From c2c6af2465e21cd706c8cfd0dc4f95f7746d86f0 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Wed, 28 Sep 2016 11:13:12 -0400 Subject: [PATCH 01/11] Initial start on moving away from Java object serialization. --- .../org/apache/pirk/serialization/HadoopFileSystemStore.java | 2 +- .../org/apache/pirk/serialization/LocalFileSystemStore.java | 2 +- src/main/java/org/apache/pirk/serialization/StorageService.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java b/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java index 38239cc9..667218ba 100644 --- a/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java +++ b/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java @@ -37,7 +37,7 @@ public class HadoopFileSystemStore extends StorageService } /** - * Creates a new storage service on the given HDFS file system using default Java serialization. + * Creates a new storage service on the given HDFS file system using default Json serialization. */ public HadoopFileSystemStore(FileSystem fs) { diff --git a/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java b/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java index ac9cf2ca..741f9448 100644 --- a/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java +++ b/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java @@ -26,7 +26,7 @@ public class LocalFileSystemStore extends StorageService { /** - * Creates a new storage service on the local file system using default Java serialization. + * Creates a new storage service on the local file system using default Json serialization. */ public LocalFileSystemStore() { diff --git a/src/main/java/org/apache/pirk/serialization/StorageService.java b/src/main/java/org/apache/pirk/serialization/StorageService.java index a4910dfd..94c5921d 100644 --- a/src/main/java/org/apache/pirk/serialization/StorageService.java +++ b/src/main/java/org/apache/pirk/serialization/StorageService.java @@ -27,7 +27,7 @@ abstract class StorageService StorageService() { - this.setSerializer(new JavaSerializer()); + this.setSerializer(new JsonSerializer()); } StorageService(SerializationService service) From 66235a9c6ac07e3cec6a94c0874b40bf6624192e Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Thu, 29 Sep 2016 13:02:17 -0400 Subject: [PATCH 02/11] Initial work. --- pom.xml | 8 ++++ .../apache/pirk/query/wideskies/Query.java | 19 ++++++++- .../query/wideskies/QueryDeserializer.java | 40 +++++++++++++++++++ .../pirk/query/wideskies/QueryInfo.java | 19 ++++++++- .../wideskies/QueryInfoDeserializer.java | 27 +++++++++++++ .../pirk/serialization/JsonSerializer.java | 3 +- 6 files changed, 113 insertions(+), 3 deletions(-) create mode 100644 src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java create mode 100644 src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java diff --git a/pom.xml b/pom.xml index 5c46283f..3fe281bb 100644 --- a/pom.xml +++ b/pom.xml @@ -93,6 +93,8 @@ 2.0.0 1C true + 2.7.0 + @@ -114,6 +116,12 @@ 1.1.1 + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + + commons-net commons-net diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java index 51e6bb48..dcd44d2a 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/Query.java +++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java @@ -26,6 +26,9 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Consumer; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.pirk.encryption.ModPowAbstraction; import org.apache.pirk.serialization.Storable; import org.slf4j.Logger; @@ -35,32 +38,46 @@ * Class to hold the PIR query vectors * */ +@JsonDeserialize(using = QueryDeserializer.class) public class Query implements Serializable, Storable { + @JsonSerialize private static final long serialVersionUID = 1L; + @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(Query.class); + @JsonSerialize private final QueryInfo queryInfo; // holds all query info + @JsonSerialize private final SortedMap queryElements; // query elements - ordered on insertion // lookup table for exponentiation of query vectors - based on dataPartitionBitSize // element -> + @JsonIgnore private Map> expTable = new ConcurrentHashMap<>(); // File based lookup table for modular exponentiation // element hash -> filename containing it's modular exponentiations + @JsonIgnore private Map expFileBasedLookup = new HashMap<>(); + @JsonSerialize private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements + @JsonSerialize private final BigInteger NSquared; public Query(QueryInfo queryInfo, BigInteger N, SortedMap queryElements) + { + this(queryInfo, N, N.pow(2), queryElements); + } + + public Query(QueryInfo queryInfo, BigInteger N, BigInteger NSquared, SortedMap queryElements) { this.queryInfo = queryInfo; this.N = N; - NSquared = N.pow(2); + this.NSquared = NSquared; this.queryElements = queryElements; } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java new file mode 100644 index 00000000..d052590d --- /dev/null +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -0,0 +1,40 @@ +package org.apache.pirk.query.wideskies; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import org.apache.pirk.serialization.JsonSerializer; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.SortedMap; + +/** + * Created by walter on 9/28/16. + */ +public class QueryDeserializer extends StdDeserializer { + + public QueryDeserializer(){ + this(null); + } + + public QueryDeserializer(Class vc) { + super(vc); + } + + @Override + public Query deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + JsonNode node = jsonParser.getCodec().readTree(jsonParser); + QueryInfo queryInfo = JsonSerializer.objectMapper.convertValue(node.get("queryInfo"), QueryInfo.class); + SortedMap queryElements = JsonSerializer.objectMapper.convertValue(node.get("queryElements"), SortedMap.class); + BigInteger N = new BigInteger(node.get("N").asText()); + BigInteger NSquared = new BigInteger(node.get("NSquared").asText()); + + + Query query = new Query(queryInfo, N, NSquared, queryElements); + + return query; + } +} diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java index 06bfa28b..e63ad036 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java @@ -24,6 +24,9 @@ import java.util.Map; import java.util.UUID; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.pirk.schema.query.QuerySchema; import org.apache.pirk.schema.query.QuerySchemaRegistry; import org.slf4j.Logger; @@ -35,34 +38,48 @@ * Note that the hash key is specific to the query. If we have hash collisions over our selector set, we will append integers to the key starting with 0 until * we no longer have collisions */ +@JsonDeserialize(using = QueryInfoDeserializer.class) public class QueryInfo implements Serializable, Cloneable { + @JsonSerialize private static final long serialVersionUID = 1L; + @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class); + @JsonSerialize private UUID identifier; // the identifier of the query + @JsonSerialize private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize} + @JsonSerialize private String queryType = null; // QueryType string const + @JsonSerialize private int hashBitSize = 0; // Bit size of the keyed hash function + @JsonSerialize private String hashKey; // Key for the keyed hash function + @JsonSerialize private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type + @JsonSerialize private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now + @JsonSerialize private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element + @JsonSerialize private boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute + @JsonSerialize private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS // if it doesn't yet exist, it will be created within the cluster and stored in HDFS + @JsonSerialize private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low // false positive rate for variable length selectors and a zero false positive rate // for selectors of fixed size < 32 bits - + @JsonSerialize private QuerySchema qSchema = null; public QueryInfo(int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java new file mode 100644 index 00000000..0be8ca55 --- /dev/null +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java @@ -0,0 +1,27 @@ +package org.apache.pirk.query.wideskies; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; + +import java.io.IOException; + +/** + * Created by walter on 9/28/16. + */ +public class QueryInfoDeserializer extends StdDeserializer { + protected QueryInfoDeserializer(){ + this(null); + } + + protected QueryInfoDeserializer(Class vc) { + super(vc); + } + + @Override + public QueryInfo deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + return null; + } +} diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java index 6071c60b..718c63fe 100644 --- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java +++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java @@ -26,7 +26,8 @@ public class JsonSerializer extends SerializationService { - private ObjectMapper objectMapper = new ObjectMapper(); + // We really only need the one objectMapper, I think. + public static final ObjectMapper objectMapper = new ObjectMapper(); /** * Stores the given object on the output stream as JSON. From 7b221de870f7efc8b41ec52fd5d05af06360c405 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Tue, 4 Oct 2016 14:24:18 -0400 Subject: [PATCH 03/11] Added deserializer for query; I think I should delete the serializer for QueryInfo. --- .../apache/pirk/query/wideskies/Query.java | 2 +- .../query/wideskies/QueryDeserializer.java | 93 ++++++++++++++++++- .../pirk/query/wideskies/QueryInfo.java | 2 +- .../wideskies/QueryInfoDeserializer.java | 21 ++++- .../apache/pirk/schema/query/QuerySchema.java | 17 +++- 5 files changed, 123 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java index dcd44d2a..54598a7e 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/Query.java +++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java @@ -42,7 +42,7 @@ public class Query implements Serializable, Storable { @JsonSerialize - private static final long serialVersionUID = 1L; + public static final long querySerialVersionUID = 1L; @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(Query.class); diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index d052590d..499e0636 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -1,21 +1,30 @@ package org.apache.pirk.query.wideskies; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import org.apache.pirk.serialization.JsonSerializer; +import org.apache.pirk.schema.query.QuerySchema; +import org.apache.pirk.schema.query.filter.DataFilter; +import org.apache.pirk.schema.query.filter.FilterFactory; +import org.apache.pirk.utils.PIRException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.math.BigInteger; -import java.util.SortedMap; +import java.util.*; /** - * Created by walter on 9/28/16. + * Custom deserializer for Query class for Jackson. */ public class QueryDeserializer extends StdDeserializer { + private static final Logger logger = LoggerFactory.getLogger(QueryDeserializer.class); + public QueryDeserializer(){ this(null); } @@ -24,11 +33,20 @@ public QueryDeserializer(Class vc) { super(vc); } + private static ObjectMapper objectMapper = new ObjectMapper(); + @Override public Query deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); - QueryInfo queryInfo = JsonSerializer.objectMapper.convertValue(node.get("queryInfo"), QueryInfo.class); - SortedMap queryElements = JsonSerializer.objectMapper.convertValue(node.get("queryElements"), SortedMap.class); + // Check the version number. + long serialVersionUID = node.get("querySerialVersionUID").asLong(); + if (node.get("querySerialVersionUID").asLong() != Query.querySerialVersionUID) { + throw new IOException("Attempt to deserialize unsupported query version. Supported: " + + Query.querySerialVersionUID + "; Received: " + serialVersionUID); + } + // Then deserialize the Query Info + QueryInfo queryInfo = deserializeInfo(node.get("queryInfo")); + SortedMap queryElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>(){}); BigInteger N = new BigInteger(node.get("N").asText()); BigInteger NSquared = new BigInteger(node.get("NSquared").asText()); @@ -37,4 +55,69 @@ public Query deserialize(JsonParser jsonParser, DeserializationContext deseriali return query; } + + private QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { + // Deserialize The Query Schema First. + long infoSerialVersionUID = infoNode.get("queryInfoSerialVersionUID").asLong(); + if (infoSerialVersionUID != QueryInfo.queryInfoSerialVersionUID) { + throw new IOException("Attempt to deserialize unsupported query info version. Supported: " + + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoSerialVersionUID); + } + QuerySchema querySchema = deserializeSchema(infoNode.get("qSchema")); + QueryInfo info = new QueryInfo( + UUID.fromString(infoNode.get("identifier").asText()), + infoNode.get("numSelectors").asInt(), + infoNode.get("hashBitSize").asInt(), + infoNode.get("hashKey").asText(), + infoNode.get("dataPartitionBitSize").asInt(), + infoNode.get("queryType").asText(), + infoNode.get("useExpLookupTable").asBoolean(), + infoNode.get("embedSelector").asBoolean(), + infoNode.get("useHDFSExpLookupTable").asBoolean() + ); + info.addQuerySchema(querySchema); + return info; + } + + private QuerySchema deserializeSchema(JsonNode schemaNode) throws IOException { + // Deserialize The Query Schema First. + long infoSerialVersionUID = schemaNode.get("querySchemaSerialVersionUID").asLong(); + if (infoSerialVersionUID != QuerySchema.querySchemaSerialVersionUID) { + throw new IOException("Attempt to deserialize unsupported query info version. Supported: " + + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoSerialVersionUID); + } + String dataFilterName = schemaNode.get("filterTypeName").asText(); + Set filteredElementNames; + try { + filteredElementNames = objectMapper.readValue(schemaNode.get("filteredElementNames").toString(), new TypeReference>() { + }); + } catch (Exception e) { + logger.warn("No filtered element names for Query Schema deserialization."); + filteredElementNames = null; + } + // Set up the data filter + DataFilter dataFilter; + try { + dataFilter = FilterFactory.getFilter(dataFilterName, filteredElementNames); + } catch (PIRException e) { + logger.error("Error trying to create data filter from JSON.", e); + throw new IOException(e); + } + + QuerySchema querySchema = new QuerySchema( + schemaNode.get("schemaName").asText(), + schemaNode.get("dataSchemaName").asText(), + schemaNode.get("selectorName").asText(), + dataFilterName, + dataFilter, + schemaNode.get("dataElementSize").asInt() + ); + List elementNames = objectMapper.readValue(schemaNode.get("elementNames").toString(), new TypeReference>(){}); + querySchema.getElementNames().addAll(elementNames); + HashMap additionalFields = objectMapper.readValue(schemaNode.get("additionalFields").toString(), new TypeReference>(){}); + querySchema.getAdditionalFields().putAll(additionalFields); + return querySchema; + } + + } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java index e63ad036..fd13206a 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java @@ -42,7 +42,7 @@ public class QueryInfo implements Serializable, Cloneable { @JsonSerialize - private static final long serialVersionUID = 1L; + public static final long queryInfoSerialVersionUID = 1L; @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class); diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java index 0be8ca55..c9e17a59 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java @@ -3,25 +3,40 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import java.io.IOException; +import java.util.UUID; /** * Created by walter on 9/28/16. */ public class QueryInfoDeserializer extends StdDeserializer { - protected QueryInfoDeserializer(){ + public QueryInfoDeserializer(){ this(null); } - protected QueryInfoDeserializer(Class vc) { + public QueryInfoDeserializer(Class vc) { super(vc); } @Override public QueryInfo deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + JsonNode node = jsonParser.getCodec().readTree(jsonParser); + long serialVersionUID = node.get("querySerialVersionUID").asLong(); + if (serialVersionUID != QueryInfo.queryInfoSerialVersionUID) { + throw new IOException("Attempt to deserialize unsupported query version. Supported: " + + QueryInfo.queryInfoSerialVersionUID + "; Received: " + serialVersionUID); + } + //Map infoMap = JsonSerializer.objectMapper.convertValue(node.get()); + UUID identifier = UUID.fromString(node.get("identifier").asText()); + String queryType = node.get("queryType").asText(); + int numSelectors = node.get("numSelectors").asInt(); + int hasBitSize = node.get("hashBitSize").asInt(); + String hashKey = node.get("hashKey").asText(); + int dataPartitionBitSize = node.get("dataPartitionBitSize").asInt(); + return null; } } diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java index 6fa4dd5c..435edbc9 100644 --- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java +++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java @@ -25,6 +25,8 @@ import java.util.List; import java.util.Set; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.pirk.schema.query.filter.DataFilter; /** @@ -33,37 +35,48 @@ */ public class QuerySchema implements Serializable { - private static final long serialVersionUID = 1L; + + @JsonSerialize + public static final long querySchemaSerialVersionUID = 1L; // This schema's name. + @JsonSerialize private final String schemaName; // Name of the data schema associated with this query schema. + @JsonSerialize private final String dataSchemaName; // Name of element in the dataSchema to be used as the selector. + @JsonSerialize private final String selectorName; // Element names from the data schema to include in the response. // Order matters for packing/unpacking. + @JsonSerialize private final List elementNames = new ArrayList<>(); // Name of class to use in data filtering. + @JsonSerialize private final String filterTypeName; // Instance of the filterTypeName. + @JsonIgnore private final DataFilter filter; // Set of data schema element names on which to apply filtering. + @JsonSerialize private final Set filteredElementNames = new HashSet<>(); // Total number of bits to be returned for each data element hit. + @JsonSerialize private final int dataElementSize; // Additional fields by key,value + @JsonSerialize private final HashMap additionalFields = new HashMap<>(); - QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize) + public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize) { this.schemaName = schemaName; this.dataSchemaName = dataSchemaName; From fdeb7d77117691f7dda63a2a180d805861216893 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Tue, 4 Oct 2016 15:21:22 -0400 Subject: [PATCH 04/11] Query serialization seems to be working now; at least it doesn't fail to get recalled. --- .../apache/pirk/query/wideskies/Query.java | 5 ++- .../query/wideskies/QueryDeserializer.java | 36 ++++++++++++---- .../pirk/query/wideskies/QueryInfo.java | 7 +++- .../wideskies/QueryInfoDeserializer.java | 42 ------------------- .../apache/pirk/schema/query/QuerySchema.java | 4 +- 5 files changed, 39 insertions(+), 55 deletions(-) delete mode 100644 src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java index 54598a7e..914af9fd 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/Query.java +++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java @@ -41,9 +41,12 @@ @JsonDeserialize(using = QueryDeserializer.class) public class Query implements Serializable, Storable { - @JsonSerialize public static final long querySerialVersionUID = 1L; + // So that we can serialize the version number in jackson. + @JsonSerialize + public final long queryVersion = querySerialVersionUID; + @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(Query.class); diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index 499e0636..3f655fa9 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -1,3 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.apache.pirk.query.wideskies; import com.fasterxml.jackson.core.type.TypeReference; @@ -39,10 +57,10 @@ public QueryDeserializer(Class vc) { public Query deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); // Check the version number. - long serialVersionUID = node.get("querySerialVersionUID").asLong(); - if (node.get("querySerialVersionUID").asLong() != Query.querySerialVersionUID) { + long queryVersion = node.get("queryVersion").asLong(); + if (queryVersion != Query.querySerialVersionUID) { throw new IOException("Attempt to deserialize unsupported query version. Supported: " - + Query.querySerialVersionUID + "; Received: " + serialVersionUID); + + Query.querySerialVersionUID + "; Received: " + queryVersion); } // Then deserialize the Query Info QueryInfo queryInfo = deserializeInfo(node.get("queryInfo")); @@ -58,10 +76,10 @@ public Query deserialize(JsonParser jsonParser, DeserializationContext deseriali private QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { // Deserialize The Query Schema First. - long infoSerialVersionUID = infoNode.get("queryInfoSerialVersionUID").asLong(); - if (infoSerialVersionUID != QueryInfo.queryInfoSerialVersionUID) { + long infoVersion = infoNode.get("queryInfoVersion").asLong(); + if (infoVersion != QueryInfo.queryInfoSerialVersionUID) { throw new IOException("Attempt to deserialize unsupported query info version. Supported: " - + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoSerialVersionUID); + + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoVersion); } QuerySchema querySchema = deserializeSchema(infoNode.get("qSchema")); QueryInfo info = new QueryInfo( @@ -81,10 +99,10 @@ private QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { private QuerySchema deserializeSchema(JsonNode schemaNode) throws IOException { // Deserialize The Query Schema First. - long infoSerialVersionUID = schemaNode.get("querySchemaSerialVersionUID").asLong(); - if (infoSerialVersionUID != QuerySchema.querySchemaSerialVersionUID) { + long schemaVersion = schemaNode.get("querySchemaVersion").asLong(); + if (schemaVersion!= QuerySchema.querySchemaSerialVersionUID) { throw new IOException("Attempt to deserialize unsupported query info version. Supported: " - + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoSerialVersionUID); + + QueryInfo.queryInfoSerialVersionUID + "; Received: " + schemaVersion); } String dataFilterName = schemaNode.get("filterTypeName").asText(); Set filteredElementNames; diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java index fd13206a..ef607e0a 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java @@ -38,12 +38,15 @@ * Note that the hash key is specific to the query. If we have hash collisions over our selector set, we will append integers to the key starting with 0 until * we no longer have collisions */ -@JsonDeserialize(using = QueryInfoDeserializer.class) +//@JsonDeserialize(using = QueryInfoDeserializer.class) public class QueryInfo implements Serializable, Cloneable { - @JsonSerialize public static final long queryInfoSerialVersionUID = 1L; + // So that we can serialize the version number in jackson. + @JsonSerialize + public final long queryInfoVersion = queryInfoSerialVersionUID; + @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class); diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java deleted file mode 100644 index c9e17a59..00000000 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfoDeserializer.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.apache.pirk.query.wideskies; - -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; - -import java.io.IOException; -import java.util.UUID; - -/** - * Created by walter on 9/28/16. - */ -public class QueryInfoDeserializer extends StdDeserializer { - public QueryInfoDeserializer(){ - this(null); - } - - public QueryInfoDeserializer(Class vc) { - super(vc); - } - - @Override - public QueryInfo deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { - JsonNode node = jsonParser.getCodec().readTree(jsonParser); - long serialVersionUID = node.get("querySerialVersionUID").asLong(); - if (serialVersionUID != QueryInfo.queryInfoSerialVersionUID) { - throw new IOException("Attempt to deserialize unsupported query version. Supported: " - + QueryInfo.queryInfoSerialVersionUID + "; Received: " + serialVersionUID); - } - //Map infoMap = JsonSerializer.objectMapper.convertValue(node.get()); - UUID identifier = UUID.fromString(node.get("identifier").asText()); - String queryType = node.get("queryType").asText(); - int numSelectors = node.get("numSelectors").asInt(); - int hasBitSize = node.get("hashBitSize").asInt(); - String hashKey = node.get("hashKey").asText(); - int dataPartitionBitSize = node.get("dataPartitionBitSize").asInt(); - - return null; - } -} diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java index 435edbc9..e9873da4 100644 --- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java +++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java @@ -35,9 +35,11 @@ */ public class QuerySchema implements Serializable { + public static final long querySchemaSerialVersionUID = 1L; + // So that we can serialize the version number in jackson. @JsonSerialize - public static final long querySchemaSerialVersionUID = 1L; + public final long querySchemaVersion = querySchemaSerialVersionUID; // This schema's name. @JsonSerialize From b31afcfe7f01ebda6a19e27b5e651f8dc8763f16 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Tue, 4 Oct 2016 15:42:20 -0400 Subject: [PATCH 05/11] Response deserializer compiles. --- .../query/wideskies/QueryDeserializer.java | 4 +- .../pirk/response/wideskies/Response.java | 6 +- .../wideskies/ResponseDeserializer.java | 68 +++++++++++++++++++ 3 files changed, 75 insertions(+), 3 deletions(-) create mode 100644 src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index 3f655fa9..48c2b439 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -74,7 +74,7 @@ public Query deserialize(JsonParser jsonParser, DeserializationContext deseriali return query; } - private QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { + public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { // Deserialize The Query Schema First. long infoVersion = infoNode.get("queryInfoVersion").asLong(); if (infoVersion != QueryInfo.queryInfoSerialVersionUID) { @@ -97,7 +97,7 @@ private QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { return info; } - private QuerySchema deserializeSchema(JsonNode schemaNode) throws IOException { + public static QuerySchema deserializeSchema(JsonNode schemaNode) throws IOException { // Deserialize The Query Schema First. long schemaVersion = schemaNode.get("querySchemaVersion").asLong(); if (schemaVersion!= QuerySchema.querySchemaSerialVersionUID) { diff --git a/src/main/java/org/apache/pirk/response/wideskies/Response.java b/src/main/java/org/apache/pirk/response/wideskies/Response.java index e3fdad12..52b47e3f 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/Response.java +++ b/src/main/java/org/apache/pirk/response/wideskies/Response.java @@ -22,6 +22,7 @@ import java.math.BigInteger; import java.util.TreeMap; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.apache.pirk.query.wideskies.QueryInfo; import org.apache.pirk.serialization.Storable; @@ -31,9 +32,12 @@ * Serialized and returned to the querier for decryption * */ +@JsonDeserialize(using = ResponseDeserializer.class) public class Response implements Serializable, Storable { - private static final long serialVersionUID = 1L; + public static final long responseSerialVersionUID = 1L; + + public final long responseVersion = responseSerialVersionUID; private QueryInfo queryInfo = null; // holds all query info diff --git a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java new file mode 100644 index 00000000..2562852a --- /dev/null +++ b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.pirk.response.wideskies; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import org.apache.pirk.query.wideskies.QueryDeserializer; +import org.apache.pirk.query.wideskies.QueryInfo; +import org.apache.pirk.response.wideskies.Response; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.TreeMap; + +/** + * Custom deserializer for Response class for Jackson. + */ +public class ResponseDeserializer extends StdDeserializer { + + public ResponseDeserializer() { this(null); } + + public ResponseDeserializer(Class vc) { super(vc);} + + private static ObjectMapper objectMapper = new ObjectMapper(); + + + @Override + public Response deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + JsonNode node = jsonParser.getCodec().readTree(jsonParser); + // Check the version number. + long responseVersion = node.get("responseVersion").asLong(); + if (responseVersion != Response.responseSerialVersionUID) { + throw new IOException("Attempt to deserialize unsupported query version. Supported: " + + Response.responseSerialVersionUID + "; Received: " + responseVersion); + } + // Then deserialize the Query Info + QueryInfo queryInfo = QueryDeserializer.deserializeInfo(node.get("queryInfo")); + // Form the initial response object + Response response = new Response(queryInfo); + // Get the response elements + TreeMap responseElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>(){}); + response.setResponseElements(responseElements); + + return response; + } +} From 42e2bb6082a601e5e503e76170499baff37fc478 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Wed, 5 Oct 2016 08:31:51 -0400 Subject: [PATCH 06/11] Custom deserializers complete, as are class amendments to make them work. --- .../org/apache/pirk/encryption/Paillier.java | 165 +++++++----------- .../pirk/querier/wideskies/Querier.java | 26 ++- .../wideskies/QuerierDeserializer.java | 87 +++++++++ .../apache/pirk/query/wideskies/Query.java | 69 +++----- .../query/wideskies/QueryDeserializer.java | 40 ++++- .../pirk/query/wideskies/QueryInfo.java | 101 ++++------- .../pirk/response/wideskies/Response.java | 21 +-- .../wideskies/ResponseDeserializer.java | 13 +- .../apache/pirk/schema/query/QuerySchema.java | 65 +++---- .../pirk/serialization/JsonSerializer.java | 27 +-- .../serialization/SerializationService.java | 14 +- .../pirk/serialization/StorageService.java | 2 +- 12 files changed, 302 insertions(+), 328 deletions(-) create mode 100644 src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java diff --git a/src/main/java/org/apache/pirk/encryption/Paillier.java b/src/main/java/org/apache/pirk/encryption/Paillier.java index 72f59b23..0ccf4516 100644 --- a/src/main/java/org/apache/pirk/encryption/Paillier.java +++ b/src/main/java/org/apache/pirk/encryption/Paillier.java @@ -23,6 +23,7 @@ import java.security.GeneralSecurityException; import java.security.SecureRandom; +import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.pirk.utils.PIRException; import org.apache.pirk.utils.SystemConfiguration; import org.slf4j.Logger; @@ -65,31 +66,24 @@ *

* Ref: Paillier, Pascal. "Public-Key Cryptosystems Based on Composite Degree Residuosity Classes." EUROCRYPT'99. */ -public final class Paillier implements Serializable -{ +public final class Paillier implements Serializable { private static final long serialVersionUID = 1L; private static final Logger logger = LoggerFactory.getLogger(Paillier.class); private static final SecureRandom secureRandom; - static - { - try - { + static { + try { String alg = SystemConfiguration.getProperty("pallier.secureRandom.algorithm"); - if (alg == null) - { + if (alg == null) { secureRandom = new SecureRandom(); - } - else - { + } else { String provider = SystemConfiguration.getProperty("pallier.secureRandom.provider"); secureRandom = (provider == null) ? SecureRandom.getInstance(alg) : SecureRandom.getInstance(alg, provider); } logger.info("Using secure random from " + secureRandom.getProvider().getName() + ":" + secureRandom.getAlgorithm()); - } catch (GeneralSecurityException e) - { + } catch (GeneralSecurityException e) { logger.error("Unable to instantiate a SecureRandom object with the requested algorithm.", e); throw new RuntimeException("Unable to instantiate a SecureRandom object with the requested algorithm.", e); } @@ -99,33 +93,30 @@ public final class Paillier implements Serializable private BigInteger q; // large prime private BigInteger N; // N=pq, RSA modulus + @JsonIgnore private BigInteger NSquared; // NSquared = N^2 + @JsonIgnore private BigInteger lambdaN; // lambda(N) = lcm(p-1,q-1), Carmichael function of N + @JsonIgnore private BigInteger w; // lambda(N)^-1 mod N private final int bitLength; // bit length of the modulus N /** * Creates a Paillier algorithm with all parameters specified. - * - * @param p - * First large prime. - * @param q - * Second large prime. - * @param bitLength - * Bit length of the modulus {@code N}. - * @throws IllegalArgumentException - * If {@code p} or {@code q} do not satisfy primality constraints. + * + * @param p First large prime. + * @param q Second large prime. + * @param bitLength Bit length of the modulus {@code N}. + * @throws IllegalArgumentException If {@code p} or {@code q} do not satisfy primality constraints. */ - public Paillier(BigInteger p, BigInteger q, int bitLength) - { + public Paillier(BigInteger p, BigInteger q, int bitLength) { this.bitLength = bitLength; // Verify the prime conditions are satisfied int primeCertainty = SystemConfiguration.getIntProperty("pir.primeCertainty", 128); BigInteger three = BigInteger.valueOf(3); - if ((p.compareTo(three) < 0) || (q.compareTo(three) < 0) || p.equals(q) || !p.isProbablePrime(primeCertainty) || !q.isProbablePrime(primeCertainty)) - { + if ((p.compareTo(three) < 0) || (q.compareTo(three) < 0) || p.equals(q) || !p.isProbablePrime(primeCertainty) || !q.isProbablePrime(primeCertainty)) { throw new IllegalArgumentException("p = " + p + " q = " + q + " do not satisfy primality constraints"); } @@ -146,16 +137,12 @@ public Paillier(BigInteger p, BigInteger q, int bitLength) *

* The probability that the generated keys represent primes will exceed (1 - (1/2){@code certainty}). The execution time of this constructor is * proportional to the value of this parameter. - * - * @param bitLength - * The bit length of the resulting modulus {@code N}. - * @param certainty - * The probability that the new {@code p} and {@code q} represent prime numbers. - * @throws IllegalArgumentException - * If the {@code certainty} is less than the system allowed lower bound. + * + * @param bitLength The bit length of the resulting modulus {@code N}. + * @param certainty The probability that the new {@code p} and {@code q} represent prime numbers. + * @throws IllegalArgumentException If the {@code certainty} is less than the system allowed lower bound. */ - public Paillier(int bitLength, int certainty) - { + public Paillier(int bitLength, int certainty) { this(bitLength, certainty, -1); } @@ -168,25 +155,18 @@ public Paillier(int bitLength, int certainty) * proportional to the value of this parameter. *

* When ensureBitSet > -1 the value of bit "{@code ensureBitSet}" in modulus {@code N} will be set. - * - * @param bitLength - * The bit length of the resulting modulus {@code N}. - * @param certainty - * The probability that the new {@code p} and {@code q} represent prime numbers. - * @param ensureBitSet - * index of bit in {@code N} to ensure is set. - * @throws IllegalArgumentException - * If the {@code certainty} is less than the system allowed lower bound, or the index of {@code ensureBitSet} is greater than the {@code bitLength}. + * + * @param bitLength The bit length of the resulting modulus {@code N}. + * @param certainty The probability that the new {@code p} and {@code q} represent prime numbers. + * @param ensureBitSet index of bit in {@code N} to ensure is set. + * @throws IllegalArgumentException If the {@code certainty} is less than the system allowed lower bound, or the index of {@code ensureBitSet} is greater than the {@code bitLength}. */ - public Paillier(int bitLength, int certainty, int ensureBitSet) - { + public Paillier(int bitLength, int certainty, int ensureBitSet) { int systemPrimeCertainty = SystemConfiguration.getIntProperty("pir.primeCertainty", 128); - if (certainty < systemPrimeCertainty) - { + if (certainty < systemPrimeCertainty) { throw new IllegalArgumentException("Input certainty = " + certainty + " is less than allowed system lower bound = " + systemPrimeCertainty); } - if (ensureBitSet >= bitLength) - { + if (ensureBitSet >= bitLength) { throw new IllegalArgumentException("ensureBitSet = " + ensureBitSet + " must be less than bitLengthInput = " + bitLength); } this.bitLength = bitLength; @@ -198,41 +178,37 @@ public Paillier(int bitLength, int certainty, int ensureBitSet) /** * Returns the value of the large prime {@code p}. - * + * * @return p. */ - public BigInteger getP() - { + public BigInteger getP() { return p; } /** * Returns the value of the large prime {@code q}. - * + * * @return q. */ - public BigInteger getQ() - { + public BigInteger getQ() { return q; } /** * Returns the RSA modulus value {@code N}. - * + * * @return N, the product of {@code p} and {@code q}. */ - public BigInteger getN() - { + public BigInteger getN() { return N; } /** * Returns the value of {@code N}2. - * + * * @return N squared. */ - public BigInteger getNSquared() - { + public BigInteger getNSquared() { return NSquared; } @@ -240,32 +216,27 @@ public BigInteger getNSquared() * Returns the value of Carmichael's function at {@code N}. *

* The Carmichael function of {@code N} is the least common multiple of {@code p-1} and {@code q-1}, - * + * * @return Carmichael's function at {@code N}. */ - public BigInteger getLambdaN() - { + public BigInteger getLambdaN() { return lambdaN; } /** * Returns the bit length of the modulus {@code N}. - * + * * @return the bit length, as an integer. */ - public int getBitLength() - { + public int getBitLength() { return bitLength; } - private void generateKeys(int bitLength, int certainty, final int ensureBitSet) - { + private void generateKeys(int bitLength, int certainty, final int ensureBitSet) { getKeys(bitLength, certainty); - if (ensureBitSet > -1) - { - while (!N.testBit(ensureBitSet)) - { + if (ensureBitSet > -1) { + while (!N.testBit(ensureBitSet)) { logger.info("testBit false\n N = " + N.toString(2)); getKeys(bitLength, certainty); } @@ -273,8 +244,7 @@ private void generateKeys(int bitLength, int certainty, final int ensureBitSet) } } - private void getKeys(int bitLength, int certainty) - { + private void getKeys(int bitLength, int certainty) { // Generate the primes BigInteger[] pq = PrimeGenerator.getPrimePair(bitLength, certainty, secureRandom); p = pq[0]; @@ -283,8 +253,7 @@ private void getKeys(int bitLength, int certainty) N = p.multiply(q); } - private void setDerivativeElements() - { + private void setDerivativeElements() { NSquared = N.multiply(N); // lambda(N) = lcm(p-1,q-1) @@ -296,18 +265,14 @@ private void setDerivativeElements() /** * Returns the encrypted value of {@code m} using a generated random value. * - * @param m - * the value to be encrypted. + * @param m the value to be encrypted. * @return the encrypted value - * @throws PIRException - * If {@code m} is not less than @{code N}. + * @throws PIRException If {@code m} is not less than @{code N}. */ - public BigInteger encrypt(BigInteger m) throws PIRException - { + public BigInteger encrypt(BigInteger m) throws PIRException { // Generate a random value r in (Z/NZ)* BigInteger r = (new BigInteger(bitLength, secureRandom)).mod(N); - while (r.equals(BigInteger.ZERO) || r.equals(BigInteger.ONE) || r.mod(p).equals(BigInteger.ZERO) || r.mod(q).equals(BigInteger.ZERO)) - { + while (r.equals(BigInteger.ZERO) || r.equals(BigInteger.ONE) || r.mod(p).equals(BigInteger.ZERO) || r.mod(q).equals(BigInteger.ZERO)) { r = (new BigInteger(bitLength, secureRandom)).mod(N); } @@ -316,19 +281,14 @@ public BigInteger encrypt(BigInteger m) throws PIRException /** * Returns the ciphertext of a message using the given random value. - * - * @param m - * the value to be encrypted. - * @param r - * the random value to use in the Pailler encryption. + * + * @param m the value to be encrypted. + * @param r the random value to use in the Pailler encryption. * @return the encrypted value. - * @throws PIRException - * If {@code m} is not less than @{code N}. + * @throws PIRException If {@code m} is not less than @{code N}. */ - public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException - { - if (m.compareTo(N) >= 0) - { + public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException { + if (m.compareTo(N) >= 0) { throw new PIRException("m = " + m.toString(2) + " is greater than or equal to N = " + N.toString(2)); } @@ -341,13 +301,11 @@ public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException /** * Returns the plaintext message for a given ciphertext. - * - * @param c - * an encrypted value. + * + * @param c an encrypted value. * @return the corresponding plaintext value. */ - public BigInteger decrypt(BigInteger c) - { + public BigInteger decrypt(BigInteger c) { // w = lambda(N)^-1 mod N; x = c^(lambda(N)) mod N^2; y = (x-1)/N; d = yw mod N BigInteger x = ModPowAbstraction.modPow(c, lambdaN, NSquared); BigInteger y = (x.subtract(BigInteger.ONE)).divide(N); @@ -355,8 +313,7 @@ public BigInteger decrypt(BigInteger c) return (y.multiply(w)).mod(N); } - private String parametersToString() - { + private String parametersToString() { return "p = " + p.intValue() + " q = " + q.intValue() + " N = " + N.intValue() + " NSquared = " + NSquared.intValue() + " lambdaN = " + lambdaN.intValue() + " bitLength = " + bitLength; } diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java index b63e06ef..d075601c 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Map; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import org.apache.pirk.encryption.Paillier; import org.apache.pirk.query.wideskies.Query; import org.apache.pirk.serialization.Storable; @@ -29,9 +30,11 @@ /** * Class to hold the information necessary for the PIR querier to perform decryption */ -public class Querier implements Serializable, Storable -{ - private static final long serialVersionUID = 1L; +@JsonDeserialize(using = QuerierDeserializer.class) +public class Querier implements Serializable, Storable { + public static final long querierSerialVersionUID = 1L; + + public final long querierVersion = querierSerialVersionUID; private Query query = null; // contains the query vectors and functionality @@ -42,10 +45,9 @@ public class Querier implements Serializable, Storable // map to check the embedded selectors in the results for false positives; // if the selector is a fixed size < 32 bits, it is included as is // if the selector is of variable lengths - private Map embedSelectorMap = null; + private Map embedSelectorMap = null; - public Querier(List selectorsInput, Paillier paillierInput, Query queryInput, Map embedSelectorMapInput) - { + public Querier(List selectorsInput, Paillier paillierInput, Query queryInput, Map embedSelectorMapInput) { selectors = selectorsInput; paillier = paillierInput; @@ -55,23 +57,19 @@ public Querier(List selectorsInput, Paillier paillierInput, Query queryI embedSelectorMap = embedSelectorMapInput; } - public Query getQuery() - { + public Query getQuery() { return query; } - public Paillier getPaillier() - { + public Paillier getPaillier() { return paillier; } - public List getSelectors() - { + public List getSelectors() { return selectors; } - public Map getEmbedSelectorMap() - { + public Map getEmbedSelectorMap() { return embedSelectorMap; } } diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java new file mode 100644 index 00000000..86e62858 --- /dev/null +++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.pirk.querier.wideskies; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import org.apache.pirk.encryption.Paillier; +import org.apache.pirk.query.wideskies.Query; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.List; +import java.util.Map; + +/** + * Custom deserializer for Querier class for Jackson. + */ +public class QuerierDeserializer extends StdDeserializer { + + public QuerierDeserializer() { + this(null); + } + + public QuerierDeserializer(Class vc) { + super(vc); + } + + private static ObjectMapper objectMapper = new ObjectMapper(); + + + @Override + public Querier deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + JsonNode node = jsonParser.getCodec().readTree(jsonParser); + // Check the version number. + long querierVersion = node.get("querierVersion").asLong(); + if (querierVersion != Querier.querierSerialVersionUID) { + throw new IOException("Attempt to deserialize unsupported query version. Supported: " + + Querier.querierSerialVersionUID + "; Received: " + querierVersion); + } + // Then deserialize the Query Info + Query query = objectMapper.readValue(node.get("query").toString(), Query.class); + + // Now Paillier + Paillier paillier = deserializePaillier(node.get("paillier")); + + List selectors = objectMapper.readValue(node.get("selectors").toString(), new TypeReference>() { + }); + Map embedSelectorMap = objectMapper.readValue(node.get("embedSelectorMap").toString(), new TypeReference>() { + }); + + return new Querier(selectors, paillier, query, embedSelectorMap); + } + + /** + * Deserializes a Paillier JsonNode. + * + * @param paillier A JsonNode at the root of a serialied Paillier object. + * @return A Paillier object of the deserialized Json. + */ + private Paillier deserializePaillier(JsonNode paillier) { + BigInteger p = new BigInteger(paillier.get("p").asText()); + BigInteger q = new BigInteger(paillier.get("q").asText()); + int bitLength = paillier.get("bitLength").asInt(); + return new Paillier(p, q, bitLength); + } +} diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java index 914af9fd..3e6446f7 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/Query.java +++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java @@ -28,7 +28,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.pirk.encryption.ModPowAbstraction; import org.apache.pirk.serialization.Storable; import org.slf4j.Logger; @@ -36,91 +35,76 @@ /** * Class to hold the PIR query vectors - * */ @JsonDeserialize(using = QueryDeserializer.class) -public class Query implements Serializable, Storable -{ +public class Query implements Serializable, Storable { public static final long querySerialVersionUID = 1L; // So that we can serialize the version number in jackson. - @JsonSerialize public final long queryVersion = querySerialVersionUID; - @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(Query.class); - @JsonSerialize + private final QueryInfo queryInfo; // holds all query info - @JsonSerialize - private final SortedMap queryElements; // query elements - ordered on insertion + + private final SortedMap queryElements; // query elements - ordered on insertion // lookup table for exponentiation of query vectors - based on dataPartitionBitSize // element -> @JsonIgnore - private Map> expTable = new ConcurrentHashMap<>(); + private Map> expTable = new ConcurrentHashMap<>(); // File based lookup table for modular exponentiation // element hash -> filename containing it's modular exponentiations @JsonIgnore - private Map expFileBasedLookup = new HashMap<>(); + private Map expFileBasedLookup = new HashMap<>(); - @JsonSerialize private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements - @JsonSerialize + private final BigInteger NSquared; - public Query(QueryInfo queryInfo, BigInteger N, SortedMap queryElements) - { + public Query(QueryInfo queryInfo, BigInteger N, SortedMap queryElements) { this(queryInfo, N, N.pow(2), queryElements); } - public Query(QueryInfo queryInfo, BigInteger N, BigInteger NSquared, SortedMap queryElements) - { + public Query(QueryInfo queryInfo, BigInteger N, BigInteger NSquared, SortedMap queryElements) { this.queryInfo = queryInfo; this.N = N; this.NSquared = NSquared; this.queryElements = queryElements; } - public QueryInfo getQueryInfo() - { + public QueryInfo getQueryInfo() { return queryInfo; } - public SortedMap getQueryElements() - { + public SortedMap getQueryElements() { return queryElements; } - public BigInteger getQueryElement(int index) - { + public BigInteger getQueryElement(int index) { return queryElements.get(index); } - public BigInteger getN() - { + public BigInteger getN() { return N; } - public BigInteger getNSquared() - { + public BigInteger getNSquared() { return NSquared; } - public Map getExpFileBasedLookup() - { + public Map getExpFileBasedLookup() { return expFileBasedLookup; } - public String getExpFile(int i) - { + public String getExpFile(int i) { return expFileBasedLookup.get(i); } - public void setExpFileBasedLookup(Map expInput) - { + public void setExpFileBasedLookup(Map expInput) { expFileBasedLookup = expInput; } @@ -128,18 +112,14 @@ public void setExpFileBasedLookup(Map expInput) * This should be called after all query elements have been added in order to generate the expTable. For int exponentiation with BigIntegers, assumes that * dataPartitionBitSize < 32. */ - public void generateExpTable() - { + public void generateExpTable() { int maxValue = (1 << queryInfo.getDataPartitionBitSize()) - 1; // 2^partitionBitSize - 1 - queryElements.values().parallelStream().forEach(new Consumer() - { + queryElements.values().parallelStream().forEach(new Consumer() { @Override - public void accept(BigInteger element) - { - Map powMap = new HashMap<>(maxValue); // - for (int i = 0; i <= maxValue; ++i) - { + public void accept(BigInteger element) { + Map powMap = new HashMap<>(maxValue); // + for (int i = 0; i <= maxValue; ++i) { BigInteger value = ModPowAbstraction.modPow(element, BigInteger.valueOf(i), NSquared); powMap.put(i, value); } @@ -149,9 +129,8 @@ public void accept(BigInteger element) logger.debug("expTable.size() = " + expTable.keySet().size() + " NSquared = " + NSquared.intValue() + " = " + NSquared.toString()); } - public BigInteger getExp(BigInteger value, int power) - { - Map powerMap = expTable.get(value); + public BigInteger getExp(BigInteger value, int power) { + Map powerMap = expTable.get(value); return (powerMap == null) ? null : powerMap.get(power); } } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index 48c2b439..cffd1448 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -43,7 +43,7 @@ public class QueryDeserializer extends StdDeserializer { private static final Logger logger = LoggerFactory.getLogger(QueryDeserializer.class); - public QueryDeserializer(){ + public QueryDeserializer() { this(null); } @@ -60,13 +60,14 @@ public Query deserialize(JsonParser jsonParser, DeserializationContext deseriali long queryVersion = node.get("queryVersion").asLong(); if (queryVersion != Query.querySerialVersionUID) { throw new IOException("Attempt to deserialize unsupported query version. Supported: " - + Query.querySerialVersionUID + "; Received: " + queryVersion); + + Query.querySerialVersionUID + "; Received: " + queryVersion); } // Then deserialize the Query Info QueryInfo queryInfo = deserializeInfo(node.get("queryInfo")); - SortedMap queryElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>(){}); - BigInteger N = new BigInteger(node.get("N").asText()); - BigInteger NSquared = new BigInteger(node.get("NSquared").asText()); + SortedMap queryElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>() { + }); + BigInteger N = new BigInteger(node.get("n").asText()); + BigInteger NSquared = new BigInteger(node.get("nsquared").asText()); Query query = new Query(queryInfo, N, NSquared, queryElements); @@ -74,6 +75,13 @@ public Query deserialize(JsonParser jsonParser, DeserializationContext deseriali return query; } + /** + * Deserializes a QueryInfo JsonNode + * + * @param infoNode A JsonNode at the root of a serialied QueryInfo object. + * @return A QueryInfo object of the deserialized Json. + * @throws IOException + */ public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { // Deserialize The Query Schema First. long infoVersion = infoNode.get("queryInfoVersion").asLong(); @@ -81,7 +89,12 @@ public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { throw new IOException("Attempt to deserialize unsupported query info version. Supported: " + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoVersion); } - QuerySchema querySchema = deserializeSchema(infoNode.get("qSchema")); + QuerySchema querySchema; + if (infoNode.get("querySchema").isNull()) { + querySchema = null; + } else { + querySchema = deserializeSchema(infoNode.get("querySchema")); + } QueryInfo info = new QueryInfo( UUID.fromString(infoNode.get("identifier").asText()), infoNode.get("numSelectors").asInt(), @@ -97,10 +110,17 @@ public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { return info; } + /** + * Deserializes a QuerySchema JsonNode + * + * @param schemaNode A JsonNode at the root of a serialized QuerySchema object. + * @return A QuerySchema object of the deserialized Json. + * @throws IOException + */ public static QuerySchema deserializeSchema(JsonNode schemaNode) throws IOException { // Deserialize The Query Schema First. long schemaVersion = schemaNode.get("querySchemaVersion").asLong(); - if (schemaVersion!= QuerySchema.querySchemaSerialVersionUID) { + if (schemaVersion != QuerySchema.querySchemaSerialVersionUID) { throw new IOException("Attempt to deserialize unsupported query info version. Supported: " + QueryInfo.queryInfoSerialVersionUID + "; Received: " + schemaVersion); } @@ -130,9 +150,11 @@ public static QuerySchema deserializeSchema(JsonNode schemaNode) throws IOExcept dataFilter, schemaNode.get("dataElementSize").asInt() ); - List elementNames = objectMapper.readValue(schemaNode.get("elementNames").toString(), new TypeReference>(){}); + List elementNames = objectMapper.readValue(schemaNode.get("elementNames").toString(), new TypeReference>() { + }); querySchema.getElementNames().addAll(elementNames); - HashMap additionalFields = objectMapper.readValue(schemaNode.get("additionalFields").toString(), new TypeReference>(){}); + HashMap additionalFields = objectMapper.readValue(schemaNode.get("additionalFields").toString(), new TypeReference>() { + }); querySchema.getAdditionalFields().putAll(additionalFields); return querySchema; } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java index ef607e0a..12ebf271 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java @@ -24,8 +24,6 @@ import java.util.Map; import java.util.UUID; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.apache.pirk.schema.query.QuerySchema; import org.apache.pirk.schema.query.QuerySchemaRegistry; @@ -38,36 +36,31 @@ * Note that the hash key is specific to the query. If we have hash collisions over our selector set, we will append integers to the key starting with 0 until * we no longer have collisions */ -//@JsonDeserialize(using = QueryInfoDeserializer.class) -public class QueryInfo implements Serializable, Cloneable -{ +public class QueryInfo implements Serializable, Cloneable { public static final long queryInfoSerialVersionUID = 1L; // So that we can serialize the version number in jackson. - @JsonSerialize public final long queryInfoVersion = queryInfoSerialVersionUID; - @JsonIgnore private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class); - @JsonSerialize private UUID identifier; // the identifier of the query - @JsonSerialize + private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize} - @JsonSerialize + private String queryType = null; // QueryType string const - @JsonSerialize + private int hashBitSize = 0; // Bit size of the keyed hash function - @JsonSerialize + private String hashKey; // Key for the keyed hash function - @JsonSerialize + private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type - @JsonSerialize + private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now - @JsonSerialize + private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element @JsonSerialize @@ -77,24 +70,22 @@ public class QueryInfo implements Serializable, Cloneable private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS // if it doesn't yet exist, it will be created within the cluster and stored in HDFS - @JsonSerialize + private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low // false positive rate for variable length selectors and a zero false positive rate // for selectors of fixed size < 32 bits - @JsonSerialize + private QuerySchema qSchema = null; public QueryInfo(int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, - boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) - { + boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) { this(UUID.randomUUID(), numSelectorsInput, hashBitSizeInput, hashKeyInput, dataPartitionBitSizeInput, queryTypeInput, useExpLookupTableInput, embedSelectorInput, useHDFSExpLookupTableInput); } public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, - boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) - { + boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) { identifier = identifierInput; queryType = queryTypeInput; @@ -111,16 +102,14 @@ public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInp dataPartitionBitSize = dataPartitionBitSizeInput; numPartitionsPerDataElement = numBitsPerDataElement / dataPartitionBitSizeInput; - if (embedSelectorInput) - { + if (embedSelectorInput) { numPartitionsPerDataElement += 4; // using a 8-bit partition size and a 32-bit embedded selector } printQueryInfo(); } - public QueryInfo(Map queryInfoMap) - { + public QueryInfo(Map queryInfoMap) { // The Storm Config serializes the map as a json and reads back in with numeric values as longs. // So numerics need to be cast as a long and call .intValue. However, in PirkHashScheme the map contains ints. identifier = UUID.fromString((String) queryInfoMap.get("uuid")); @@ -129,15 +118,13 @@ public QueryInfo(Map queryInfoMap) useExpLookupTable = (boolean) queryInfoMap.get("useExpLookupTable"); useHDFSExpLookupTable = (boolean) queryInfoMap.get("useHDFSExpLookupTable"); embedSelector = (boolean) queryInfoMap.get("embedSelector"); - try - { + try { numSelectors = ((Long) queryInfoMap.get("numSelectors")).intValue(); hashBitSize = ((Long) queryInfoMap.get("hashBitSize")).intValue(); numBitsPerDataElement = ((Long) queryInfoMap.get("numBitsPerDataElement")).intValue(); numPartitionsPerDataElement = ((Long) queryInfoMap.get("numPartitionsPerDataElement")).intValue(); dataPartitionBitSize = ((Long) queryInfoMap.get("dataPartitionsBitSize")).intValue(); - } catch (ClassCastException e) - { + } catch (ClassCastException e) { numSelectors = (int) queryInfoMap.get("numSelectors"); hashBitSize = (int) queryInfoMap.get("hashBitSize"); numBitsPerDataElement = (int) queryInfoMap.get("numBitsPerDataElement"); @@ -146,64 +133,52 @@ public QueryInfo(Map queryInfoMap) } } - public UUID getIdentifier() - { + public UUID getIdentifier() { return identifier; } - public String getQueryType() - { + public String getQueryType() { return queryType; } - public int getNumSelectors() - { + public int getNumSelectors() { return numSelectors; } - public int getHashBitSize() - { + public int getHashBitSize() { return hashBitSize; } - public String getHashKey() - { + public String getHashKey() { return hashKey; } - public int getNumBitsPerDataElement() - { + public int getNumBitsPerDataElement() { return numBitsPerDataElement; } - public int getNumPartitionsPerDataElement() - { + public int getNumPartitionsPerDataElement() { return numPartitionsPerDataElement; } - public int getDataPartitionBitSize() - { + public int getDataPartitionBitSize() { return dataPartitionBitSize; } - public boolean useExpLookupTable() - { + public boolean useExpLookupTable() { return useExpLookupTable; } - public boolean useHDFSExpLookupTable() - { + public boolean useHDFSExpLookupTable() { return useHDFSExpLookupTable; } - public boolean getEmbedSelector() - { + public boolean getEmbedSelector() { return embedSelector; } - public Map toMap() - { - Map queryInfo = new HashMap(); + public Map toMap() { + Map queryInfo = new HashMap(); queryInfo.put("uuid", identifier.toString()); queryInfo.put("queryType", queryType); queryInfo.put("numSelectors", numSelectors); @@ -219,18 +194,15 @@ public Map toMap() return queryInfo; } - public void addQuerySchema(QuerySchema qSchemaIn) - { + public void addQuerySchema(QuerySchema qSchemaIn) { qSchema = qSchemaIn; } - public QuerySchema getQuerySchema() - { + public QuerySchema getQuerySchema() { return qSchema; } - public void printQueryInfo() - { + public void printQueryInfo() { logger.info("identifier = " + identifier + " numSelectors = " + numSelectors + " hashBitSize = " + hashBitSize + " hashKey = " + hashKey + " dataPartitionBitSize = " + dataPartitionBitSize + " numBitsPerDataElement = " + numBitsPerDataElement + " numPartitionsPerDataElement = " + numPartitionsPerDataElement + " queryType = " + queryType + " useExpLookupTable = " + useExpLookupTable + " useHDFSExpLookupTable = " @@ -238,13 +210,10 @@ public void printQueryInfo() } @Override - public QueryInfo clone() - { - try - { + public QueryInfo clone() { + try { return (QueryInfo) super.clone(); - } catch (CloneNotSupportedException e) - { + } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } diff --git a/src/main/java/org/apache/pirk/response/wideskies/Response.java b/src/main/java/org/apache/pirk/response/wideskies/Response.java index 52b47e3f..e6cd5d02 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/Response.java +++ b/src/main/java/org/apache/pirk/response/wideskies/Response.java @@ -30,42 +30,35 @@ * Class to hold the encrypted response elements for the PIR query *

* Serialized and returned to the querier for decryption - * */ @JsonDeserialize(using = ResponseDeserializer.class) -public class Response implements Serializable, Storable -{ +public class Response implements Serializable, Storable { public static final long responseSerialVersionUID = 1L; public final long responseVersion = responseSerialVersionUID; private QueryInfo queryInfo = null; // holds all query info - private TreeMap responseElements = null; // encrypted response columns, colNum -> column + private TreeMap responseElements = null; // encrypted response columns, colNum -> column - public Response(QueryInfo queryInfoInput) - { + public Response(QueryInfo queryInfoInput) { queryInfo = queryInfoInput; responseElements = new TreeMap<>(); } - public TreeMap getResponseElements() - { + public TreeMap getResponseElements() { return responseElements; } - public void setResponseElements(TreeMap elements) - { + public void setResponseElements(TreeMap elements) { responseElements = elements; } - public QueryInfo getQueryInfo() - { + public QueryInfo getQueryInfo() { return queryInfo; } - public void addElement(int position, BigInteger element) - { + public void addElement(int position, BigInteger element) { responseElements.put(position, element); } } diff --git a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java index 2562852a..5b974473 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java +++ b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java @@ -22,13 +22,11 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import org.apache.pirk.query.wideskies.QueryDeserializer; import org.apache.pirk.query.wideskies.QueryInfo; -import org.apache.pirk.response.wideskies.Response; import java.io.IOException; import java.math.BigInteger; @@ -39,9 +37,13 @@ */ public class ResponseDeserializer extends StdDeserializer { - public ResponseDeserializer() { this(null); } + public ResponseDeserializer() { + this(null); + } - public ResponseDeserializer(Class vc) { super(vc);} + public ResponseDeserializer(Class vc) { + super(vc); + } private static ObjectMapper objectMapper = new ObjectMapper(); @@ -60,7 +62,8 @@ public Response deserialize(JsonParser jsonParser, DeserializationContext deseri // Form the initial response object Response response = new Response(queryInfo); // Get the response elements - TreeMap responseElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>(){}); + TreeMap responseElements = objectMapper.readValue(node.get("responseElements").toString(), new TypeReference>() { + }); response.setResponseElements(responseElements); return response; diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java index e9873da4..cc7313f7 100644 --- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java +++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java @@ -31,10 +31,8 @@ /** * Class to hold a query schema - * */ -public class QuerySchema implements Serializable -{ +public class QuerySchema implements Serializable { public static final long querySchemaSerialVersionUID = 1L; // So that we can serialize the version number in jackson. @@ -42,24 +40,19 @@ public class QuerySchema implements Serializable public final long querySchemaVersion = querySchemaSerialVersionUID; // This schema's name. - @JsonSerialize private final String schemaName; // Name of the data schema associated with this query schema. - @JsonSerialize private final String dataSchemaName; // Name of element in the dataSchema to be used as the selector. - @JsonSerialize private final String selectorName; // Element names from the data schema to include in the response. // Order matters for packing/unpacking. - @JsonSerialize private final List elementNames = new ArrayList<>(); // Name of class to use in data filtering. - @JsonSerialize private final String filterTypeName; // Instance of the filterTypeName. @@ -67,19 +60,15 @@ public class QuerySchema implements Serializable private final DataFilter filter; // Set of data schema element names on which to apply filtering. - @JsonSerialize private final Set filteredElementNames = new HashSet<>(); // Total number of bits to be returned for each data element hit. - @JsonSerialize private final int dataElementSize; // Additional fields by key,value - @JsonSerialize - private final HashMap additionalFields = new HashMap<>(); + private final HashMap additionalFields = new HashMap<>(); - public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize) - { + public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize) { this.schemaName = schemaName; this.dataSchemaName = dataSchemaName; this.selectorName = selectorName; @@ -90,11 +79,10 @@ public QuerySchema(String schemaName, String dataSchemaName, String selectorName /** * Returns the name of this schema. - * + * * @return The schema name. */ - public String getSchemaName() - { + public String getSchemaName() { return schemaName; } @@ -105,8 +93,7 @@ public String getSchemaName() * * @return The data schema name. */ - public String getDataSchemaName() - { + public String getDataSchemaName() { return dataSchemaName; } @@ -114,11 +101,10 @@ public String getDataSchemaName() * Returns the element names to include in the response. *

* The element names are defined by the data schema associated with this query. - * + * * @return The ordered list of query element names. */ - public List getElementNames() - { + public List getElementNames() { return elementNames; } @@ -126,38 +112,34 @@ public List getElementNames() * Returns the element name used as the selector. *

* The element names are defined by the data schema associated with this query. - * + * * @return The element names being selected. */ - public String getSelectorName() - { + public String getSelectorName() { return selectorName; } - public int getDataElementSize() - { + public int getDataElementSize() { return dataElementSize; } /** * Returns the name of the filter class for this query. - * + *

* The filter class name is the fully qualified name of a Java class that implements the {@link DataFilter} interface. - * + * * @return The type name of the query filter, or null if there is no filter defined. */ - public String getFilterTypeName() - { + public String getFilterTypeName() { return filterTypeName; } /** * Returns the set of element names on which to apply the filter. - * + * * @return The possibly empty set of data schema element names. */ - public Set getFilteredElementNames() - { + public Set getFilteredElementNames() { return filteredElementNames; } @@ -165,11 +147,10 @@ public Set getFilteredElementNames() * Returns the data element filter for this query. *

* The data filter is applied to the {@link QuerySchema#getFilteredElementNames()} data elements. - * + * * @return The data filter, or null if no filter has been specified for this query. */ - public DataFilter getFilter() - { + public DataFilter getFilter() { return filter; } @@ -177,22 +158,20 @@ public DataFilter getFilter() * Returns the map of additional field keys and values *

* Note that additional fields are optional, thus the map may be empty - * + * * @return The additionalFields HashMap */ - public HashMap getAdditionalFields() - { + public HashMap getAdditionalFields() { return additionalFields; } /** * Returns the value from the additionalFields mapping corresponding to the given key - * + * * @param key * @return value from the additionalFields mapping corresponding to the given key */ - public String getAdditionalFieldValue(String key) - { + public String getAdditionalFieldValue(String key) { return additionalFields.get(key); } } diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java index 718c63fe..3ec1768d 100644 --- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java +++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java @@ -24,40 +24,31 @@ import com.fasterxml.jackson.databind.ObjectMapper; -public class JsonSerializer extends SerializationService -{ +public class JsonSerializer extends SerializationService { // We really only need the one objectMapper, I think. public static final ObjectMapper objectMapper = new ObjectMapper(); /** * Stores the given object on the output stream as JSON. * - * @param outputStream - * The stream on which to store the object. - * @param obj - * The object to be stored. - * @throws IOException - * If a problem occurs storing the object on the given stream. + * @param outputStream The stream on which to store the object. + * @param obj The object to be stored. + * @throws IOException If a problem occurs storing the object on the given stream. */ @Override - public void write(OutputStream outputStream, Storable obj) throws IOException - { + public void write(OutputStream outputStream, Storable obj) throws IOException { objectMapper.writerWithDefaultPrettyPrinter().writeValue(outputStream, obj); } /** * Read a JSON string from the given input stream and returns the Object representation. * - * @param inputStream - * The stream from which to read the object. - * @param classType - * The type of object being retrieved. - * @throws IOException - * If a problem occurs reading the object from the stream. + * @param inputStream The stream from which to read the object. + * @param classType The type of object being retrieved. + * @throws IOException If a problem occurs reading the object from the stream. */ @Override - public T read(InputStream inputStream, Class classType) throws IOException - { + public T read(InputStream inputStream, Class classType) throws IOException { return objectMapper.readValue(inputStream, classType); } diff --git a/src/main/java/org/apache/pirk/serialization/SerializationService.java b/src/main/java/org/apache/pirk/serialization/SerializationService.java index 2764fc88..87e3dd45 100644 --- a/src/main/java/org/apache/pirk/serialization/SerializationService.java +++ b/src/main/java/org/apache/pirk/serialization/SerializationService.java @@ -23,24 +23,20 @@ import java.io.InputStream; import java.io.OutputStream; -/* +/** * Ability to read and write objects to/from a stream. */ -public abstract class SerializationService -{ +public abstract class SerializationService { public abstract T read(InputStream stream, Class type) throws IOException; public abstract void write(OutputStream w, Storable obj) throws IOException; - public byte[] toBytes(Storable obj) - { + public byte[] toBytes(Storable obj) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); - try - { + try { write(bos, obj); - } catch (IOException e) - { + } catch (IOException e) { throw new RuntimeException(e); } diff --git a/src/main/java/org/apache/pirk/serialization/StorageService.java b/src/main/java/org/apache/pirk/serialization/StorageService.java index 94c5921d..74f7f03d 100644 --- a/src/main/java/org/apache/pirk/serialization/StorageService.java +++ b/src/main/java/org/apache/pirk/serialization/StorageService.java @@ -18,7 +18,7 @@ *******************************************************************************/ package org.apache.pirk.serialization; -/* +/** * Common supertype for types that can store objects using serialization. */ abstract class StorageService From fbebc4adafefa640e5b0339d2423abdabed9b0b3 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Wed, 5 Oct 2016 11:59:55 -0400 Subject: [PATCH 07/11] Needed to add a new constructor for QueryInfo so as to not depend on the existence of a populated QuerySchemaRegistry. --- .../query/wideskies/QueryDeserializer.java | 5 ++-- .../pirk/query/wideskies/QueryInfo.java | 30 ++++++++++++++++++- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index cffd1448..59326f22 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -104,9 +104,10 @@ public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { infoNode.get("queryType").asText(), infoNode.get("useExpLookupTable").asBoolean(), infoNode.get("embedSelector").asBoolean(), - infoNode.get("useHDFSExpLookupTable").asBoolean() + infoNode.get("useHDFSExpLookupTable").asBoolean(), + infoNode.get("numBitsPerDataElement").asInt(), + querySchema ); - info.addQuerySchema(querySchema); return info; } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java index 12ebf271..743465e8 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java @@ -97,7 +97,6 @@ public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInp useExpLookupTable = useExpLookupTableInput; useHDFSExpLookupTable = useHDFSExpLookupTableInput; embedSelector = embedSelectorInput; - numBitsPerDataElement = QuerySchemaRegistry.get(queryType).getDataElementSize(); dataPartitionBitSize = dataPartitionBitSizeInput; numPartitionsPerDataElement = numBitsPerDataElement / dataPartitionBitSizeInput; @@ -109,6 +108,35 @@ public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInp printQueryInfo(); } + public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, + boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput, int numBitsPerDataElementInput, QuerySchema querySchemaInput) + { + identifier = identifierInput; + queryType = queryTypeInput; + + numSelectors = numSelectorsInput; + + hashBitSize = hashBitSizeInput; + hashKey = hashKeyInput; + + useExpLookupTable = useExpLookupTableInput; + useHDFSExpLookupTable = useHDFSExpLookupTableInput; + embedSelector = embedSelectorInput; + + numBitsPerDataElement = numBitsPerDataElementInput; + dataPartitionBitSize = dataPartitionBitSizeInput; + numPartitionsPerDataElement = numBitsPerDataElement / dataPartitionBitSizeInput; + + if (embedSelectorInput) + { + numPartitionsPerDataElement += 4; // using a 8-bit partition size and a 32-bit embedded selector + } + + addQuerySchema(querySchemaInput); + + printQueryInfo(); + } + public QueryInfo(Map queryInfoMap) { // The Storm Config serializes the map as a json and reads back in with numeric values as longs. // So numerics need to be cast as a long and call .intValue. However, in PirkHashScheme the map contains ints. From b9caf260f14405705864ba200d993057a7040416 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Thu, 6 Oct 2016 08:16:34 -0400 Subject: [PATCH 08/11] Move to gson breaks hdfs? --- pom.xml | 89 +++++++++++- .../pirk/querier/wideskies/Querier.java | 1 - .../wideskies/QuerierDeserializer.java | 54 ++++++-- .../apache/pirk/query/wideskies/Query.java | 7 +- .../query/wideskies/QueryDeserializer.java | 127 ++++++++++++++++-- .../pirk/response/wideskies/Response.java | 1 - .../wideskies/ResponseDeserializer.java | 32 ++++- .../pirk/serialization/JsonSerializer.java | 26 +++- 8 files changed, 298 insertions(+), 39 deletions(-) diff --git a/pom.xml b/pom.xml index 3fe281bb..86d6149d 100644 --- a/pom.xml +++ b/pom.xml @@ -141,6 +141,22 @@ org.apache.curator curator-client + + org.codehaus.jackson + jackson-mapper-asl + + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-xc + + + org.codehaus.jackson + jackson-jaxrs + @@ -157,6 +173,22 @@ log4j log4j + + org.codehaus.jackson + jackson-mapper-asl + + + org.codehaus.jackson + jackson-core-asl + + + org.codehaus.jackson + jackson-xc + + + org.codehaus.jackson + jackson-jaxrs + @@ -177,6 +209,14 @@ xerces xercesImpl + + org.codehaus.jackson + jackson-mapper-asl + + + org.codehaus.jackson + jackson-core-asl + @@ -205,6 +245,22 @@ org.scala-lang scala-reflect + + org.json4s + json4s-jackson_2.11 + + + com.fasterxml.jackson.module + jackson-module-scala_2.11 + + + org.codehaus.jackson + jackson-mapper-asl + + + org.codehaus.jackson + jackson-core-asl + @@ -217,6 +273,14 @@ org.slf4j slf4j-log4j12 + + org.json4s + json4s-jackson_2.10 + + + com.fasterxml.jackson.module + jackson-module-scala_2.10 + @@ -257,6 +321,22 @@ cascading cascading-hadoop + + com.fasterxml.jackson.core + jackson-core + + + org.apache.parquet + parquet-jackson + + + org.codehaus.jackson + jackson-mapper-asl + + + org.codehaus.jackson + jackson-core-asl + @@ -418,7 +498,8 @@ eclipse*.xml docs/* logs/* - **/m2.conf + **/m2.conf + src/main/resources/META-INF/** @@ -557,9 +638,11 @@ true exe - + - + diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java index d075601c..cf6f156f 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java @@ -30,7 +30,6 @@ /** * Class to hold the information necessary for the PIR querier to perform decryption */ -@JsonDeserialize(using = QuerierDeserializer.class) public class Querier implements Serializable, Storable { public static final long querierSerialVersionUID = 1L; diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java index 86e62858..495cb295 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java @@ -25,10 +25,13 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.google.gson.*; +import com.google.gson.reflect.TypeToken; import org.apache.pirk.encryption.Paillier; import org.apache.pirk.query.wideskies.Query; import java.io.IOException; +import java.lang.reflect.Type; import java.math.BigInteger; import java.util.List; import java.util.Map; @@ -36,19 +39,11 @@ /** * Custom deserializer for Querier class for Jackson. */ -public class QuerierDeserializer extends StdDeserializer { - - public QuerierDeserializer() { - this(null); - } - - public QuerierDeserializer(Class vc) { - super(vc); - } - - private static ObjectMapper objectMapper = new ObjectMapper(); +public class QuerierDeserializer implements JsonDeserializer { + private static final Gson gson = new Gson(); + /* @Override public Querier deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); @@ -71,7 +66,7 @@ public Querier deserialize(JsonParser jsonParser, DeserializationContext deseria return new Querier(selectors, paillier, query, embedSelectorMap); } - + */ /** * Deserializes a Paillier JsonNode. * @@ -84,4 +79,39 @@ private Paillier deserializePaillier(JsonNode paillier) { int bitLength = paillier.get("bitLength").asInt(); return new Paillier(p, q, bitLength); } + + @Override + public Querier deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { + JsonObject jsonObject = jsonElement.getAsJsonObject(); + // Check the version number. + long querierVersion = jsonObject.get("querierVersion").getAsLong(); + if (querierVersion != Querier.querierSerialVersionUID) { + throw new JsonParseException("Attempt to deserialize unsupported query version. Supported: " + + Querier.querierSerialVersionUID + "; Received: " + querierVersion); + } + // Then deserialize the Query Info + Query query = gson.fromJson(jsonObject.get("query").toString(), Query.class); + + // Now Paillier + Paillier paillier = deserializePaillier(jsonObject.get("paillier").getAsJsonObject()); + + List selectors = gson.fromJson(jsonObject.get("selectors").toString(), new TypeToken>() {}.getType()); + Map embedSelectorMap = gson.fromJson(jsonObject.get("embedSelectorMap").toString(), new TypeToken>() {}.getType()); + + return new Querier(selectors, paillier, query, embedSelectorMap); + } + + /** + * Deserializes a Paillier JsonObject. + * + * @param paillier A JsonObject at the root of a serialied Paillier object. + * @return A Paillier object of the deserialized Json. + */ + private Paillier deserializePaillier(JsonObject paillier) { + BigInteger p = new BigInteger(paillier.get("p").getAsString()); + BigInteger q = new BigInteger(paillier.get("q").getAsString()); + int bitLength = paillier.get("bitLength").getAsInt(); + return new Paillier(p, q, bitLength); + } + } diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java index 3e6446f7..c324c624 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/Query.java +++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java @@ -28,6 +28,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.google.gson.annotations.Expose; import org.apache.pirk.encryption.ModPowAbstraction; import org.apache.pirk.serialization.Storable; import org.slf4j.Logger; @@ -36,7 +37,7 @@ /** * Class to hold the PIR query vectors */ -@JsonDeserialize(using = QueryDeserializer.class) + public class Query implements Serializable, Storable { public static final long querySerialVersionUID = 1L; @@ -53,12 +54,12 @@ public class Query implements Serializable, Storable { // lookup table for exponentiation of query vectors - based on dataPartitionBitSize // element -> - @JsonIgnore + @Expose(serialize = false) private Map> expTable = new ConcurrentHashMap<>(); // File based lookup table for modular exponentiation // element hash -> filename containing it's modular exponentiations - @JsonIgnore + private Map expFileBasedLookup = new HashMap<>(); private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index 59326f22..85c85ce1 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -25,6 +25,8 @@ import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.google.gson.*; +import com.google.gson.reflect.TypeToken; import org.apache.pirk.schema.query.QuerySchema; import org.apache.pirk.schema.query.filter.DataFilter; import org.apache.pirk.schema.query.filter.FilterFactory; @@ -33,26 +35,43 @@ import org.slf4j.LoggerFactory; import java.io.IOException; +import java.lang.reflect.Type; import java.math.BigInteger; import java.util.*; /** * Custom deserializer for Query class for Jackson. */ -public class QueryDeserializer extends StdDeserializer { +public class QueryDeserializer implements JsonDeserializer { private static final Logger logger = LoggerFactory.getLogger(QueryDeserializer.class); - public QueryDeserializer() { - this(null); - } + private static final Gson gson = new Gson(); + - public QueryDeserializer(Class vc) { - super(vc); - } + @Override + public Query deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { + JsonObject jsonObject = jsonElement.getAsJsonObject(); + logger.info("Got query json:" + jsonObject.toString()); + // Check the version number. + long queryVersion = jsonObject.get("queryVersion").getAsLong(); + if (queryVersion != Query.querySerialVersionUID) { + throw new JsonParseException("Attempt to deserialize unsupported query version. Supported: " + + Query.querySerialVersionUID + "; Received: " + queryVersion); + } + // Then deserialize the Query Info + QueryInfo queryInfo = deserializeInfo(jsonObject.get("queryInfo").getAsJsonObject()); + SortedMap queryElements = gson.fromJson(jsonObject.get("queryElements"), new TypeToken>() {}.getType()); + BigInteger N = new BigInteger(jsonObject.get("n").getAsString()); + BigInteger NSquared = new BigInteger(jsonObject.get("nsquared").getAsString()); + Map expFileBasedLookup = gson.fromJson(jsonObject.get("expFileBasedLookup"), new TypeToken>() {}.getType()); - private static ObjectMapper objectMapper = new ObjectMapper(); + Query query = new Query(queryInfo, N, NSquared, queryElements); + query.setExpFileBasedLookup(expFileBasedLookup); + return query; + } + /* @Override public Query deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { JsonNode node = jsonParser.getCodec().readTree(jsonParser); @@ -64,17 +83,16 @@ public Query deserialize(JsonParser jsonParser, DeserializationContext deseriali } // Then deserialize the Query Info QueryInfo queryInfo = deserializeInfo(node.get("queryInfo")); - SortedMap queryElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>() { - }); + SortedMap queryElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>() {}); BigInteger N = new BigInteger(node.get("n").asText()); BigInteger NSquared = new BigInteger(node.get("nsquared").asText()); - + Map expFileBasedLookup = objectMapper.readValue(node.get("expFileBasedLookup").toString(), new TypeReference>() {}); Query query = new Query(queryInfo, N, NSquared, queryElements); - + query.setExpFileBasedLookup(expFileBasedLookup); return query; } - + */ /** * Deserializes a QueryInfo JsonNode * @@ -82,6 +100,7 @@ public Query deserialize(JsonParser jsonParser, DeserializationContext deseriali * @return A QueryInfo object of the deserialized Json. * @throws IOException */ + /* public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { // Deserialize The Query Schema First. long infoVersion = infoNode.get("queryInfoVersion").asLong(); @@ -110,6 +129,7 @@ public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { ); return info; } + */ /** * Deserializes a QuerySchema JsonNode @@ -118,6 +138,7 @@ public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { * @return A QuerySchema object of the deserialized Json. * @throws IOException */ + /* public static QuerySchema deserializeSchema(JsonNode schemaNode) throws IOException { // Deserialize The Query Schema First. long schemaVersion = schemaNode.get("querySchemaVersion").asLong(); @@ -159,6 +180,86 @@ public static QuerySchema deserializeSchema(JsonNode schemaNode) throws IOExcept querySchema.getAdditionalFields().putAll(additionalFields); return querySchema; } + */ + /** + * Deserializes a QueryInfo JsonObject + * @param queryInfoJson A JsonObject at the root of a serialized QueryInfo object. + * @return A QueryInfo object of the deserialized Json. + * @throws JsonParseException + */ + public static QueryInfo deserializeInfo(JsonObject queryInfoJson) throws JsonParseException { + // First check the version. + long infoVersion = queryInfoJson.get("queryInfoVersion").getAsLong(); + if (infoVersion != QueryInfo.queryInfoSerialVersionUID) { + throw new JsonParseException("Attempt to deserialize unsupported query info version. Supported: " + + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoVersion); + } + // Deserialize the QuerySchema next, accounting for the possibility that it is null. + QuerySchema querySchema; + if (queryInfoJson.get("querySchema").isJsonNull()) { + querySchema = null; + } else { + querySchema = deserializeSchema(queryInfoJson.get("querySchema").getAsJsonObject()); + } + // Now start making the QueryInfo object. + QueryInfo info = new QueryInfo( + UUID.fromString(queryInfoJson.get("identifier").getAsString()), + queryInfoJson.get("numSelectors").getAsInt(), + queryInfoJson.get("hashBitSize").getAsInt(), + queryInfoJson.get("hashKey").getAsString(), + queryInfoJson.get("dataPartitionBitSize").getAsInt(), + queryInfoJson.get("queryType").getAsString(), + queryInfoJson.get("useExpLookupTable").getAsBoolean(), + queryInfoJson.get("embedSelector").getAsBoolean(), + queryInfoJson.get("useHDFSExpLookupTable").getAsBoolean(), + queryInfoJson.get("numBitsPerDataElement").getAsInt(), + querySchema + ); + return info; + } + /** + * Deserializes a QuerySchema JsonObject + * @param querySchemaJson A JsonObject at the root of a serialized QuerySchema object. + * @return A QuerySchema object of the deserialized Json. + * @throws JsonParseException + */ + private static QuerySchema deserializeSchema(JsonObject querySchemaJson) throws JsonParseException{ + // Deserialize The Query Schema First. + long schemaVersion = querySchemaJson.get("querySchemaVersion").getAsLong(); + if (schemaVersion != QuerySchema.querySchemaSerialVersionUID) { + throw new JsonParseException("Attempt to deserialize unsupported query info version. Supported: " + + QueryInfo.queryInfoSerialVersionUID + "; Received: " + schemaVersion); + } + String dataFilterName = querySchemaJson.get("filterTypeName").getAsString(); + Set filteredElementNames; + try { + filteredElementNames = gson.fromJson(querySchemaJson.get("filteredElementNames"), new TypeToken>() {}.getType()); + } catch (Exception e) { + logger.warn("No filtered element names for Query Schema deserialization."); + filteredElementNames = null; + } + // Set up the data filter + DataFilter dataFilter; + try { + dataFilter = FilterFactory.getFilter(dataFilterName, filteredElementNames); + } catch (IOException|PIRException e) { + logger.error("Error trying to create data filter from JSON.", e); + throw new JsonParseException(e); + } + QuerySchema querySchema = new QuerySchema( + querySchemaJson.get("schemaName").getAsString(), + querySchemaJson.get("dataSchemaName").getAsString(), + querySchemaJson.get("selectorName").getAsString(), + dataFilterName, + dataFilter, + querySchemaJson.get("dataElementSize").getAsInt() + ); + List elementNames = gson.fromJson(querySchemaJson.get("elementNames"), new TypeToken>() {}.getType()); + querySchema.getElementNames().addAll(elementNames); + HashMap additionalFields = gson.fromJson(querySchemaJson.get("additionalFields"), new TypeToken>() {}.getType()); + querySchema.getAdditionalFields().putAll(additionalFields); + return querySchema; + } } diff --git a/src/main/java/org/apache/pirk/response/wideskies/Response.java b/src/main/java/org/apache/pirk/response/wideskies/Response.java index e6cd5d02..434e4756 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/Response.java +++ b/src/main/java/org/apache/pirk/response/wideskies/Response.java @@ -31,7 +31,6 @@ *

* Serialized and returned to the querier for decryption */ -@JsonDeserialize(using = ResponseDeserializer.class) public class Response implements Serializable, Storable { public static final long responseSerialVersionUID = 1L; diff --git a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java index 5b974473..dc9d1508 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java +++ b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java @@ -18,24 +18,52 @@ */ package org.apache.pirk.response.wideskies; +import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.google.gson.*; +import com.google.gson.JsonParseException; +import com.google.gson.reflect.TypeToken; import org.apache.pirk.query.wideskies.QueryDeserializer; import org.apache.pirk.query.wideskies.QueryInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; +import java.lang.reflect.Type; import java.math.BigInteger; import java.util.TreeMap; /** * Custom deserializer for Response class for Jackson. */ +public class ResponseDeserializer implements JsonDeserializer { + + private static final Gson gson = new Gson(); + + + @Override + public Response deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { + final JsonObject jsonObject = jsonElement.getAsJsonObject(); + long responseVersion = jsonObject.get("responseVersion").getAsLong(); + if (responseVersion != Response.responseSerialVersionUID) { + throw new JsonParseException("\"Attempt to deserialize unsupported query version. Supported: \"\n" + + " + Response.responseSerialVersionUID + \"; Received: \" + responseVersion"); + } + QueryInfo queryInfo = QueryDeserializer.deserializeInfo(jsonObject.get("queryInfo").getAsJsonObject()); + Response response = new Response(queryInfo); + TreeMap responseElements = gson.fromJson(jsonObject.get("responseElements"), new TypeToken>(){}.getType()); + response.setResponseElements(responseElements); + return response; + } +} +/* public class ResponseDeserializer extends StdDeserializer { + private static final Logger logger = LoggerFactory.getLogger(ResponseDeserializer.class); public ResponseDeserializer() { this(null); @@ -50,6 +78,7 @@ public ResponseDeserializer(Class vc) { @Override public Response deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { + logger.info("Got json parser: " + jsonParser.readValueAsTree().toString()); JsonNode node = jsonParser.getCodec().readTree(jsonParser); // Check the version number. long responseVersion = node.get("responseVersion").asLong(); @@ -69,3 +98,4 @@ public Response deserialize(JsonParser jsonParser, DeserializationContext deseri return response; } } +*/ diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java index 3ec1768d..bf0c301b 100644 --- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java +++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java @@ -18,15 +18,27 @@ */ package org.apache.pirk.serialization; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; +import java.io.*; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.apache.pirk.querier.wideskies.Querier; +import org.apache.pirk.querier.wideskies.QuerierDeserializer; +import org.apache.pirk.query.wideskies.QueryDeserializer; +import org.apache.pirk.response.wideskies.Response; +import org.apache.pirk.response.wideskies.ResponseDeserializer; + +import javax.management.Query; public class JsonSerializer extends SerializationService { // We really only need the one objectMapper, I think. public static final ObjectMapper objectMapper = new ObjectMapper(); + public static final Gson gson = new GsonBuilder() + .registerTypeAdapter(Response.class, new ResponseDeserializer()) + .registerTypeAdapter(Query.class, new QueryDeserializer()) + .registerTypeAdapter(Querier.class, new QuerierDeserializer()) + .create(); /** * Stores the given object on the output stream as JSON. @@ -37,7 +49,9 @@ public class JsonSerializer extends SerializationService { */ @Override public void write(OutputStream outputStream, Storable obj) throws IOException { - objectMapper.writerWithDefaultPrettyPrinter().writeValue(outputStream, obj); + Writer writer = new OutputStreamWriter(outputStream); + gson.toJson(obj); + //objectMapper.writerWithDefaultPrettyPrinter().writeValue(outputStream, obj); } /** @@ -49,7 +63,9 @@ public void write(OutputStream outputStream, Storable obj) throws IOException { */ @Override public T read(InputStream inputStream, Class classType) throws IOException { - return objectMapper.readValue(inputStream, classType); + Reader reader = new InputStreamReader(inputStream); + return gson.fromJson(reader, classType); + } } From dc9fcfd848c89bf59310c0c12284b5bbb72caec4 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Thu, 6 Oct 2016 08:49:45 -0400 Subject: [PATCH 09/11] Working with Gson on two tests; time for distributed tests. --- pom.xml | 81 +------------------ .../org/apache/pirk/encryption/Paillier.java | 11 ++- .../pirk/querier/wideskies/Querier.java | 6 ++ .../apache/pirk/query/wideskies/Query.java | 10 ++- .../query/wideskies/QueryDeserializer.java | 4 +- .../pirk/query/wideskies/QueryInfo.java | 21 +++-- .../pirk/response/wideskies/Response.java | 4 + .../apache/pirk/schema/query/QuerySchema.java | 14 +++- .../pirk/serialization/JsonSerializer.java | 7 +- 9 files changed, 57 insertions(+), 101 deletions(-) diff --git a/pom.xml b/pom.xml index 86d6149d..6052b9b3 100644 --- a/pom.xml +++ b/pom.xml @@ -141,22 +141,6 @@ org.apache.curator curator-client - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-xc - - - org.codehaus.jackson - jackson-jaxrs - @@ -173,22 +157,7 @@ log4j log4j - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-xc - - - org.codehaus.jackson - jackson-jaxrs - + @@ -209,14 +178,6 @@ xerces xercesImpl - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - @@ -245,22 +206,6 @@ org.scala-lang scala-reflect - - org.json4s - json4s-jackson_2.11 - - - com.fasterxml.jackson.module - jackson-module-scala_2.11 - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - @@ -273,14 +218,6 @@ org.slf4j slf4j-log4j12 - - org.json4s - json4s-jackson_2.10 - - - com.fasterxml.jackson.module - jackson-module-scala_2.10 - @@ -321,22 +258,6 @@ cascading cascading-hadoop - - com.fasterxml.jackson.core - jackson-core - - - org.apache.parquet - parquet-jackson - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - diff --git a/src/main/java/org/apache/pirk/encryption/Paillier.java b/src/main/java/org/apache/pirk/encryption/Paillier.java index 0ccf4516..2e126327 100644 --- a/src/main/java/org/apache/pirk/encryption/Paillier.java +++ b/src/main/java/org/apache/pirk/encryption/Paillier.java @@ -24,6 +24,7 @@ import java.security.SecureRandom; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.google.gson.annotations.Expose; import org.apache.pirk.utils.PIRException; import org.apache.pirk.utils.SystemConfiguration; import org.slf4j.Logger; @@ -89,17 +90,19 @@ public final class Paillier implements Serializable { } } + @Expose private BigInteger p; // large prime + @Expose private BigInteger q; // large prime private BigInteger N; // N=pq, RSA modulus - @JsonIgnore + private BigInteger NSquared; // NSquared = N^2 - @JsonIgnore + private BigInteger lambdaN; // lambda(N) = lcm(p-1,q-1), Carmichael function of N - @JsonIgnore - private BigInteger w; // lambda(N)^-1 mod N + private BigInteger w; // lambda(N)^-1 mod N + @Expose private final int bitLength; // bit length of the modulus N /** diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java index cf6f156f..03609e91 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java @@ -23,6 +23,7 @@ import java.util.Map; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.google.gson.annotations.Expose; import org.apache.pirk.encryption.Paillier; import org.apache.pirk.query.wideskies.Query; import org.apache.pirk.serialization.Storable; @@ -33,17 +34,22 @@ public class Querier implements Serializable, Storable { public static final long querierSerialVersionUID = 1L; + @Expose public final long querierVersion = querierSerialVersionUID; + @Expose private Query query = null; // contains the query vectors and functionality + @Expose private Paillier paillier = null; // Paillier encryption functionality + @Expose private List selectors = null; // selectors // map to check the embedded selectors in the results for false positives; // if the selector is a fixed size < 32 bits, it is included as is // if the selector is of variable lengths + @Expose private Map embedSelectorMap = null; public Querier(List selectorsInput, Paillier paillierInput, Query queryInput, Map embedSelectorMapInput) { diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java index c324c624..dc3098be 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/Query.java +++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java @@ -42,28 +42,30 @@ public class Query implements Serializable, Storable { public static final long querySerialVersionUID = 1L; // So that we can serialize the version number in jackson. + @Expose public final long queryVersion = querySerialVersionUID; private static final Logger logger = LoggerFactory.getLogger(Query.class); - + @Expose private final QueryInfo queryInfo; // holds all query info - + @Expose private final SortedMap queryElements; // query elements - ordered on insertion // lookup table for exponentiation of query vectors - based on dataPartitionBitSize // element -> - @Expose(serialize = false) private Map> expTable = new ConcurrentHashMap<>(); // File based lookup table for modular exponentiation // element hash -> filename containing it's modular exponentiations - + @Expose private Map expFileBasedLookup = new HashMap<>(); + @Expose private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements + @Expose private final BigInteger NSquared; public Query(QueryInfo queryInfo, BigInteger N, SortedMap queryElements) { diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index 85c85ce1..c9a6157f 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -196,10 +196,10 @@ public static QueryInfo deserializeInfo(JsonObject queryInfoJson) throws JsonPar } // Deserialize the QuerySchema next, accounting for the possibility that it is null. QuerySchema querySchema; - if (queryInfoJson.get("querySchema").isJsonNull()) { + if (queryInfoJson.get("qSchema").isJsonNull()) { querySchema = null; } else { - querySchema = deserializeSchema(queryInfoJson.get("querySchema").getAsJsonObject()); + querySchema = deserializeSchema(queryInfoJson.get("qSchema").getAsJsonObject()); } // Now start making the QueryInfo object. QueryInfo info = new QueryInfo( diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java index 743465e8..836a2a55 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java @@ -25,6 +25,7 @@ import java.util.UUID; import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.gson.annotations.Expose; import org.apache.pirk.schema.query.QuerySchema; import org.apache.pirk.schema.query.QuerySchemaRegistry; import org.slf4j.Logger; @@ -40,42 +41,48 @@ public class QueryInfo implements Serializable, Cloneable { public static final long queryInfoSerialVersionUID = 1L; // So that we can serialize the version number in jackson. + @Expose public final long queryInfoVersion = queryInfoSerialVersionUID; private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class); + @Expose private UUID identifier; // the identifier of the query + @Expose private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize} - + @Expose private String queryType = null; // QueryType string const - + @Expose private int hashBitSize = 0; // Bit size of the keyed hash function + @Expose private String hashKey; // Key for the keyed hash function - + @Expose private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type + @Expose private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now + @Expose private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element - @JsonSerialize + @Expose private boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute - @JsonSerialize + @Expose private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS // if it doesn't yet exist, it will be created within the cluster and stored in HDFS - + @Expose private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low // false positive rate for variable length selectors and a zero false positive rate // for selectors of fixed size < 32 bits - + @Expose private QuerySchema qSchema = null; public QueryInfo(int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, diff --git a/src/main/java/org/apache/pirk/response/wideskies/Response.java b/src/main/java/org/apache/pirk/response/wideskies/Response.java index 434e4756..948765e0 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/Response.java +++ b/src/main/java/org/apache/pirk/response/wideskies/Response.java @@ -23,6 +23,7 @@ import java.util.TreeMap; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.google.gson.annotations.Expose; import org.apache.pirk.query.wideskies.QueryInfo; import org.apache.pirk.serialization.Storable; @@ -34,10 +35,13 @@ public class Response implements Serializable, Storable { public static final long responseSerialVersionUID = 1L; + @Expose public final long responseVersion = responseSerialVersionUID; + @Expose private QueryInfo queryInfo = null; // holds all query info + @Expose private TreeMap responseElements = null; // encrypted response columns, colNum -> column public Response(QueryInfo queryInfoInput) { diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java index cc7313f7..294b6e5e 100644 --- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java +++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.google.gson.annotations.Expose; import org.apache.pirk.schema.query.filter.DataFilter; /** @@ -36,36 +37,43 @@ public class QuerySchema implements Serializable { public static final long querySchemaSerialVersionUID = 1L; // So that we can serialize the version number in jackson. - @JsonSerialize + @Expose public final long querySchemaVersion = querySchemaSerialVersionUID; // This schema's name. + @Expose private final String schemaName; // Name of the data schema associated with this query schema. + @Expose private final String dataSchemaName; // Name of element in the dataSchema to be used as the selector. + @Expose private final String selectorName; // Element names from the data schema to include in the response. // Order matters for packing/unpacking. + @Expose private final List elementNames = new ArrayList<>(); // Name of class to use in data filtering. + @Expose private final String filterTypeName; // Instance of the filterTypeName. - @JsonIgnore private final DataFilter filter; // Set of data schema element names on which to apply filtering. + @Expose private final Set filteredElementNames = new HashSet<>(); // Total number of bits to be returned for each data element hit. + @Expose private final int dataElementSize; - // Additional fields by key,value + // Addiional fields by key,value + @Expose private final HashMap additionalFields = new HashMap<>(); public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize) { diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java index bf0c301b..eb13a120 100644 --- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java +++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java @@ -19,6 +19,7 @@ package org.apache.pirk.serialization; import java.io.*; +import java.lang.reflect.Modifier; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; @@ -38,6 +39,9 @@ public class JsonSerializer extends SerializationService { .registerTypeAdapter(Response.class, new ResponseDeserializer()) .registerTypeAdapter(Query.class, new QueryDeserializer()) .registerTypeAdapter(Querier.class, new QuerierDeserializer()) + .setPrettyPrinting() + .excludeFieldsWithoutExposeAnnotation() + .serializeNulls() .create(); /** @@ -50,7 +54,8 @@ public class JsonSerializer extends SerializationService { @Override public void write(OutputStream outputStream, Storable obj) throws IOException { Writer writer = new OutputStreamWriter(outputStream); - gson.toJson(obj); + gson.toJson(obj, obj.getClass(), writer); + writer.close(); //objectMapper.writerWithDefaultPrettyPrinter().writeValue(outputStream, obj); } From 36fc5bfadeea3fec8ff228e144ebd9d36961d1ea Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Fri, 7 Oct 2016 17:07:11 -0400 Subject: [PATCH 10/11] Removing import-* --- .../EncryptionPropertiesBuilder.java | 17 ++- .../pirk/querier/wideskies/QuerierCLI.java | 7 +- .../wideskies/QuerierDeserializer.java | 53 ++----- .../query/wideskies/QueryDeserializer.java | 133 ++---------------- .../wideskies/ResponseDeserializer.java | 53 +------ .../pirk/serialization/JsonSerializer.java | 13 +- 6 files changed, 56 insertions(+), 220 deletions(-) diff --git a/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java b/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java index ab7637d9..1f7d2cd1 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java @@ -22,7 +22,22 @@ import java.util.Properties; -import static org.apache.pirk.querier.wideskies.QuerierProps.*; +import static org.apache.pirk.querier.wideskies.QuerierProps.BITSET; +import static org.apache.pirk.querier.wideskies.QuerierProps.CERTAINTY; +import static org.apache.pirk.querier.wideskies.QuerierProps.DATAPARTITIONSIZE; +import static org.apache.pirk.querier.wideskies.QuerierProps.EMBEDSELECTOR; +import static org.apache.pirk.querier.wideskies.QuerierProps.HASHBITSIZE; +import static org.apache.pirk.querier.wideskies.QuerierProps.HASHKEY; +import static org.apache.pirk.querier.wideskies.QuerierProps.NUMTHREADS; +import static org.apache.pirk.querier.wideskies.QuerierProps.PAILLIERBITSIZE; +import static org.apache.pirk.querier.wideskies.QuerierProps.QUERYTYPE; +import static org.apache.pirk.querier.wideskies.QuerierProps.USEHDFSLOOKUPTABLE; +import static org.apache.pirk.querier.wideskies.QuerierProps.USEMEMLOOKUPTABLE; +import static org.apache.pirk.querier.wideskies.QuerierProps.setEncryptionDefaults; +import static org.apache.pirk.querier.wideskies.QuerierProps.setGeneralDefaults; +import static org.apache.pirk.querier.wideskies.QuerierProps.validateQuerierEncryptionProperties; + +//import static org.apache.pirk.querier.wideskies.QuerierProps.; /** * Holds the various parameters related to creating a {@link Querier}. diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java index 826c577c..7cf25b9f 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java @@ -18,7 +18,12 @@ */ package org.apache.pirk.querier.wideskies; -import org.apache.commons.cli.*; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; import org.apache.pirk.schema.data.DataSchemaLoader; import org.apache.pirk.schema.query.QuerySchemaLoader; import org.apache.pirk.utils.SystemConfiguration; diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java index 495cb295..e0f11d6a 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java @@ -18,19 +18,19 @@ */ package org.apache.pirk.querier.wideskies; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import com.google.gson.*; + +import com.google.gson.Gson; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; import com.google.gson.reflect.TypeToken; import org.apache.pirk.encryption.Paillier; import org.apache.pirk.query.wideskies.Query; -import java.io.IOException; + import java.lang.reflect.Type; import java.math.BigInteger; import java.util.List; @@ -43,43 +43,6 @@ public class QuerierDeserializer implements JsonDeserializer { private static final Gson gson = new Gson(); - /* - @Override - public Querier deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { - JsonNode node = jsonParser.getCodec().readTree(jsonParser); - // Check the version number. - long querierVersion = node.get("querierVersion").asLong(); - if (querierVersion != Querier.querierSerialVersionUID) { - throw new IOException("Attempt to deserialize unsupported query version. Supported: " - + Querier.querierSerialVersionUID + "; Received: " + querierVersion); - } - // Then deserialize the Query Info - Query query = objectMapper.readValue(node.get("query").toString(), Query.class); - - // Now Paillier - Paillier paillier = deserializePaillier(node.get("paillier")); - - List selectors = objectMapper.readValue(node.get("selectors").toString(), new TypeReference>() { - }); - Map embedSelectorMap = objectMapper.readValue(node.get("embedSelectorMap").toString(), new TypeReference>() { - }); - - return new Querier(selectors, paillier, query, embedSelectorMap); - } - */ - /** - * Deserializes a Paillier JsonNode. - * - * @param paillier A JsonNode at the root of a serialied Paillier object. - * @return A Paillier object of the deserialized Json. - */ - private Paillier deserializePaillier(JsonNode paillier) { - BigInteger p = new BigInteger(paillier.get("p").asText()); - BigInteger q = new BigInteger(paillier.get("q").asText()); - int bitLength = paillier.get("bitLength").asInt(); - return new Paillier(p, q, bitLength); - } - @Override public Querier deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { JsonObject jsonObject = jsonElement.getAsJsonObject(); diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index c9a6157f..e19ff431 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -18,14 +18,13 @@ */ package org.apache.pirk.query.wideskies; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import com.google.gson.*; + +import com.google.gson.Gson; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; import com.google.gson.reflect.TypeToken; import org.apache.pirk.schema.query.QuerySchema; import org.apache.pirk.schema.query.filter.DataFilter; @@ -37,7 +36,13 @@ import java.io.IOException; import java.lang.reflect.Type; import java.math.BigInteger; -import java.util.*; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedMap; +import java.util.UUID; + /** * Custom deserializer for Query class for Jackson. @@ -71,116 +76,6 @@ public Query deserialize(JsonElement jsonElement, Type type, JsonDeserialization return query; } - /* - @Override - public Query deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { - JsonNode node = jsonParser.getCodec().readTree(jsonParser); - // Check the version number. - long queryVersion = node.get("queryVersion").asLong(); - if (queryVersion != Query.querySerialVersionUID) { - throw new IOException("Attempt to deserialize unsupported query version. Supported: " - + Query.querySerialVersionUID + "; Received: " + queryVersion); - } - // Then deserialize the Query Info - QueryInfo queryInfo = deserializeInfo(node.get("queryInfo")); - SortedMap queryElements = objectMapper.readValue(node.get("queryElements").toString(), new TypeReference>() {}); - BigInteger N = new BigInteger(node.get("n").asText()); - BigInteger NSquared = new BigInteger(node.get("nsquared").asText()); - Map expFileBasedLookup = objectMapper.readValue(node.get("expFileBasedLookup").toString(), new TypeReference>() {}); - - Query query = new Query(queryInfo, N, NSquared, queryElements); - query.setExpFileBasedLookup(expFileBasedLookup); - return query; - } - */ - /** - * Deserializes a QueryInfo JsonNode - * - * @param infoNode A JsonNode at the root of a serialied QueryInfo object. - * @return A QueryInfo object of the deserialized Json. - * @throws IOException - */ - /* - public static QueryInfo deserializeInfo(JsonNode infoNode) throws IOException { - // Deserialize The Query Schema First. - long infoVersion = infoNode.get("queryInfoVersion").asLong(); - if (infoVersion != QueryInfo.queryInfoSerialVersionUID) { - throw new IOException("Attempt to deserialize unsupported query info version. Supported: " - + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoVersion); - } - QuerySchema querySchema; - if (infoNode.get("querySchema").isNull()) { - querySchema = null; - } else { - querySchema = deserializeSchema(infoNode.get("querySchema")); - } - QueryInfo info = new QueryInfo( - UUID.fromString(infoNode.get("identifier").asText()), - infoNode.get("numSelectors").asInt(), - infoNode.get("hashBitSize").asInt(), - infoNode.get("hashKey").asText(), - infoNode.get("dataPartitionBitSize").asInt(), - infoNode.get("queryType").asText(), - infoNode.get("useExpLookupTable").asBoolean(), - infoNode.get("embedSelector").asBoolean(), - infoNode.get("useHDFSExpLookupTable").asBoolean(), - infoNode.get("numBitsPerDataElement").asInt(), - querySchema - ); - return info; - } - */ - - /** - * Deserializes a QuerySchema JsonNode - * - * @param schemaNode A JsonNode at the root of a serialized QuerySchema object. - * @return A QuerySchema object of the deserialized Json. - * @throws IOException - */ - /* - public static QuerySchema deserializeSchema(JsonNode schemaNode) throws IOException { - // Deserialize The Query Schema First. - long schemaVersion = schemaNode.get("querySchemaVersion").asLong(); - if (schemaVersion != QuerySchema.querySchemaSerialVersionUID) { - throw new IOException("Attempt to deserialize unsupported query info version. Supported: " - + QueryInfo.queryInfoSerialVersionUID + "; Received: " + schemaVersion); - } - String dataFilterName = schemaNode.get("filterTypeName").asText(); - Set filteredElementNames; - try { - filteredElementNames = objectMapper.readValue(schemaNode.get("filteredElementNames").toString(), new TypeReference>() { - }); - } catch (Exception e) { - logger.warn("No filtered element names for Query Schema deserialization."); - filteredElementNames = null; - } - // Set up the data filter - DataFilter dataFilter; - try { - dataFilter = FilterFactory.getFilter(dataFilterName, filteredElementNames); - } catch (PIRException e) { - logger.error("Error trying to create data filter from JSON.", e); - throw new IOException(e); - } - - QuerySchema querySchema = new QuerySchema( - schemaNode.get("schemaName").asText(), - schemaNode.get("dataSchemaName").asText(), - schemaNode.get("selectorName").asText(), - dataFilterName, - dataFilter, - schemaNode.get("dataElementSize").asInt() - ); - List elementNames = objectMapper.readValue(schemaNode.get("elementNames").toString(), new TypeReference>() { - }); - querySchema.getElementNames().addAll(elementNames); - HashMap additionalFields = objectMapper.readValue(schemaNode.get("additionalFields").toString(), new TypeReference>() { - }); - querySchema.getAdditionalFields().putAll(additionalFields); - return querySchema; - } - */ /** * Deserializes a QueryInfo JsonObject * @param queryInfoJson A JsonObject at the root of a serialized QueryInfo object. diff --git a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java index dc9d1508..c842a487 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java +++ b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java @@ -18,22 +18,16 @@ */ package org.apache.pirk.response.wideskies; -import com.fasterxml.jackson.core.*; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.deser.std.StdDeserializer; -import com.google.gson.*; +import com.google.gson.Gson; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import com.google.gson.JsonParseException; import com.google.gson.reflect.TypeToken; import org.apache.pirk.query.wideskies.QueryDeserializer; import org.apache.pirk.query.wideskies.QueryInfo; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.io.IOException; import java.lang.reflect.Type; import java.math.BigInteger; import java.util.TreeMap; @@ -61,41 +55,4 @@ public Response deserialize(JsonElement jsonElement, Type type, JsonDeserializat return response; } } -/* -public class ResponseDeserializer extends StdDeserializer { - private static final Logger logger = LoggerFactory.getLogger(ResponseDeserializer.class); - - public ResponseDeserializer() { - this(null); - } - - public ResponseDeserializer(Class vc) { - super(vc); - } - - private static ObjectMapper objectMapper = new ObjectMapper(); - - - @Override - public Response deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { - logger.info("Got json parser: " + jsonParser.readValueAsTree().toString()); - JsonNode node = jsonParser.getCodec().readTree(jsonParser); - // Check the version number. - long responseVersion = node.get("responseVersion").asLong(); - if (responseVersion != Response.responseSerialVersionUID) { - throw new IOException("Attempt to deserialize unsupported query version. Supported: " - + Response.responseSerialVersionUID + "; Received: " + responseVersion); - } - // Then deserialize the Query Info - QueryInfo queryInfo = QueryDeserializer.deserializeInfo(node.get("queryInfo")); - // Form the initial response object - Response response = new Response(queryInfo); - // Get the response elements - TreeMap responseElements = objectMapper.readValue(node.get("responseElements").toString(), new TypeReference>() { - }); - response.setResponseElements(responseElements); - return response; - } -} -*/ diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java index eb13a120..cab1e234 100644 --- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java +++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java @@ -18,9 +18,6 @@ */ package org.apache.pirk.serialization; -import java.io.*; -import java.lang.reflect.Modifier; - import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import com.google.gson.GsonBuilder; @@ -31,10 +28,15 @@ import org.apache.pirk.response.wideskies.ResponseDeserializer; import javax.management.Query; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.Reader; +import java.io.Writer; public class JsonSerializer extends SerializationService { - // We really only need the one objectMapper, I think. - public static final ObjectMapper objectMapper = new ObjectMapper(); public static final Gson gson = new GsonBuilder() .registerTypeAdapter(Response.class, new ResponseDeserializer()) .registerTypeAdapter(Query.class, new QueryDeserializer()) @@ -56,7 +58,6 @@ public void write(OutputStream outputStream, Storable obj) throws IOException { Writer writer = new OutputStreamWriter(outputStream); gson.toJson(obj, obj.getClass(), writer); writer.close(); - //objectMapper.writerWithDefaultPrettyPrinter().writeValue(outputStream, obj); } /** From 4a4b4488bb81dbe05a13eb88affb0d3932164926 Mon Sep 17 00:00:00 2001 From: Walter Ray-Dulany Date: Sun, 9 Oct 2016 17:06:19 -0400 Subject: [PATCH 11/11] Used style xml file to properly format code I touched; removed Jackson imports and updated jackson comments to reflect gson. --- pom.xml | 9 -- .../org/apache/pirk/encryption/Paillier.java | 99 ++++++++----- .../pirk/querier/wideskies/Querier.java | 34 ++--- .../wideskies/QuerierDeserializer.java | 32 +++-- .../apache/pirk/query/wideskies/Query.java | 77 +++++----- .../query/wideskies/QueryDeserializer.java | 109 +++++++------- .../pirk/query/wideskies/QueryInfo.java | 133 ++++++++++-------- .../pirk/response/wideskies/Response.java | 28 ++-- .../wideskies/ResponseDeserializer.java | 21 +-- .../apache/pirk/schema/query/QuerySchema.java | 73 +++++----- .../pirk/serialization/JsonSerializer.java | 23 ++- .../serialization/SerializationService.java | 12 +- 12 files changed, 350 insertions(+), 300 deletions(-) diff --git a/pom.xml b/pom.xml index 6052b9b3..80e9a2b2 100644 --- a/pom.xml +++ b/pom.xml @@ -93,8 +93,6 @@ 2.0.0 1C true - 2.7.0 - @@ -116,12 +114,6 @@ 1.1.1 - - com.fasterxml.jackson.core - jackson-databind - ${jackson.version} - - commons-net commons-net @@ -157,7 +149,6 @@ log4j log4j - diff --git a/src/main/java/org/apache/pirk/encryption/Paillier.java b/src/main/java/org/apache/pirk/encryption/Paillier.java index 2e126327..da14bbae 100644 --- a/src/main/java/org/apache/pirk/encryption/Paillier.java +++ b/src/main/java/org/apache/pirk/encryption/Paillier.java @@ -23,7 +23,6 @@ import java.security.GeneralSecurityException; import java.security.SecureRandom; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.gson.annotations.Expose; import org.apache.pirk.utils.PIRException; import org.apache.pirk.utils.SystemConfiguration; @@ -67,43 +66,46 @@ *

* Ref: Paillier, Pascal. "Public-Key Cryptosystems Based on Composite Degree Residuosity Classes." EUROCRYPT'99. */ -public final class Paillier implements Serializable { +public final class Paillier implements Serializable +{ private static final long serialVersionUID = 1L; private static final Logger logger = LoggerFactory.getLogger(Paillier.class); private static final SecureRandom secureRandom; - static { - try { + static + { + try + { String alg = SystemConfiguration.getProperty("pallier.secureRandom.algorithm"); - if (alg == null) { + if (alg == null) + { secureRandom = new SecureRandom(); - } else { + } + else + { String provider = SystemConfiguration.getProperty("pallier.secureRandom.provider"); secureRandom = (provider == null) ? SecureRandom.getInstance(alg) : SecureRandom.getInstance(alg, provider); } logger.info("Using secure random from " + secureRandom.getProvider().getName() + ":" + secureRandom.getAlgorithm()); - } catch (GeneralSecurityException e) { + } catch (GeneralSecurityException e) + { logger.error("Unable to instantiate a SecureRandom object with the requested algorithm.", e); throw new RuntimeException("Unable to instantiate a SecureRandom object with the requested algorithm.", e); } } - @Expose - private BigInteger p; // large prime - @Expose - private BigInteger q; // large prime + @Expose private BigInteger p; // large prime + @Expose private BigInteger q; // large prime private BigInteger N; // N=pq, RSA modulus - private BigInteger NSquared; // NSquared = N^2 private BigInteger lambdaN; // lambda(N) = lcm(p-1,q-1), Carmichael function of N private BigInteger w; // lambda(N)^-1 mod N - @Expose - private final int bitLength; // bit length of the modulus N + @Expose private final int bitLength; // bit length of the modulus N /** * Creates a Paillier algorithm with all parameters specified. @@ -113,13 +115,15 @@ public final class Paillier implements Serializable { * @param bitLength Bit length of the modulus {@code N}. * @throws IllegalArgumentException If {@code p} or {@code q} do not satisfy primality constraints. */ - public Paillier(BigInteger p, BigInteger q, int bitLength) { + public Paillier(BigInteger p, BigInteger q, int bitLength) + { this.bitLength = bitLength; // Verify the prime conditions are satisfied int primeCertainty = SystemConfiguration.getIntProperty("pir.primeCertainty", 128); BigInteger three = BigInteger.valueOf(3); - if ((p.compareTo(three) < 0) || (q.compareTo(three) < 0) || p.equals(q) || !p.isProbablePrime(primeCertainty) || !q.isProbablePrime(primeCertainty)) { + if ((p.compareTo(three) < 0) || (q.compareTo(three) < 0) || p.equals(q) || !p.isProbablePrime(primeCertainty) || !q.isProbablePrime(primeCertainty)) + { throw new IllegalArgumentException("p = " + p + " q = " + q + " do not satisfy primality constraints"); } @@ -145,7 +149,8 @@ public Paillier(BigInteger p, BigInteger q, int bitLength) { * @param certainty The probability that the new {@code p} and {@code q} represent prime numbers. * @throws IllegalArgumentException If the {@code certainty} is less than the system allowed lower bound. */ - public Paillier(int bitLength, int certainty) { + public Paillier(int bitLength, int certainty) + { this(bitLength, certainty, -1); } @@ -164,12 +169,15 @@ public Paillier(int bitLength, int certainty) { * @param ensureBitSet index of bit in {@code N} to ensure is set. * @throws IllegalArgumentException If the {@code certainty} is less than the system allowed lower bound, or the index of {@code ensureBitSet} is greater than the {@code bitLength}. */ - public Paillier(int bitLength, int certainty, int ensureBitSet) { + public Paillier(int bitLength, int certainty, int ensureBitSet) + { int systemPrimeCertainty = SystemConfiguration.getIntProperty("pir.primeCertainty", 128); - if (certainty < systemPrimeCertainty) { + if (certainty < systemPrimeCertainty) + { throw new IllegalArgumentException("Input certainty = " + certainty + " is less than allowed system lower bound = " + systemPrimeCertainty); } - if (ensureBitSet >= bitLength) { + if (ensureBitSet >= bitLength) + { throw new IllegalArgumentException("ensureBitSet = " + ensureBitSet + " must be less than bitLengthInput = " + bitLength); } this.bitLength = bitLength; @@ -184,7 +192,8 @@ public Paillier(int bitLength, int certainty, int ensureBitSet) { * * @return p. */ - public BigInteger getP() { + public BigInteger getP() + { return p; } @@ -193,7 +202,8 @@ public BigInteger getP() { * * @return q. */ - public BigInteger getQ() { + public BigInteger getQ() + { return q; } @@ -202,7 +212,8 @@ public BigInteger getQ() { * * @return N, the product of {@code p} and {@code q}. */ - public BigInteger getN() { + public BigInteger getN() + { return N; } @@ -211,7 +222,8 @@ public BigInteger getN() { * * @return N squared. */ - public BigInteger getNSquared() { + public BigInteger getNSquared() + { return NSquared; } @@ -222,7 +234,8 @@ public BigInteger getNSquared() { * * @return Carmichael's function at {@code N}. */ - public BigInteger getLambdaN() { + public BigInteger getLambdaN() + { return lambdaN; } @@ -231,15 +244,19 @@ public BigInteger getLambdaN() { * * @return the bit length, as an integer. */ - public int getBitLength() { + public int getBitLength() + { return bitLength; } - private void generateKeys(int bitLength, int certainty, final int ensureBitSet) { + private void generateKeys(int bitLength, int certainty, final int ensureBitSet) + { getKeys(bitLength, certainty); - if (ensureBitSet > -1) { - while (!N.testBit(ensureBitSet)) { + if (ensureBitSet > -1) + { + while (!N.testBit(ensureBitSet)) + { logger.info("testBit false\n N = " + N.toString(2)); getKeys(bitLength, certainty); } @@ -247,7 +264,8 @@ private void generateKeys(int bitLength, int certainty, final int ensureBitSet) } } - private void getKeys(int bitLength, int certainty) { + private void getKeys(int bitLength, int certainty) + { // Generate the primes BigInteger[] pq = PrimeGenerator.getPrimePair(bitLength, certainty, secureRandom); p = pq[0]; @@ -256,7 +274,8 @@ private void getKeys(int bitLength, int certainty) { N = p.multiply(q); } - private void setDerivativeElements() { + private void setDerivativeElements() + { NSquared = N.multiply(N); // lambda(N) = lcm(p-1,q-1) @@ -272,10 +291,12 @@ private void setDerivativeElements() { * @return the encrypted value * @throws PIRException If {@code m} is not less than @{code N}. */ - public BigInteger encrypt(BigInteger m) throws PIRException { + public BigInteger encrypt(BigInteger m) throws PIRException + { // Generate a random value r in (Z/NZ)* BigInteger r = (new BigInteger(bitLength, secureRandom)).mod(N); - while (r.equals(BigInteger.ZERO) || r.equals(BigInteger.ONE) || r.mod(p).equals(BigInteger.ZERO) || r.mod(q).equals(BigInteger.ZERO)) { + while (r.equals(BigInteger.ZERO) || r.equals(BigInteger.ONE) || r.mod(p).equals(BigInteger.ZERO) || r.mod(q).equals(BigInteger.ZERO)) + { r = (new BigInteger(bitLength, secureRandom)).mod(N); } @@ -290,8 +311,10 @@ public BigInteger encrypt(BigInteger m) throws PIRException { * @return the encrypted value. * @throws PIRException If {@code m} is not less than @{code N}. */ - public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException { - if (m.compareTo(N) >= 0) { + public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException + { + if (m.compareTo(N) >= 0) + { throw new PIRException("m = " + m.toString(2) + " is greater than or equal to N = " + N.toString(2)); } @@ -308,7 +331,8 @@ public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException { * @param c an encrypted value. * @return the corresponding plaintext value. */ - public BigInteger decrypt(BigInteger c) { + public BigInteger decrypt(BigInteger c) + { // w = lambda(N)^-1 mod N; x = c^(lambda(N)) mod N^2; y = (x-1)/N; d = yw mod N BigInteger x = ModPowAbstraction.modPow(c, lambdaN, NSquared); BigInteger y = (x.subtract(BigInteger.ONE)).divide(N); @@ -316,7 +340,8 @@ public BigInteger decrypt(BigInteger c) { return (y.multiply(w)).mod(N); } - private String parametersToString() { + private String parametersToString() + { return "p = " + p.intValue() + " q = " + q.intValue() + " N = " + N.intValue() + " NSquared = " + NSquared.intValue() + " lambdaN = " + lambdaN.intValue() + " bitLength = " + bitLength; } diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java index 03609e91..efb5a116 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Map; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.google.gson.annotations.Expose; import org.apache.pirk.encryption.Paillier; import org.apache.pirk.query.wideskies.Query; @@ -31,28 +30,25 @@ /** * Class to hold the information necessary for the PIR querier to perform decryption */ -public class Querier implements Serializable, Storable { +public class Querier implements Serializable, Storable +{ public static final long querierSerialVersionUID = 1L; - @Expose - public final long querierVersion = querierSerialVersionUID; + @Expose public final long querierVersion = querierSerialVersionUID; - @Expose - private Query query = null; // contains the query vectors and functionality + @Expose private Query query = null; // contains the query vectors and functionality - @Expose - private Paillier paillier = null; // Paillier encryption functionality + @Expose private Paillier paillier = null; // Paillier encryption functionality - @Expose - private List selectors = null; // selectors + @Expose private List selectors = null; // selectors // map to check the embedded selectors in the results for false positives; // if the selector is a fixed size < 32 bits, it is included as is // if the selector is of variable lengths - @Expose - private Map embedSelectorMap = null; + @Expose private Map embedSelectorMap = null; - public Querier(List selectorsInput, Paillier paillierInput, Query queryInput, Map embedSelectorMapInput) { + public Querier(List selectorsInput, Paillier paillierInput, Query queryInput, Map embedSelectorMapInput) + { selectors = selectorsInput; paillier = paillierInput; @@ -62,19 +58,23 @@ public Querier(List selectorsInput, Paillier paillierInput, Query queryI embedSelectorMap = embedSelectorMapInput; } - public Query getQuery() { + public Query getQuery() + { return query; } - public Paillier getPaillier() { + public Paillier getPaillier() + { return paillier; } - public List getSelectors() { + public List getSelectors() + { return selectors; } - public Map getEmbedSelectorMap() { + public Map getEmbedSelectorMap() + { return embedSelectorMap; } } diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java index e0f11d6a..6971a263 100644 --- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java +++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java @@ -18,8 +18,6 @@ */ package org.apache.pirk.querier.wideskies; -import com.fasterxml.jackson.databind.JsonNode; - import com.google.gson.Gson; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; @@ -30,27 +28,28 @@ import org.apache.pirk.encryption.Paillier; import org.apache.pirk.query.wideskies.Query; - import java.lang.reflect.Type; import java.math.BigInteger; import java.util.List; import java.util.Map; /** - * Custom deserializer for Querier class for Jackson. + * Custom deserializer for Querier class for Gson. */ -public class QuerierDeserializer implements JsonDeserializer { +public class QuerierDeserializer implements JsonDeserializer +{ private static final Gson gson = new Gson(); - @Override - public Querier deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { + @Override public Querier deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException + { JsonObject jsonObject = jsonElement.getAsJsonObject(); // Check the version number. long querierVersion = jsonObject.get("querierVersion").getAsLong(); - if (querierVersion != Querier.querierSerialVersionUID) { - throw new JsonParseException("Attempt to deserialize unsupported query version. Supported: " - + Querier.querierSerialVersionUID + "; Received: " + querierVersion); + if (querierVersion != Querier.querierSerialVersionUID) + { + throw new JsonParseException( + "Attempt to deserialize unsupported query version. Supported: " + Querier.querierSerialVersionUID + "; Received: " + querierVersion); } // Then deserialize the Query Info Query query = gson.fromJson(jsonObject.get("query").toString(), Query.class); @@ -58,8 +57,12 @@ public Querier deserialize(JsonElement jsonElement, Type type, JsonDeserializati // Now Paillier Paillier paillier = deserializePaillier(jsonObject.get("paillier").getAsJsonObject()); - List selectors = gson.fromJson(jsonObject.get("selectors").toString(), new TypeToken>() {}.getType()); - Map embedSelectorMap = gson.fromJson(jsonObject.get("embedSelectorMap").toString(), new TypeToken>() {}.getType()); + List selectors = gson.fromJson(jsonObject.get("selectors").toString(), new TypeToken>() + { + }.getType()); + Map embedSelectorMap = gson.fromJson(jsonObject.get("embedSelectorMap").toString(), new TypeToken>() + { + }.getType()); return new Querier(selectors, paillier, query, embedSelectorMap); } @@ -70,11 +73,12 @@ public Querier deserialize(JsonElement jsonElement, Type type, JsonDeserializati * @param paillier A JsonObject at the root of a serialied Paillier object. * @return A Paillier object of the deserialized Json. */ - private Paillier deserializePaillier(JsonObject paillier) { + private Paillier deserializePaillier(JsonObject paillier) + { BigInteger p = new BigInteger(paillier.get("p").getAsString()); BigInteger q = new BigInteger(paillier.get("q").getAsString()); int bitLength = paillier.get("bitLength").getAsInt(); return new Paillier(p, q, bitLength); } - + } diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java index dc3098be..4922d9d6 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/Query.java +++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java @@ -26,8 +26,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Consumer; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.google.gson.annotations.Expose; import org.apache.pirk.encryption.ModPowAbstraction; import org.apache.pirk.serialization.Storable; @@ -38,76 +36,81 @@ * Class to hold the PIR query vectors */ -public class Query implements Serializable, Storable { +public class Query implements Serializable, Storable +{ public static final long querySerialVersionUID = 1L; - // So that we can serialize the version number in jackson. - @Expose - public final long queryVersion = querySerialVersionUID; + // So that we can serialize the version number in gson. + @Expose public final long queryVersion = querySerialVersionUID; private static final Logger logger = LoggerFactory.getLogger(Query.class); - @Expose - private final QueryInfo queryInfo; // holds all query info + @Expose private final QueryInfo queryInfo; // holds all query info - @Expose - private final SortedMap queryElements; // query elements - ordered on insertion + @Expose private final SortedMap queryElements; // query elements - ordered on insertion // lookup table for exponentiation of query vectors - based on dataPartitionBitSize // element -> - private Map> expTable = new ConcurrentHashMap<>(); + private Map> expTable = new ConcurrentHashMap<>(); // File based lookup table for modular exponentiation // element hash -> filename containing it's modular exponentiations - @Expose - private Map expFileBasedLookup = new HashMap<>(); + @Expose private Map expFileBasedLookup = new HashMap<>(); - @Expose - private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements + @Expose private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements - @Expose - private final BigInteger NSquared; + @Expose private final BigInteger NSquared; - public Query(QueryInfo queryInfo, BigInteger N, SortedMap queryElements) { + public Query(QueryInfo queryInfo, BigInteger N, SortedMap queryElements) + { this(queryInfo, N, N.pow(2), queryElements); } - public Query(QueryInfo queryInfo, BigInteger N, BigInteger NSquared, SortedMap queryElements) { + public Query(QueryInfo queryInfo, BigInteger N, BigInteger NSquared, SortedMap queryElements) + { this.queryInfo = queryInfo; this.N = N; this.NSquared = NSquared; this.queryElements = queryElements; } - public QueryInfo getQueryInfo() { + public QueryInfo getQueryInfo() + { return queryInfo; } - public SortedMap getQueryElements() { + public SortedMap getQueryElements() + { return queryElements; } - public BigInteger getQueryElement(int index) { + public BigInteger getQueryElement(int index) + { return queryElements.get(index); } - public BigInteger getN() { + public BigInteger getN() + { return N; } - public BigInteger getNSquared() { + public BigInteger getNSquared() + { return NSquared; } - public Map getExpFileBasedLookup() { + public Map getExpFileBasedLookup() + { return expFileBasedLookup; } - public String getExpFile(int i) { + public String getExpFile(int i) + { return expFileBasedLookup.get(i); } - public void setExpFileBasedLookup(Map expInput) { + public void setExpFileBasedLookup(Map expInput) + { expFileBasedLookup = expInput; } @@ -115,14 +118,17 @@ public void setExpFileBasedLookup(Map expInput) { * This should be called after all query elements have been added in order to generate the expTable. For int exponentiation with BigIntegers, assumes that * dataPartitionBitSize < 32. */ - public void generateExpTable() { + public void generateExpTable() + { int maxValue = (1 << queryInfo.getDataPartitionBitSize()) - 1; // 2^partitionBitSize - 1 - queryElements.values().parallelStream().forEach(new Consumer() { - @Override - public void accept(BigInteger element) { - Map powMap = new HashMap<>(maxValue); // - for (int i = 0; i <= maxValue; ++i) { + queryElements.values().parallelStream().forEach(new Consumer() + { + @Override public void accept(BigInteger element) + { + Map powMap = new HashMap<>(maxValue); // + for (int i = 0; i <= maxValue; ++i) + { BigInteger value = ModPowAbstraction.modPow(element, BigInteger.valueOf(i), NSquared); powMap.put(i, value); } @@ -132,8 +138,9 @@ public void accept(BigInteger element) { logger.debug("expTable.size() = " + expTable.keySet().size() + " NSquared = " + NSquared.intValue() + " = " + NSquared.toString()); } - public BigInteger getExp(BigInteger value, int power) { - Map powerMap = expTable.get(value); + public BigInteger getExp(BigInteger value, int power) + { + Map powerMap = expTable.get(value); return (powerMap == null) ? null : powerMap.get(power); } } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java index e19ff431..92fdfa0f 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java @@ -18,7 +18,6 @@ */ package org.apache.pirk.query.wideskies; - import com.google.gson.Gson; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; @@ -43,33 +42,37 @@ import java.util.SortedMap; import java.util.UUID; - /** - * Custom deserializer for Query class for Jackson. + * Custom deserializer for Query class for Gson. */ -public class QueryDeserializer implements JsonDeserializer { +public class QueryDeserializer implements JsonDeserializer +{ private static final Logger logger = LoggerFactory.getLogger(QueryDeserializer.class); private static final Gson gson = new Gson(); - - @Override - public Query deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { + @Override public Query deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException + { JsonObject jsonObject = jsonElement.getAsJsonObject(); logger.info("Got query json:" + jsonObject.toString()); // Check the version number. long queryVersion = jsonObject.get("queryVersion").getAsLong(); - if (queryVersion != Query.querySerialVersionUID) { - throw new JsonParseException("Attempt to deserialize unsupported query version. Supported: " - + Query.querySerialVersionUID + "; Received: " + queryVersion); + if (queryVersion != Query.querySerialVersionUID) + { + throw new JsonParseException( + "Attempt to deserialize unsupported query version. Supported: " + Query.querySerialVersionUID + "; Received: " + queryVersion); } // Then deserialize the Query Info QueryInfo queryInfo = deserializeInfo(jsonObject.get("queryInfo").getAsJsonObject()); - SortedMap queryElements = gson.fromJson(jsonObject.get("queryElements"), new TypeToken>() {}.getType()); + SortedMap queryElements = gson.fromJson(jsonObject.get("queryElements"), new TypeToken>() + { + }.getType()); BigInteger N = new BigInteger(jsonObject.get("n").getAsString()); BigInteger NSquared = new BigInteger(jsonObject.get("nsquared").getAsString()); - Map expFileBasedLookup = gson.fromJson(jsonObject.get("expFileBasedLookup"), new TypeToken>() {}.getType()); + Map expFileBasedLookup = gson.fromJson(jsonObject.get("expFileBasedLookup"), new TypeToken>() + { + }.getType()); Query query = new Query(queryInfo, N, NSquared, queryElements); query.setExpFileBasedLookup(expFileBasedLookup); @@ -77,83 +80,87 @@ public Query deserialize(JsonElement jsonElement, Type type, JsonDeserialization } /** - * Deserializes a QueryInfo JsonObject + * Deserializes a QueryInfo JsonObject + * * @param queryInfoJson A JsonObject at the root of a serialized QueryInfo object. * @return A QueryInfo object of the deserialized Json. * @throws JsonParseException */ - public static QueryInfo deserializeInfo(JsonObject queryInfoJson) throws JsonParseException { + public static QueryInfo deserializeInfo(JsonObject queryInfoJson) throws JsonParseException + { // First check the version. long infoVersion = queryInfoJson.get("queryInfoVersion").getAsLong(); - if (infoVersion != QueryInfo.queryInfoSerialVersionUID) { - throw new JsonParseException("Attempt to deserialize unsupported query info version. Supported: " - + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoVersion); + if (infoVersion != QueryInfo.queryInfoSerialVersionUID) + { + throw new JsonParseException( + "Attempt to deserialize unsupported query info version. Supported: " + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoVersion); } // Deserialize the QuerySchema next, accounting for the possibility that it is null. QuerySchema querySchema; - if (queryInfoJson.get("qSchema").isJsonNull()) { + if (queryInfoJson.get("qSchema").isJsonNull()) + { querySchema = null; - } else { + } + else + { querySchema = deserializeSchema(queryInfoJson.get("qSchema").getAsJsonObject()); } // Now start making the QueryInfo object. - QueryInfo info = new QueryInfo( - UUID.fromString(queryInfoJson.get("identifier").getAsString()), - queryInfoJson.get("numSelectors").getAsInt(), - queryInfoJson.get("hashBitSize").getAsInt(), - queryInfoJson.get("hashKey").getAsString(), - queryInfoJson.get("dataPartitionBitSize").getAsInt(), - queryInfoJson.get("queryType").getAsString(), - queryInfoJson.get("useExpLookupTable").getAsBoolean(), - queryInfoJson.get("embedSelector").getAsBoolean(), - queryInfoJson.get("useHDFSExpLookupTable").getAsBoolean(), - queryInfoJson.get("numBitsPerDataElement").getAsInt(), - querySchema - ); + QueryInfo info = new QueryInfo(UUID.fromString(queryInfoJson.get("identifier").getAsString()), queryInfoJson.get("numSelectors").getAsInt(), + queryInfoJson.get("hashBitSize").getAsInt(), queryInfoJson.get("hashKey").getAsString(), queryInfoJson.get("dataPartitionBitSize").getAsInt(), + queryInfoJson.get("queryType").getAsString(), queryInfoJson.get("useExpLookupTable").getAsBoolean(), queryInfoJson.get("embedSelector").getAsBoolean(), + queryInfoJson.get("useHDFSExpLookupTable").getAsBoolean(), queryInfoJson.get("numBitsPerDataElement").getAsInt(), querySchema); return info; } /** * Deserializes a QuerySchema JsonObject + * * @param querySchemaJson A JsonObject at the root of a serialized QuerySchema object. * @return A QuerySchema object of the deserialized Json. * @throws JsonParseException */ - private static QuerySchema deserializeSchema(JsonObject querySchemaJson) throws JsonParseException{ + private static QuerySchema deserializeSchema(JsonObject querySchemaJson) throws JsonParseException + { // Deserialize The Query Schema First. long schemaVersion = querySchemaJson.get("querySchemaVersion").getAsLong(); - if (schemaVersion != QuerySchema.querySchemaSerialVersionUID) { - throw new JsonParseException("Attempt to deserialize unsupported query info version. Supported: " - + QueryInfo.queryInfoSerialVersionUID + "; Received: " + schemaVersion); + if (schemaVersion != QuerySchema.querySchemaSerialVersionUID) + { + throw new JsonParseException( + "Attempt to deserialize unsupported query info version. Supported: " + QueryInfo.queryInfoSerialVersionUID + "; Received: " + schemaVersion); } String dataFilterName = querySchemaJson.get("filterTypeName").getAsString(); Set filteredElementNames; - try { - filteredElementNames = gson.fromJson(querySchemaJson.get("filteredElementNames"), new TypeToken>() {}.getType()); - } catch (Exception e) { + try + { + filteredElementNames = gson.fromJson(querySchemaJson.get("filteredElementNames"), new TypeToken>() + { + }.getType()); + } catch (Exception e) + { logger.warn("No filtered element names for Query Schema deserialization."); filteredElementNames = null; } // Set up the data filter DataFilter dataFilter; - try { + try + { dataFilter = FilterFactory.getFilter(dataFilterName, filteredElementNames); - } catch (IOException|PIRException e) { + } catch (IOException | PIRException e) + { logger.error("Error trying to create data filter from JSON.", e); throw new JsonParseException(e); } - QuerySchema querySchema = new QuerySchema( - querySchemaJson.get("schemaName").getAsString(), - querySchemaJson.get("dataSchemaName").getAsString(), - querySchemaJson.get("selectorName").getAsString(), - dataFilterName, - dataFilter, - querySchemaJson.get("dataElementSize").getAsInt() - ); - List elementNames = gson.fromJson(querySchemaJson.get("elementNames"), new TypeToken>() {}.getType()); + QuerySchema querySchema = new QuerySchema(querySchemaJson.get("schemaName").getAsString(), querySchemaJson.get("dataSchemaName").getAsString(), + querySchemaJson.get("selectorName").getAsString(), dataFilterName, dataFilter, querySchemaJson.get("dataElementSize").getAsInt()); + List elementNames = gson.fromJson(querySchemaJson.get("elementNames"), new TypeToken>() + { + }.getType()); querySchema.getElementNames().addAll(elementNames); - HashMap additionalFields = gson.fromJson(querySchemaJson.get("additionalFields"), new TypeToken>() {}.getType()); + HashMap additionalFields = gson.fromJson(querySchemaJson.get("additionalFields"), new TypeToken>() + { + }.getType()); querySchema.getAdditionalFields().putAll(additionalFields); return querySchema; } diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java index 836a2a55..20fbb36e 100644 --- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java +++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java @@ -19,80 +19,69 @@ package org.apache.pirk.query.wideskies; -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.google.gson.annotations.Expose; import org.apache.pirk.schema.query.QuerySchema; import org.apache.pirk.schema.query.QuerySchemaRegistry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + /** * Class to hold all of the basic information regarding a query *

* Note that the hash key is specific to the query. If we have hash collisions over our selector set, we will append integers to the key starting with 0 until * we no longer have collisions */ -public class QueryInfo implements Serializable, Cloneable { +public class QueryInfo implements Serializable, Cloneable +{ public static final long queryInfoSerialVersionUID = 1L; - // So that we can serialize the version number in jackson. - @Expose - public final long queryInfoVersion = queryInfoSerialVersionUID; + // So that we can serialize the version number in gson. + @Expose public final long queryInfoVersion = queryInfoSerialVersionUID; private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class); - @Expose - private UUID identifier; // the identifier of the query + @Expose private UUID identifier; // the identifier of the query - @Expose - private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize} + @Expose private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize} - @Expose - private String queryType = null; // QueryType string const + @Expose private String queryType = null; // QueryType string const - @Expose - private int hashBitSize = 0; // Bit size of the keyed hash function + @Expose private int hashBitSize = 0; // Bit size of the keyed hash function - @Expose - private String hashKey; // Key for the keyed hash function + @Expose private String hashKey; // Key for the keyed hash function - @Expose - private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type + @Expose private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type - @Expose - private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now + @Expose private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now - @Expose - private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element + @Expose private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element - @Expose - private boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute + @Expose private boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute - @Expose - private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS + @Expose private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS // if it doesn't yet exist, it will be created within the cluster and stored in HDFS - @Expose - private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low + @Expose private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low // false positive rate for variable length selectors and a zero false positive rate // for selectors of fixed size < 32 bits - @Expose - private QuerySchema qSchema = null; + @Expose private QuerySchema qSchema = null; public QueryInfo(int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, - boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) { + boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) + { this(UUID.randomUUID(), numSelectorsInput, hashBitSizeInput, hashKeyInput, dataPartitionBitSizeInput, queryTypeInput, useExpLookupTableInput, embedSelectorInput, useHDFSExpLookupTableInput); } public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, - boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) { + boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput) + { identifier = identifierInput; queryType = queryTypeInput; @@ -108,7 +97,8 @@ public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInp dataPartitionBitSize = dataPartitionBitSizeInput; numPartitionsPerDataElement = numBitsPerDataElement / dataPartitionBitSizeInput; - if (embedSelectorInput) { + if (embedSelectorInput) + { numPartitionsPerDataElement += 4; // using a 8-bit partition size and a 32-bit embedded selector } @@ -116,7 +106,8 @@ public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInp } public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput, - boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput, int numBitsPerDataElementInput, QuerySchema querySchemaInput) + boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput, int numBitsPerDataElementInput, + QuerySchema querySchemaInput) { identifier = identifierInput; queryType = queryTypeInput; @@ -144,7 +135,8 @@ public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInp printQueryInfo(); } - public QueryInfo(Map queryInfoMap) { + public QueryInfo(Map queryInfoMap) + { // The Storm Config serializes the map as a json and reads back in with numeric values as longs. // So numerics need to be cast as a long and call .intValue. However, in PirkHashScheme the map contains ints. identifier = UUID.fromString((String) queryInfoMap.get("uuid")); @@ -153,13 +145,15 @@ public QueryInfo(Map queryInfoMap) { useExpLookupTable = (boolean) queryInfoMap.get("useExpLookupTable"); useHDFSExpLookupTable = (boolean) queryInfoMap.get("useHDFSExpLookupTable"); embedSelector = (boolean) queryInfoMap.get("embedSelector"); - try { + try + { numSelectors = ((Long) queryInfoMap.get("numSelectors")).intValue(); hashBitSize = ((Long) queryInfoMap.get("hashBitSize")).intValue(); numBitsPerDataElement = ((Long) queryInfoMap.get("numBitsPerDataElement")).intValue(); numPartitionsPerDataElement = ((Long) queryInfoMap.get("numPartitionsPerDataElement")).intValue(); dataPartitionBitSize = ((Long) queryInfoMap.get("dataPartitionsBitSize")).intValue(); - } catch (ClassCastException e) { + } catch (ClassCastException e) + { numSelectors = (int) queryInfoMap.get("numSelectors"); hashBitSize = (int) queryInfoMap.get("hashBitSize"); numBitsPerDataElement = (int) queryInfoMap.get("numBitsPerDataElement"); @@ -168,52 +162,64 @@ public QueryInfo(Map queryInfoMap) { } } - public UUID getIdentifier() { + public UUID getIdentifier() + { return identifier; } - public String getQueryType() { + public String getQueryType() + { return queryType; } - public int getNumSelectors() { + public int getNumSelectors() + { return numSelectors; } - public int getHashBitSize() { + public int getHashBitSize() + { return hashBitSize; } - public String getHashKey() { + public String getHashKey() + { return hashKey; } - public int getNumBitsPerDataElement() { + public int getNumBitsPerDataElement() + { return numBitsPerDataElement; } - public int getNumPartitionsPerDataElement() { + public int getNumPartitionsPerDataElement() + { return numPartitionsPerDataElement; } - public int getDataPartitionBitSize() { + public int getDataPartitionBitSize() + { return dataPartitionBitSize; } - public boolean useExpLookupTable() { + public boolean useExpLookupTable() + { return useExpLookupTable; } - public boolean useHDFSExpLookupTable() { + public boolean useHDFSExpLookupTable() + { return useHDFSExpLookupTable; } - public boolean getEmbedSelector() { + public boolean getEmbedSelector() + { return embedSelector; } - public Map toMap() { - Map queryInfo = new HashMap(); + public Map toMap() + { + Map queryInfo = new HashMap(); queryInfo.put("uuid", identifier.toString()); queryInfo.put("queryType", queryType); queryInfo.put("numSelectors", numSelectors); @@ -229,26 +235,31 @@ public Map toMap() { return queryInfo; } - public void addQuerySchema(QuerySchema qSchemaIn) { + public void addQuerySchema(QuerySchema qSchemaIn) + { qSchema = qSchemaIn; } - public QuerySchema getQuerySchema() { + public QuerySchema getQuerySchema() + { return qSchema; } - public void printQueryInfo() { + public void printQueryInfo() + { logger.info("identifier = " + identifier + " numSelectors = " + numSelectors + " hashBitSize = " + hashBitSize + " hashKey = " + hashKey + " dataPartitionBitSize = " + dataPartitionBitSize + " numBitsPerDataElement = " + numBitsPerDataElement + " numPartitionsPerDataElement = " + numPartitionsPerDataElement + " queryType = " + queryType + " useExpLookupTable = " + useExpLookupTable + " useHDFSExpLookupTable = " + useHDFSExpLookupTable + " embedSelector = " + embedSelector); } - @Override - public QueryInfo clone() { - try { + @Override public QueryInfo clone() + { + try + { return (QueryInfo) super.clone(); - } catch (CloneNotSupportedException e) { + } catch (CloneNotSupportedException e) + { throw new RuntimeException(e); } } diff --git a/src/main/java/org/apache/pirk/response/wideskies/Response.java b/src/main/java/org/apache/pirk/response/wideskies/Response.java index 948765e0..b94b977a 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/Response.java +++ b/src/main/java/org/apache/pirk/response/wideskies/Response.java @@ -22,7 +22,6 @@ import java.math.BigInteger; import java.util.TreeMap; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.google.gson.annotations.Expose; import org.apache.pirk.query.wideskies.QueryInfo; import org.apache.pirk.serialization.Storable; @@ -32,36 +31,39 @@ *

* Serialized and returned to the querier for decryption */ -public class Response implements Serializable, Storable { +public class Response implements Serializable, Storable +{ public static final long responseSerialVersionUID = 1L; - @Expose - public final long responseVersion = responseSerialVersionUID; + @Expose public final long responseVersion = responseSerialVersionUID; - @Expose - private QueryInfo queryInfo = null; // holds all query info + @Expose private QueryInfo queryInfo = null; // holds all query info - @Expose - private TreeMap responseElements = null; // encrypted response columns, colNum -> column + @Expose private TreeMap responseElements = null; // encrypted response columns, colNum -> column - public Response(QueryInfo queryInfoInput) { + public Response(QueryInfo queryInfoInput) + { queryInfo = queryInfoInput; responseElements = new TreeMap<>(); } - public TreeMap getResponseElements() { + public TreeMap getResponseElements() + { return responseElements; } - public void setResponseElements(TreeMap elements) { + public void setResponseElements(TreeMap elements) + { responseElements = elements; } - public QueryInfo getQueryInfo() { + public QueryInfo getQueryInfo() + { return queryInfo; } - public void addElement(int position, BigInteger element) { + public void addElement(int position, BigInteger element) + { responseElements.put(position, element); } } diff --git a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java index c842a487..f1588c6f 100644 --- a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java +++ b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java @@ -33,24 +33,27 @@ import java.util.TreeMap; /** - * Custom deserializer for Response class for Jackson. + * Custom deserializer for Response class for Gson. */ -public class ResponseDeserializer implements JsonDeserializer { +public class ResponseDeserializer implements JsonDeserializer +{ private static final Gson gson = new Gson(); - - @Override - public Response deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { + @Override public Response deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException + { final JsonObject jsonObject = jsonElement.getAsJsonObject(); long responseVersion = jsonObject.get("responseVersion").getAsLong(); - if (responseVersion != Response.responseSerialVersionUID) { - throw new JsonParseException("\"Attempt to deserialize unsupported query version. Supported: \"\n" + - " + Response.responseSerialVersionUID + \"; Received: \" + responseVersion"); + if (responseVersion != Response.responseSerialVersionUID) + { + throw new JsonParseException("\"Attempt to deserialize unsupported query version. Supported: \"\n" + + " + Response.responseSerialVersionUID + \"; Received: \" + responseVersion"); } QueryInfo queryInfo = QueryDeserializer.deserializeInfo(jsonObject.get("queryInfo").getAsJsonObject()); Response response = new Response(queryInfo); - TreeMap responseElements = gson.fromJson(jsonObject.get("responseElements"), new TypeToken>(){}.getType()); + TreeMap responseElements = gson.fromJson(jsonObject.get("responseElements"), new TypeToken>() + { + }.getType()); response.setResponseElements(responseElements); return response; } diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java index 294b6e5e..c22c3846 100644 --- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java +++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java @@ -18,6 +18,9 @@ */ package org.apache.pirk.schema.query; +import com.google.gson.annotations.Expose; +import org.apache.pirk.schema.query.filter.DataFilter; + import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; @@ -25,58 +28,46 @@ import java.util.List; import java.util.Set; -import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; -import com.google.gson.annotations.Expose; -import org.apache.pirk.schema.query.filter.DataFilter; - /** * Class to hold a query schema */ -public class QuerySchema implements Serializable { +public class QuerySchema implements Serializable +{ public static final long querySchemaSerialVersionUID = 1L; - // So that we can serialize the version number in jackson. - @Expose - public final long querySchemaVersion = querySchemaSerialVersionUID; + // So that we can serialize the version number in gson. + @Expose public final long querySchemaVersion = querySchemaSerialVersionUID; // This schema's name. - @Expose - private final String schemaName; + @Expose private final String schemaName; // Name of the data schema associated with this query schema. - @Expose - private final String dataSchemaName; + @Expose private final String dataSchemaName; // Name of element in the dataSchema to be used as the selector. - @Expose - private final String selectorName; + @Expose private final String selectorName; // Element names from the data schema to include in the response. // Order matters for packing/unpacking. - @Expose - private final List elementNames = new ArrayList<>(); + @Expose private final List elementNames = new ArrayList<>(); // Name of class to use in data filtering. - @Expose - private final String filterTypeName; + @Expose private final String filterTypeName; // Instance of the filterTypeName. private final DataFilter filter; // Set of data schema element names on which to apply filtering. - @Expose - private final Set filteredElementNames = new HashSet<>(); + @Expose private final Set filteredElementNames = new HashSet<>(); // Total number of bits to be returned for each data element hit. - @Expose - private final int dataElementSize; + @Expose private final int dataElementSize; // Addiional fields by key,value - @Expose - private final HashMap additionalFields = new HashMap<>(); + @Expose private final HashMap additionalFields = new HashMap<>(); - public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize) { + public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize) + { this.schemaName = schemaName; this.dataSchemaName = dataSchemaName; this.selectorName = selectorName; @@ -90,7 +81,8 @@ public QuerySchema(String schemaName, String dataSchemaName, String selectorName * * @return The schema name. */ - public String getSchemaName() { + public String getSchemaName() + { return schemaName; } @@ -101,7 +93,8 @@ public String getSchemaName() { * * @return The data schema name. */ - public String getDataSchemaName() { + public String getDataSchemaName() + { return dataSchemaName; } @@ -112,7 +105,8 @@ public String getDataSchemaName() { * * @return The ordered list of query element names. */ - public List getElementNames() { + public List getElementNames() + { return elementNames; } @@ -123,11 +117,13 @@ public List getElementNames() { * * @return The element names being selected. */ - public String getSelectorName() { + public String getSelectorName() + { return selectorName; } - public int getDataElementSize() { + public int getDataElementSize() + { return dataElementSize; } @@ -138,7 +134,8 @@ public int getDataElementSize() { * * @return The type name of the query filter, or null if there is no filter defined. */ - public String getFilterTypeName() { + public String getFilterTypeName() + { return filterTypeName; } @@ -147,7 +144,8 @@ public String getFilterTypeName() { * * @return The possibly empty set of data schema element names. */ - public Set getFilteredElementNames() { + public Set getFilteredElementNames() + { return filteredElementNames; } @@ -158,7 +156,8 @@ public Set getFilteredElementNames() { * * @return The data filter, or null if no filter has been specified for this query. */ - public DataFilter getFilter() { + public DataFilter getFilter() + { return filter; } @@ -169,7 +168,8 @@ public DataFilter getFilter() { * * @return The additionalFields HashMap */ - public HashMap getAdditionalFields() { + public HashMap getAdditionalFields() + { return additionalFields; } @@ -179,7 +179,8 @@ public HashMap getAdditionalFields() { * @param key * @return value from the additionalFields mapping corresponding to the given key */ - public String getAdditionalFieldValue(String key) { + public String getAdditionalFieldValue(String key) + { return additionalFields.get(key); } } diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java index cab1e234..ea4cabd5 100644 --- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java +++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java @@ -18,7 +18,6 @@ */ package org.apache.pirk.serialization; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import org.apache.pirk.querier.wideskies.Querier; @@ -36,15 +35,11 @@ import java.io.Reader; import java.io.Writer; -public class JsonSerializer extends SerializationService { - public static final Gson gson = new GsonBuilder() - .registerTypeAdapter(Response.class, new ResponseDeserializer()) - .registerTypeAdapter(Query.class, new QueryDeserializer()) - .registerTypeAdapter(Querier.class, new QuerierDeserializer()) - .setPrettyPrinting() - .excludeFieldsWithoutExposeAnnotation() - .serializeNulls() - .create(); +public class JsonSerializer extends SerializationService +{ + public static final Gson gson = new GsonBuilder().registerTypeAdapter(Response.class, new ResponseDeserializer()) + .registerTypeAdapter(Query.class, new QueryDeserializer()).registerTypeAdapter(Querier.class, new QuerierDeserializer()).setPrettyPrinting() + .excludeFieldsWithoutExposeAnnotation().serializeNulls().create(); /** * Stores the given object on the output stream as JSON. @@ -53,8 +48,8 @@ public class JsonSerializer extends SerializationService { * @param obj The object to be stored. * @throws IOException If a problem occurs storing the object on the given stream. */ - @Override - public void write(OutputStream outputStream, Storable obj) throws IOException { + @Override public void write(OutputStream outputStream, Storable obj) throws IOException + { Writer writer = new OutputStreamWriter(outputStream); gson.toJson(obj, obj.getClass(), writer); writer.close(); @@ -67,8 +62,8 @@ public void write(OutputStream outputStream, Storable obj) throws IOException { * @param classType The type of object being retrieved. * @throws IOException If a problem occurs reading the object from the stream. */ - @Override - public T read(InputStream inputStream, Class classType) throws IOException { + @Override public T read(InputStream inputStream, Class classType) throws IOException + { Reader reader = new InputStreamReader(inputStream); return gson.fromJson(reader, classType); diff --git a/src/main/java/org/apache/pirk/serialization/SerializationService.java b/src/main/java/org/apache/pirk/serialization/SerializationService.java index 87e3dd45..01dbdcd4 100644 --- a/src/main/java/org/apache/pirk/serialization/SerializationService.java +++ b/src/main/java/org/apache/pirk/serialization/SerializationService.java @@ -26,17 +26,21 @@ /** * Ability to read and write objects to/from a stream. */ -public abstract class SerializationService { +public abstract class SerializationService +{ public abstract T read(InputStream stream, Class type) throws IOException; public abstract void write(OutputStream w, Storable obj) throws IOException; - public byte[] toBytes(Storable obj) { + public byte[] toBytes(Storable obj) + { ByteArrayOutputStream bos = new ByteArrayOutputStream(); - try { + try + { write(bos, obj); - } catch (IOException e) { + } catch (IOException e) + { throw new RuntimeException(e); }