Skip to content

Commit

Permalink
Fix deserialization (#220)
Browse files Browse the repository at this point in the history
* Fix deserialization

Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com>

* Remove compiler options

Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com>

* Removing spark-13 from dependabot

Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com>

---------

Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com>
  • Loading branch information
harshavamsi committed May 18, 2023
1 parent 1af04a4 commit 54b73cf
Show file tree
Hide file tree
Showing 12 changed files with 47 additions and 24 deletions.
8 changes: 0 additions & 8 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,6 @@ updates:
labels:
- "dependabot"
- "dependencies"
- directory: /spark/sql-13/
open-pull-requests-limit: 1
package-ecosystem: gradle
schedule:
interval: weekly
labels:
- "dependabot"
- "dependencies"
- directory: /spark/sql-20/
open-pull-requests-limit: 1
package-ecosystem: gradle
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Build
name: Build and test hive

on: [push, pull_request]

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Build
name: Build and test MR

on: [push, pull_request]

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Build
name: Build and test spark

on: [push, pull_request]

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Build
name: Build and test spark 20

on: [push, pull_request]

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Build
name: Build and test spark 30

on: [push, pull_request]

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/precommit.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: Gradle Precommit
on: [pull_request]
on: [push,pull_request]

jobs:
precommit:
Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
### Removed
### Fixed
- Restored skipped push down tests ([125](https://github.com/opensearch-project/opensearch-hadoop/pull/125))
- Fixed spark failured due to deserialization failed logic ([219](https://github.com/opensearch-project/opensearch-hadoop/pull/219))

### Security
### Dependencies
- Bumps `com.google.guava:guava` from 16.0.1 to 23.0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import java.io.Serializable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Objects;

import org.opensearch.hadoop.serialization.FieldType;

Expand Down Expand Up @@ -61,20 +62,35 @@ public Field(String name, FieldType type, Collection<Field> properties) {
this.properties = properties;
}

@JsonProperty("properties")
public Field[] properties() {
return properties;
}

@JsonProperty("type")
public FieldType type() {
return type;
}

@JsonProperty("name")
public String name() {
return name;
}

@Override
public String toString() {
return String.format("%s=%s", name, ((type == FieldType.OBJECT || type == FieldType.NESTED) ? Arrays.toString(properties) : type));
return String.format("%s=%s", name,
((type == FieldType.OBJECT || type == FieldType.NESTED) ? Arrays.toString(properties) : type));
}

@Override
public boolean equals(Object o) {
if (o instanceof Field == false) {
return false;
}
Field other = (Field) o;
return Objects.equals(this.name, other.name) &&
Objects.equals(this.type, other.type) &&
Objects.deepEquals(this.properties, other.properties);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@
import com.amazonaws.thirdparty.jackson.annotation.JsonCreator;
import com.amazonaws.thirdparty.jackson.annotation.JsonProperty;

import java.util.Objects;

/**
* A mapping has a name and a collection of fields.
*/
Expand Down Expand Up @@ -170,4 +172,15 @@ public String toString() {
return String.format("%s=%s", index, Arrays.toString(fields));
}
}

@Override
public boolean equals(Object o) {
if (o instanceof Mapping == false) {
return false;
}
Mapping other = (Mapping) o;
return Objects.equals(this.index, other.index) &&
Objects.equals(this.type, other.type) &&
Objects.deepEquals(this.fields, other.fields);
}
}
14 changes: 8 additions & 6 deletions mr/src/main/java/org/opensearch/hadoop/util/IOUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.io.StringReader;
import java.io.StringWriter;
Expand All @@ -43,25 +44,26 @@
import java.net.URL;
import java.util.Properties;

import javax.xml.bind.DatatypeConverter;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.opensearch.hadoop.serialization.OpenSearchHadoopSerializationException;

import com.amazonaws.thirdparty.jackson.core.JsonProcessingException;
import com.amazonaws.thirdparty.jackson.databind.DeserializationFeature;
import com.amazonaws.thirdparty.jackson.databind.ObjectMapper;
import com.amazonaws.thirdparty.jackson.databind.SerializationFeature;



/**
* Utility class used internally for the Pig support.
*/
public abstract class IOUtils {

private final static Field BYTE_ARRAY_BUFFER;
static final ObjectMapper mapper = new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES,
false).configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
static final ObjectMapper mapper = new ObjectMapper().configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);

private static final Log log = LogFactory.getLog(IOUtils.class);
private final boolean trace = log.isTraceEnabled();
Expand Down Expand Up @@ -89,14 +91,14 @@ public static <T> T deserializeFromBase64(String data, Class<T> clazz){
if (!StringUtils.hasLength(data)) {
return null;
}
Object object = null;
final T object;
try {
object = mapper.readValue(data, clazz);
} catch (JsonProcessingException e) {
throw new OpenSearchHadoopSerializationException("Cannot deserialize object " + object, e);
throw new OpenSearchHadoopSerializationException("Cannot deserialize string " + data, e);

}
return (T) object;
return object;
}

public static String propsToString(Properties props) {
Expand Down
4 changes: 1 addition & 3 deletions spark/sql-30/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,4 @@ sparkVariants {
}
}
}
}
sourceCompatibility = JavaVersion.VERSION_17
targetCompatibility = JavaVersion.VERSION_17
}

0 comments on commit 54b73cf

Please sign in to comment.