Skip to content

Commit

Permalink
[Refactor] MR classnames and logging (#12)
Browse files Browse the repository at this point in the history
Refactors MR class names from legacy to opensearch namespace. Logging included.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
  • Loading branch information
nknize committed Oct 25, 2022
1 parent 7615b68 commit 3ace319
Show file tree
Hide file tree
Showing 181 changed files with 812 additions and 812 deletions.
24 changes: 12 additions & 12 deletions hive/src/itest/resources/log4j2.properties
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,16 @@ logger.Hive-ExecMapper.level=warn
# ES-Hadoop logging
logger.Hive-HiveMetaStore.name=org.apache.hadoop.hive.metastore.HiveMetaStore
logger.Hive-HiveMetaStore.level=warn
#logger.ESH-Hadoop-Package.name=org.elasticsearch.hadoop
#logger.ESH-Hadoop-Package.level=trace
#logger.ESH-MR-Package.name=org.elasticsearch.hadoop.mr
#logger.ESH-MR-Package.level=trace
#logger.ESH-Rest-Package.name=org.elasticsearch.hadoop.rest
#logger.ESH-Rest-Package.level=trace
#logger.ESH-CommonsHTTP-Package.name=org.elasticsearch.hadoop.rest.commonshttp
#logger.ESH-CommonsHTTP-Package.level=trace
#logger.ESH-Serialization-Package.name=org.elasticsearch.hadoop.serialization
#logger.ESH-Serialization-Package.level=trace
#logger.OpenSearch-Hadoop-Package.name=org.elasticsearch.hadoop
#logger.OpenSearch-Hadoop-Package.level=trace
#logger.OpenSearch-MR-Package.name=org.elasticsearch.hadoop.mr
#logger.OpenSearch-MR-Package.level=trace
#logger.OpenSearch-Rest-Package.name=org.elasticsearch.hadoop.rest
#logger.OpenSearch-Rest-Package.level=trace
#logger.OpenSearch-CommonsHTTP-Package.name=org.elasticsearch.hadoop.rest.commonshttp
#logger.OpenSearch-CommonsHTTP-Package.level=trace
#logger.OpenSearch-Serialization-Package.name=org.elasticsearch.hadoop.serialization
#logger.OpenSearch-Serialization-Package.level=trace

# Connection Tracking
#logger.PooledConnections.name=org.opensearch.hadoop.pooling.rest.TransportPool
Expand All @@ -47,5 +47,5 @@ logger.Hive-HiveMetaStore.level=warn
#logger.HTTPConnections.level=trace

# Integration testing
#logger.ES-Hive-IntegrationTests.name=org.elasticsearch.hadoop.integration.hive
#logger.ES-Hive-IntegrationTests.level=info
#logger.OpenSearch-Hive-IntegrationTests.name=org.elasticsearch.hadoop.integration.hive
#logger.OpenSearch-Hive-IntegrationTests.level=info
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.Progressable;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.opensearch.hadoop.cfg.HadoopSettingsManager;
import org.opensearch.hadoop.cfg.Settings;
import org.opensearch.hadoop.mr.EsOutputFormat;
Expand Down Expand Up @@ -65,7 +65,7 @@ public void write(Writable w) throws IOException {
}
else {
// we could allow custom BAs
throw new EsHadoopIllegalArgumentException(String.format("Unexpected type; expected [%s], received [%s]", HiveBytesArrayWritable.class, w));
throw new OpenSearchHadoopIllegalArgumentException(String.format("Unexpected type; expected [%s], received [%s]", HiveBytesArrayWritable.class, w));
}
}

Expand Down
4 changes: 2 additions & 2 deletions hive/src/main/java/org/elasticsearch/hadoop/hive/EsSerDe.java
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.opensearch.hadoop.EsHadoopIllegalStateException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalStateException;
import org.opensearch.hadoop.cfg.CompositeSettings;
import org.opensearch.hadoop.cfg.HadoopSettingsManager;
import org.opensearch.hadoop.cfg.Settings;
Expand Down Expand Up @@ -138,7 +138,7 @@ private Writable wrapJsonData(Writable blob) {
mapContainer.put(jsonFieldName, blob);
return (Writable) mapContainer;
default:
throw new EsHadoopIllegalStateException("Could not correctly wrap JSON data for structural type " + structTypeInfo.getCategory());
throw new OpenSearchHadoopIllegalStateException("Could not correctly wrap JSON data for structural type " + structTypeInfo.getCategory());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
import org.opensearch.hadoop.EsHadoopException;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.opensearch.hadoop.cfg.CompositeSettings;
import org.opensearch.hadoop.cfg.ConfigurationOptions;
import org.opensearch.hadoop.cfg.HadoopSettingsManager;
Expand Down Expand Up @@ -164,8 +164,8 @@ public void configureJobConf(TableDesc tableDesc, JobConf jobConf) {
}
// Add the token to the job
TokenUtil.addTokenForJobConf(bootstrap, clusterInfo.getClusterName(), user, jobConf);
} catch (EsHadoopException ex) {
throw new EsHadoopIllegalArgumentException(String.format("Cannot detect ES version - "
} catch (OpenSearchHadoopException ex) {
throw new OpenSearchHadoopIllegalArgumentException(String.format("Cannot detect ES version - "
+ "typically this happens if the network/Elasticsearch cluster is not accessible or when targeting "
+ "a WAN/Cloud instance without the proper setting '%s'", ConfigurationOptions.OPENSEARCH_NODES_WAN_ONLY), ex);
} finally {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.NullWritable;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.opensearch.hadoop.cfg.Settings;
import org.opensearch.hadoop.serialization.field.ConstantFieldExtractor;
import org.opensearch.hadoop.serialization.field.FieldExtractor;
Expand Down Expand Up @@ -82,7 +82,7 @@ public void processField(Settings settings, List<String> fl) {
}

if (!settings.getInputAsJson() && fl.isEmpty()) {
throw new EsHadoopIllegalArgumentException(
throw new OpenSearchHadoopIllegalArgumentException(
String.format(
"Cannot find field [%s] in mapping %s ; maybe a value was specified without '<','>' or there is a typo?",
fl, columnNames.keySet()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.opensearch.hadoop.cfg.Settings;
import org.elasticsearch.hadoop.hive.HiveFieldExtractor;
import org.elasticsearch.hadoop.serialization.HiveTypeToJsonTest.MyHiveType;
Expand Down Expand Up @@ -63,7 +63,7 @@ public void testHiveFieldExtractorNestedNotFound() throws Exception {
assertEquals(FieldExtractor.NOT_FOUND, extract("key", m));
}

@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testHiveFieldExtractorNested() throws Exception {
List<String> nested = Arrays.asList(new String[] { "bar", "bor" });
List<TypeInfo> types = Arrays.asList(new TypeInfo[] { stringTypeInfo, intTypeInfo });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.elasticsearch.hadoop.HdpBootstrap;
import org.elasticsearch.hadoop.Stream;
import org.opensearch.hadoop.cfg.ConfigurationOptions;
Expand Down Expand Up @@ -210,7 +210,7 @@ public void testSaveWithIngest() throws Exception {
runJob(conf);
}

@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testUpdateWithoutId() throws Exception {
Configuration conf = createConf();
conf.set(ConfigurationOptions.ES_WRITE_OPERATION, "update");
Expand Down Expand Up @@ -355,7 +355,7 @@ public void testUpsertParamJsonScript() throws Exception {
runJob(conf);
}

@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testIndexAutoCreateDisabled() throws Exception {
Configuration conf = createConf();
conf.set(ConfigurationOptions.ES_RESOURCE, resource("mrnewapi-non-existing", "data", clusterInfo.getMajorVersion()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.elasticsearch.hadoop.HdpBootstrap;
import org.elasticsearch.hadoop.Stream;
import org.opensearch.hadoop.cfg.ConfigurationOptions;
Expand Down Expand Up @@ -287,7 +287,7 @@ public void testSaveWithIngest() throws Exception {
}


@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testUpdateWithoutId() throws Exception {
JobConf conf = createJobConf();
conf.set(ConfigurationOptions.ES_WRITE_OPERATION, "upsert");
Expand Down Expand Up @@ -509,7 +509,7 @@ public void testUpsertOnlyParamScriptWithArrayOnArrayField() throws Exception {
}


@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testIndexAutoCreateDisabled() throws Exception {
JobConf conf = createJobConf();
conf.set(ConfigurationOptions.ES_RESOURCE, resource("mroldapi-non-existing", "data", clusterInfo.getMajorVersion()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;
import org.opensearch.hadoop.cfg.ConfigurationOptions;
import org.opensearch.hadoop.cfg.Settings;
import org.elasticsearch.hadoop.rest.RestUtils;
Expand Down Expand Up @@ -184,7 +184,7 @@ public void testCreatePartitionWriterWithSingleIndex() throws Exception {
writer.close();
}

@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testCreatePartitionWriterWithMultipleIndices() throws Exception {
Settings settings = new TestSettings();
settings.setProperty(ConfigurationOptions.ES_RESOURCE, "alias_index1,alias_index2/doc");
Expand Down Expand Up @@ -217,7 +217,7 @@ public void testCreatePartitionWriterWithAliasUsingSingleIndex() throws Exceptio
writer.close();
}

@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testCreatePartitionWriterWithAliasUsingMultipleIndices() throws Exception {
RestUtils.postData("_aliases", ("{" +
"\"actions\": [" +
Expand Down Expand Up @@ -274,7 +274,7 @@ public void testCreatePartitionWriterWithWritableAliasUsingMultipleIndices() thr
writer.close();
}

@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testCreatePartitionWriterWithMultipleAliases() throws Exception {
RestUtils.postData("_aliases", ("{" +
"\"actions\": [" +
Expand Down Expand Up @@ -315,7 +315,7 @@ public void testCreatePartitionWriterWithMultipleAliases() throws Exception {
fail("Multiple alias names are not supported. Only singular aliases.");
}

@Test(expected = EsHadoopIllegalArgumentException.class)
@Test(expected = OpenSearchHadoopIllegalArgumentException.class)
public void testCreatePartitionWriterWithMultipleWritableAliases() throws Exception {
RestUtils.postData("_aliases", ("{" +
"\"actions\": [" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.opensearch.hadoop.EsHadoopIllegalStateException;
import org.opensearch.hadoop.rest.EsHadoopTransportException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalStateException;
import org.opensearch.hadoop.rest.OpenSearchHadoopTransportException;
import org.elasticsearch.hadoop.security.UgiUtil;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
Expand Down Expand Up @@ -58,7 +58,7 @@ public void resetUGI() {
UgiUtil.resetUGI();
}

@Test(expected = EsHadoopIllegalStateException.class)
@Test(expected = OpenSearchHadoopIllegalStateException.class)
public void testPreemptNegotiatorWithChallengeFails() throws IOException, InterruptedException {
// Configure logins
Configuration configuration = new Configuration();
Expand Down Expand Up @@ -113,7 +113,7 @@ public String run() throws Exception {
fail("Defective token given to Negotiator and accepted.");
}

@Test(expected = EsHadoopTransportException.class)
@Test(expected = OpenSearchHadoopTransportException.class)
public void testMissingNegotiationTokenFails() throws IOException, GSSException, InterruptedException {
// Configure logins
Configuration configuration = new Configuration();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,21 @@
/**
* Base class for Elasticsearch Hadoop exceptions.
*/
public class EsHadoopException extends RuntimeException {
public class OpenSearchHadoopException extends RuntimeException {

public EsHadoopException() {
public OpenSearchHadoopException() {
super();
}

public EsHadoopException(String message, Throwable cause) {
public OpenSearchHadoopException(String message, Throwable cause) {
super(message, cause);
}

public EsHadoopException(String message) {
public OpenSearchHadoopException(String message) {
super(message);
}

public EsHadoopException(Throwable cause) {
public OpenSearchHadoopException(Throwable cause) {
super(cause);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,22 @@
*/
package org.opensearch.hadoop;

public class EsHadoopIllegalStateException extends EsHadoopException {

public EsHadoopIllegalStateException() {
public class OpenSearchHadoopIllegalArgumentException extends OpenSearchHadoopException {

public OpenSearchHadoopIllegalArgumentException() {
super();
}

public EsHadoopIllegalStateException(String message, Throwable cause) {
public OpenSearchHadoopIllegalArgumentException(String message, Throwable cause) {
super(message, cause);
}

public EsHadoopIllegalStateException(String message) {
public OpenSearchHadoopIllegalArgumentException(String message) {
super(message);
}

public EsHadoopIllegalStateException(Throwable cause) {
public OpenSearchHadoopIllegalArgumentException(Throwable cause) {
super(cause);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,21 @@
*/
package org.opensearch.hadoop;

public class OpenSearchHadoopIllegalStateException extends OpenSearchHadoopException {

public class EsHadoopIllegalArgumentException extends EsHadoopException {

public EsHadoopIllegalArgumentException() {
public OpenSearchHadoopIllegalStateException() {
super();
}

public EsHadoopIllegalArgumentException(String message, Throwable cause) {
public OpenSearchHadoopIllegalStateException(String message, Throwable cause) {
super(message, cause);
}

public EsHadoopIllegalArgumentException(String message) {
public OpenSearchHadoopIllegalStateException(String message) {
super(message);
}

public EsHadoopIllegalArgumentException(Throwable cause) {
public OpenSearchHadoopIllegalStateException(Throwable cause) {
super(cause);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,21 @@
* Denotes an operation that is not allowed to be performed, often due to the feature support of
* the version of Elasticsearch being used.
*/
public class EsHadoopUnsupportedOperationException extends EsHadoopException {
public class OpenSearchHadoopUnsupportedOperationException extends OpenSearchHadoopException {

public EsHadoopUnsupportedOperationException() {
public OpenSearchHadoopUnsupportedOperationException() {
super();
}

public EsHadoopUnsupportedOperationException(String message, Throwable cause) {
public OpenSearchHadoopUnsupportedOperationException(String message, Throwable cause) {
super(message, cause);
}

public EsHadoopUnsupportedOperationException(String message) {
public OpenSearchHadoopUnsupportedOperationException(String message) {
super(message);
}

public EsHadoopUnsupportedOperationException(Throwable cause) {
public OpenSearchHadoopUnsupportedOperationException(Throwable cause) {
super(cause);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.opensearch.hadoop.EsHadoopIllegalArgumentException;
import org.opensearch.hadoop.OpenSearchHadoopIllegalArgumentException;

/**
* Factory for loading settings based on various configuration objects, such as Properties or Hadoop configuration.
Expand Down Expand Up @@ -55,7 +55,7 @@ public static Settings loadFrom(Object configuration) {
if (HADOOP_CONFIGURATION != null && HADOOP_CONFIGURATION.isInstance(configuration)) {
return FromHadoopConfiguration.create(configuration);
}
throw new EsHadoopIllegalArgumentException("Don't know how to create Settings from configuration " + configuration);
throw new OpenSearchHadoopIllegalArgumentException("Don't know how to create Settings from configuration " + configuration);
}

public Settings load(Object configuration) {
Expand Down

0 comments on commit 3ace319

Please sign in to comment.