Skip to content

Commit

Permalink
[ES] Upgrade to ES 5.0.0-alpha1
Browse files Browse the repository at this point in the history
relates #738
  • Loading branch information
costin committed Apr 13, 2016
1 parent bbe64b6 commit bd200ba
Show file tree
Hide file tree
Showing 11 changed files with 64 additions and 44 deletions.
4 changes: 3 additions & 1 deletion build.gradle
Expand Up @@ -22,6 +22,8 @@ allprojects {
// Hive depends on JDO ec2 missing from Maven Central
maven { url "http://www.datanucleus.org/downloads/maven2" }
maven { url "http://oss.sonatype.org/content/groups/public/" }
// Lucene snapshot
maven { url "http://download.elastic.co/lucenesnapshots/f0aa4fc" }
}

apply plugin: "java"
Expand Down Expand Up @@ -201,7 +203,7 @@ allprojects { project ->
testRuntime "org.slf4j:slf4j-log4j12:1.7.6"
testRuntime "log4j:log4j:$log4jVersion"
testRuntime "net.java.dev.jna:jna:4.2.2"
testCompile "org.elasticsearch.module:lang-groovy:$esVersion"
//testCompile "org.elasticsearch.module:lang-groovy:$esVersion"
//testRuntime "org.codehaus.groovy:groovy-all:$groovyVersion"
testRuntime "com.spatial4j:spatial4j:0.5"
testRuntime "com.vividsolutions:jts:1.13"
Expand Down
7 changes: 3 additions & 4 deletions gradle.properties
Expand Up @@ -35,11 +35,10 @@ hamcrestVersion = 1.3
# Hive 0.11 finally updated antlr to 3.4 so there are no more conflicts with Pig
antlrVersion = 3.4
thriftVersion = 0.5.0
#esVersion = 1.7.1
esVersion = 2.3.1
esVersionStable = 2.3.1
esVersion = 5.0.0-alpha1
esVersionStable = 5.0.0-alpha1

luceneVersion = 5.3.1
luceneVersion = 6.0.0-snapshot-f0aa4fc
groovyVersion = 2.4.4

# --------------------
Expand Down
15 changes: 9 additions & 6 deletions mr/src/itest/java/org/elasticsearch/hadoop/EsEmbeddedServer.java
Expand Up @@ -18,19 +18,18 @@
*/
package org.elasticsearch.hadoop;

import java.util.Arrays;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Properties;

import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.hadoop.util.StringUtils;
import org.elasticsearch.hadoop.util.StringUtils.IpAndPort;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
import org.elasticsearch.node.internal.InternalSettingsPreparer;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.groovy.GroovyPlugin;

public class EsEmbeddedServer {
private static class PluginConfigurableNode extends Node {
Expand Down Expand Up @@ -61,8 +60,12 @@ public EsEmbeddedServer(String clusterName, String homePath, String dataPath, St
props.setProperty("script.inline", "true");
props.setProperty("script.indexed", "true");

Settings settings = NodeBuilder.nodeBuilder().local(false).client(false).settings(Settings.settingsBuilder().put(props).build()).clusterName(clusterName).getSettings().build();
Collection plugins = Arrays.asList(GroovyPlugin.class);
props.setProperty("node.local", "false");
props.setProperty("node.client", "false");
props.setProperty("cluster.name", clusterName);

Settings settings = Settings.settingsBuilder().put(props).build();
Collection plugins = Collections.emptyList(); // Arrays.asList(GroovyPlugin.class);
node = new PluginConfigurableNode(settings, plugins);
}

Expand All @@ -75,7 +78,7 @@ public void start() {
ipAndPort = StringUtils.parseIpAddress(value);
}

public void stop() {
public void stop() throws IOException {
node.close();
}

Expand Down
10 changes: 7 additions & 3 deletions mr/src/itest/java/org/elasticsearch/hadoop/LocalEs.java
Expand Up @@ -41,7 +41,7 @@ public class LocalEs extends ExternalResource {
public static final String DATA_PORTS_SLAVE = "9700-9799";
public static final String TRANSPORT_PORTS_SLAVE = "9800-9899";

private boolean USE_SLAVE = false;
private final boolean USE_SLAVE = false;
private boolean disabled = false;

@Override
Expand All @@ -65,7 +65,7 @@ protected void before() throws Throwable {

// delete data path to start fresh
TestUtils.delete(new File(ES_DATA_PATH));

if (master == null) {
System.out.println("Starting Elasticsearch Master...");
master = new EsEmbeddedServer(CLUSTER_NAME, ES_HOME_PATH, ES_DATA_PATH, DATA_PORTS, TRANSPORT_PORTS, USE_SLAVE);
Expand All @@ -89,7 +89,11 @@ protected void after() {
if (master != null) {
if (USE_SLAVE && slave != null) {
System.out.println("Stopping Elasticsearch Slave...");
slave.stop();
try {
slave.stop();
} catch (Exception ex) {
// ignore
}
slave = null;
}

Expand Down
Expand Up @@ -21,6 +21,8 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
Expand All @@ -35,10 +37,6 @@
import org.elasticsearch.hadoop.serialization.dto.Shard;
import org.junit.Test;

import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;

Expand All @@ -52,8 +50,21 @@ public void testLargeIndexShardList() throws Exception {
Map<String, Node> nodes = readNodes();
List<List<Map<String, Object>>> targetShards = readShards();
Map<Shard, Node> result = ShardSorter.find(targetShards, nodes, null);
Set<Integer> index1Shards = Sets.newHashSet(0, 1, 2, 3, 4);
Set<Integer> index2Shards = Sets.newHashSet(0, 1, 2, 3, 4);

Set<Integer> index1Shards = new HashSet<Integer>();
index1Shards.add(0);
index1Shards.add(1);
index1Shards.add(2);
index1Shards.add(3);
index1Shards.add(4);

Set<Integer> index2Shards = new HashSet<Integer>();
index1Shards.add(0);
index1Shards.add(1);
index1Shards.add(2);
index1Shards.add(3);
index1Shards.add(4);

for(Shard shard : result.keySet()) {
String index = shard.getIndex();
if(index.equals("index1")) {
Expand All @@ -71,7 +82,7 @@ private Map<String, Node> readNodes() throws java.io.IOException {
new TypeReference<Map<String, Object>>() {
});
Map<String, Object> rawNodes = (Map<String, Object>)values.get("nodes");
Map<String, Node> nodes = Maps.newLinkedHashMap();
Map<String, Node> nodes = new LinkedHashMap();
for (String nodeId : rawNodes.keySet()) {
Node node = new Node(nodeId, (Map<String, Object>)rawNodes.get(nodeId));
nodes.put(nodeId, node);
Expand All @@ -91,8 +102,11 @@ public void testPowerSet() {
for (int i = 0; i < 10; i++) {
set.add(Integer.valueOf(i));
}
List<Set<Integer>> powerList = Lists.newArrayList(ShardSorter.powerList(set));
assertEquals(1023, powerList.size());
int count = 0;
for (Iterator<Set<Integer>> it = ShardSorter.powerList(set); it.hasNext(); ++count) {
it.next();
}
assertEquals(1023, count);
}

private <K, V> Map<K, V> map(K k1, V v1) {
Expand Down
Expand Up @@ -24,6 +24,7 @@
import java.util.HashMap;
import java.util.Map;

import org.apache.storm.guava.collect.ImmutableMap;
import org.elasticsearch.hadoop.util.TestSettings;
import org.elasticsearch.storm.cfg.StormConfigurationOptions;
import org.junit.After;
Expand All @@ -34,8 +35,6 @@
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;

import com.google.common.collect.ImmutableMap;

import static org.elasticsearch.integration.storm.AbstractStormSuite.COMPONENT_HAS_COMPLETED;

@FixMethodOrder(MethodSorters.NAME_ASCENDING)
Expand Down
Expand Up @@ -22,6 +22,7 @@
import java.util.List;
import java.util.Map;

import org.apache.storm.guava.collect.ImmutableList;
import org.elasticsearch.hadoop.cfg.ConfigurationOptions;
import org.elasticsearch.hadoop.mr.RestUtils;
import org.elasticsearch.hadoop.util.unit.TimeValue;
Expand All @@ -31,12 +32,11 @@
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;

import com.google.common.collect.ImmutableList;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;

import static org.junit.Assert.*;

import static org.elasticsearch.integration.storm.AbstractStormSuite.*;
import static org.hamcrest.CoreMatchers.*;
import static org.elasticsearch.integration.storm.AbstractStormSuite.COMPONENT_HAS_COMPLETED;
import static org.hamcrest.CoreMatchers.containsString;

public class AbstractStormIdMappingBoltTests extends AbstractStormBoltTests {

Expand Down
Expand Up @@ -21,6 +21,7 @@
import java.util.List;
import java.util.Map;

import org.apache.storm.guava.collect.ImmutableList;
import org.elasticsearch.hadoop.mr.RestUtils;
import org.elasticsearch.hadoop.util.unit.TimeValue;
import org.elasticsearch.storm.EsBolt;
Expand All @@ -29,12 +30,11 @@
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;

import com.google.common.collect.ImmutableList;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;

import static org.junit.Assert.*;

import static org.elasticsearch.integration.storm.AbstractStormSuite.*;
import static org.hamcrest.CoreMatchers.*;
import static org.elasticsearch.integration.storm.AbstractStormSuite.COMPONENT_HAS_COMPLETED;
import static org.hamcrest.CoreMatchers.containsString;

public class AbstractStormIndexPatternBoltTests extends AbstractStormBoltTests {

Expand Down
Expand Up @@ -22,6 +22,7 @@
import java.util.List;
import java.util.Map;

import org.apache.storm.guava.collect.ImmutableList;
import org.elasticsearch.hadoop.mr.RestUtils;
import org.elasticsearch.hadoop.util.unit.TimeValue;
import org.elasticsearch.storm.EsBolt;
Expand All @@ -30,8 +31,6 @@
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;

import com.google.common.collect.ImmutableList;

import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;

Expand Down
Expand Up @@ -22,6 +22,8 @@
import java.util.List;
import java.util.Map;

import org.apache.storm.guava.collect.ImmutableList;
import org.apache.storm.guava.collect.ImmutableMap;
import org.elasticsearch.hadoop.mr.RestUtils;
import org.elasticsearch.hadoop.util.TestSettings;
import org.elasticsearch.hadoop.util.unit.TimeValue;
Expand All @@ -31,9 +33,6 @@
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;

import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;

Expand Down
Expand Up @@ -21,9 +21,11 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;

import org.apache.storm.guava.collect.ImmutableMap;
import org.elasticsearch.hadoop.util.TestSettings;
import org.elasticsearch.storm.cfg.StormConfigurationOptions;
import org.junit.After;
Expand All @@ -32,8 +34,6 @@
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;

import com.google.common.collect.ImmutableMap;

import static org.elasticsearch.integration.storm.AbstractStormSuite.COMPONENT_HAS_COMPLETED;

@RunWith(Parameterized.class)
Expand Down Expand Up @@ -64,10 +64,11 @@ public void destroy() {
@Parameters
public static Collection<Object[]> configs() throws IOException {
// no ack
Map noAck = new LinkedHashMap(ImmutableMap.of(StormConfigurationOptions.ES_STORM_SPOUT_RELIABLE, Boolean.FALSE.toString()));
Map noAck = new LinkedHashMap(Collections.singletonMap(StormConfigurationOptions.ES_STORM_SPOUT_RELIABLE, Boolean.FALSE.toString()));

// read ack
Map ack = new LinkedHashMap(ImmutableMap.of(StormConfigurationOptions.ES_STORM_SPOUT_RELIABLE, Boolean.TRUE.toString()));
Map ack = new LinkedHashMap(Collections.singletonMap(StormConfigurationOptions.ES_STORM_SPOUT_RELIABLE,
Boolean.TRUE.toString()));

// read ack bounded queue
Map ackWithSize = new LinkedHashMap(ImmutableMap.of(StormConfigurationOptions.ES_STORM_SPOUT_RELIABLE, Boolean.TRUE.toString(), StormConfigurationOptions.ES_STORM_SPOUT_RELIABLE_QUEUE_SIZE, "1"));
Expand Down

0 comments on commit bd200ba

Please sign in to comment.