From fd5f00fd9eba4f003525dc062cb6645e1b0ac839 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Wed, 25 Jan 2017 20:27:08 +0100 Subject: [PATCH 01/19] fix gradle setup, add checkstyle, set to version 2.4.4.1 --- build.gradle | 40 ++-- config/checkstyle/checkstyle.xml | 323 +++++++++++++++++++++++++++++++ gradle.properties | 3 + gradle/git.gradle | 9 - gradle/publish.gradle | 59 ++---- gradle/sonarqube.gradle | 41 ++++ 6 files changed, 394 insertions(+), 81 deletions(-) create mode 100644 config/checkstyle/checkstyle.xml create mode 100644 gradle.properties delete mode 100644 gradle/git.gradle create mode 100644 gradle/sonarqube.gradle diff --git a/build.gradle b/build.gradle index 77831d9..19a4cc3 100644 --- a/build.gradle +++ b/build.gradle @@ -1,51 +1,37 @@ -group = 'org.xbib.elasticsearch.plugin' -version = '2.3.3.0' +plugins { + id "org.sonarqube" version "2.2" + id "org.xbib.gradle.plugin.asciidoctor" version "1.5.4.1.0" +} ext { pluginName = 'langdetect' pluginClassname = 'org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin' pluginDescription = 'Language detection for Elasticsearch' user = 'jprante' - name = 'elasticsearch-langdetect' + name = 'elasticsearch-mapper-langdetect' scmUrl = 'https://github.com/' + user + '/' + name scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' scmDeveloperConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' versions = [ - 'elasticsearch' : '2.3.3', + 'elasticsearch' : '2.4.4', 'jackson': '2.6.2', 'log4j': '2.5', 'junit' : '4.12' ] } -buildscript { - repositories { - mavenLocal() - mavenCentral() - jcenter() - maven { - url "http://xbib.org/repository" - } - } - dependencies { - classpath 'org.ajoberstar:gradle-git:1.4.2' - classpath 'co.riiid:gradle-github-plugin:0.4.2' - classpath 'io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.5.3' - } -} - apply plugin: 'java' apply plugin: 'maven' apply plugin: 'signing' -apply plugin: 'co.riiid.gradle' +apply plugin: 'findbugs' +apply plugin: 'pmd' +apply plugin: 'checkstyle' +apply plugin: "jacoco" +apply plugin: 'org.xbib.gradle.plugin.asciidoctor' repositories { mavenLocal() mavenCentral() - jcenter() - maven { - url "http://xbib.org/repository" - } } sourceSets { @@ -172,7 +158,5 @@ if (project.hasProperty('signing.keyId')) { } } -ext.grgit = org.ajoberstar.grgit.Grgit.open() - -apply from: 'gradle/git.gradle' +apply from: 'gradle/sonarqube.gradle' apply from: 'gradle/publish.gradle' diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml new file mode 100644 index 0000000..52fe33c --- /dev/null +++ b/config/checkstyle/checkstyle.xml @@ -0,0 +1,323 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/gradle.properties b/gradle.properties new file mode 100644 index 0000000..4441187 --- /dev/null +++ b/gradle.properties @@ -0,0 +1,3 @@ +group = org.xbib.elasticsearch.plugin +name = elasticsearch-langdetect +version = 2.4.4.1 diff --git a/gradle/git.gradle b/gradle/git.gradle deleted file mode 100644 index 6043d07..0000000 --- a/gradle/git.gradle +++ /dev/null @@ -1,9 +0,0 @@ - -task gitRelease(dependsOn: build) << { - grgit.add(patterns: ['.'], update: true) - grgit.commit(message: "release of ${project.version}") - grgit.tag.remove(names: [project.version]) - grgit.tag.add(name: project.version) - grgit.push() - grgit.push(tags: true) -} \ No newline at end of file diff --git a/gradle/publish.gradle b/gradle/publish.gradle index 34cf8d2..af1816c 100644 --- a/gradle/publish.gradle +++ b/gradle/publish.gradle @@ -1,49 +1,37 @@ -apply plugin: 'io.codearte.nexus-staging' - - -/* -nexus { - attachJavadoc = true - attachSources = true - attachTests = true - sign = true - repositoryUrl = 'https://oss.sonatype.org/service/local/staging/deploy/maven2' - snapshotRepositoryUrl = 'https://oss.sonatype.org/content/repositories/snapshots' -} -*/ - -nexusStaging { - packageGroup = "org.xbib" -} task xbibUpload(type: Upload) { configuration = configurations.archives uploadDescriptor = true repositories { - if (project.hasProperty("xbibUsername")) { + if (project.hasProperty('xbibUsername')) { mavenDeployer { configuration = configurations.wagon - repository(id: 'xbib.org', - url: uri('scpexe://xbib.org/repository'), - authentication: [userName: xbibUsername, privateKey: xbibPrivateKey]) + repository(url: uri('scpexe://xbib.org/repository')) { + authentication(userName: xbibUsername, privateKey: xbibPrivateKey) + } } } } } -task mavenCentralUpload(type: Upload) { +task sonatypeUpload(type: Upload) { configuration = configurations.archives uploadDescriptor = true repositories { if (project.hasProperty('ossrhUsername')) { mavenDeployer { beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) } - repository(id: 'ossrh', url: uri(ossrhReleaseUrl), - authentication: [userName: ossrhUsername, password: ossrhPassword]) - snapshotRepository(id: 'ossrh', url: uri(ossrhSnapshotUrl), - authentication: [userName: ossrhUsername, password: ossrhPassword]) + repository(url: uri(ossrhReleaseUrl)) { + authentication(userName: ossrhUsername, password: ossrhPassword) + } + snapshotRepository(url: uri(ossrhSnapshotUrl)) { + authentication(userName: ossrhUsername, password: ossrhPassword) + } pom.project { - name pluginName + groupId project.group + artifactId project.name + version project.version + name project.name description pluginDescription packaging 'jar' inceptionYear '2012' @@ -76,20 +64,3 @@ task mavenCentralUpload(type: Upload) { } } } - -if (project.hasProperty('githubToken')) { - github { - owner = user - token = githubToken - repo = project.name - name = project.version - tagName = project.version - targetCommitish = 'master' - assets = [ - "build/distributions/${project.name}-${project.version}-plugin.zip" - ] - } - githubRelease { - dependsOn gitRelease, buildPluginZip - } -} \ No newline at end of file diff --git a/gradle/sonarqube.gradle b/gradle/sonarqube.gradle new file mode 100644 index 0000000..6d4c3fa --- /dev/null +++ b/gradle/sonarqube.gradle @@ -0,0 +1,41 @@ +tasks.withType(FindBugs) { + ignoreFailures = true + reports { + xml.enabled = true + html.enabled = false + } +} +tasks.withType(Pmd) { + ignoreFailures = true + reports { + xml.enabled = true + html.enabled = true + } +} +tasks.withType(Checkstyle) { + ignoreFailures = true + reports { + xml.enabled = true + html.enabled = true + } +} + +jacocoTestReport { + reports { + xml.enabled true + csv.enabled false + xml.destination "${buildDir}/reports/jacoco-xml" + html.destination "${buildDir}/reports/jacoco-html" + } +} + +sonarqube { + properties { + property "sonar.projectName", "${project.group} ${project.name}" + property "sonar.sourceEncoding", "UTF-8" + property "sonar.tests", "src/test/java" + property "sonar.scm.provider", "git" + property "sonar.java.coveragePlugin", "jacoco" + property "sonar.junit.reportsPath", "build/test-results/test/" + } +} From a9536d5bbf638212c7bf50d254ed5b42e08e03dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Wed, 25 Jan 2017 20:28:29 +0100 Subject: [PATCH 02/19] add 'language_to' parameter --- .../mapper/langdetect/LangdetectMapper.java | 180 +++++++++++++----- .../langdetect/LangdetectMappingTest.java | 26 ++- .../mapper/langdetect/mapping-to-fields.json | 22 +++ .../index/mapper/langdetect/mapping.json | 22 +-- 4 files changed, 192 insertions(+), 58 deletions(-) create mode 100644 src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json diff --git a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java index 0b149dd..efba15d 100644 --- a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java +++ b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java @@ -19,8 +19,10 @@ import org.xbib.elasticsearch.common.langdetect.LanguageDetectionException; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.Collections; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -48,6 +50,8 @@ public static class Builder extends FieldMapper.Builder map = XContentMapValues.nodeMapValue(fieldNode, null); + LanguageTo.Builder languageToBuilder = LanguageTo.builder(); + languageToBuilder.add(map); + builder.languageTo(languageToBuilder.build()); + iterator.remove(); + break; + default: + break; } } return builder; @@ -288,17 +290,21 @@ public static class TypeParser implements Mapper.TypeParser { private final int positionIncrementGap; + private final LanguageTo languageTo; + public LangdetectMapper(String simpleName, - MappedFieldType fieldType, - MappedFieldType defaultFieldType, - int positionIncrementGap, - int ignoreAbove, - Settings indexSettings, - MultiFields multiFields, - CopyTo copyTo, - LangdetectService langdetectService) { + MappedFieldType fieldType, + MappedFieldType defaultFieldType, + int positionIncrementGap, + int ignoreAbove, + Settings indexSettings, + MultiFields multiFields, + CopyTo copyTo, + LanguageTo languageTo, + LangdetectService langdetectService) { super(simpleName, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove, indexSettings, multiFields, copyTo); + this.languageTo = languageTo; this.langdetectService = langdetectService; this.positionIncrementGap = positionIncrementGap; } @@ -337,7 +343,7 @@ protected void parseCreateField(ParseContext context, List fields) throws try { byte[] b = parser.binaryValue(); if (b != null && b.length > 0) { - value = new String(b, Charset.forName("UTF-8")); + value = new String(b, StandardCharsets.UTF_8); } } catch (Exception e) { // ignore @@ -348,6 +354,9 @@ protected void parseCreateField(ParseContext context, List fields) throws for (Language lang : langs) { Field field = new Field(fieldType().names().indexName(), lang.getLanguage(), fieldType()); fields.add(field); + if (languageTo.languageToFields().containsKey(lang.getLanguage())) { + parseLanguageToFields(context, languageTo.languageToFields().get(lang.getLanguage())); + } } } catch (LanguageDetectionException e) { context.createExternalValueContext("unknown"); @@ -377,5 +386,84 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, for (String key : map.keySet()) { builder.field(key, map.get(key)); } + languageTo.toXContent(builder, params); } -} \ No newline at end of file + + @SuppressWarnings("unchecked") + private static void parseLanguageToFields(ParseContext originalContext, Object languageToFields) throws IOException { + List fieldList = languageToFields instanceof List ? + (List)languageToFields : Collections.singletonList(languageToFields); + ParseContext context = originalContext.createCopyToContext(); + for (Object field : fieldList) { + ParseContext.Document targetDoc = null; + for (ParseContext.Document doc = context.doc(); doc != null; doc = doc.getParent()) { + if (field.toString().startsWith(doc.getPrefix())) { + targetDoc = doc; + break; + } + } + if (targetDoc == null) { + throw new IllegalArgumentException("target doc is null"); + } + final ParseContext copyToContext; + if (targetDoc == context.doc()) { + copyToContext = context; + } else { + copyToContext = context.switchDoc(targetDoc); + } + FieldMapper fieldMapper = copyToContext.docMapper().mappers().getMapper(field.toString()); + if (fieldMapper != null) { + fieldMapper.parse(copyToContext); + } else { + throw new MapperParsingException("attempt to copy value to non-existing field [" + field + "]"); + } + } + } + + public static class LanguageTo { + + private final Map languageToFields; + + private LanguageTo(Map languageToFields) { + this.languageToFields = languageToFields; + } + + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (!languageToFields.isEmpty()) { + builder.startObject("language_to"); + for (Map.Entry field : languageToFields.entrySet()) { + builder.field(field.getKey(), field.getValue()); + } + builder.endObject(); + } + return builder; + } + + public static Builder builder() { + return new Builder(); + } + + public static class Builder { + private final Map languageToBuilders = new LinkedHashMap<>(); + + public LanguageTo.Builder add(String language, String field) { + languageToBuilders.put(language, field); + return this; + } + + public LanguageTo.Builder add(Map map) { + languageToBuilders.putAll(map); + return this; + } + + public LanguageTo build() { + return new LanguageTo(Collections.unmodifiableMap(languageToBuilders)); + } + } + + public Map languageToFields() { + return languageToFields; + } + } + +} diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java index 1b7c63a..7fea4a8 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java @@ -12,6 +12,7 @@ import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import static org.elasticsearch.common.io.Streams.copyToString; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -115,7 +116,30 @@ public void testShortTextProfile() throws Exception { assertEquals("en", doc.getFields("someField")[0].stringValue()); } + @Test + public void testToFields() throws Exception { + String mapping = copyToStringFromClasspath("mapping-to-fields.json"); + DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(mapping)); + String sampleText = copyToStringFromClasspath("english.txt"); + BytesReference json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); + ParseContext.Document doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); + assertEquals(1, doc.getFields("someField").length); + assertEquals("en", doc.getFields("someField")[0].stringValue()); + // re-parse it + String builtMapping = docMapper.mappingSource().string(); + docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(builtMapping)); + json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); + doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); + //for (IndexableField field : doc.getFields()) { + // System.err.println(field.name() + " = " + field.stringValue()); + //} + assertEquals(1, doc.getFields("someField").length); + assertEquals("en", doc.getFields("someField")[0].stringValue()); + assertEquals(1, doc.getFields("english_field").length); + assertEquals("This is a very small example of a text", doc.getFields("english_field")[0].stringValue()); + } + public String copyToStringFromClasspath(String path) throws IOException { - return copyToString(new InputStreamReader(getClass().getResource(path).openStream(), "UTF-8")); + return copyToString(new InputStreamReader(getClass().getResource(path).openStream(), StandardCharsets.UTF_8)); } } diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json new file mode 100644 index 0000000..70bd95d --- /dev/null +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json @@ -0,0 +1,22 @@ +{ + "someType" : { + "properties" : { + "someField":{ + "type" : "langdetect", + "languages" : [ "de", "en", "fr", "nl", "it" ], + "language_to" : { + "de": "german_field", + "en": "english_field" + } + }, + "german_field" : { + "analyzer" : "german", + "type": "string" + }, + "english_field" : { + "analyzer" : "english", + "type" : "string" + } + } + } +} \ No newline at end of file diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping.json index 6d9a73d..cbe4697 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping.json @@ -1,13 +1,13 @@ { - "someType" : { - "properties" : { - "someField":{ - "type" : "langdetect", - "languages" : [ "de", "en", "fr", "nl", "it" ], - "map" : { - "de" : "Deutsch" - } - } - } + "someType" : { + "properties" : { + "someField":{ + "type" : "langdetect", + "languages" : [ "de", "en", "fr", "nl", "it" ], + "map" : { + "de" : "Deutsch" } -} \ No newline at end of file + } + } + } +} From 5e2f9ea049943ddedca346bfb0874d76dad6a7a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Wed, 25 Jan 2017 20:44:12 +0100 Subject: [PATCH 03/19] update README --- README.md | 74 ++++++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 65 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 1091bd2..72b10cb 100644 --- a/README.md +++ b/README.md @@ -81,6 +81,7 @@ zh-tw | Elasticsearch | Plugin | Release date | | -------------- | -------------- | ------------ | +| 2.4.4 | 2.4.4.1 | Jan 25, 2017 | | 2.3.3 | 2.3.3.0 | Jun 11, 2016 | | 2.3.2 | 2.3.2.0 | Jun 11, 2016 | | 2.3.1 | 2.3.1.0 | Apr 11, 2016 | @@ -104,7 +105,7 @@ zh-tw ## Installation Elasticsearch 2.x - ./bin/plugin install https://github.com/jprante/elasticsearch-langdetect/releases/download/2.3.3.0/elasticsearch-langdetect-2.3.3.0-plugin.zip + ./bin/plugin install https://github.com/jprante/elasticsearch-langdetect/releases/download/2.4.4.1/elasticsearch-langdetect-2.4.4.1-plugin.zip ## Installation Elasticsearch 1.x @@ -134,7 +135,10 @@ In this example, we create a simple detector field, and write text to it for det { "article" : { "properties" : { - "content" : { "type" : "langdetect" } + "langcode" : { + "type" : "langdetect", + "languages" : [ "de", "en", "fr", "nl", "it" ] + } } } } @@ -143,21 +147,21 @@ In this example, we create a simple detector field, and write text to it for det curl -XPUT 'localhost:9200/test/article/1' -d ' { "title" : "Some title", - "content" : "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?" + "langcode" : "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?" } ' curl -XPUT 'localhost:9200/test/article/2' -d ' { "title" : "Ein Titel", - "content" : "Einigkeit und Recht und Freiheit für das deutsche Vaterland!" + "langcode" : "Einigkeit und Recht und Freiheit für das deutsche Vaterland!" } ' curl -XPUT 'localhost:9200/test/article/3' -d ' { "title" : "Un titre", - "content" : "Allons enfants de la Patrie, Le jour de gloire est arrivé!" + "langcode" : "Allons enfants de la Patrie, Le jour de gloire est arrivé!" } ' @@ -169,7 +173,7 @@ A search for the detected language codes is a simple term query, like this: { "query" : { "term" : { - "content" : "en" + "langcode" : "en" } } } @@ -178,7 +182,7 @@ A search for the detected language codes is a simple term query, like this: { "query" : { "term" : { - "content" : "de" + "langcode" : "de" } } } @@ -188,13 +192,65 @@ A search for the detected language codes is a simple term query, like this: { "query" : { "term" : { - "content" : "fr" + "langcode" : "fr" } } } ' -## Show stored language codes +## Indexing language-detected text alongside with code + +Just indexing the language code is not eough in most cases. The language-detected text +should be passed to a specific analyzer to papply language-specific analysis. This plugin +allows that by the `language_to` parameter. + + curl -XDELETE 'localhost:9200/test' + + curl -XPUT 'localhost:9200/test' + + curl -XPOST 'localhost:9200/test/article/_mapping' -d ' + { + "article" : { + "properties" : { + "langcode":{ + "type" : "langdetect", + "languages" : [ "de", "en", "fr", "nl", "it" ], + "language_to" : { + "de": "german_field", + "en": "english_field" + } + }, + "german_field" : { + "analyzer" : "german", + "type": "string" + }, + "english_field" : { + "analyzer" : "english", + "type" : "string" + } + } + } + } + ' + + curl -XPUT 'localhost:9200/test/article/1' -d ' + { + "langcode" : "This is a small example for english text" + } + ' + + curl -XPOST 'localhost:9200/test/_search?pretty' -d ' + { + "query" : { + "match" : { + "english_field" : "This is a small example for english text" + } + } + } + ' + + +## Language code and `multi_field` Using multifields, it is possible to store the text alongside with the detected language(s). Here, we use another (short nonsense) example text for demonstration, From 3b161a1a822b5f0ab01106b068ff12d92b958a51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Wed, 25 Jan 2017 22:10:44 +0100 Subject: [PATCH 04/19] update gradle to 3.2.1, fix serialization warning --- build.gradle | 24 ++- gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 54227 bytes gradle/wrapper/gradle-wrapper.properties | 6 + gradlew | 172 ++++++++++++++++++ gradlew.bat | 84 +++++++++ .../LanguageDetectionException.java | 2 + 6 files changed, 284 insertions(+), 4 deletions(-) create mode 100644 gradle/wrapper/gradle-wrapper.jar create mode 100644 gradle/wrapper/gradle-wrapper.properties create mode 100755 gradlew create mode 100644 gradlew.bat diff --git a/build.gradle b/build.gradle index 19a4cc3..c503a01 100644 --- a/build.gradle +++ b/build.gradle @@ -1,14 +1,28 @@ + plugins { id "org.sonarqube" version "2.2" id "org.xbib.gradle.plugin.asciidoctor" version "1.5.4.1.0" } +printf "Host: %s\nOS: %s %s %s\nJVM: %s %s %s %s\nGroovy: %s\nGradle: %s\n" + + "Build: group: ${project.group} name: ${project.name} version: ${project.version}\n", + InetAddress.getLocalHost(), + System.getProperty("os.name"), + System.getProperty("os.arch"), + System.getProperty("os.version"), + System.getProperty("java.version"), + System.getProperty("java.vm.version"), + System.getProperty("java.vm.vendor"), + System.getProperty("java.vm.name"), + GroovySystem.getVersion(), + gradle.gradleVersion + ext { pluginName = 'langdetect' pluginClassname = 'org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin' pluginDescription = 'Language detection for Elasticsearch' user = 'jprante' - name = 'elasticsearch-mapper-langdetect' + name = 'elasticsearch-langdetect' scmUrl = 'https://github.com/' + user + '/' + name scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' scmDeveloperConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' @@ -70,13 +84,15 @@ dependencies { wagon 'org.apache.maven.wagon:wagon-ssh-external:2.10' } -sourceCompatibility = 1.7 -targetCompatibility = 1.7 +sourceCompatibility = JavaVersion.VERSION_1_7 +targetCompatibility = JavaVersion.VERSION_1_7 +[compileJava, compileTestJava]*.options*.encoding = 'UTF-8' tasks.withType(JavaCompile) { - options.compilerArgs << "-Xlint:unchecked,deprecation" + options.compilerArgs << "-Xlint:all" } + test { systemProperties['path.home'] = System.getProperty("user.dir") testLogging { diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..51288f9c2f05faf8d42e1a751a387ca7923882c3 GIT binary patch literal 54227 zcmaI7W0WS{vNc-lF59+k+qP}nt}ffQZQE5{wp~xzc30g#d_WEY_lIM<*>kEH?T**n)L|d4_rKbms*8e?2n)Ki&=O?<4J<%>V7R{?iBg zUq0qeh9)+q|G%!t|L*GUWa!{v>h%8_vhfuLJO%*-l>FD9;-818|MyT)4^v}T7kei< zV;e(f=M?n_Z{$_P@0tV}PfeKL81;-1K%hd2jPC7;DA>wd?+UcoR*R#W($?M))u>gd zdn}ehUd3m+8_k40@?K-j2J`KZyalp1AAH~OemS09)oMlsj}Ip=JKY&CS%B#&?VkHQ zbp0RoxZiO54G4JffN7@lTM-EP35bJi7)bfM8EqjCYYcnn{VS&2QEn5KJxt6fN4jA5 z35`f+Sduq7yq)PG@7FzVPY-Jv0EQW7A|u)@XZ43(l=9JiPNOEX-?M91MKSb8LB7*ix_1 zP;pX`ce%>4<(cfvymJ}t(%d{Q0z=8R1bQv29j?-2sm$ONs(@PIGdy@9R%Ta=*YdEA z&d3ZtFAJZsYhJ)vf*)knsFyO}Y=592S4#xG?1vm(x|kp{tuD2chGmvsN`Pzy^ zUV@_Yn2NIb#=1!#ZV!rb;}EdSGKZGG!tBKgC=%PM#u0uc?HGxKOZ9pZ7_T|A*1#Qy zf$Rm!>2sH}rRl&DRAmM3N>s80GU*RYMpwv5BW)XS&*^g<%=05?3M^7)<&I1!aW`0s z&|9LT!iow_V9V%qPOf<7C7X$)ABMd05mgn;wfKiF_Pq*?msg;KGz$4;ER)t4nxfpQ z>NJFiAO81*#gMz>5y%b5ZK(1yt)P-Kre5KQC3YH?naRFiq=b zJGw+CtdguK-J8U(x~Pj8^~>uka;Ov~%`0$5NxZck$uYX(d75Q@4yqjY9dl38H@E9W2#wa*RJ8+Fq#c%!`8 zF?p_di`A_*fY_}yp!i7Tkv@$(lKu;SFRYmSf^wOzrJS-o78?qR;M;FFvyw!qlC@@U zjOGrCIjd*tv02Jqu9Kx`kDN_RazI{F##4p9_>71}duTTOg+3*QxxW8&f8T&T>SwHa-%rxr1sm0msfK|vl&OQI zX&*rkG-JX~)q&mH7lf~Zy}Z}7U|~=(6%wDv zXggVP{CTshNqV&AXub;T_43>o*E6$B1-0epUkaVp40X1y$=!96kgUo zQAWHAQ4ln6PPLTKRtm|vB90=hFW>%6&a3Vxd&+4)-RLEH+7&*$N#o@kG-=jl9>#1{ zmA_w6RaTULUkhTUPuuirdzrP-E*H7VLpxQbZ1nM0jU1g9WR`IdH%?agZbu88eAX!D zmmH#^1wp!tnHJ!>H+UD~o#?l=TaYt97?r-!bgjq6OdAYtzBP>2kC7H6i5oHEphkp zXwA+ySm+72eNV~9feSko<*D;cffr6e&cy%;qNZSI2spGwz0q^o0ocX!Xj{Q3F_FFy zb;xtD7*4?WLK3%$cb>s{W9Q&3(PD@F(i%LS6uG@nCDEj-8k$@3;>2k48067l8*}^f zDK1+;{IRWcX9VNm7Upz%V#NEjq}`_K^*p)Vza)4Z!DS(t4nSte6V)4t{4zL9S!Rt( zG{n^ISmKsVo2QDFceIUfS|WN{o_BbkcY09LPkC0>vo5ZtVTJ0qcQ!3~XP=nGv1 zJo-kS`382}lM(yeA}-?&GjM11ZN}hk3NG%6OLm!3viy4Se)z*zljGWqNMTRT z;|}+51hU6pvm>GAzY!k9b#*Qp!4uUd{BHHM=>z7(TUQ&61wgNHQl)lHtm$Tv{V8?< zk}YmbB)h;VK~-h)&*iAh&3CrB-vGbFBvrq;kS955Nc(!x4d&^0{<$rOYwALrDOsQ4 zW9hcssstO*k44cLQ%oj=Xj9q|<70?iI9T0cS6YjjP+BYXQepHl8|&Ti8B#65lL2A6H|t*f&z#g5Xapa^l!(# z{3O494S#6RU}ob3Jn||E zC_hOlR0p5`1g7@8F13LjC$1vO%+&M*gnInCca^I=)t1-j*Sz;lm3mi<95Fr|0BFc+ zu7#3Ed?%B~Ue(h6*BMLO%@xB;6k8MgnjgMWjopLNBA9B0ynXzYzi2B4s?*J`Z6}M9 z`z+Ety}W&Vn)+KW)ql|+_RkE4d_;T&aHIwh$qd6>IR;n7%`5{W&?Roh16J=vX;Sz(L_>M=+Ys zq^-2;)!L)YO}{_rzt7a@)sp)32M`bz91sxMKhIRb(8WT~+1b?A$j0=4K%d&1KK5VG zM<=1BS#3*W%xxfHDuYZ2T)91JD|?Vi-HHU3Og$negw~995uVc6)mWhJ)ggOB&AZU~ z;+wRRur(+D;Qd|Ry1T9`ogYs2CU`ULoyq=?b2F9I_Wd<`@eSet_Z_{@6D^!vJEqVt z7M(~r1dExgu9$o!tEoXH#W-y6TTZhWy|CWY%}8N?!c8UQ{2+>vnqAL)pG@)nixQ8l3#ZDiH- zOAKW<`aqk}2YVOsgkJn^o1&MTTwm?Z7nNVZ!KSmh7M)yntYx|-(mIs}Tx+Ef#=Eeb zJ9eLf!`0Kz#>vad)zZ+)?`guu&dSx$k&oM1Q_$x<)K7U9#kDG4GatB$p>pq5WZX$- z+bQkIXhgxm#Ln+Iq9S7Cy@D8Adlo&0bgaHEnN4(v-8hvgE=x3sj3_QM^-ND`E*D2f z4Iz`_sKU){3q9S;{>cO(!Duz;Y3V>sB$$VB=phfv*F;pxRVz!1i{0*%QQ}ZK7ARHbULmBp8A7 zz93B7-OS_TvFL!)V<3|WKIDk{_$C$Gpw}Sl&YiUR!TvO4y;-YbzS-gk4rR>KFm}GQ z(y0jd8o`kZO}4_uCe0pGLAzRy6L%bc)KC-;ODa9{Yw9EW>F00Sp=p|Fb9e~NQ=P`B zC!*duS%7J&u3)s-SO#I-pRb^G3l)C}q=_KtlE@yr-(=Lj`miy-Az_O=REB;!DvnTN zk=+WzuHH6>mN8TjMp^0|_N20YLb$W={LE4B#2oP$p*e=t9*Pq^A%CUB9JhGQs200( zhV4T*OKUIM`{b^3Xt<+B?OS08(%d=UIwYp@E81^+&57K#eg*w1-RZiVpAJkC=VrCOt1UIT|y#IB$V5l}&G8kO{eaZFLGx3RoJ&N;4O9yunGUqu?ea z_(0(#OeEfb*Vg)R}!1*oms#bJAGG83BAD4+Q+e;y0;RhEa~F zz`Oo_z!J)KI)+4vzOeSt1mYkFe~D1AC&ZuJlB8ZY14)uVQUWpt%(YK?-imDdvIW)V zRfLI>P%DVp;?PA_5xP*fg4*Bu^H^|KM6F7~Kh?ER!vNwOE@2!hj?JMnQTrdAW6x)! zFK1p03IGKY^f7g%?fq*BSY7TX2QP&EbQ6jo4k+)%X3L5vc zBgoN{WOq|@unntspvNN5q`cUiO$=zt{SNkTy)J;pf8r@$(il*!`~7x?@Y+~G>*n=) zOt^zka-hG2j|*o5^BkI2ePjqWjvNycLpt5jBDy$f4bZ&vTS|W08a~$$$HtK(P_@p8 zMUUu?UUT1Fa_^fo7g>yK@y-M_sE#1BGpaSmh3BY-tK<}L=50V1>P|u{*fT7yL9RD% za;^U1y<;Kt&T{dD>E)ZssbT7R>Ro$ff_p}?@(ZTg`{ZFNUj&X*n&bB+ub zq)0d%*cs3S34)l8068Ko@q8d;1PK@jgqshQOc2Q6-z+H zuPims#_D_e+utKms(L7Wt2x0o!w2`NyC1Z>_E%p=yZCiLYmpC*HUjFTD@EG0@>Xl; z;$Xa3UV!idY2D{4WqokC@;~{7k5xzUT5wfI;h|+z4l1~DBMmU}V}?co!o46N9BNe0 zDe;atV+Pq`p$GL2g&;#xdAflKR2&Wfj1gwlTZN@PYYIya%6gjBly&E7;;c>AYIA>a z@#lO2-Roe_1)23y=JfMB-bqXw)bg2jxp{WnzS(AJrTm39v109e?kErRK^uE&eT3(g3KwitamUX%r=36jqIb}JtYaiXHS`+tt!u3-IgyIxh2=i!{PiU ziqcdkUO^Rtvovc=tj8HeMGMkbTq$kRq8G)N^)~Cdvy(;*i!M`wXNVS4Fc2VD<8$zn zvXs3mw`%va+$rukKI0F?fHJ01Tk5D9*{1GmEDpj4z7$KB^@mO0_x7xG8+Lx0IRqyz zm27+_T=9*OevoYpwS>f`eMTl{LyFZYqUGv8_yQoP6ME$)Z;yA3lf*2m*yiUvi(#uf z1;-wx@s7ARy%wQ~djP3Us92T<6aCTY08l35F3~rW@xs znO_V+$s1G;i=QBEw1vU3Pwk}i{3#nP>~+JmQW|4q(Pt8clH$ynV2|s1=N#aLJ-&Wz zS2UV#f?iy?BhFm8W6(4iv~GSHV^NXyko0FBm}Rcrg{kU}nVx+G#`@!mHJpHyeqPKr z6Lgrdr^rU7D#=j(%P zZ|R{u#_gd#2JjFX^5)tZ`w61jvtP`x1qgLI5q4`4#CapsWEhp1ESm%l$mnGpL}T+| z?@wZ5@hJuvXD{AH#L6?4?la@&8AnTMk;H+}wz06RNj^X;ZwuB7si%=< z9lJM)rS=>{;H#5lp=D(=n(k5}s5D6G>Bm1% z&25b2t5a2TTv;n8-Fpr!v`+I`w|y7B3>^u0nue5~^_Wj1wrQ)-Q{lX7w%LTLapV}S zU}NSrz$z1N7!KELA^`=x@CI}k?Cp3e+bR>gqs{FQrlJMm(rzflHaaJErBK6;Un&*o zH63;rpuq`y`-PhKB30SJ#oBo+q?##C--15`GXw<&iTO3N-?|szKyHO}5!NTPF%2u0 zmHwP{wF&^k`4!WVVGV8n=aBM5Z_@{{(DMnN!rJZ?_-Ge=P#dHHVpXj6BHxeS%7RQZ z5;1Zg@2h}-gOZtugHqU(XE*BOw#HJSWIx*1at zRrG=z+kI=U#BiDXcaYY~o@1WMB|UH8m?VuAep2nWY@x&2tkj;XN(pgME;mT(R%;^N zjPg%R-0mgo>D{8sXwPeCo4nzPuhh=>sCHglSrKvvajw*GSpq}{{cl#mebopG^vLAA zj^Jf|{X7*fEi-Jpe9;mo5bN{nL(fGVge-dF8Ue0cf&mB=B<1|1TRRMi;XYY)+8Je6 z)|y$Pr8=f}|Gi8F#A>t_tDn>=HZa$+R_58Ntusu@2Is4c*i`s&Ov~aKz)5xBW)nto zHe0i2-#|aF*kXsT+5^IFNf%zBYF>ZXzUC^W&C5k~Im9Hr%A4Gwy8-Mk2l#(8j_49a z+7)`}R12t))N8XC#916}iuKwGx+@ThYtWOO?D zFTB$Pyvq-=m#?4yUaVOWSj9I3{}m=cfq*#vIRX7!u|^_eZ)|8IV(IMSWNGB;Vrg&p zFV0>`TN+ys6LOpaue7UiNVcXV$m-QIYJ`mi2d> zN*=@RX=EOenr3$$(wpB_CAIABy)kR~B?22(#`W;uA0{H&-*Wy&8rL83?QX|_zrLmQShDC*nAJ$v8q7U*1(xBK1(s;q##37*2IvNSh zwr=LR#R|?jr6hQdK(?fo^jC?eiF;+s1TB)E_FBT*H)&U{lIz3qbtKQ2HY+B%ep7+n z_ytKe!a7?!kSGrXf&p(ojv44{`cs3Thr55=<*1gV$VLP|dskx$GL&l5#61gJ^+VqN z9lx@G1&;#?massD3C#waJrev|_Q@qTPP-4ixHhrEBevY5SFq5d8Q4qo1sobpScc-% zlIapW4WVEJwW*mg4cc2!OmwO-agkj!o}pVvG$tjGyP_80MsjiUD8o|l1)goh9L zS`Q>&2k7Nq1h4u!?d#QW4*DljC&<4?!i!gRKm1=YOW?0q#PZLP@IU+niGq{8ho_*c zi-oD3i>0yQ|0*}iS9VB(D7>J_Bc_&)s`V>Uw$>q)G_RKpcNod+9FgK(P z%wk(;FCf9Dit(O)wTUp7%@pjrN-(0z@I2k{h& z4~F7+(KryqTkF$=|I@hh${li3@!mq=>Z}!_Wp_`D>c@#H29zqaM;mpxa;5b0y;dYU z=>8MxjZ5&o-fuq)iJdJT?fk*|EhMk7g%2fGat8fS80}&QYkaDB+Yeim^_98)?{BZ&SKW{kHEC(riD; zhlrcGqj$)(j!z6}4Epde;Noe%4KH~04;znvE3$&p$l+!b_DaXNtv|Yg>(s(wBAM=? zkd)yySesZSr_g#ctWx5@Hu7K!dX1|%JHv>WYg{?5tVAJ4Z16Gc8;#GIH|jI1bvK4FrXAHDcrx5L zkM{>n8}B?MRqQ8SYnMVh8@B)V05kEL+D-f$T)cmkQigvHFlA3?7gO86(e+nObg?vb zmN2yYt402cA&ODcR@PQU`C^mgAjFyIVUf@-ZVA>_qL(0Tra~b>1tEnf(K{#1CJPI7 zJ{Xq-wtsrp(#J7B53+}w;h0%UWB48I`yyVkD@Sy=GGWsAykYI@ebv&}vMTrW{t?Fy z+#PQ3dpc75ixU5w;cUoNhC2OjfH=nfE#E**j!3DiV9yrZ8S2>AVz3xpk&{lMfnr1! zUxh}}qZ)D2qYRN+Bjq4<>#!8vnr~~gOR1ZD#*E=)8=%sI;kT;5+0HOSUygZGV4cfcL~k zev9V}!;^)ZwQgHl%TqF4kftMTOW}HGHhPR5xupqM=m`xsqSV@km=wJ2rWRMtX2F`> z=zJTRZ4~m@9CK5qWZN{#26CtJ87c=pMX|L;0VZ)t*iWd%YU5&|2d&JD?$B}L(~Z=C zJ{WAqMa{B3A{mB;8Df7ELof2$GTb4iGw1C^cT9;UxoTBTfXkcvO zAL3X$>sZnx{uI6mO*>g(&kc|m(P6r#R2enAp)@+qr!%A%Mr^|~3V=8JAN-S|BVh8% zC>`65(EwORVnZvr6|?9wb7$X!^xpK87Jye^cy5^r4@FvKdpJ6nR@_U*GLz(W2|=w9 zsLrlp6l7p+nTnQJcNI}!F_iGAU7ey+;5A@4t+CYD=8SZvc0yU!EM_Ux4q7L@)pqiF zTkhP`^n$lm`$qTJNp*zmS~5YY0bH5Pq(TBf4Bi?Eg9EOK`chD}HVQ{Cb{w9c z|4SDYAApRScfgFA2MCO$zb*Myr{WW15B5WTM+t;BB~3$swj{>Z4yBHI^ONmD!4ufz zV$vBZo6{}jIYq?dfRdUxXxv4{NE~GO#VCJ5g@jcnkiK=Z=~i!yD&2z{WLbddhqZ}< zk*3_nO6z5o&)3uCSNipDK-!K+=HZZrS$v&r!`XV2>22aze$56|K1qv=%B~?qTL%7w zGQXFp78SBN|D9=T&Q#?-$y?|$QCdrt{F#{$h{goO2Un8rEXu0;4fqhaWg*@tFeBqP+0OptF|;X6xeNL61nagmA#cXZQGqZ#*UV zO?`Jx#RHURffxb=V_`7{85DJBdV{hg_S%G(YOaP@+0%nQ#63ds@CSv3|9lcAvx+^7 zg^4>^?5(!_blsvw?fV$V^C}8cScRJxd8}wX#?itkWDRRg@NMSDoGAbR<$%m5fM%n4 z1Yf9hf{kwbxx<-nd~&+*58loncHnr8^7_HI5?q%B@nRd%Wx$F=`Eq+GY z(j4pfcwnwGU`-Q(zw5lpAkCmiBRAmww;b{T)-QV&)AN4N|M&GRrwZeB>2EF12LJ!f z@Y^}N7~0s7F#b1a%K6J2pr~W|$v-nqngt+eIcQtVi>=YXLKi4N20}sF3a1HR3xOHC zHgeniAY;C`q)DrM3TQRYuuSg_CvAw9-c#rxH=zCXxEx%!J%G$O`g*f(n z&gR_pNqh0x<=*A={MfDQ{b7Lp)e571+YW=Dyf4gz!6Y*tFHaQp>j!fLn4A>_S%Qcl z+Ty(lse{z5=Ynrk!K+?{(!jXzHy^Z zBQpLNIoF+}k5s5Uxr5+vp5#H(s}53LnnBX55QseKZ8Hxa@eumRm$anNS7WF-D^J-z zHI%M7OxKv(L#Xc{6|cp5>0sm*B_KMXWvjZ{o!TW*Zh-1zD7CDEc^J=m@4 zUC-W2IZE*bPdOZ}1KiBlbjE3Feqt*5441XE7;c-Ba*}bV$wU`f%rp&3VNM(;4AG`Y zhmEqsI%4|K2GCpNGAiU+;;ns=^=K~jupEa{{2Ny-ZqT-XsL9@3+$X8af%k9EME7DT zdL_NPOp^kh+bmbBH44fSBvY|XHcOZ`X5oh&rFkx+ZDY7cS7!D}IW+|D>J#b;&DP7C zlJ5<+mdp=yJCiyCZU;L1R1!1IA@z=sce|i!{9G>6W7362#;Q{czmcL<-ze0~gZP2B z$dioNJY1tMFo~{uW4n>bEJLYBt(CTLENJik@lCsoBR-S&MLO+~RTrMPx(kecyPQ8I z`eSaG*F$i)+>U**F9vC`f<8gLnZjqUWOynXn^Hx8P$%iqb7P}C1jpKLZPgs3ytT&M zO?bo6U477Z7w>#A{RkS<{!--p3wKO(l*6sbFte+7#-4wWKa&+No{Aq}5AMvd{AvN{ zuO+ei`;DRME}vL^^N!$6T$SyWhH_)9-Ie>DvAdCZyMar3yfz!9^l}nZWGhcIi`&*7 zxe{!J*)rP@)j;g)1*DYv=RkiJ_+jaECX~u^OjvXHv)OEUGTI z5%xHYFBW9u!w0hrT@UIHhwIc2>nvLqCs%W9^O)Wu$yv-78n(g5!RM__FQyUbL8+!d zx2$|y{ZX!sR&w3RPG%bvahWHlMCV}2xW~RAO*d)=PH|awjnKinvJZD_E)?te6>Bs`87e+# zcT=hsbqYzg=jtQJR#{G>h${J2Oh^zT$LJbxZ?oz zdCb~Js#N#7J%y2ps_s7uSQSRgqSd=PqA#M=1p*NaJ`G-QZMtx@UHSoNJ;4?x@T>Ej zTYR0@L~`_)=F>jeffiI+BO8!+d|~cODqYGdr&R}VRH}~VuTDAb@}hNyzF-v=r&qNR zT-BUgya?S#Z#)RnBenZEk?66!_m6ZT?C}`kZD1W?f=Z6pq$0}D$&s@K<_8P@dUc+$ zs(05^Rf8U%cSogY?3S5f7Q>3}_U+Nl?=_qRb27Nh?(GSlIN{)q#?EU~I=kEnk-Bhe zgv-7-)|}78vT|2Mc}^w^B%Ue+VWs1RCx>`qaJq&myTv2B4fx;v2wx$FEqFhd$n>O` zJTzeU^DFldU4g@v9xznt59+-j<9<|U3^ToZ0>LxNC8uWHG2|V*l54X33NnH7R0gPM z@kWThsu?Qbl4J@WstaC?QDop59=188%fvE`QhD8!T^Y0Lh2D%}>{E~`N{x0sClrhc zh5pb_sAo^DD7?8xdW>DppX%D@Ks@b(9}9W|+rjV78vdCMb?dJ>GRyM%LI3YL@y1Il z?dC7$U;c~vY5y7X3)$Pd{ACji9b^qH?UY>%om~DG@N2p+sv&%#Or{cxpqT^zlz<@| zo-tCEvXr!dYKH{N7BGfjEkGIRbpvZmo?s%`M(=!qzuiddO-0X2RrY${@So=w;#O}& zvUkYLc1~OG%it#d{`2`_4x~3A&m`obJ!&{5tr71aI7&b_p)pK~nU-)cnFxcQYN|7! z+S`ph3{_Sf)K0eF8BB?}t&!p&JlgeJqP^qiAB`jj>hPELj!hrWJbL$E3Z~V>)P(3@ zfU)jqVG4;g1O$#E%uHVLe)O0cD1;=lSjba3%OBcw=&Kp7vbw7gNKzS^s`{$5F}v0x z^E-1=AV!pnR3uQTnTwhSfhBf!Ow^7$ELVxLX{4&m3$0e`Q<Vw37*ldO`jX2H?M?}WZW16~7e3r1*O?D#={<(WU@*&&8Z>oH)j1BH zB%EvM?N_q&o`0szW@V-vCm*ymTP>|i6%+FnDep5Aq$^B`y9Q`}PB}DF%1{3u={{Bw zZ>cUaF{lZtV-CU?EDV5)fpglR`8#UQB4iVlZibeE-TP!7L=jt31C8-Q?RZdtAYtEv2e z_^XD%EZy70yKZH0usn)SK-#O+fUu8j zz;=6SPkSiD{5aBWG&)G%j z=GWkI>em+7sEVaH?T2XSBrpTt>Q64PQtbkt6xSiWGFLpa(miCGSqZ*P*_tclLfca9+#UIs42wtVx zv*e~MD@-;dAbr*MklPxeIg~q#GQ>HBR?5QhBY|#|y^;bCrJ_Qz0-J|TuyeSxwlQ@J z=5GwXd2e~}AbFn6cqQ7u+ydRcMNkKgxm*mQR|15$;*6WAYJo-(OLFj-$Rr}E`N=sW zoa_b0>s1*Tq0sc+R0B=`eW1nVPbFz+A_>oo>)>oJt5*0=%iAHupM+c|en z1?0MdH!=h`2;oQ*uViOC?!+`-Az$J;H|#JF9+zOVMckosT4BFGfI*xPk{*xvBMf!= zA=D5DY(XZ&hl=`6#WbRF^T-&F2quiN9EFhjaixT_ZQ_U`0FT;YB-Z4o->O2>wz#%lGz(!3Mwd@goQ6zrP+(!k7vS>EY!Wltd3_a|U4&wQ~a?fHTP?#?E{q z2^`Cv57~a@Qaftgta9lPRbFza&z3gO8zG?Yw^p( ztmyBGx%}6N|9R#7KXlx`$_d|S*}n}#2qC)`tXkA=^uM52Lvsi49SG@o1Z{+EASER$ zd$OS6_X>vpa$-Ekym(VSbuR!&0^z}a3?m_*R`#?>-wIO0&X$^aHIxgfEpWqD=8o20 zmUD`%Dq0M(;1JhHYH}!A&kGS<=`nx*s%Rh~CkC<37_=Q}c?oW;)jg8~A2rEYwbGUaC^6rW{vlW+!}y<| zpGDCpt*}TMP4T(8&NrNS4^;kNU+=Jc2=Yl1duAYM4%C60dJ=r+;=K`ljPXCT1zv>Dvcqy zIifqtMuQ8MT5xseaa|3Zj*5z6#kLeXTWg)mMi5wSa|#r$ax3svMpS5Z&pKia&u9}A zu`j1 z5=G(SLOf^(kVlX?OhcpJpZ4$OIz=FDGt?1QwUG_g^sml6XO2xtcr2#D~XL-K!qyMNX28jwCH ztG-{k&0Wk`KM2tfXx8f2Mj>#i0c$u1^$GC?4Rp;CCIf5HYQ&azR_46S(7){#I%^kk zY`4iQvAb*+*er2RENoZx5)j7hF5*9W>)2!ujxC%Y`4-6Db~cys7DA8J4R*T${yjJT zQ~s}Wal3!?fVfAJ137Z71~orJaB@?Jn16)*iUkKvyQPD@Vao?J2EOcRg5HccJ9+3W ze3syRO$d;GX$?VR?U);|)?!!;e8B#t+>^)f6NkcFeTMtR?-)6Kw))9$B|-BB%Z_d{ z$gJL~V`nj6d3y8rqqura2qNi!7JR{oyZVgutKYHz_(IcPOh}CCOA_j(bMLL)$6g4w zpV$}W{OrN$B@MHm+~;c5+D0Me{Bd=w|I>pBnYO%qEKbABFc}*rxWU&V=1^O z9Pf8DJT(fTlbJxJwv4b?8KKf`J~e8g8|>65ifZZ9AO~-@GU_2?vmhb@0%p#g-mudj zz316PhKTE&8F>@-^k12IKTAI1t`&GVUHhHT@Fj9^-CG10a-Yj=@1Ni!zj#wy$*X7b z>{1k>s4sN)G06A7ycYRX;cshgg9v7tnTnRB%=&@rUg|yCQp)7^0=_zNWYgxYfP0NR z*N)eEY?I00F~eo=YF>RQ<38xVxQ>a3&68WNOMhiT>EzFq^$ZX7*s3df?8G?f?j{2P z_MS~ysmq!^voZA6A-TzGC~KBXC^s=7qhY62E1d=G&nBB)TT}L0?@XBGSWUU%EjuwT zS7v@++k>Lu8Z^pQp_xa;2w`JMjg{8&=lZU}V|=N3kvD#3Z9--`Gtg~)%(#@HczTu)Tz0vtTeu_4V3!u5{)}$6XdbmA>R1suiJ>%W#Z81vcnO-k{Tvk>> zjxG1HkZa>-H~CZQc}P3;e6-$WcQ#w9!m_>gcRR>AxZ7m(6?(=N;bxv|_{k_0*J|#J zg0}lz^kZpFIJ#xa!9tB}R>HWmNg`jecUx@xpVTZ#=}K2sm7{tMk}X|Pj6_Bv@{1}C zl?)7(pNE>$ml8+JsrGSchV8Yz>{d6`29uR0tIo+D1CE|44lx|49w646=TZ!ZJHa_Q ziAR>Aj*3)pRn+1szxF*wHWm&&mb1dPmS{pqv%9kzOhWbcq1qsZ)7N9Y(IO*J+;U>vu59(lgYef z8ssn0HI!$T6P#=yMbPN+d;U@%RHNxE5jI4n5ul|U6n2ZlSEU>zUb6G#&shic!|Tdj&~hqoIHy@j?I|fZXeG zIAXrM5^R1$3Bh${({5>EL26d6!9$#S9q%dU)adGO`>mHq2M1TmGe%01rzCPzteMRU z@c3~_2I;(yrXkVjUg9|pyRUx5ZM8nuM$emzn5k~LMXreH@G$mmIHlMTZCn(iGRZI` z8r(Q`(S}U5dJ3Lwbe54&cCk1$Ysj*vlBz4vNo#q--Tvx|7OtE3WV8!v zeY+KFN(bhth|xCss>}_J5s{f@o3Raz-qVKdSxHwyPs^x36(REq!mz! z8>!Z^IkwM^wdcS=wq@S#!`3?fMy(#ri6JUNz3sKWCFt&yex@5y>N~K@IlJZ6V3X73 z8>N1#biY;WZW3ePGIk+dI&{n`?n|wXdo?FzB@CJ~Q2{&};R!Dzma1u$bpW5`fVYu#3hOwiUJpy#w<-6WCV;0lnqY<` zXe}#q*?ZB}?9tf~_uV&sR<~q$z6Pc}&2L$ccTDawu10Qrgux0J6YR!$HhTCa&m}ydNF6WaC z(R7akrkk)xuBOE^w6IF^1D?`h z(I}cD=;}fGJyWR>s&1SDGCvXX>cjD<%S!KGxM_1b{gt_Mrp+Yu z#;%ExL(m$`IFP9It>kk%h>$b}xDB>f^F)ES@8N3wp%VE467mjDn`yk3iT#7NPn z;5IVXR#qO!VswVez^gpJT;=ySZjV7)b;MXOcRB+ua;;fcSb0sZ*tB9=+Wl(Jp6rV+ z%jOYw)cRw{mrHv#Pa{ubE;6CkzSiXN)XQwaFT=s=1meJ;Ep9po^?`BY_TTZEzB)=$NKpL@3;vGsq>S9c4QZqYBHcEZ9c<>K401) z{BCu6SN8Mj@a7o2`!IsZhj7F}UDd#z6oww)tT=KZF;2gf7|E{GA=4gog$vzo+J5l| zf(Ns=C8!f>?TI4NZcCQkE%ZfabmtJnYw!bVZFi?70MrM~A>L7aJbXGBGQAj=Qiw^E zpe$p9c`A|x*R7Un1U_|uOXJ4{fnQiZOT0I{pL)q_;j-fOy>dD>O8-H!! zHh9-ic+@%UYil30Fz8eKYZ&<7PYFP)+fg%$INZ~Rp0=KZ^MAt`rPAX5C!8>}F!~WAs2h5!?)MOufMx$>Gd$oq&H?KceyVD4IAZsdNrjjVD{j$W$F|6ax5 zN>6Rf&XNfqlREmW@#r>LNuyL2TgbSTwwyEIsEFn!je*WPUPFf1rNke&#e9PZxv`p0 z@y4i+i7*L>6CjeLCr>PpU`SXZDiBe+ks#3)%E4)_9VXrSGn`oOOFQj_d+5IxoI!y2 z!#H`()_iZg3WpX;rBe!m{EjW8N;0lbSC@1u+Bd9I3M}kSZU!XT0my$;Gv3Lq0gCg` zMnLf)Ne1Cf%+_Eq8(hof{glQAgCG3gdEg(m_?ftWw}D?$0s#^Hb6F+%-`eQEswz!L zAC;pPesbmv?hG9QFd>W;#0V33ZY%^?$@n#d_<9V4J2K}aoea{Q4q>DVHGDO#E3s|c z5nL-v)D*CoNZaj}R<){{mX_9)=a%Q5-jbWY*8Sb#ZpI`@2SV28^Ri>ldGVcn!>ehBY z>QA>YA6-JJlp(Ev?N+UWctERTSKg4{%AYODd$%J;cI?9C62BeF*>DJCRSvX#ISnLP zo;>eYCQF$RWZ;eQm<0_c#)@0I5O2^WVrRe>IrO{e{3Ph7X0H%mU3?q$2)R0i4=OTa zg=|B*-HLd#h>$!3I5w90s;$*ddr?7!D*A9SHI(RJIqN^n<66Lzw ztW}D5p*6!&%ZDeep*6KpEt)uYWJRV=va?*0C9{|XFjJRJs|X?XlC7(FdOS6gZQ>12 z`EuxHM~kpFEY-9_&cadx43>yDbzTq{HS-cBpS06zHY1<(O;?m`ipX;5d-(ftAI;nPe)j?!(-7}v&!zmP%Bj7i@~jBdR+ z7K05s2=oX;Td&Rt^(o`(QELO9opv~)>5UK}@Xa4uVwJXMKtfy8ti`f%%yqCRn@?e3 zoeUpKP2-efKVUUNb`%wV=1^XjO!_5eSkE5wTeob^fep<-w9+tNHtStJO|V}>;+Xd~ z(G=RB9iYXyln^u6p$R-mjxAO?M&r%3>xqki{bU_T%3*uUrVuhJW#%n#Cy^aGuhWF4 z0SSi{OUtb{F)ichp%^NnTuEvcP+u*?Fijs=&Aq%tDL=8%4zaMl3439cHl6@g76&P< zFfP_Itpkbqe;9kmC|$Z_UAtDI=uV?m2{xMj_2Ff|G(rG=7gi|!L z*F28JDpjJmUD54UP_$C>kim#}`@v(r94s=S%@DUHq24wY%Y)+mu;DQ@8Src(exhbB zTLJ^97|N;R3)2WoRr=FP_K0Lxyk6+fO-A+8!!b@0Zj%npngK&htPT)$OLSxNbJ?@#jkI$U-a8{Zodq^Qq_FCZ?oCd@`2N5Ml~;!gwHJyIJ8y;|D0q^g_0Z zF_|22Z6TM>UFy-uS|gZ^2%MX&&kF2*CGFF+^z}lA8c<7jD`s~> zgBD*CO{dFmEoo|+_MeLTPnXeB@?zUgW~c&n_Hm2fH4a-ia}`=oZBp{_qz!W5mF&)f zD?eq^7W0tiIQ4DB=QOAr%47l;7k36Ejc!25YM+(vEM&Iw^l2pn$8Hr}TwNLp#A&#U zt&Q!=GbeHS%S+uC7^Mr3j@=VCi*?JH`TZ$BB7RlPDt==EmS*`%oM60V3WHA&5+AP{ z^}Hldil_l2r{*3gFN)5PH!E+Q-`x|dk9Z(`1b913ZehIgM|F3xYI*QqZv2${(20{4-r86b*@b|Yl4{5v$pXz!Ti^ID=MY4)AWz zAiu;8@b0iEzXG=U)v6Ui%idAHln(y9;LPV#HAYKx9c5FzJ0@X{CjnO0!Qw@XjRjI0Rl20snZEc<;5<59i;9_`w(RQ{yd3R?Gg=lRiS+bSxE z3$oR#$VXDlW7$SGW?GV1C_-ax5qH!c-ww~e5JGe)_6kyHoNo(U6qT0XJeZ|*>QM%p zDaKTUQ|thATUvz#DH>{>jm>xpqnh_)d~BWi{Bz!TN~}Dy!Qt{DMOHG?_*4syl(nuF z&4oDj#!yB2TaF2iV?VTGRV8QlN~UGB-3|R<{b7JJ?elxAr~Mq5jUZlY7Ht**SHEY! zN!eI>ntTOmk($D1dZ&9HO(t}kh9({>E|iEV-di%oc3=rwHwihBwPklAcuyjE!FuTT zH6qv8iGzpv$MQ?+TjJ>6*(vLW?ZBKv1fKe)5Czc;`bDiB@4fS?`3i4;L~f^bg{Rg7 zg+sj5B?;2t0evnQEtX+KM4R`ku~<*2ywdFXzSu=ZLFRF_rEEWuxMZ>$W{db^KBF%l z&oW`%)Yloo#I?FhoFv=$43REEi|Xtf^jcrk(2a3%CwEIS0{_|)uCXDl)@`J)P^+Fl zxCZ(p_ecMeww^pd=4qoEcaQ?@E;&o#`~hh9WHNt;xhCUUOdlE^(vZ{WU;zedlg^u( zqr$bgZVhm>r&Z#ei{Gbn2HaELLcvb%JWK5TD+zY`MOzL^alkkO zn?xKP6B=tA&tkEJauJivDDl`FyCa~O!k=|HYm15ZU?NMsmQ0|uy$-Pqa^@g+qbd7? z+#J8@w?$`MvW&Fzw%UNbUrSVZdge_KB03|;EwK0nt&RPw&82K2_vDOB0P%QpZx!=L z$1qGt^^oJoT8=MyT$$y|+cj82>+|zQYkK`O^EcOwE0IY`-LcK+EPv2awTtIUvPVP6 zll`1Pw8f=Vcc_A^iUc_{_d4WYS09l?jow1<7_;K(UE#ccT%g@J!;rh(WN7k=xgQ82 zn^BEti_yWvd87;`s$!ECw%^jfjF!XBJqxi0V@=Bt;jMD0VdRZCaT=va)|UX=nodWe zQ@EPVCLfOpKc)}&Kc04>v1$50FYJ-FUl!=x5tc54sG1b zarK3?bUa`vT@3!T7ubTAwxNQ1^=nB)QTA72=2y%r+!YlWQ3u5t6KFxKQ<-rp3);W4 z=2&81XLP74sGiXgOOQ03Svnz=gP1mG#dv(sv*y?k{3BF2)t% z@n~?Mt>%AMmJi34OXnMV23d@oyS++~wy44&HmAug*?|OgOyOf~6U%0bH&Ft42s<6| zef)H#J@l!7G5&xoFp~Nf?XAYaUvH6naNZ;Uxj>m!_(|w9{WRXe>_dnH^&0jbUd1U` zyl@C|2Wl4eWHSm!333N^d`~0rv-Qws<>KgLNC%D*NQu!ZX@MExA`reUl5|>y-w;d7 z4Pm0_f;pNtDA#2=bsp9+;DmeD9LRZ=Wd0|p^PI60MNTo0nfzKnrrb4RMJJfZX#Sl! z#D?TktZu?&$^&OBo`_(--U84)enARV_hNPqIc!8p^2jc#W0f^NLjY{0iZW`OGD85U)sx&296;^J zYn%v=`dMk+qFia+nWL&yI^msw3YC@Q7Ck+?lagu3bs$0Co_3269gh$zAiS3f|EU$S z1#rG)HslNEO=)l=0Uj4E=GLnN@|)3ubvE8*GM25!C+dUMXh^(CK~?}mw3O{SL9HAs z&$#;5kLni#X#JbeOq;#pC`49Mv9bI5qm;>74t6%32jbB`)i2_?l4TQl>=aV3D zTh-W)6V&P3(sE}e&xfd#g_;P9e+0HQkFi4%VXqZ(K7MhDktu@k2#MDIqO|+0h-^K% zK6$!MqUtq9u8@40fwS1{$cT8#$m%9k(O+rHxP*c2~*$8Fxv{UX9Ywb~C85>i@M zk#Ba0nN%O65puFgzcMegyhEhfvMuUg5|`4kip1fp;uAd zc>%h|uxG{vn&;-Xe40BYurSBUbGBym`M}qv7sDb}Y%u5<)c866r?hhM3lIe?v3CM? z-v4v)$7&sj`%lE)K$rR4VA~(CHHce}02v)2gJ{P;)jlw-_~>TFwwr@ZW~qGh@Yba~ z5_SBOEzQY3VswI&@p{+sW};_Ymw$Qo1wD#lO!)}&)$;I#ZuVbgHU-&EYqB;t?lV#~ z+S?2ycmLAZi6U|u$uEGB^K|^FKLilycv-APy z;8xC@wOQLoDQYvO=!NT>;5T86;=--8ms|Z%k)a+cz1`q|rIW~n2B_=5p9r%LJNkgV zei5tIGY`@y$bR!h2z`&J))ArS214qILUGPQ{GM!1fQ*)l>aPu9(jnR9yS#^h8#m3P z<_J3eC8@b@jNwR=ue)+M8e9H3U*$@`E)IW=lP9xs)rxK8Ea|~Y%!B3DlAs@v3A;}8 zN3qsSfdWtdn310$=YE193}~0fnrIO8q)vcQ9ssywHWB<4bOf=P>}if2?0F=|!Rf0u zJ>Z1)r-cPeR*?$0F}c)H?P`?_QhIcT>QO7mZUzaIF3W z$Y>l+;Dl)U6=>iDPv8Wur0)}4124bog^Aod!s}Hwm6#cA?E>r`YuSaXX=TfxvEaD_ zgXC+PR3a($#3 zl#aI)Smp8atcp@QK>EKVzG5L`F}aOZs)$KlXUmv+2qFi*Ahd^QA2Zi?yL$L1cUP4V zuU8I&-z><6y)i1iqEvG3`0l%GoZ7);V<*36wiHct;LgO#Zu-Vcf_Z|qC^&wG^9hZ> zNle;Y24`k&tji9GTpoh_S`O9TNZp$}jNfoKP2&2EGN~V(&aj;MXdfZKL0xuWN9(;f z*~TH}I5}0x4bu4Ii|>D?*8YU*o#lNaDJjvvum5{$P4pi~$A7XG|0=WmuhIoDFQg@m zFTNP>+ZXN_7?A)1@NEzkbny^rF`PBu9-#IR0tk-%KGawz5l)7*g;*zI>($xR25Hwh zr?`g14R(kVsq{;ytB%=@r{}J=dTPnwn`*x8lg+2j5AUnD&Y}1(_h$xx^nO$nF1tTr z6KH@j)|A(yUCG_}d$>N_l7Txk2O!%;dpwmZ2H%vZhtxsf z%G9V@)}w=w+qa_ON=gR6l#S>Kx=0Uh30k<%hZ69jORo3iTYRK?I8dVZ`Y3t*qRXDq zd8>Aj+yr~bP`o4tSh-7g=bj_*vJ;uJ@AmE>JMsCt$lybryml^>0MJrcd zUftMHzIUzXBf7P-(XoLO?D+)a^!Nx4+!}R4c5f#o8xizYX~fL1N+`EzLaLCIRTFft zwh9l967$Cuy1~W8xxw^BsfGLvjEf7jZu+1;2Sj*aEkCrRQAkf;`ydAQYXMol!xf z%Z3NDK)#p#CnR+#my%f>jkau5oNnS!t?#MBWnsYB z5wRkx$U{R!d%0V$fZZjKvN$FMQ)UEuCB-hfhnAkb%Ad)8w^fI3;)SS!{MuMvA_L)} zO6s%;Ue&P)XEd33z`N=+v>7dq+{ZN&_9tWUoXSabdB?1Kf<*OKoNJ=NdGG+iW<_P_J?PD%u_tzY=6a)P#Y zo-jXs`6!>`gZTF7q{>pE8Z~7jozM0#Kf}7rpSeDB2?DuTG6tDg)S4xG_*mYdzEIT; zcRFR%lMNYk*rWSg?u-acPKhdhOpb`dObuMjhJU4AG?|Bax3VmzNnqHk%op^((V?{h zG~y1c))y60+ZcJh(rmD*fYyI#mM0El$)(Thm0b{qurbAG zN!pRALLr|ZK`0|nbiy1P@Umzp9A%cLr9_r54gE=&6GADi6h089mKTayn;?_E*Ezez z^fH?)qc+nV%adRMj8ZFwgeLCoEoxpwniP%#$hCSe+}j{X1iFD>N*F-Mz>|nsY$_x= zVKl7F)<22BnZ~@A3!c7?z+)U>u~cZ0Zc=D|6>BKcKie-+DvY!`FR1T~xMkYdDxLRIdX;7(F9Qu%_vdwsFXDLmtkCrZHu^u8E zEfN_k$83-LDIHcaC(Ki2=>AAf7onA&O1fbMC;ir+xI`RjEhn5$${G^c9_44%m(~pY zhsq$n9q-R9#1Rc?oJYMCTy2WG1HHYW-%^fit9eCbdA95x4Xd^{-4iz4q23Iaw&)SL zWC!itECbI%8n*OMWPmflLY>!vlpQl?n#`?1B^7yF;NWnQs*xn~%(}x`BZZHO_79jD4X(dJJ7Vd7rM?-RrYq1gMn`3(zu?v0#_S0 zSYsAF`ow`;yvVJl(nd%B9&^p&NL1pE$|T3S&YqohmNUH47bR!IX_LmX$W}9~R#K#L zZpRBdOq+u(XbU!|nkE`rtvw=?G)qdvdy|Ve(E(f+~}uqxfZ?hwo}-RMq|3 zkeHA`ku^A|#nH(H|F%t20L9d9d83Fcvv>_X{vxYPH^j9GttuOjG+W0h00{PZ%V4xK;At7Sy%w#OB}Shxq(M2%ky#eh3D!Y8koB3ca}5{eD%0;?(Dd zyJx9_GuOaj>D~gmprgNYs*mNLgV8`GUW2`VH1he+LN{R8r2;2QUjL?0im9t>Ay*?d zM+5PO!2nJRCzD^O07elEFP4)|7{H(&$srw%FP!+y@!2ExbEvOqG3q0xwNUZ5=tE9| z1CuPO@w3!BAXbsaXAWHG6ZisNl-k~#lA)&s$; zMZv1Ul3tcA*7ldapFl+q$m%t2hFP-%OS>Vo8p6;Dbu?@AX1jb2b`!nW=IJ%vL;q0s zgAjfp0lJ09i!oFa1=?`hdBi$97K4QC$7N;H9w*w!F4G>hX#O58^E^esw$|xMU zfPUco9Un17p&7-`*7f4y*oRF}4f0-DNLDPzwGLmB&i_Gtyorf6p8}L-FUElav??D6 z+iFy7J&zcSVv~nh(-pNMQhG~r4%t3GQLa#D>GYX3F;+F1aZcIz0a({eGM2Y6P+p5N zsR#6B4b2SFi=oB%Tl(d^)p2>jXyqpHvaT(gt31EUmX` zXAnZmK8i67hz7%|3r&)Q-mdUU{v2Ird73Y@ob)=vv5K)0srj{nraZ2*WqrYj#2DZs z00x26*8-Z+8J0e@YUhF{PgZidcC{m}Tl@MHTwG75 zr`!cG726xraXfFf@R!tn=6M4-L8P(NS1Pocxh~eoMz1#QlQr$rEd_LpIR=T89z9>Q z&FGH8r2|5{1>)Jj*S~=!flEC$w7$#5<-Y|*|BIOD??_kK(bz%E*4kLg*4EP5?(bkX zQc+VD^BYhi605!Vpin{H*kqvzthu6bp*+_=Ru-xRjt?Pg6bMm8+h(M0^{(1MY+*4w z#CtEw!qcZfHSFIu(lX)N>3BFke#OW43s9}M%ujxN#4T@|B1=4k0$fi=MC&;eUZe(p zr^PP@pab4RUPQr6x>piH5;|V<_3>4JQg?Nv%C_)+RzPMd$z?9vDah`0nVp4stfpLJ z(owptb(DT1lIq*LMPyB&NqTz3>YL5gVrA)jg7jH9>~;k%Jwfp@H=C>$J;mnoWs%bZ zvt~k)L=;G+U$I#RuTNkFqoR4U8~0J7S5dsX&~M|U!UPGA+LvoAuu!5d@wSO{Oln&f zAK)@Kx1G(qjoQnkIa_!)uOgVDN0PnMq(wDZYh=*hYOGke7Vc*hX&xH^a-8)hGS@)4ZVawHSMC<> zI$xc14^{3K4F`SaiVfy`-6i&Vos!%UC3jh|bA#EZm*|PHM1Xk}L4exjO@i2AGvY>v zpJ^X+-0Uq8WHwPnPrVsh#{*!`pS2f^emUehKS;gtD(`{w5fWw|hcbX}-W$Im>Vy1U zT!l}N$}SWak31vBGz$cUrl+eJLq4KM#U2Gltc>SN!A*?Q%Z%F?FRNj6&)nyod`x%w z;pY*^A6o+28Q`BB!ztAJ462^86i6rxO{fy$0v#*lTl6BIr=0?DF_;nzwNn9g)~?Gky8ig8wgO`p>PH zkgbi0xv8^*{(p0HBIB)Pkp$p}zSfQw{dxU-^YREU2r_cEnIPbl8I++>#Jq&6?2?EM zqSm!pZWPT48Eyjcy5Wp7Iy7@C*816)>Y47cKQ6T1-lnE>0S>RpgC$c&^jO2OBcLzs z`GzLA*`8Y5yC(!O!_B#krXQ{OuFqB2YP5s(YwXQ_F~ST%MpRBb6hjAYzJ{G>LKl`4 z8**Cbi?bBdJasVn0|n)7D&2Kw=&D({M5E8@p6{6re<2t)xW3{BlY~gV4BCQT4G(zYT2rdn96eZvT`4k*;my* z%HO#3AT=de_gaHoq-}4Ps~oYNdUH^$^{5T5Pb!q3nWHS;qpkWzaHR#8eLa?mh_=O< z3a=oP{xRJl`n9DHrGzKLCpvk>Bb;c~ToNKQcj4i=2$g>IOQ~^N{Sy-g1$stD*M_#5 z<+M$6;`Q*dJd({B#($UzI^!3?C)EuLa?Fz4ZqayT&<~iOo9qwSx`|bsy1OE0)2mSQ zKR$y26hdx7kq4eHa_PWzkvoaKIPQV;GP2LnV0OTP zf6>41YD&ZuKr`k8w^V|PkK#KPq+S6jz~jeD#8g1nK~LJ3!FasGa6`1kdl&HylQHn%dOaWFPCHn(&7muIpJbHRS2 zkpZSVR0Ya*-xHwKm(|p^0cFlZ#H2MBD2{@EY(9@JdSbCj^cZ1jMd?wkSVRaZobxP- z=^>1?^0y+h9n{m@TV)#SZN;l>|7ZXOn^_5eP22PmTU&fvVZs|3_XzsZmVaiG#-+Nv zT@@*72KV{5SN(pw|I_;88~E^#_8R|eondHY{y%zQ{Pks`GbZP8-%cUpTk7<0pvS*% z7qYds{$|}sncEo4={tNcU&OL@e>nxkZ_JjB>EA0@qT+=N(l=ab4z4MoQF}C*Jk3ve zmByH2mn>uu6N%^t5;W)+%!OK#m23LOz;XUF@Moeh{!BmEb#8=f5tZN?@U!F5CYR$S z&(Y*R7O@{CyDRurISHhOMX>_vL5>tkvnj!a`UEgA#^z`&>? zEsZ>%v6`^1>5C@EYc`Il`rVenO-FI{25m0Qqx72RJWJ^m)(pfJIzseDOIE5CSASdr zzLKuk1zm1W*H?BsY4*rd!CfzW1-9g1xQxlY4qAJwIk>@rFElK(wi?oSZP|OvxSEvj zG-xhajl$X1;R1BoBguZG7J=(73#{#AFS&=mMxaW51QZEY?^(JH0|YIh&ARhum!3|-4 z7#l?mZJ<+X=2l^dK?RMpQJ8(TCL{FBnWd*Z1S7>z6Bc!rGJBnn>p*b|J^1yC#pRiY z%Q&9F$K)~glfx!I9p?mA@G`iEi)-B&{mikY@acZky^Urabmqb}hYZEMsPu4ZFNpi2 z8Tw-pT{>Ym4v^O{P zpvABswskwrFFlipjA@!4XM(>kBJKCRO|EZqY_IkDiN45gYpS_vigfub!#Jg8e?e;; z`o<-X)=x|}^T62GfAI}}nYb<-?`;0JiBo|6Kbp9J$+y(|Z##GV+r%pqt!3xF6@FiT zu+EM_vB*QFp#1ou5_0oG805XcM}(1_LfaW!EpZmASX_VJ5It*q5rj(Qh5oeya{npM zuZ#6#koK73cv|1$INEev%kJv@0FWCD14Ws`tV=l|6IW#@Cx~>1Aw^Y6P^MAYncNZ>ejld;X(x9?QtQ4XrYRFn;B#cG7d~VQIP_G|dg$Jy=ImZ>9B+XefgO%P zN+kIfmVeUYuZyh{fk443V6+=Ob*~mdV9vY|SeR_>Z6qONfp%9b0G#qa^Y@7%sATmR;HLx7FlT@|CKRp!3tj*{es!RvV8>?>D) zxAM9Aq_RZ3M?3qZOnhhX~&Jg326FbIcWSjJa zZ%ng^6NeVZPco2WCmBj;=-4H5%&CF{xan>XOiAXvltQ1J-yCLL{~{Os%jO9^Ml|BS zU3%)bWbofu_y6YB|F-jgCf-#v9sdFwB%qJg{zQcC-@(QAHAGrR1xzTA1OLM>=35{x zl=HJzSyU-jOZ{;6t>Xi#{l4;`5UxS7w_x1qwO`2emF!h$^_QrM(Mp|UQGDoR)-n;SnsE~ag!WegIEuu zG!ziwjhZ|y^^0L9=pZV{bE6*28L|^4--_&kFC(#EM23?T5g+tKG%DXePEt>rX89%B zQGQ4nWwT$zY^o+Wl%T_`Tx?*8__)}GQ)&aVKz&$*t;rVNN9Hale_>!O&XjoqMuNKv zUxUdq?~qEGj_FK+I2~yis491SD`Jt)`E=Gr;(M{aRsjnf>Yr^*Dx}RcR zw`5{juDQ=Lta#`<8==cqIHhv1 zm%~m}lB<@&c*U-Z?zkGKCthk+n@1E6q%H`eL945I?X+_ToR-N*O;R@It*2nyR0+bI z_Gxe-WN7fxlDseaf?tjyrxg`Aj6o-m3 z2_Y&ZHUX?-Ol5wOK74#m0o2%HQbil4%iUd@|JEycoS(5?ezRqMr!@S;*3(^fhTQ?s z=4h3pG$ie-i~6rY5=8fyTLOz;qFjVpbjDeNqufGk>xvD00>pPri#!4iMX~i%A>z#r zb|5&2xCU5(&HZ2Tb`x^%Um&;#y5 z290*%aA=yK*zK#-5AM{%bXB{Q=MYFydf?_+8EleU!vvHn~`v z&L0=GmNAaub?EuPTMDra%@mcYfS|H#Y@hFLI=SL{63qaf%_Sz3h6J8Z{ zOR1PDxhGGxSq^>elf~eU5t#aOu`40{7CBA)IYQi7P z@>3((I=!n*PIV{RdlfoQj^ygKw1$1uOZ$!m^<&G=IJ+yCoyJya6Qpe6uD#?ZkR!NL ztPk)Vkxd;{BaBV+09p7XJt~9C{j`@JE5X{-)4yB{VlZ&^v#Jpe)jzcdIkd>!5A9;R z1nEFVtKA_6?804+v;Z@PKAqWIBVf z-2=qlqG5#7A?MrHs-FrEe=YyP(|As`K0R`{+mFlLKTKVpKpnxkj+1|g!;4?21yq)JSytE_(8My$9eLgJN}Od;ZT>o1^~}T2@;o$`rt)_sb+1eS)4Vbt_*AlvIgD|94|)Js z>!41HC*@?IF_fv*MC`E$}VkZO!Bo?B3;XtH=?}J`}I<` z7~2F~8~xJtP8r779u_1pvlMnEz1m5aad<%_>S;_4I8=q7)MBdV_t zbkqo=a_!$>i-j5pzxz=~C+L+Yl8^s@>IFvNEx1E>5qc*|5PU;t>*Zx1(GiNe{FP1Z zP`dTsd-8u+tFCc$(YKWze_#LBX7S%*kbiD1|A`9;j1c$>7xMGVtWg8X<)AghQfNvJ z8Te-pQollmZ!n^wR;wNF9Uq>_=GliE;4pkJxR7wbv6W5JwsmDn&9L;E zg!ZN71erO`nlsT=+&7?E5JUCsw0mPAfxsl7ur{%zf*K`UaAEhzJrRLF#8snyBGD-d zhu|buP+U87uW%=isNhRX{FDR8%k&sz3oyb96x}oXHS!(ifhN`K-*(*@R$5cV-?-`` zp#Q*C|KGp$@38Cd5C4WsIT`*xE&2lO_)u9s2G zN#8(Ga9pr;(0QrSYzBi-nWr`AAA~1hp-zg#W{i6-!L2(P;nV?>^#l2IOY^I}0?W>k z0R5fEkzutB`%Lf+#`Wpy^+p9jHo`1oKk}gowP1k~cREA?wzzMO>{H9>3U7YvC|4|J zpH~I>{GkZ+L<=lW0>F)aB5MdFC-nm&M_jZ>qGzSfNZrV0k7&tU0DjT?T~fx-q!iq1XkWeCR< zfkmOf9kW+udSo5#)(h@1T9_AS%w+V@cu{?FX~Gr~k6zmIaDExf`Z5k{UgNO_roTbzro=wfX=9UD50wOIcAG$@x zAJvana4Ln5Q;#R3k4cuKp4UkdkQPCs5O1Oaqp&H1bwf)II`|$ILpaF^gSmgN1{nBA zEJQv+py5Y1D49E76-?@?m1g-Rl=krl(pbL`+`r_-Bm^L4qjOBRS$g6`>aY$PY*If*51a zir%@`IJF0*@fMoXJ(d7W%lbJMdTarhmim(lqU6Putn>lye>yKRU@>Tn?=dy~D{=6j zWBR`jVWhmYWFJ4g_uS&(e03Sw6d^ODNVPPDm^?BdB*f;QlU6Hnwz`dkPgb~30H1_r z>0ID=LA0by_xh`E%GuG=8CEyXB#skK24V)WB2*Pj?iN(BRnL0@#ZDJ`)e8H80E8!w zth)hVH6#LO^jLroW@>4RKa(Gpcy7uF3)%Vt1e}DLQG*g8I;cIi+aqF@r+!f&9dmlY zx~#%U7X!~xygX|<0}DjqYD@hYP62CYRpwwY^3OF@6oXI&v*T`0vU^bbE(`98OofC4 z&K+6C##c0wmKhHSt*t7xfU%3o^y+|1AE@35mtB96fF@R5s%p0YTn{jgPP#Gyd^d`|I6~{M3PPk1n zu<+VMgnZ00VbGdFH=)pBOf*oBL>txMPW)q~NU@w=B(E&3saR>8uONR<$kt|c3`VRp z0oHYl7>XLX`5yz4aaFi zlxDPOQi-ajSPLSZ0qj`5H*@TF3NB?P5w3;7 zjaY^rJVl7a!tqzKHciFLh(Dfm!OQ47z1eA5tW0z2bCOJI?n?9!uqH&lfiY$)Pjnh^ zse9YQx?3q;8>Z-~dW0EXX^GuZg>oU>MU@{tO?G!INWF0^ZvaScf(4GE+V^{ctcxcJg%qFZ-{Xvgb%8-A#kRY5^T%?|Wr5Z$F%Vg)!+gKt<%|&iT8TQWNu0Ueo^;TWxnrSGXOXs z9gGa$+^MO%Ea#Pem2rc_s?I5$h<8^{{a&VRclriG2~r)*^P&OF^O6BG3KshE=aMA= zU}HS6Q&{H_0%0_EkJFq6(r+fnndsO}aOE8O3zK%Y7}TdvGWRFN>~M!J7-&{}9Q_de zk68qxpzIn!eU0=huIAqb9qG;>MQemZP~i)0Bc=oGP?F7P^`M`~ zL6S3{9WI_7?rraYf*-Ke+azwZEsuUnqKZ3Y4g!kXY!d}{rRP)JCgva<=_OJ-q|bUW zKO*kI0B1jpTgR)HUZ@M!9jN!x2yUDV&E4KJ@Y=u5|4UixUyf_vV1aDsdr`3Y&L;Ey zr{nsUh2fu3t+I`!&3FII|0Uq-P##nG>+pnUgjk=E9~}NBFob*r9(*U4FodwMP#IYx zuugA@SaI*zC@4)TLHAl$OAIBN6784o*Y@C2ZzL95YfU>(DkOp+iT%#u=H=Q`MyL1X z#Qw+CpDTcUdrbl8JxEUa8G-T)`uN`cfu00E^KoH&mM}|bhn2l7+yQK%cLdk5);3Z= z=u@Dz#SP=|tPU%Ph-PP@Dm%V6~u~{Bd5{+Nf;&Sdcl^j-Q}B-WjzV0-gh=ix|CQGO=N2s1)Ui zvqhJ@hv`YQI{V1Yjg#7n=l*)jo)&RpK(gmYO?R`unh#Q=K9VzN66_FDvbKzmH@$Uq z93mye|L`cVWv=AZ7%B#xRi3VG#>IFtW}5$z4Ku`>-89d!S&LxM*&}io-K=wBFknOP zzCPt+u5owSw5@C4p^(x^q0leF5YhNCH@27&ra66;n)gv^$bqyQN$D{|Hr^g)L#%X< zUn(e&9HtK~Qk%0&ikja%K>su~*F(R6yaq5oho59EYV^9piWa8NykWrQ;P z%Epsc_edr1@gs7u@y9|f6@xHdFYK#YPz*9muChYI@Y6iWErM8{H*@Vr1Jw+zRHv>X zSWH7LPe8*tZtvEvy3klkgPQBpg0~H4cRZFg!lRiBe^hzC`{bbP=FTL$No`aOJ_PhyaFc znsAxhKIBkCR139t!^vG{W%p2SL<1`md!CB-f6Ge;7B--nA_4#mepiJ5|L3Luo<0A} zORGb3OS`R;v6I50S5^yfUyZ_uk;Oy1IDS)Liql^<49?)I8~WOidk^f+c<2+dJp+ ztnOG`J-fMTblrS^Zb1@e4wpTgg?;sTdH>0aH*!>s`9YKXDT8;mpR34up*LF^s=kMv z>13|WiE+NT(x;QU$oy!Ykv_zEbfVN9B=f>8xEITba#O-RdF01UyU(=Gq_Ibs344u> z7rkK49X+>PJx7dtwa0Yu1pFd}945}G-52kzMNMQnVj319^>D*%HZte!)ilWD1W6yh zj~FTGTxRfSpW1M63o#r9`Jx2rPr1XT$XhQg+i2s0o89NQub6YAm5J)uIC$rzF?z?M zlVcw#Gv<~n)7UN+f@;|=TzoeGDe2rQXM%dyg1L0+R7lJsU)EXJYB zi+1cqP#v@R6H_-s^vta=`i#XU=8_VjWp&6>_)5}ioxE$w!mCt96@1TG3%}!GH_Z1Q z5L5SXk91`TG*iPp|DN^5tzPZOA@k2Q3TJxz!q7w>#QRKM{XiwJP5g*Uq`y?ZQ3ZH# zrJ43{#6Si5T#9n$jUMqjdNgRCwR3Eg43otmk$mJI2I(p$!^JSWamXRRn{64Y^bm?I zoy<-fuDNE(zO}wsZGBINMvO@HnZag^Kg_TKy933kXL^)&=&(#E3t)x}Ax);1`PLSY zo~722n)05^MfL&a4VWB!=#c|hZB2sJuuSWKpkoz_V)LVzOU+EizN^|M^P@50+{0t& z-M=E8EEzIuEgn`;$}yXm=NIX=>RMecRu7Vy>0eZBm|WA8wQuxFC@mV*+saka?afeh zhM6f1sM;#HMhulOojX<_($)v{G?yb>f7bRLq}GR@XA>~3s~9t>H8Ubwms^CV(GZlE zaUfSnoLd{yTdj=JY5`_}n2;l{DC4bLHxiEz^kdZkv1IMgjUzXyWy+VYD0TWGlfz-H z*7?U$vNZ5w_S36nGe`?klqF@MMWkv9p;BLJBSVAjNv3F#6rQE5JIA6*vO+atwsJOU z%rpvu%Tp0en3!c^z>Hq?=WuP}24E`k>f1J%3VVr5R0l{9((!r5jLcC;rW__(VOtH% zTFHm`NT%`I4O`+qL-fE}0c4K3vT2U^_TwHV7syxolGgR@gfb=cLijLDV9a99ff~a# zyH(mjF@j#|{GJXY-czN%%IWkCFhQE=T@@2-8bgNV=rJFzX$Vnz6Ii5^f-$v0vR_K= zU;z8WX|1CQh2*ZoalgpK#zV?f*2)zhd$11^To9L_&SABx@9ucZ4+{7jF&4QT25x_^ zAxQ z2Xtm4ri37eEclAwiwpXfCTVHb0z=oCNsYO4F*HuAL30dy%^sK2L-Bi@PfcR*2u&r# z*52C0$aQInLz#lBKPtG{N&81m#F{M5Sm_bg72|6YsOpTnQ5dL{hu!|TG&Y6F4#FTd zNKJ$^t5D!EPCfky3LiBxl+1J`j_+HaFP=PBqC7QmtVao#PUmtYox6|A705vb;h(%r z1)@3{8+8n-caWQjGpglR+b8)ou24pe&f6z25NL~Vd?6@k8{zabUA_+@DjK6dPXQg> z32noATA+0Rbh}3?U>zEwM)x3Re+op3iC-{V+Xg4>?U*J4;5L4Iq8h4WlnvW>EJhd#C`w{z0 zL)|-a@C~{%sd~<>t0(*54CWJi3m{`GipJk|A1K5>4AP%(itW43jDor26jhyV551&h z+3jU_;sA837Yw>Hu}kmT`u(TB?GYD%8HuD}Aw<|fqA}>!2w31Tt^d`b5P&rc-5jH2 zaKE{GXl>X;qOsT*GrP1(`dI#q*!*^1-u-fqIu>-Jm>|=mGn~)Jw&x44{}tN*SJ_uU zRkd|((~WdD(jna--7TF;hop3OH%NDPcXue=NQb0!H~;ay-d8Wz_kQ=^W1MjgL)Y`H zy;ttJ=6t3gcZUP!Jt43DakuNhD%~Ux1aDV_scX+_IaIwFHYBT-tB3NMj%t&3#D<2RwFoMXZw;VowBD2SQ>?4 zBEuFOjLh!XUPu(IJMrh#J`usd>1#&tulr<87WKwt8)sKH)CLvu13TT2tq*rAch^EY zsu|{2x33|5?rxl*63bf7K|m;#HECesUK5$H_hE&xxXbUfdl8eCih$17B@&@FYvHk% zDEP6P(z$%=WyIJkmQB6N<&{&xD(1M#cBhQO>&5m1J|&(Y}fGe`U0Qwa$Q*M zw3To`R$6fsT7zkm2PNR$V~|=D{6k4KUyR^I8xL&zfD2Z!FG+S;tuNiAdYN42w~N9< z2Fsp_FgQ3`aqfK5ZMaE!u*3S1Wt55jnWr2JWj`9mcw@Dx!qzI0ml(9EgAeU~mANeE+%`A+iP^7s zhk53vFCy`vo5I(>O#%>NoR*t|_F@L)LXqCu0J6r=jP8KyExRLX!=gnWcFB_UMTSq) zIG!@ENc=}mw3Qn?!z<;+R?&@53;Oawf@;nD%jEX9iJ7YC{1aM)stg0BPc72SDdWQj z>x+x3voos1Ss@b2>~ZTaNp>ddDmrywpN0+=PErl^1rjjTT4#rF`)v4TssqsB>Q$RJ zwK~FROA!gmt(KF@2qj3VCmG3SUDdxh1u;?ES1u;C<~5yzXyH1g=c&hd^WuvyElJ`k z5qvT1mA`;1C|^KR7(iw|^DnW|XwmWmcsXN0vk7&HLvB%j{>mB`;$FsDI5c7(%#+_C z0}87&(P)+SGGRU|s%+$fKT0Fv1^nq(B5O9iogrBvWArMLQ3;l9zlwRe9h6nb-7G!9 z!T=xdP~3WgcOr(w8RlbWC8{>e39bjPlR*TUr>KM)R>B{RdXl++D z)4#G&g(cq)LnTb;_Ik?~liDGmfbDeo&V*Vi)t||VIR~@{kEzx8Bn1vgZsa=(r5cw_MGN$z*5BBh1DzmZZ;1t<39~ zCqNqlVr1(6M<5l&+0eFW$n<@N(TIet%{xSabD{{7@eD@qQ?m;`41SK79KgrEPz)|r z*{!DvQ?dwFdc`9Uq5BARTXj&r`qX`n+2~vNjqrRF6vu&glkGx*U*Svj*S&0kJZ;YG zc#+vF1$hNEPZtv@K_`N+-s%1WhpTb+JVDnt&hB}ZW7pWSek+rLM)>RG_i0_wJj^GE zn^%)LWNMs^Yfvj8YwP%&-w?wfb5}suDS4P{`@UUWjxi(d19#dA%k5r$r8~^5E^cKt zK%y{&D&3mBzus&UD5dSpf~_b}q~(+xZ{mS|Q*q(aLfQ05v~Zsu&8rD_DuA*)9pqTC zH$)XA3%lHG;piayGp^7O=GvyTnBx3+GxRb`Qq~mm$2+RqLYPoP^pU;HF9O`n0V7fN zreUK;-!eG@3dW8kEij74fU(jRRtraHKcmGc@d}+)Qr}0r4Ht|vsqI0>IR|ep`ad+f zew`W=HKw%vB(B27;NhExkQD*ge*-7>oifVMKLa?mj~d1iZ4dC&e}C zD|tBApiNO8vTSDryULH$OML{B2OQCl$ag$q6&Cv4D!cEmm&Idtt(`$yMI+4^^Z1#% z)BIksrb3t~?0h>X*~Jwc;q^mn95XFWy5P64$WoCgpPbB?%%fRG?gDnZHq+5AY=I{lC$srho`453`HGxDM4l%-s+6^-)$Xj?nvAxV%ypsK1*HSZQ`ANR0SR;`!&yhrPm4vLI_)vG#%>?*dyeQp+-d>~GrJ&A zur;DOqIe8zP4b7OnYJ6M-i?^GQ26IyM;+?%O8G@87Cb!$3e0zDt8hfcXYC(niT$_Yb@RiO4%6j zl(Ka_6lT(NRF`jA)DbsxDGT8g!__&i?YKwgyB3$P zK;%dYBFRUF0lhB@6ED#GMQn8%3Kqo#MQ?e=@ ziH5B%;y3TV(rQ*$l|x|JtLJQkRNX_R)k1XSzL`)y z_aY;D+k7ZdXeEoz;~8T0saIVyI^U|;g-=-2{sQVXYoM&fOEMe$_sqUPUR71 zm*afYT?c|OxWn7=5^~XRG<6pc7U$tJ<9~|)R3*Aw4;^V9V4y!s{`ngyd0a!$lJH>= z!gv{9l#6|sC{Z!Fe8}}(VQparZ4^*i-^SFkB)=ki^w)*xvxVyIh&N(1ljDMG-Ox#5 zti$eXKzDua>Wd_~Ms#*Fkf_%=8&^5Oid4<9+J}6|z{{|3xOCIZAS=A%#6v!!-!$LKKAEwsH%sr z1uSNn6qPZ#WKN7I&TEQ@AcB2OzYcJpKbDZV1(u+P;tFduln3czWG#hpX|T-$jBA1} zX!AVASKj?bW7?(n4L+BRc1`F9c9`kD_Pq0>S*`eI!p6J-mk>2L!|dyMBxs8K}P>ke70P zQl!2O0&vJ!4N+E(KOc@%XwAHigA@cC(?a4Gu>D>Mje{laF;-=lN~n2fF9~FUmMQIu ztc#f}*{4gVTW+`8mGn{`BSU*YX2psi-n;GOv(%%-Hk-SV!``Pf4`4p7<=9Gahq4S( zqHGjBWGO5n)ll)%QzvaRZCq{5JXvu}1U-Q+B&^xh0yuC7hI2pqHdQaWwLvM{da3pH zt_2qoSEW8@SDsK81G8>7#x^g@(@*vKt~e_?T}S>WJbjCAy^b~@Tkhq$BCfC533v%N zF!4Vg>tWedbX`MxkUKe1gr=Wf>0m{y8cE%q!Q6vfNnVo{J1KUR1JY5;l>oB6Zh5}P z*$VWH@%<9-muX&ZvS%+VV1v7aWPDJ)A2oxd5kh`GquOPBP}$eV%(QHjaO*y7Z`(|k z>=D4%SkAw?H&}7YEu@8%EQu&2^vtCaIT~mwCWSuOq;SRTVzJ}p75Lbt~)`taaQ#D$TTW#P{ zt&{Zd^^&UsZq}y;BAE@_AzaQ*BW9SWP#3D!r!#0$6Y1DoISY_Ag3w(;wogD*RI+Wem#H)9RbU?y7GjutCrHVB)Jy-(!8bSY@%}zU7OSC z%V)QD(7F`wK$Sm-@3LX0Y<)X5R%bVcxy(@Mgl~w-BnO)r3aDDBtIb|Cy55VpXG%d0WHM`FWRRcI1wIjM9%!sa7*T;^!tXC`V*B%RWX0eq+6u1I=zKT4lNSi=PE&&6&mu z$DWO;!+)9HGi^xh3|cPcsL-iFb8mjH)U^UOr5jtcioA@}a0z;8>J?5}7mk}Z4Q@3& zxW^A7J!nFBo`*r_=GC;`%4p0yHE%+jZ-zNpFlkA5EP}VMp=x(-hUVy81k-9s!=0J@ z@~|MkI9@w`5zBu}ox9DGtJGgR6^ch>Dae;4$|+)}G7ip&4dccDrMo@9qmz{V<SE4rlTd+GwX154>%e!JXIz``MoXyfG!3l69-dGXmw-(bh71p8H3njf>(3zeaa)80Ol z5*Y%~(+KESmMOhS$Z$~^wfRi-a|8H@vi!)hF&FujYAO8jj_9rHB@RAzZI?svw((fRyOZ85Y=QiSWU$x9kG>S3Jl$j{3>uSh^ct@cPF~<&zA|o&q@Xq6Jp?z$#LpN80Va-9SxjK^$ zEZdm6dm@obL+G+Yg$RUIeW{&S(kfQzfmR38+Ts_pZmx+Of1KtB z`Gj=zhUW2*qa|d~BjtmALRA*()jJf6UjFC@eDF0UZT}}9s^0#nGoKFIam;JAMF@H5 zB2ikXO6e++7rHf-g^iqwTCAYfhWrTxCpCvByNqgutBBL}#~G?}Ixy?}ew1TAFbY-y zDmrjs@20*%`sc6FzuqHN%6y`1=mwdD8e_j)wo{_BANc&4=ngI#+BCPzE=h7G7i#KoRQk z!-z!3cta<*>z;%^8AXCZHYuhMKA7I}BommLW{&@tK=!6k31&Hj=MyuwN`=-UxqLY6&UWh4aJ5H%B%hoPt!U z%rZSCDHWa_^69V(HQ;z$#_BJaAaw}GTTvE8iLfBF4x6TML__Qw*%XL_Q9xL540K^E z-99ylW{l6=Mc9f+2RPSpM0z{bRn@txx=>KsSV_HOGk8@2CaFvVf32wM_$e#+qWzD4 z05`tR(7$y{`lShfobLZ8R!qd?gN?0%o$il+{^QWHSNYqUNjbzv84ai$UfI@9F@~nN%X#$Qb_?_KRW}{x(Z)~WxA!G ze)^l)W4RxxHf?3d6)l5f^leesSQq%21`c)biK6zWn4a{B>JRSXtcq=PFg%M|{&hX? z6&T2rvfmljDF;WwO@!uAiZIe)z`&@5vEO2{zX(`AH$!} zgBD|~nQw7Jqm=G){*tU7v+_BrrDT&BYf`ROvqf`ad*2C*^=36L3|CeY3*U`nsfs*b zLO>G3V7jf<90lVz5*Y5A(dk-hcZgc=nlr|xWGI~voR|6tLm-yh-~d$&Re3@0r&` zjHy=3m4gZuDU=8>H4c5<%v3MMLXa54b~D9Jobh6Wl1xx`H&U#;gi_7s$*fSU10<~I zWo9d{MN0}lSKLUVda^@Zp1cb6{E-4&+8&zra0~mzqsm?vIlnN*!=}a#PlxO4*JFV; z-M!m5!{1ovr6-rA6=D2ecca1iIjlX3xnRtjm~U~`1#(`3X~*$He&Q4Z$hJp>c=nF# z6JvFwH4TeActSV^l6j}E-h#+pQ}1bBd)?kVrXK?`w7iJQBO0(DZ>WZ}PFy(XY{rF& z5!+i)VLiZYSl{;C=TQFwTT^WSHb*^}n&CF?#RaV?%GTJ~h||+Zu|omU09_xEeU7dr zrC@xY^R9R3gtX_XnD!!)x5n>i9gC{Nq2OZ}2IXx!0(W#DfQ*#+IiIt@Swj6x|T}r$Vz6_#J3Xhj zCGejfC1pPZG-m3Wj^ltA7A<`+y>E=f!))%@^VXlB_tgOh%{NuS4LFeegnb(|ZG~42 zLnc#PmQ2Q$pmveaKf65BFe)7A$*fY}m|Sf}SKoc<7_N55@x(|oAfP~nZ|VpZ&5hk9 z?o>9;$TQCDhQyot6*G)2FuLF0J>>t!`2i$)Co2vb;XMnsD%xL4qTeaWUm6ji-Dg z^3k3z&k-uuF?3*tDC;fK-F|lY7}W|9#Y+*6mo<|h+-15`+XUUUk;PaaXwlk<+&!1| zNb0Kgh@yEz-Xd}dAYi!>_$!dF?I2Ac-FsgaYFQ^Rdz%(2W~X2G7Yg8`;7!NRPxn*e zpA>(yjec5>zW7M92AR(ly`l4AEE2bui~&vEQ3T0EC5GfobSG@p&VhO>l8obJn;Zeo z>8B-TLsu+YBf-8m6x2B9tI^(|1(gW;_j>VtH!s4z)OB(Y#6CTXW7crANcadaNc1E9 z)_3M_qu@tGfyx&T92K-jk%fT?p-QFx@Fg;(N%lAc4u6s{$HI^eGHKieN^5Z|br_WQ z#Ilh_AIArRG$bT5$NX}0!IR?Lm_odAn4!sd%s>+XSvb)|PVYCs8YMpgX+!J}VQfoSt)wFz3y;@#`E&_szN`iVvYJu_xJ4a>rGYyNwp#^3!#=?}6bLK(dDq z`c5--w%G%zg&}*}gA9pN zr!et3BXv8ro-Q)>gSN4jXMs^@%1r%06CR4hp;T8BY&dtQoI8G9VyLj$*_R6B^CB{r zHc3*w6H>xN#Y_^fK6qMq-6p&Cg^@%#)J`i-9cyQJ*q*;dg!seZQ? zB)_r0wgruiT%uYE?LHsb|Kdm{#-&TQ6AQFN=DPRUFzoV@(4WMXd7?tIOx~KE&*S{? zFsLRJ%s@}lNVspkPL2Bd((J7q@<&M}fOC3SS56^pmQM_UEu->Dq=8BS)M2rGqp9SM zDW_FVQ9MkNSZRq$_^z^_`UFSRmyBZ&YFpdECa=djnu1t$K(-0i_vhouGvL$3jguDn zQH1z7Y;o_&o2%@=xpd+r=3Bb!*;`xn(erCaOTS(jiB?qkYOV%IgAhQ~Vd363!V3X{ zJDnkG(OB+w{2Qaolw=?Dzs%M&iUTc733T#vXU7%Ilbu&NOc3hm&1DbeVt$oUlS`k?%IBtF8KFy#u;Ser3^VVkZh5^ZSpqc_^Ux5_A) zf{%mn>+IF|YqS87a&Jm0>I9&h2Nq?6dy!6ZhWyp~Ydle>rOSRw*a z%VZOm(%)3?}Yna4K(Dfaw?;>1mvUM;};KqWn~~IN27kU ztyIKL$~*il$#(QX^~3E@qy}VaDIPbKFfItanT(NJ@E9QgOjq+#rn zcUE;+El%WPK6Buc>WV=OsAF}q?(_qS}1nrFl;F(h7>pEHk@Q=)*LhGwW;K-Bnc3XRZPh}ghV6J@erVWH2sc=%D(J8 z-V)9Ru$FxPK}2%%I+W;hwG!<6@G;6))Uh+F+#NZ>@9v(mIh#KLv&tZ1%Y6KhqL z4Pe{*TH9}qv@0LBIHZpFGIVVNQ@GkQeZ{*kUEwc`;T?8h{OZCGbVnbeRZ)pUx9;BJ z3#Uycp3QBY0{lg6O(&GdKYu(#^qAqXKY?_eJsi<8`WnUYqdk6Ce`+}qKXCXu=Bii( zl5iLFXPA}4ksR4|afuXM*JuG(Z)IN@kEWNG)XzJDxEv%q!f|NvMx}WAW8EZfDR%ZL zs4^dnl32*N(ZP%=AtL~}cqT+}8DDlOqz_Hzs77LKf0~pM2;AvE0W*P}MWU%0xduzm z?cv`?a`omS<~MJAOdd!<+h@Fm6i~e|kz$AGN)^TJ=UfDBLopw^!dxRsxdWPWq~9me zkzEGA6zAUD+>>qt2XzBmP?#lJs(oRS{;84)za`jM-wx0S-No)OoMKr8`4(>~14#CB z%UjP?6Go6~l3zgkWEn8IFJ0aop06pCT-`RIaiw3HJiw7^+iTyWW%>Gr8w+wavk7gh z=bw3Wk1XsYN*;x-XqFJ)bcaVz5H zS;AEUwrv7vR+czxols#JX%>D4juIJP+srd)Ri~v0&ioCc2?natg2S zI7_*syFNTvr*NC2%mZ_2SC$fN8x#!BCKe=xv^nwk>pSNg0cgAe!0z@60tkrv@4MSy z_O(CWIi*c(Yyj#zf9!JkKN$TIKbRV;B|G^JAXGBS+En^YKZCH15#wVkpy0W9LP;=9jAC-lAdZJncn>u=OORVrzLy9D%C`vT(jQ%Fn?>a98Nv0(As@ z7BNIshDsFa{3Z=;Dho3Q`K}8&F>6(|-NVurIByVMygY>UnZR?>XAdVTdVN}_1z)7T zpC&9SOGT=*VOeiCVK(a_Dqpo3e~(6&q^TH!QzmUO*huS7Uti4F{*n$WI?kBi%#P32 z^K$mVmvn~K8k1_Aab;Sj(E(u+o6sPLET%b2pw`GHSzgl!sQawVe|ahYE3L@RaA->` zCRzAKSB8bE%VoIxXk-YZbC03=rC>MVn^V?u{$6N!Sj@?aKpgCmwHANUP*MJpo5#yQ zouYVjrlHGaK7XkzdwMB+J$CnOJ%5`9!6LR}Eroa;!zv!S(7jsS@;umhqNQOhlUvVT zA{7%=86OgGA`T)>EaHRLCPdC;r~5#eu5097cRBt8xcc~HqYjwwjxwtd*u{;q+c;#R zz+|IUMP^Mx!N+oeioU|y3AM2}!hC(>VYW^IAMVY>{>|E05@bX$SQd*g@yW{y57e)a z^W%|ugDn250~}VXPeA0Pu{nsJFzE3;Eg_)$NE1GU)(P^mZDLyTZG_=YHlSvpbMx@Y zhmSti%f=q=!|LuA2TZyO62yZ@>kxY`5)D?YRDf1I)ah%pDa|cb3~F!W+JeVu3#*MX ziL!uV2^q7K8YA7HQ0su5VkmbBPcWSeQC&U@wi6sj-`WFMU0eadVE<|a2pAYzelYkS zW*2h>6cto=iz+MYYGX0@*t$6}A3kj;{$f5vSfohgAoGu`bM?}ylhgXMU(AX;GI=wf zx<+bz;tDS{jBazEvU#&6Z5;HV$SOLm^WGi~HjeUaH;$&cTs~eLS<(UJSikL48%oVG z)E2?C!NtNtMmL~YRR@x67&!?_ps^`|nuEt;ROXn0s2H(U=!1A2%Md6Xo)U^!$9{@i zk9B)+bm464uR~*`9;?{<#-UifNOM|2ty)gDgjlFvx64)y|Nuqg=mngb`D(Ea%*Ddvg{wh6v8nOM1w(0>hI zQmrwLDNp^h2zmj@qNEfiQS?ruN&b_qdR|MR!!_9p_vs?2UC#UQdL}&Xpsz0$!IQQS zq!n5rEI9B@W2ja_tufZ-5UB&eLKWw>^?To_*_)fgLMWwq|>MZy|(Qz;|` z;^H25NWnIurE~{+>$D772M{?CWA$z|!_=LC_s7Eo&*=m@iWjWnx9KKah1$+K8Ye`A zz53*MSU?JnK~JXYHi*c}*-@orGR54j?e=CSDg9Ak(jX@h{S(z1vBiimS>HF5Nw`j* z`=NMzV}s-kE^sOdV%@~GqU|Ww2=+0nEAX}Wz3y1HV#rp+4}hganf8D_GaKDd*1dCA z+bREBWzdE7GY==5VEnf?2v<&@!=@`PO!wk$#ZX5DA*l4xh7B9$ zWG06{s<^@lSEgSm~_)dz&@_8#e!9%|zA446r-P;j5DmdOEx^n%OtE9VG9Hj~4A z?Vw%{KRR^T{mv(*VK>M0`AOX{EkObe0Z*-dNPV0q& zqOElZg_yDxJm==U?p{Y&wq$ir%d@0(!lzT|Ho{q1K zi152%8-yF^FVe+dyGlQ>Mc)(rbrq#WGxmlAtfI|;pi;(vvx@$)nEtYM#wx5>PROA? z%21-h+H@=`GNZ8J(zEWlGGR)SBqcAqGg6kR7pF}n)Ov|wHBb~ z>Kr32vFF*v*ABfayQR2&?IN-Cj_@8OliOT2#n5)k&v&s>F0aAvkgix z9zkP?FDxYrS#ZS;qHWvO`$Tmhvn=wfQvSq(Zt4pTN6Xf?VP7;$Kp2^ynyC~B`1f$9oL3bI@H<$RmqA$D#z;Knis}40jlY@Wq^F}4J)uZfxODnqHyrV_UWX+n#92@vz^ZvG zd7!XfYDmMnIPGT}X9~}0TK8bBfVHe7UeKdEx~$lQ=iVTi(u#&}>@ zd|fiGFQ+u|>nUpbIZHZoS_X1HPuNDiRlETf>Ikasm=W)>rNE@nqP(RN%cSyX|uFRh!e%g5~mg|0vxG5bDWL z2jUhyrs;TPhTgcRKWS|OXPi=!nS6h6xRSpY?e4(4WTLoq-gjf4EtYgK3&r}Kr@712 zmvQ84DH+wR-*>%6OjEq0C;$(bFcLdT1)H$D1eN6st8#HA=0*Q|8gv}R5z4FE8T?;n_rgHik@ca)VLfg0q0D+C_mZgx+#_dGl?$KZT3=fDe6Xx zbHjf+IM@v%BSxDD>0WRa7OH>wVYx)}eT@oFl_`HAR~Gik)u4)t5JxOg?kpx^+_$@% z-smRZ#U~&v|4V~zBz}dUstdZJVZQk-0@C^>zq!0OeQgWprZcdG7A#BC7UFmkgkJg} zvXy0eK5I^I!wuFnn!_zQ+TgwxPV?YTx*-;AWP_1K8j5Lc4_8=sn>4*Cp+)a@q^rfm zzifw2!d&=(-Klf!Z;Wkto7&09Bj6*lj?SY7uL&*LtTUgeZ=#O)wgK!(>P0_PDJa!| zf>>0#F{l;JhMbM?sZanfk%;xXd@HYnOW>wUBy}^FlcS}kI36E{%+!}78eeQS(}>Dy z+XP$&Um?*UI%iCqxTZe}c8D?z^+#@QBy6G#oQb2}pej3&k<`psU?aU*Cdm_s=4uec zC;w19u=;9zkT}5Qt4SdqaaveldyZ77*@w(yyj$XjSM+mxHy2Fi@AB26DP#hMqdz7q z)3dP1uSr*NN5@>mF)cyZ!t@os3qNJxesNNr0Beq~r_Y2+AOM{Gm4`57ig4*2ux#Yb zGaZY%jtZLuKwmEcyomgN^mTp*oew|ML4TP|OO*hspz?@3^#F|y`%1J-_f<$;q$X4t zmVta2WCjxWq$%c3kNN|3jQZxS!Of(H3ug%f2RoEk>CZ1&Q-Q9&& zjt#D5T^VLgcBZVPpj@Sf)2OnJ(|H4OpGvKpyZlHW2nz=6u^1z?Z{xn1wQLc~QQs}* zx3nFw=grkLZYTO`yf|Q7NDq2^-}GrkS^Cu1BTxr3k?PmqpYJ@yV+D`5o3b8f|ac0f@31z;N~X4PFOyWcrjnw zt1uEt^h9S=xQ-8sN!G2x#kHcn)Ub&MZ%<=3iv{7-`nVcK=jgXUdqy{?xSP8tpPzW9 zK78J?--WBS7=)rJ+?Yf6I2g;b`^D_4AT{khH*S9DIL@1V2P3fV54~Qt>aTc&v*e5; z{Ee%&2dI&VC0Na+j(o88sfk703C3mlX$#^`X%NDO340A01S;V+3;g$mGag7-hp=($=W@K+<^NYUG7Cn6iXe_~P6I_wg7Z z~A z_lJ2sYFGl40o70TmMOcoPTib(uj{Zz334{pJJ6fHp(6Lr7CSF0QHYUg?|max!!fWc z1%tRG8m+h5;@7mryCsy<$CF!#Rk&efm^ntB1>q%OpAL>~?|dHb+e{Bxf=B?mtIS-2gXD+K;1@2e-^mvI)Mg0$~G+&(DuS+A}^2-fKV)^L+=2n6qd+SO2+1JmagmK@aR}KSOWC1V{vHfGK{IH1pvFHLUC04%*o5d>q zN43@6Vs>1f6Vp=R7+eSr$I`%)5DqCt7afB>k?2GZYnWzNeZsyceGOqP4mEt|L5@c}xHNR0Uy+lx6_2l0px95LsJ7H7aWT z4pm!Rb>r9$S{ND!S)0k$95ZgsoRQ?ijT=OzKEWQG8(Q~-Lvk4C(oG3oLJn5J9LwCk3a;4Fn@+S-2_u*S#UiiGJqgkR%O!~Ixt6DTCqgBc^pIKPUGAReW zFjgXnzZJ99(8S~vlRZE( z$<$O?tJFDMXF9|yj8LJGbz`|RO8YE0726$VZK2DqXsnMG99r?kwQZ#uHLBAz*%65T zP8qhHg?Jic{Z8;pyiP}7gX`?AyYEqltaovEuZ~FEW4C9WbI5hV|Bt!)%u+#L($^wOO_@bGx0}p%NX(WiJ68D5{&3MW(zCIQ9Vp z_BlUXm$3IEijD>H)Zqq8=cAI}xlbL4t|=QzA+iQOf#niCC2x8~bTDFHFw+a0bKT}* zA;j(DIv+d}0*1jrF_sF}8PSAGA0Xd&4i)hbQ?d#zzIeuwrW@*-zoWZB5|n7pxm0~@ zOYljcMND>7Gs`G0jfo;ZN(C3ev-F*b6e7o2u6VRB+%=|O&-)q}oe@>rkt*N};RKQ{ z+%G5tch@FE*WYOH$2j?&q!cKtK7PTdA&wyqli;nAzvVAf?cI5HA|5$#9>)zBAqs$P z>2Kz(Kjm|Nu^m2hIf_;ML-Y?6HiWt^(7beI3}G|dcHwoRfEd)cu|N^hL=WIlZr1!= zJe***N7qUg9rJh(!D~f=VrMuo z-e~NVfD;Fv6COC8u)kyPg{GQ8;MQqAochv{mH)K=x04v1@?qgYQwL~qHrIu{H1#f} zJLrN>qlccEQ^$UKoLzgijV2CS)id~FI$x>|ubks%<@N1WxV8m8$Q#Zz3|-Vdr628( z3B@WGYm%}q;lG6IZ|qC7vq;n$v>B>L%~Bm?nrptqsyE;u@D~dq#hs|C3-m4%pLqXj znulf}4B+3g>aFu$yJM(x1(E@^b?0L&RtcrwHx92l3KM;LZMRCNw@qzqW0n0@#k-to zqygS#`|nIxvpz%G5>S zzD1M0=EfioUhEj0qeDi+D4(nh2|M1JE76461{1 z800%_>Kd|sU{&GZ{Uu4UB%17zQu(y3I}zoV%{Q=75g37Ks1h%Y{oYvn&MiZo98JPd z>+?H7mrT09EKtPUmZ74wOkJ+%XqJ`r%~Ejm4ER_)5(Ul9O1_QR&zsc8n|DqA^}R25 zZ#AE@e_CaHukyS4r)QHj!c(^jDqxZZs5$@<06ssY_W`nZ|17(|`K#DIFbFCrAXgw1 z6i^Ys-vfa1Uk!VJBi4@#5$J&b`%h_MMSfaw84-Xu=3k4ktJ(Q~{~yYL&-WVrdF6P0 zSMd68F=>7oaS>q!#aGfIzanHjBL>(Jeq7&)Qvkm$KN$Um^LYt?_41bz9f058OZ*Vu z|0gOygu##NJJq+p7W|RJ;BU?1!G?>0X;q z?*Nu@>=%DHE&K_Yl?_bcFc*T(0W zWqtz6>HY6KGtW`Jm-^ltzh8jS_~ZJ%nA`zSerfXWVzkc-KTiSkXYm{_D*e9|{$)q` zmv^r}44!}QLf`N4c><1~1n@@xg5dAzIG*3Z^K1t{34l%iiQrd1(dU@YwYGm^&RYHx z=8vcHuavj{#`#P!`6s8}hkwEOo4M-y$CTt=djQ9NPog+IsOgy&)2JeBkB8`=(&F2 zPa+MszY+aX{~!J8pVb7PlRcMP`$?AQ`G1rBel$Oii~Wfy6!U*$lKfMY?DOhB53u=3 zWtjXos^@!`U!L9n(e}@SK7P`0r2dWO7uX+7^1nks{&qjl{MdgIjb{9f=s)QH%d7o4 z@^jzdpUBgIeb4jJ_dS!>ugJe|=;wY(KQT*-|AzVZ$MO$eO3!Qm-2dYzQ+e6HVEPAd zkmpR#PlS+Hz47-s$#aX@Pm;Cvzmfbo!T-{$|BpH3xfSFm iyjj \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn ( ) { + echo "$*" +} + +die ( ) { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save ( ) { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..e95643d --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java index 84bf7be..e4c0e93 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java @@ -4,6 +4,8 @@ public class LanguageDetectionException extends IOException { + private static final long serialVersionUID = 752257035371915875L; + public LanguageDetectionException(String message) { super(message); } From ebc90a74d1c46cbf4074bf188d393b8d00c92f87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 26 Jan 2017 16:12:00 +0100 Subject: [PATCH 05/19] update to Elasticsearch 5.1.2 --- README.adoc | 516 ++++++++++++ README.md | 499 ------------ build.gradle | 83 +- docs/javadoc/allclasses-frame.html | 35 + docs/javadoc/allclasses-noframe.html | 35 + docs/javadoc/coderay-asciidoctor.css | 89 +++ docs/javadoc/constant-values.html | 192 +++++ docs/javadoc/deprecated-list.html | 121 +++ docs/javadoc/help-doc.html | 222 ++++++ docs/javadoc/index-all.html | 452 +++++++++++ docs/javadoc/index.html | 75 ++ .../action/langdetect/LangdetectAction.html | 325 ++++++++ .../action/langdetect/LangdetectRequest.html | 423 ++++++++++ .../langdetect/LangdetectRequestBuilder.html | 308 ++++++++ .../action/langdetect/LangdetectResponse.html | 417 ++++++++++ .../langdetect/TransportLangdetectAction.html | 329 ++++++++ .../action/langdetect/package-frame.html | 23 + .../action/langdetect/package-summary.html | 155 ++++ .../action/langdetect/package-tree.html | 178 +++++ .../common/langdetect/LangProfile.html | 351 +++++++++ .../common/langdetect/LangdetectService.html | 369 +++++++++ .../common/langdetect/Language.html | 345 +++++++++ .../LanguageDetectionException.html | 264 +++++++ .../common/langdetect/NGram.html | 334 ++++++++ .../common/langdetect/package-frame.html | 26 + .../common/langdetect/package-summary.html | 166 ++++ .../common/langdetect/package-tree.html | 150 ++++ .../langdetect/LangdetectMapper.Builder.html | 601 ++++++++++++++ .../langdetect/LangdetectMapper.Defaults.html | 274 +++++++ .../LangdetectMapper.LanguageTo.Builder.html | 300 +++++++ .../LangdetectMapper.LanguageTo.html | 290 +++++++ .../LangdetectMapper.TypeParser.html | 300 +++++++ .../mapper/langdetect/LangdetectMapper.html | 501 ++++++++++++ .../mapper/langdetect/package-frame.html | 24 + .../mapper/langdetect/package-summary.html | 159 ++++ .../index/mapper/langdetect/package-tree.html | 159 ++++ .../plugin/langdetect/LangdetectPlugin.html | 352 +++++++++ .../plugin/langdetect/package-frame.html | 19 + .../plugin/langdetect/package-summary.html | 139 ++++ .../plugin/langdetect/package-tree.html | 138 ++++ .../langdetect/RestLangdetectAction.html | 352 +++++++++ .../rest/action/langdetect/package-frame.html | 19 + .../action/langdetect/package-summary.html | 139 ++++ .../rest/action/langdetect/package-tree.html | 142 ++++ docs/javadoc/overview-frame.html | 24 + docs/javadoc/overview-summary.html | 162 ++++ docs/javadoc/overview-tree.html | 235 ++++++ docs/javadoc/package-list | 5 + docs/javadoc/script.js | 30 + docs/javadoc/serialized-form.html | 138 ++++ docs/javadoc/stylesheet.css | 732 ++++++++++++++++++ ....mapper.langdetect.DetectLanguageTest.html | 124 +++ ....index.mapper.langdetect.DetectorTest.html | 116 +++ ...apper.langdetect.LangDetectActionTest.html | 480 ++++++++++++ ...apper.langdetect.LangDetectBinaryTest.html | 372 +++++++++ ...pper.langdetect.LangDetectChineseTest.html | 371 +++++++++ ...dex.mapper.langdetect.LangProfileTest.html | 121 +++ ...pper.langdetect.LangdetectMappingTest.html | 348 +++++++++ ....index.mapper.langdetect.LanguageTest.html | 96 +++ ...rch.index.mapper.langdetect.NGramTest.html | 111 +++ ....mapper.langdetect.SimpleDetectorTest.html | 106 +++ ...ndex.mapper.langdetect.SimpleHttpTest.html | 101 +++ docs/test/css/base-style.css | 179 +++++ docs/test/css/style.css | 84 ++ docs/test/index.html | 238 ++++++ docs/test/js/report.js | 194 +++++ ...elasticsearch.index.mapper.langdetect.html | 219 ++++++ gradle.properties | 2 +- settings.gradle | 2 +- src/docs/asciidoc/css/foundation.css | 684 ++++++++++++++++ src/docs/asciidoclet/overview.adoc | 4 + .../resources => docs/img}/towerofbabel.jpg | Bin .../org/xbib/elasticsearch/NodeTestUtils.java | 73 -- .../langdetect/LangDetectActionTest.java | 105 --- .../langdetect/LangDetectBinaryTest.java | 54 -- .../langdetect/LangDetectChineseTest.java | 56 -- .../plugin/langdetect/SimpleHttpTest.java | 72 -- src/integration-test/resources/log4j2.xml | 13 - .../action/langdetect/LangdetectAction.java | 4 +- .../action/langdetect/LangdetectRequest.java | 21 +- .../langdetect/LangdetectRequestBuilder.java | 3 + .../action/langdetect/LangdetectResponse.java | 14 +- .../langdetect/TransportLangdetectAction.java | 8 +- .../common/langdetect/LangProfile.java | 33 +- .../common/langdetect/LangdetectService.java | 128 +-- .../common/langdetect/Language.java | 6 +- .../LanguageDetectionException.java | 4 +- .../common/langdetect/NGram.java | 185 ++--- .../mapper/langdetect/LangdetectMapper.java | 337 ++++---- .../plugin/langdetect/LangdetectPlugin.java | 60 +- .../langdetect/RestLangdetectAction.java | 26 +- .../templates/plugin-descriptor.properties | 3 - src/site/site.xml | 30 - .../java/org/elasticsearch/node/MockNode.java | 16 +- .../xbib/elasticsearch/MapperTestUtils.java | 350 ++++++--- .../org/xbib/elasticsearch/NodeTestUtils.java | 205 +++++ .../mapper/langdetect/DetectLanguageTest.java | 2 +- .../index/mapper/langdetect/DetectorTest.java | 3 +- .../langdetect/LangDetectActionTest.java | 126 +++ .../langdetect/LangDetectBinaryTest.java | 62 ++ .../langdetect/LangDetectChineseTest.java | 63 ++ .../langdetect/LangdetectMappingTest.java | 43 +- .../index/mapper/langdetect/LanguageTest.java | 1 - .../index/mapper/langdetect/NGramTest.java | 1 - .../mapper/langdetect/SimpleDetectorTest.java | 4 +- .../mapper/langdetect/SimpleHttpTest.java | 70 ++ src/test/resources/log4j2.xml | 11 +- .../mapper/langdetect/base64-2-mapping.json | 5 +- .../mapper/langdetect/mapping-to-fields.json | 4 +- 109 files changed, 16359 insertions(+), 1510 deletions(-) create mode 100644 README.adoc delete mode 100644 README.md create mode 100644 docs/javadoc/allclasses-frame.html create mode 100644 docs/javadoc/allclasses-noframe.html create mode 100644 docs/javadoc/coderay-asciidoctor.css create mode 100644 docs/javadoc/constant-values.html create mode 100644 docs/javadoc/deprecated-list.html create mode 100644 docs/javadoc/help-doc.html create mode 100644 docs/javadoc/index-all.html create mode 100644 docs/javadoc/index.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html create mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html create mode 100644 docs/javadoc/overview-frame.html create mode 100644 docs/javadoc/overview-summary.html create mode 100644 docs/javadoc/overview-tree.html create mode 100644 docs/javadoc/package-list create mode 100644 docs/javadoc/script.js create mode 100644 docs/javadoc/serialized-form.html create mode 100644 docs/javadoc/stylesheet.css create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html create mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html create mode 100644 docs/test/css/base-style.css create mode 100644 docs/test/css/style.css create mode 100644 docs/test/index.html create mode 100644 docs/test/js/report.js create mode 100644 docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html create mode 100644 src/docs/asciidoc/css/foundation.css create mode 100644 src/docs/asciidoclet/overview.adoc rename src/{site/resources => docs/img}/towerofbabel.jpg (100%) delete mode 100644 src/integration-test/java/org/xbib/elasticsearch/NodeTestUtils.java delete mode 100644 src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectActionTest.java delete mode 100644 src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectBinaryTest.java delete mode 100644 src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectChineseTest.java delete mode 100644 src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/SimpleHttpTest.java delete mode 100644 src/integration-test/resources/log4j2.xml delete mode 100644 src/site/site.xml rename src/{integration-test => test}/java/org/elasticsearch/node/MockNode.java (78%) create mode 100644 src/test/java/org/xbib/elasticsearch/NodeTestUtils.java create mode 100644 src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectActionTest.java create mode 100644 src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectBinaryTest.java create mode 100644 src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectChineseTest.java create mode 100644 src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java diff --git a/README.adoc b/README.adoc new file mode 100644 index 0000000..fedec60 --- /dev/null +++ b/README.adoc @@ -0,0 +1,516 @@ +# A langdetect plugin for Elasticsearch + +image:https://api.travis-ci.org/jprante/elasticsearch-langdetect.svg[title="Build status", link="https://travis-ci.org/jprante/elasticsearch-langdetect/"] +image:https://img.shields.io/sonar/http/nemo.sonarqube.com/org.xbib.elasticsearch.plugin%3Aelasticsearch-langdetect/coverage.svg?style=flat-square[title="Coverage", link="https://sonarqube.com/dashboard/index?id=org.xbib.elasticsearch.plugin%3Aelasticsearch-langdetect"] +image:https://maven-badges.herokuapp.com/maven-central/org.xbib.elasticsearch.plugin/elasticsearch-langdetect/badge.svg[title="Maven Central", link="http://search.maven.org/#search%7Cga%7C1%7Cxbib%20elasticsearch-langdetect"] +image:https://img.shields.io/badge/License-Apache%202.0-blue.svg[title="Apache License 2.0", link="https://opensource.org/licenses/Apache-2.0"] +image:https://img.shields.io/twitter/url/https/twitter.com/xbib.svg?style=social&label=Follow%20%40xbib[title="Twitter", link="https://twitter.com/xbib"] + +image:https://upload.wikimedia.org/wikipedia/commons/thumb/2/29/Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg/299px-Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg["Tower of Babel"] + +This is an implementation of a plugin for [Elasticsearch](http://github.com/elasticsearch/elasticsearch) using the +implementation of Nakatani Shuyo's [language detector](http://code.google.com/p/language-detection/). + +It uses 3-gram character and a Bayesian filter with various normalizations and feature sampling. +The precision is over 99% for 53 languages. + +The plugin offers a mapping type to specify fields where you want to enable language detection. +Detected languages are indexed into a subfield of the field named 'lang', as you can see in the example. +The field can be queried for language codes. + +You can use the multi_field mapping type to combine this plugin with the attachment mapper plugin, to +enable language detection in base64-encoded binary data. Currently, UTF-8 texts are supported only. + +The plugin offers also a REST endpoint, where a short text can be posted to in UTF-8, and the plugin responds +with a list of recognized languages. + +Here is a list of languages code recognized: + +.Langauges +[frame="all"] +|=== +| Code | Description +| af | Afrikaans +| ar | Arabic +| bg | Bulgarian +| bn | Bengali +| cs | Czech +| da | Danish +| de | German +| el | Greek +| en | English +| es | Spanish +| et | Estonian +| fa | Farsi +| fi | Finnish +| fr | French +| gu | Gujarati +| he | Hebrew +| hi | Hindi +| hr | Croatian +| hu | Hungarian +| id | Indonesian +| it | Italian +| ja | Japanese +| kn | Kannada +| ko | Korean +| lt | Lithuanian +| lv | Latvian +| mk | Macedonian +| ml | Malayalam +| mr | Marathi +| ne | Nepali +| nl | Dutch +| no | Norwegian +| pa | Eastern Punjabi +| pl | Polish +| pt | Portuguese +| ro | Romanian +| ru | Russian +| sk | Slovak +| sl | Slovene +| so | Somali +| sq | Albanian +| sv | Swedish +| sw | Swahili +| ta | Tamil +| te | Telugu +| th | Thai +| tl | Tagalog +| tr | Turkish +| uk | Ukrainian +| ur | Urdu +| vi | Vietnamese +| zh-cn | Chinese +| zh-tw | Traditional Chinese characters (Taiwan, Hongkong, Macau) +|=== + +.Compatibility matrix +[frame="all"] +|=== +| Plugin version | Elasticsearch version | Release date +| 5.1.2.0 | 5.1.2 | Jan 26, 2017 +| 2.4.4.1 | 2.4.4 | Jan 25, 2017 +| 2.3.3.0 | 2.3.3 | Jun 11, 2016 +| 2.3.2.0 | 2.3.2 | Jun 11, 2016 +| 2.3.1.0 | 2.3.1 | Apr 11, 2016 +| 2.2.1.0 | 2.2.1 | Apr 11, 2016 +| 2.2.0.2 | 2.2.0 | Mar 25, 2016 +| 2.2.0.1 | 2.2.0 | Mar 6, 2016 +| 2.1.1.0 | 2.1.1 | Dec 20, 2015 +| 2.1.0.0 | 2.1.0 | Dec 15, 2015 +| 2.0.1.0 | 2.0.1 | Dec 15, 2015 +| 2.0.0.0 | 2.0.0 | Nov 12, 2015 +| 1.6.0.0 | 1.6.0 | Jul 1, 2015 +| 1.4.4.1 | 1.4.4 | Apr 3, 2015 +| 1.4.4.1 | 1.4.4 | Mar 4, 2015 +| 1.4.0.2 | 1.4.0 | Nov 26, 2014 +| 1.4.0.1 | 1.4.0 | Nov 20, 2014 +| 1.4.0.0 | 1.4.0 | Nov 14, 2014 +| 1.3.1.0 | 1.3.0 | Jul 30, 2014 +| 1.2.1.1 | 1.2.1 | Jun 18, 2014 +|=== + +## Installation + +### Elasticsearch 5.x + +[source] +---- +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.1.2.0/elasticsearch-langdetect-5.1.2.0-plugin.zip +---- + +### Elasticsearch 2.x + +[source] +---- +./bin/plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/2.4.4.1/elasticsearch-langdetect-2.4.4.1-plugin.zip +---- + +### Elasticsearch 1.x + +[source] +---- +./bin/plugin -install langdetect -url http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/1.6.0.0/elasticsearch-langdetect-1.6.0.0-plugin.zip +---- + +Do not forget to restart the node after installing. + +# Examples + +NOTE: The examples are written for Elasticsearch 5.x and need to be adapted to earlier versions of Elastiscearch. + +## A simple language detection example + +In this example, we create a simple detector field, and write text to it for detection. + +[source] +---- +DELETE /test +PUT /test +{ + "mappings": { + "docs": { + "properties": { + "text": { + "type": "langdetect", + "languages" : [ "en", "de", "fr" ] + } + } + } + } +} + +PUT /test/docs/1 +{ + "text" : "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?" +} + +PUT /test/docs/2 +{ + "text" : "Einigkeit und Recht und Freiheit für das deutsche Vaterland!" +} + +PUT /test/docs/3 +{ + "text" : "Allons enfants de la Patrie, Le jour de gloire est arrivé!" +} + +POST /test/_search +{ + "query" : { + "term" : { + "text" : "en" + } + } +} + +POST /test/_search +{ + "query" : { + "term" : { + "text" : "de" + } + } +} + +POST /test/_search +{ + "query" : { + "term" : { + "text" : "fr" + } + } +} +---- + +## Indexing language-detected text alongside with code + +Just indexing the language code is not enough in most cases. The language-detected text +should be passed to a specific analyzer to apply language-specific analysis. This plugin +allows that by the `language_to` parameter. + +[source] +---- +DELETE /test +PUT /test +{ + "mappings": { + "docs": { + "properties": { + "text": { + "type": "langdetect", + "languages": [ + "de", + "en", + "fr", + "nl", + "it" + ], + "language_to": { + "de": "german_field", + "en": "english_field" + } + }, + "german_field": { + "analyzer": "german", + "type": "string" + }, + "english_field": { + "analyzer": "english", + "type": "string" + } + } + } + } +} + +PUT /test/docs/1 +{ + "text" : "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?" +} + +POST /test/_search +{ + "query" : { + "match" : { + "english_field" : "light" + } + } +} +---- + +## Language code and `multi_field` + +Using multifields, it is possible to store the text alongside with the detected language(s). +Here, we use another (short nonsense) example text for demonstration, +which has more than one detected language code. + +[source] +---- +DELETE /test +PUT /test +{ + "mappings": { + "docs": { + "properties": { + "text": { + "type": "text", + "fields": { + "language": { + "type": "langdetect", + "languages": [ + "de", + "en", + "fr", + "nl", + "it" + ], + "store": true + } + } + } + } + } + } +} + +PUT /test/docs/1 +{ + "text" : "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?" +} + +POST /test/_search +{ + "query" : { + "match" : { + "text" : "light" + } + } +} + +POST /test/_search +{ + "query" : { + "match" : { + "text.language" : "en" + } + } +} +---- + +## Language detection ina binary field with `attachment` mapper plugin + +[source] +---- +DELETE /test +PUT /test +{ + "mappings": { + "docs": { + "properties": { + "text": { + "type" : "attachment", + "fields" : { + "content" : { + "type" : "text", + "fields" : { + "language" : { + "type" : "langdetect", + "binary" : true + } + } + } + } + } + } + } + } +} +---- + +On a shell, enter commands + +[source,bash] +---- +rm index.tmp +echo -n '{"content":"' >> index.tmp +echo "This is a very simple text in plain english" | base64 >> index.tmp +echo -n '"}' >> index.tmp +curl -XPOST --data-binary "@index.tmp" 'localhost:9200/test/docs/1' +rm index.tmp +---- + +[source] +---- +POST /test/_refresh + +POST /test/_search +{ + "query" : { + "match" : { + "content" : "very simple" + } + } +} + +POST /test/_search +{ + "query" : { + "match" : { + "content.language" : "en" + } + } +} +---- + +## Language detection REST API Example + + +[source] +---- +curl -XPOST 'localhost:9200/_langdetect?pretty' -d 'This is a test' +{ + "languages" : [ + { + "language" : "en", + "probability" : 0.9999972283490304 + } + ] +} +---- + +[source] +---- +curl -XPOST 'localhost:9200/_langdetect?pretty' -d 'Das ist ein Test' +{ + "languages" : [ + { + "language" : "de", + "probability" : 0.9999985460514316 + } + ] +} +---- + +[source] +---- +curl -XPOST 'localhost:9200/_langdetect?pretty' -d 'Datt isse ne test' +{ + "languages" : [ + { + "language" : "no", + "probability" : 0.5714275763833249 + }, + { + "language" : "nl", + "probability" : 0.28571402563882925 + }, + { + "language" : "de", + "probability" : 0.14285660343967294 + } + ] +} +---- + +## Use _langdetect endpoint from Sense + +[source] +---- +GET _langdetect +{ + "text": "das ist ein test" +} +---- + +## Change profile of language detection + +There is a "short text" profile which is better to detect languages in a few words. + +[source] +---- +curl -XPOST 'localhost:9200/_langdetect?pretty&profile=short-text' -d 'Das ist ein Test' +{ + "profile" : "/langdetect/short-text/", + "languages" : [ { + "language" : "de", + "probability" : 0.9999993070517024 + } ] +} +---- + +# Settings + +These settings can be used in `elasticsearch.yml` to modify language detection. + +Use with caution. You don't need to modify settings. This list is just for the sake of completeness. +For successful modification of the model parameters, you should study the source code +and be familiar with probabilistic matching using naive bayes with character n-gram. +See also Ted Dunning, +link:http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.48.1958[Statistical Identification of Language], 1994. + +|=== +|Name |Description +|`languages` | a comma-separated list of language codes such as (de,en,fr...) used to restrict (and speed up) the detection process +|`map.` | a substitution code for a language code +|`number_of_trials` | number of trials, affects CPU usage (default: 7) +|`alpha` | additional smoothing parameter, default: 0.5 +|`alpha_width` | the width of smoothing, default: 0.05 +|`iteration_limit` | safeguard to break loop, default: 10000 +|`prob_threshold` | default: 0.1 +|`conv_threshold` | detection is terminated when normalized probability exceeds +this threshold, default: 0.99999 +|`base_freq` | default 10000 +|=== + +## Issues + +All feedback is welcome! If you find issues, please post them at +link:https://github.com/jprante/elasticsearch-langdetect/issues[Github] + +# Credits + +Thanks to Alexander Reelsen for his OpenNLP plugin, from where I have copied and adapted the mapping type code. + +# License + +elasticsearch-langdetect - a language detection plugin for Elasticsearch + +Derived work of language-detection by Nakatani Shuyo http://code.google.com/p/language-detection/ + +Copyright (C) 2012 Jörg Prante + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +you may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +image:https://www.paypalobjects.com/en_US/i/btn/btn_donateCC_LG.gif[title="PayPal", link="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=GVHFQYZ9WZ8HG"] diff --git a/README.md b/README.md deleted file mode 100644 index 72b10cb..0000000 --- a/README.md +++ /dev/null @@ -1,499 +0,0 @@ -![Image 'Tower of Babel'](https://upload.wikimedia.org/wikipedia/commons/thumb/2/29/Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg/299px-Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg) - -# Elasticsearch Langdetect Plugin - -This is an implementation of a plugin for [Elasticsearch](http://github.com/elasticsearch/elasticsearch) using the -implementation of Nakatani Shuyo's [language detector](http://code.google.com/p/language-detection/). - -It uses 3-gram character and a Bayesian filter with various normalizations and feature sampling. -The precision is over 99% for 53 languages. - -The plugin offers a mapping type to specify fields where you want to enable language detection. -Detected languages are indexed into a subfield of the field named 'lang', as you can see in the example. -The field can be queried for language codes. - -You can use the multi_field mapping type to combine this plugin with the attachment mapper plugin, to -enable language detection in base64-encoded binary data. Currently, UTF-8 texts are supported only. - -The plugin offers also a REST endpoint, where a short text can be posted to in UTF-8, and the plugin responds -with a list of recognized languages. - -Here is a list of languages code recognized: - -af -ar -bg -bn -cs -da -de -el -en -es -et -fa -fi -fr -gu -he -hi -hr -hu -id -it -ja -kn -ko -lt -lv -mk -ml -mr -ne -nl -no -pa -pl -pt -ro -ru -sk -sl -so -sq -sv -sw -ta -te -th -tl -tr -uk -ur -vi -zh-cn -zh-tw - - -## Versions - -![Travis](https://travis-ci.org/jprante/elasticsearch-langdetect.png) - -| Elasticsearch | Plugin | Release date | -| -------------- | -------------- | ------------ | -| 2.4.4 | 2.4.4.1 | Jan 25, 2017 | -| 2.3.3 | 2.3.3.0 | Jun 11, 2016 | -| 2.3.2 | 2.3.2.0 | Jun 11, 2016 | -| 2.3.1 | 2.3.1.0 | Apr 11, 2016 | -| 2.2.1 | 2.2.1.0 | Apr 11, 2016 | -| 2.2.0 | 2.2.0.2 | Mar 25, 2016 | -| 2.2.0 | 2.2.0.1 | Mar 6, 2016 | -| 2.1.1 | 2.1.1.0 | Dec 20, 2015 | -| 2.1.0 | 2.1.0.0 | Dec 15, 2015 | -| 2.0.1 | 2.0.1.0 | Dec 15, 2015 | -| 2.0.0 | 2.0.0.0 | Nov 12, 2015 | -| 2.0.0-beta2 | 2.0.0-beta2.0 | Sep 19, 2015 | -| 1.6.0 | 1.6.0.0 | Jul 1, 2015 | -| 1.4.0 | 1.4.4.2 | Apr 3, 2015 | -| 1.4.0 | 1.4.4.1 | Mar 4, 2015 | -| 1.4.0 | 1.4.0.2 | Nov 26, 2014 | -| 1.4.0 | 1.4.0.1 | Nov 20, 2014 | -| 1.4.0 | 1.4.0.0 | Nov 14, 2014 | -| 1.3.1 | 1.3.0.0 | Jul 30, 2014 | -| 1.2.1 | 1.2.1.1 | Jun 18, 2014 | - - -## Installation Elasticsearch 2.x - - ./bin/plugin install https://github.com/jprante/elasticsearch-langdetect/releases/download/2.4.4.1/elasticsearch-langdetect-2.4.4.1-plugin.zip - -## Installation Elasticsearch 1.x - - ./bin/plugin -install langdetect -url http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/1.6.0.0/elasticsearch-langdetect-1.6.0.0-plugin.zip - -Do not forget to restart the node after installing. - -## Project docs - -The Maven project site is available at [Github](http://jprante.github.io/elasticsearch-langdetect) - -## Issues - -All feedback is welcome! If you find issues, please post them at [Github](https://github.com/jprante/elasticsearch-langdetect/issues) - -# Examples - -## A simple language detection example - -In this example, we create a simple detector field, and write text to it for detection. - - curl -XDELETE 'localhost:9200/test' - - curl -XPUT 'localhost:9200/test' - - curl -XPOST 'localhost:9200/test/article/_mapping' -d ' - { - "article" : { - "properties" : { - "langcode" : { - "type" : "langdetect", - "languages" : [ "de", "en", "fr", "nl", "it" ] - } - } - } - } - ' - - curl -XPUT 'localhost:9200/test/article/1' -d ' - { - "title" : "Some title", - "langcode" : "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?" - } - ' - - curl -XPUT 'localhost:9200/test/article/2' -d ' - { - "title" : "Ein Titel", - "langcode" : "Einigkeit und Recht und Freiheit für das deutsche Vaterland!" - } - ' - - curl -XPUT 'localhost:9200/test/article/3' -d ' - { - "title" : "Un titre", - "langcode" : "Allons enfants de la Patrie, Le jour de gloire est arrivé!" - } - ' - -A search for the detected language codes is a simple term query, like this: - - curl -XGET 'localhost:9200/test/_refresh' - - curl -XPOST 'localhost:9200/test/_search' -d ' - { - "query" : { - "term" : { - "langcode" : "en" - } - } - } - ' - curl -XPOST 'localhost:9200/test/_search' -d ' - { - "query" : { - "term" : { - "langcode" : "de" - } - } - } - ' - - curl -XPOST 'localhost:9200/test/_search' -d ' - { - "query" : { - "term" : { - "langcode" : "fr" - } - } - } - ' - -## Indexing language-detected text alongside with code - -Just indexing the language code is not eough in most cases. The language-detected text -should be passed to a specific analyzer to papply language-specific analysis. This plugin -allows that by the `language_to` parameter. - - curl -XDELETE 'localhost:9200/test' - - curl -XPUT 'localhost:9200/test' - - curl -XPOST 'localhost:9200/test/article/_mapping' -d ' - { - "article" : { - "properties" : { - "langcode":{ - "type" : "langdetect", - "languages" : [ "de", "en", "fr", "nl", "it" ], - "language_to" : { - "de": "german_field", - "en": "english_field" - } - }, - "german_field" : { - "analyzer" : "german", - "type": "string" - }, - "english_field" : { - "analyzer" : "english", - "type" : "string" - } - } - } - } - ' - - curl -XPUT 'localhost:9200/test/article/1' -d ' - { - "langcode" : "This is a small example for english text" - } - ' - - curl -XPOST 'localhost:9200/test/_search?pretty' -d ' - { - "query" : { - "match" : { - "english_field" : "This is a small example for english text" - } - } - } - ' - - -## Language code and `multi_field` - -Using multifields, it is possible to store the text alongside with the detected language(s). -Here, we use another (short nonsense) example text for demonstration, -which has more than one detected language code. - - curl -XDELETE 'localhost:9200/test' - - curl -XPUT 'localhost:9200/test' - - curl -XPOST 'localhost:9200/test/article/_mapping' -d ' - { - "article" : { - "properties" : { - "content" : { - "type" : "multi_field", - "fields" : { - "content" : { - "type" : "string" - }, - "language" : { - "type": "langdetect", - "store" : true - } - } - } - } - } - } - ' - - curl -XPUT 'localhost:9200/test/article/1' -d ' - { - "content" : "watt datt" - } - ' - - curl -XGET 'localhost:9200/test/_refresh' - - curl -XPOST 'localhost:9200/test/_search?pretty' -d ' - { - "fields" : "content.language", - "query" : { - "match" : { - "content" : "watt datt" - } - } - } - ' - -The result is - - { - "took" : 2, - "timed_out" : false, - "_shards" : { - "total" : 5, - "successful" : 5, - "failed" : 0 - }, - "hits" : { - "total" : 1, - "max_score" : 0.51623213, - "hits" : [ { - "_index" : "test", - "_type" : "article", - "_id" : "1", - "_score" : 0.51623213, - "fields" : { - "content.language" : [ "sv", "it", "nl" ] - } - } ] - } - } - - -## Language detection with attachment mapper plugin - - curl -XDELETE 'localhost:9200/test' - - curl -XPUT 'localhost:9200/test' -d ' - { - "mappings" : { - "_default_" : { - "properties" : { - "content" : { - "type" : "attachment", - "fields" : { - "content" : { - "type" : "multi_field", - "fields" : { - "content" : { - "type" : "string" - }, - "language" : { - "type" : "langdetect", - "binary" : true - } - } - } - } - } - } - } - } - } - ' - - rm index.tmp - echo -n '{"content":"' >> index.tmp - echo "This is a very simple text in plain english" | base64 >> index.tmp - echo -n '"}' >> index.tmp - curl -XPOST --data-binary "@index.tmp" 'localhost:9200/test/docs/1' - rm index.tmp - - curl -XPOST 'localhost:9200/test/_refresh' - - curl -XGET 'localhost:9200/test/docs/_mapping?pretty' - - curl -XPOST 'localhost:9200/test/docs/_search?pretty' -d ' - { - "query" : { - "match" : { - "content" : "very simple" - } - } - } - ' - - curl -XPOST 'localhost:9200/test/docs/_search?pretty' -d ' - { - "query" : { - "term" : { - "content.language" : "en" - } - } - } - ' - -## Language detection REST API Example - - curl -XPOST 'localhost:9200/_langdetect?pretty' -d 'This is a test' - { - "profile" : "/langdetect/", - "languages" : [ { - "language" : "en", - "probability" : 0.9999971603535163 - } ] - } - - curl -XPOST 'localhost:9200/_langdetect?pretty' -d 'Das ist ein Test' - { - "profile" : "/langdetect/", - "languages" : [ { - "language" : "de", - "probability" : 0.9999993070517024 - } ] - } - - curl -XPOST 'localhost:9200/_langdetect?pretty' -d 'Datt isse ne test' - { - "profile" : "/langdetect/", - "languages" : [ { - "language" : "no", - "probability" : 0.5714251911820175 - }, { - "language" : "de", - "probability" : 0.14285762298521493 - }, { - "language" : "it", - "probability" : 0.14285706984044144 - } ] - } - -## Use _langdetect endpoint from Sense - - GET _langdetect - { - "text": "das ist ein test" - } - - -## Change profile of language detection to "short text" profile - - curl -XPOST 'localhost:9200/_langdetect?pretty&profile=short-text' -d 'Das ist ein Test' - { - "profile" : "/langdetect/short-text/", - "languages" : [ { - "language" : "de", - "probability" : 0.9999993070517024 - } ] - } - - -# Settings - -These settings can be used in `elasticsearch.yml` to modify language detection. - -Use with caution. You don't need to modify settings. This list is just for the sake of completeness. -For successful modification of the model parameters, you should study the source code -and be familiar with probabilistic matching using naive bayes with character n-gram. -See also Ted Dunning, -[Statistical Identification of Language](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.48.1958), 1994. - -`languages` - a comma-separated list of language codes such as (de,en,fr...) used to restrict (and speed up) the detection process - -`map.` - a substitution code for a language code - -`number_of_trials` - number of trials, affects CPU usage (default: 7) - -`alpha` - additional smoothing parameter, default: 0.5 - -`alpha_width` - the width of smoothing, default: 0.05 - -`iteration_limit` - safeguard to break loop, default: 10000 - -`prob_threshold` - default: 0.1 - -`conv_threshold` - detection is terminated when normalized probability exceeds -this threshold, default: 0.99999 - -`base_freq` - default 10000 - -# Credits - -Thanks to Alexander Reelsen for his OpenNLP plugin, from where I have copied and -adapted the mapping type analysis code. - -# License - -Elasticsearch Langdetect Plugin - -Derived work of language-detection by Nakatani Shuyo http://code.google.com/p/language-detection/ - -Copyright (C) 2012 Jörg Prante - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -you may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. \ No newline at end of file diff --git a/build.gradle b/build.gradle index c503a01..3037448 100644 --- a/build.gradle +++ b/build.gradle @@ -1,4 +1,3 @@ - plugins { id "org.sonarqube" version "2.2" id "org.xbib.gradle.plugin.asciidoctor" version "1.5.4.1.0" @@ -27,9 +26,8 @@ ext { scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' scmDeveloperConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' versions = [ - 'elasticsearch' : '2.4.4', - 'jackson': '2.6.2', - 'log4j': '2.5', + 'elasticsearch' : '5.1.2', + 'log4j': '2.7', 'junit' : '4.12' ] } @@ -48,48 +46,38 @@ repositories { mavenCentral() } -sourceSets { - integrationTest { - java { - compileClasspath += main.output + test.output - runtimeClasspath += main.output + test.output - srcDir file('src/integration-test/java') - } - resources.srcDir file('src/integration-test/resources') - } -} configurations { + asciidoclet wagon - integrationTestCompile.extendsFrom testCompile - integrationTestRuntime.extendsFrom testRuntime - releaseJars { + distJars { extendsFrom runtime exclude group: 'org.elasticsearch' + exclude module: 'lucene-core' + exclude module: 'lucene-analyzers-common' exclude module: 'jna' exclude module: 'jackson-core' exclude module: 'jackson-dataformat-smile' exclude module: 'jackson-dataformat-yaml' + exclude module: 'jflex' } } dependencies { compile "org.elasticsearch:elasticsearch:${versions.elasticsearch}" - compile "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" + compileOnly "org.apache.logging.log4j:log4j-core:${versions.log4j}" testCompile "junit:junit:${versions.junit}" - testCompile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}" testCompile "org.apache.logging.log4j:log4j-core:${versions.log4j}" - integrationTestCompile "junit:junit:${versions.junit}" - integrationTestCompile "org.elasticsearch:elasticsearch:${versions.elasticsearch}" - releaseJars "${project.group}:${project.name}:${project.version}" + asciidoclet 'org.asciidoctor:asciidoclet:1.5.4' wagon 'org.apache.maven.wagon:wagon-ssh-external:2.10' + distJars "${project.group}:${project.name}:${project.version}" } -sourceCompatibility = JavaVersion.VERSION_1_7 -targetCompatibility = JavaVersion.VERSION_1_7 +sourceCompatibility = JavaVersion.VERSION_1_8 +targetCompatibility = JavaVersion.VERSION_1_8 [compileJava, compileTestJava]*.options*.encoding = 'UTF-8' tasks.withType(JavaCompile) { - options.compilerArgs << "-Xlint:all" + options.compilerArgs << "-Xlint:all" << "-profile" << "compact2" } @@ -99,6 +87,7 @@ test { showStandardStreams = false exceptionFormat = 'full' } + reports.html.destination = "docs/test" } task makePluginDescriptor(type: Copy) { @@ -120,38 +109,48 @@ task makePluginDescriptor(type: Copy) { } task buildPluginZip(type: Zip, dependsOn: [':jar', ':makePluginDescriptor']) { - from configurations.releaseJars + from configurations.distJars from 'build/tmp/plugin' + into 'elasticsearch' classifier 'plugin' } task unpackPlugin(type: Copy, dependsOn: [':buildPluginZip']) { delete "plugins" - from configurations.releaseJars + from configurations.distJars from 'build/tmp/plugin' into "plugins/${pluginName}" } -task integrationTest(type: Test, dependsOn: ['unpackPlugin']) { - testClassesDir = sourceSets.integrationTest.output.classesDir - classpath = configurations.integrationTestCompile - classpath += fileTree("plugins/${pluginName}").include('*.jar') - classpath += sourceSets.integrationTest.output - // without this trick to remove identical jars from classpath, an Elasticsearch bug whines about a "jar hell" - classpath -= configurations.releaseJars - outputs.upToDateWhen { false } - systemProperty 'path.home', projectDir.absolutePath - testLogging.showStandardStreams = false -} - -integrationTest.mustRunAfter test -check.dependsOn integrationTest - clean { delete "plugins" delete "logs" } +asciidoctor { + backends 'html5' + outputDir = file('docs') + separateOutputDirs = false + attributes 'source-highlighter': 'coderay', + toc : '', + idprefix : '', + idseparator : '-', + stylesheet: "${projectDir}/src/docs/asciidoc/css/foundation.css" +} + +javadoc { + options.docletpath = configurations.asciidoclet.files.asType(List) + options.doclet = 'org.asciidoctor.Asciidoclet' + options.overview = "src/docs/asciidoclet/overview.adoc" + options.addStringOption "-base-dir", "${projectDir}" + options.addStringOption "-attribute", + "name=${project.name},version=${project.version},title-link=https://github.com/${user}/${project.name}" + options.destinationDirectory(file("${projectDir}/docs/javadoc")) + configure(options) { + noTimestamp = true + } +} + task javadocJar(type: Jar, dependsOn: classes) { from javadoc into "build/tmp" diff --git a/docs/javadoc/allclasses-frame.html b/docs/javadoc/allclasses-frame.html new file mode 100644 index 0000000..9259a8f --- /dev/null +++ b/docs/javadoc/allclasses-frame.html @@ -0,0 +1,35 @@ + + + + + +All Classes (elasticsearch-langdetect 5.1.2.0 API) + + + + +

All Classes

+ + + diff --git a/docs/javadoc/allclasses-noframe.html b/docs/javadoc/allclasses-noframe.html new file mode 100644 index 0000000..41b7ee7 --- /dev/null +++ b/docs/javadoc/allclasses-noframe.html @@ -0,0 +1,35 @@ + + + + + +All Classes (elasticsearch-langdetect 5.1.2.0 API) + + + + +

All Classes

+ + + diff --git a/docs/javadoc/coderay-asciidoctor.css b/docs/javadoc/coderay-asciidoctor.css new file mode 100644 index 0000000..ce7c72e --- /dev/null +++ b/docs/javadoc/coderay-asciidoctor.css @@ -0,0 +1,89 @@ +/* Stylesheet for CodeRay to match GitHub theme | MIT License | http://foundation.zurb.com */ +/*pre.CodeRay {background-color:#f7f7f8;}*/ +.CodeRay .line-numbers{border-right:1px solid #d8d8d8;padding:0 0.5em 0 .25em} +.CodeRay span.line-numbers{display:inline-block;margin-right:.5em;color:rgba(0,0,0,.3)} +.CodeRay .line-numbers strong{font-weight: normal} +table.CodeRay{border-collapse:separate;border-spacing:0;margin-bottom:0;border:0;background:none} +table.CodeRay td{vertical-align: top} +table.CodeRay td.line-numbers{text-align:right} +table.CodeRay td.line-numbers>pre{padding:0;color:rgba(0,0,0,.3)} +table.CodeRay td.code{padding:0 0 0 .5em} +table.CodeRay td.code>pre{padding:0} +.CodeRay .debug{color:#fff !important;background:#000080 !important} +.CodeRay .annotation{color:#007} +.CodeRay .attribute-name{color:#000080} +.CodeRay .attribute-value{color:#700} +.CodeRay .binary{color:#509} +.CodeRay .comment{color:#998;font-style:italic} +.CodeRay .char{color:#04d} +.CodeRay .char .content{color:#04d} +.CodeRay .char .delimiter{color:#039} +.CodeRay .class{color:#458;font-weight:bold} +.CodeRay .complex{color:#a08} +.CodeRay .constant,.CodeRay .predefined-constant{color:#008080} +.CodeRay .color{color:#099} +.CodeRay .class-variable{color:#369} +.CodeRay .decorator{color:#b0b} +.CodeRay .definition{color:#099} +.CodeRay .delimiter{color:#000} +.CodeRay .doc{color:#970} +.CodeRay .doctype{color:#34b} +.CodeRay .doc-string{color:#d42} +.CodeRay .escape{color:#666} +.CodeRay .entity{color:#800} +.CodeRay .error{color:#808} +.CodeRay .exception{color:inherit} +.CodeRay .filename{color:#099} +.CodeRay .function{color:#900;font-weight:bold} +.CodeRay .global-variable{color:#008080} +.CodeRay .hex{color:#058} +.CodeRay .integer,.CodeRay .float{color:#099} +.CodeRay .include{color:#555} +.CodeRay .inline{color:#00} +.CodeRay .inline .inline{background:#ccc} +.CodeRay .inline .inline .inline{background:#bbb} +.CodeRay .inline .inline-delimiter{color:#d14} +.CodeRay .inline-delimiter{color:#d14} +.CodeRay .important{color:#555;font-weight:bold} +.CodeRay .interpreted{color:#b2b} +.CodeRay .instance-variable{color:#008080} +.CodeRay .label{color:#970} +.CodeRay .local-variable{color:#963} +.CodeRay .octal{color:#40e} +.CodeRay .predefined{color:#369} +.CodeRay .preprocessor{color:#579} +.CodeRay .pseudo-class{color:#555} +.CodeRay .directive{font-weight:bold} +.CodeRay .type{font-weight:bold} +.CodeRay .predefined-type{color:inherit} +.CodeRay .reserved,.CodeRay .keyword {color:#000;font-weight:bold} +.CodeRay .key{color:#808} +.CodeRay .key .delimiter{color:#606} +.CodeRay .key .char{color:#80f} +.CodeRay .value{color:#088} +.CodeRay .regexp .delimiter{color:#808} +.CodeRay .regexp .content{color:#808} +.CodeRay .regexp .modifier{color:#808} +.CodeRay .regexp .char{color:#d14} +.CodeRay .regexp .function{color:#404;font-weight:bold} +.CodeRay .string{color:#d20} +.CodeRay .string .string .string{background:#ffd0d0} +.CodeRay .string .content{color:#d14} +.CodeRay .string .char{color:#d14} +.CodeRay .string .delimiter{color:#d14} +.CodeRay .shell{color:#d14} +.CodeRay .shell .delimiter{color:#d14} +.CodeRay .symbol{color:#990073} +.CodeRay .symbol .content{color:#a60} +.CodeRay .symbol .delimiter{color:#630} +.CodeRay .tag{color:#008080} +.CodeRay .tag-special{color:#d70} +.CodeRay .variable{color:#036} +.CodeRay .insert{background:#afa} +.CodeRay .delete{background:#faa} +.CodeRay .change{color:#aaf;background:#007} +.CodeRay .head{color:#f8f;background:#505} +.CodeRay .insert .insert{color:#080} +.CodeRay .delete .delete{color:#800} +.CodeRay .change .change{color:#66f} +.CodeRay .head .head{color:#f4f} diff --git a/docs/javadoc/constant-values.html b/docs/javadoc/constant-values.html new file mode 100644 index 0000000..582f885 --- /dev/null +++ b/docs/javadoc/constant-values.html @@ -0,0 +1,192 @@ + + + + + +Constant Field Values (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Constant Field Values

+

Contents

+ +
+
+ + +

org.xbib.*

+
    +
  • + + + + + + + + + + + + + + +
    org.xbib.elasticsearch.action.langdetect.LangdetectAction 
    Modifier and TypeConstant FieldValue
    + +public static final java.lang.StringNAME"langdetect"
    +
  • +
+
    +
  • + + + + + + + + + + + + + + +
    org.xbib.elasticsearch.common.langdetect.NGram 
    Modifier and TypeConstant FieldValue
    + +public static final intN_GRAM3
    +
  • +
+
    +
  • + + + + + + + + + + + + + + +
    org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper 
    Modifier and TypeConstant FieldValue
    + +public static final java.lang.StringMAPPER_TYPE"langdetect"
    +
  • +
+
+ + + + + + diff --git a/docs/javadoc/deprecated-list.html b/docs/javadoc/deprecated-list.html new file mode 100644 index 0000000..2f92db8 --- /dev/null +++ b/docs/javadoc/deprecated-list.html @@ -0,0 +1,121 @@ + + + + + +Deprecated List (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + +
+ + + + + + + +
+ + +
+

Deprecated API

+

Contents

+
+ +
+ + + + + + + +
+ + + + diff --git a/docs/javadoc/help-doc.html b/docs/javadoc/help-doc.html new file mode 100644 index 0000000..0c30ac8 --- /dev/null +++ b/docs/javadoc/help-doc.html @@ -0,0 +1,222 @@ + + + + + +API Help (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + +
+ + + + + + + +
+ + +
+

How This API Document Is Organized

+
This API (Application Programming Interface) document has pages corresponding to the items in the navigation bar, described as follows.
+
+
+
    +
  • +

    Overview

    +

    The Overview page is the front page of this API document and provides a list of all packages with a summary for each. This page can also contain an overall description of the set of packages.

    +
  • +
  • +

    Package

    +

    Each package has a page that contains a list of its classes and interfaces, with a summary for each. This page can contain six categories:

    +
      +
    • Interfaces (italic)
    • +
    • Classes
    • +
    • Enums
    • +
    • Exceptions
    • +
    • Errors
    • +
    • Annotation Types
    • +
    +
  • +
  • +

    Class/Interface

    +

    Each class, interface, nested class and nested interface has its own separate page. Each of these pages has three sections consisting of a class/interface description, summary tables, and detailed member descriptions:

    +
      +
    • Class inheritance diagram
    • +
    • Direct Subclasses
    • +
    • All Known Subinterfaces
    • +
    • All Known Implementing Classes
    • +
    • Class/interface declaration
    • +
    • Class/interface description
    • +
    +
      +
    • Nested Class Summary
    • +
    • Field Summary
    • +
    • Constructor Summary
    • +
    • Method Summary
    • +
    +
      +
    • Field Detail
    • +
    • Constructor Detail
    • +
    • Method Detail
    • +
    +

    Each summary entry contains the first sentence from the detailed description for that item. The summary entries are alphabetical, while the detailed descriptions are in the order they appear in the source code. This preserves the logical groupings established by the programmer.

    +
  • +
  • +

    Annotation Type

    +

    Each annotation type has its own separate page with the following sections:

    +
      +
    • Annotation Type declaration
    • +
    • Annotation Type description
    • +
    • Required Element Summary
    • +
    • Optional Element Summary
    • +
    • Element Detail
    • +
    +
  • +
  • +

    Enum

    +

    Each enum has its own separate page with the following sections:

    +
      +
    • Enum declaration
    • +
    • Enum description
    • +
    • Enum Constant Summary
    • +
    • Enum Constant Detail
    • +
    +
  • +
  • +

    Tree (Class Hierarchy)

    +

    There is a Class Hierarchy page for all packages, plus a hierarchy for each package. Each hierarchy page contains a list of classes and a list of interfaces. The classes are organized by inheritance structure starting with java.lang.Object. The interfaces do not inherit from java.lang.Object.

    +
      +
    • When viewing the Overview page, clicking on "Tree" displays the hierarchy for all packages.
    • +
    • When viewing a particular package, class or interface page, clicking "Tree" displays the hierarchy for only that package.
    • +
    +
  • +
  • +

    Deprecated API

    +

    The Deprecated API page lists all of the API that have been deprecated. A deprecated API is not recommended for use, generally due to improvements, and a replacement API is usually given. Deprecated APIs may be removed in future implementations.

    +
  • +
  • +

    Index

    +

    The Index contains an alphabetic list of all classes, interfaces, constructors, methods, and fields.

    +
  • +
  • +

    Prev/Next

    +

    These links take you to the next or previous class, interface, package, or related page.

    +
  • +
  • +

    Frames/No Frames

    +

    These links show and hide the HTML frames. All pages are available with or without frames.

    +
  • +
  • +

    All Classes

    +

    The All Classes link shows all classes and interfaces except non-static nested types.

    +
  • +
  • +

    Serialized Form

    +

    Each serializable or externalizable class has a description of its serialization fields and methods. This information is of interest to re-implementors, not to developers using the API. While there is no link in the navigation bar, you can get to this information by going to any serialized class and clicking "Serialized Form" in the "See also" section of the class description.

    +
  • +
  • +

    Constant Field Values

    +

    The Constant Field Values page lists the static final fields and their values.

    +
  • +
+This help file applies to API documentation generated using the standard doclet.
+ +
+ + + + + + + +
+ + + + diff --git a/docs/javadoc/index-all.html b/docs/javadoc/index-all.html new file mode 100644 index 0000000..db2dcec --- /dev/null +++ b/docs/javadoc/index-all.html @@ -0,0 +1,452 @@ + + + + + +Index (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + +
+ + + + + + + +
+ + +
A B C D G I L M N O P R S T V W  + + +

A

+
+
add(String) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
add(String, String) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
+
 
+
add(Map<String, Object>) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
+
 
+
addChar(char) - Method in class org.xbib.elasticsearch.common.langdetect.NGram
+
 
+
addProfile(LangProfile, int, int) - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
alpha(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
alphaWidth(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
+ + + +

B

+
+
baseFreq(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
binary(boolean) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
build(Mapper.BuilderContext) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
build() - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
+
 
+
Builder(String) - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
builder() - Static method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
+
 
+
Builder() - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
+
 
+
+ + + +

C

+
+
contentType() - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
+
 
+
convThreshold(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
+ + + +

D

+
+
Defaults() - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
+
 
+
detectAll(String) - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
doExecute(LangdetectRequest, ActionListener<LangdetectResponse>) - Method in class org.xbib.elasticsearch.action.langdetect.TransportLangdetectAction
+
 
+
doXContentBody(XContentBuilder, boolean, ToXContent.Params) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
+
 
+
+ + + +

G

+
+
get(int) - Method in class org.xbib.elasticsearch.common.langdetect.NGram
+
 
+
getActions() - Method in class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
+
 
+
getFreq() - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
getLanguage() - Method in class org.xbib.elasticsearch.common.langdetect.Language
+
 
+
getLanguages() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
+
 
+
getMappers() - Method in class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
+
 
+
getName() - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
getNWords() - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
getProbability() - Method in class org.xbib.elasticsearch.common.langdetect.Language
+
 
+
getProfile() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
getProfile() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
+
 
+
getProfile() - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
getRestHandlers() - Method in class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
+
 
+
getSettings() - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
getText() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
+ + + +

I

+
+
INSTANCE - Static variable in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
+
 
+
iterationLimit(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
+ + + +

L

+
+
LANG_FIELD_TYPE - Static variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
+
 
+
LangdetectAction - Class in org.xbib.elasticsearch.action.langdetect
+
 
+
LangdetectMapper - Class in org.xbib.elasticsearch.index.mapper.langdetect
+
 
+
LangdetectMapper(String, TextFieldMapper.TextFieldType, MappedFieldType, int, Settings, FieldMapper.MultiFields, FieldMapper.CopyTo, LangdetectMapper.LanguageTo, LangdetectService) - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
+
 
+
LangdetectMapper.Builder - Class in org.xbib.elasticsearch.index.mapper.langdetect
+
 
+
LangdetectMapper.Defaults - Class in org.xbib.elasticsearch.index.mapper.langdetect
+
 
+
LangdetectMapper.LanguageTo - Class in org.xbib.elasticsearch.index.mapper.langdetect
+
 
+
LangdetectMapper.LanguageTo.Builder - Class in org.xbib.elasticsearch.index.mapper.langdetect
+
 
+
LangdetectMapper.TypeParser - Class in org.xbib.elasticsearch.index.mapper.langdetect
+
 
+
LangdetectPlugin - Class in org.xbib.elasticsearch.plugin.langdetect
+
 
+
LangdetectPlugin() - Constructor for class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
+
 
+
LangdetectRequest - Class in org.xbib.elasticsearch.action.langdetect
+
 
+
LangdetectRequest() - Constructor for class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
LangdetectRequestBuilder - Class in org.xbib.elasticsearch.action.langdetect
+
 
+
LangdetectRequestBuilder(ElasticsearchClient) - Constructor for class org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder
+
 
+
LangdetectResponse - Class in org.xbib.elasticsearch.action.langdetect
+
 
+
LangdetectResponse() - Constructor for class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
+
 
+
LangdetectService - Class in org.xbib.elasticsearch.common.langdetect
+
 
+
LangdetectService() - Constructor for class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
LangdetectService(Settings) - Constructor for class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
LangdetectService(Settings, String) - Constructor for class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
LangProfile - Class in org.xbib.elasticsearch.common.langdetect
+
 
+
LangProfile() - Constructor for class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
Language - Class in org.xbib.elasticsearch.common.langdetect
+
 
+
Language(String, double) - Constructor for class org.xbib.elasticsearch.common.langdetect.Language
+
 
+
LanguageDetectionException - Exception in org.xbib.elasticsearch.common.langdetect
+
 
+
LanguageDetectionException(String) - Constructor for exception org.xbib.elasticsearch.common.langdetect.LanguageDetectionException
+
 
+
languages(String[]) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
languageTo - Variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
languageTo(LangdetectMapper.LanguageTo) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
languageToFields() - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
+
 
+
loadProfileFromResource(String, int, int) - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
+
 
+
+ + + +

M

+
+
map(Map<String, Object>) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
MAPPER_TYPE - Static variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
+
 
+
max(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
+ + + +

N

+
+
N_GRAM - Static variable in class org.xbib.elasticsearch.common.langdetect.NGram
+
 
+
NAME - Static variable in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
+
 
+
newRequestBuilder(ElasticsearchClient) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
+
 
+
newResponse() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
+
 
+
NGram - Class in org.xbib.elasticsearch.common.langdetect
+
 
+
NGram() - Constructor for class org.xbib.elasticsearch.common.langdetect.NGram
+
 
+
normalize(char) - Static method in class org.xbib.elasticsearch.common.langdetect.NGram
+
 
+
ntrials(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
+ + + +

O

+
+
org.xbib.elasticsearch.action.langdetect - package org.xbib.elasticsearch.action.langdetect
+
 
+
org.xbib.elasticsearch.common.langdetect - package org.xbib.elasticsearch.common.langdetect
+
 
+
org.xbib.elasticsearch.index.mapper.langdetect - package org.xbib.elasticsearch.index.mapper.langdetect
+
 
+
org.xbib.elasticsearch.plugin.langdetect - package org.xbib.elasticsearch.plugin.langdetect
+
 
+
org.xbib.elasticsearch.rest.action.langdetect - package org.xbib.elasticsearch.rest.action.langdetect
+
 
+
+ + + +

P

+
+
parse(String, Map<String, Object>, Mapper.TypeParser.ParserContext) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser
+
 
+
parseCreateField(ParseContext, List<Field>) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
+
 
+
pattern(String) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
positionIncrementGap - Variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
positionIncrementGap(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
prepareRequest(RestRequest, NodeClient) - Method in class org.xbib.elasticsearch.rest.action.langdetect.RestLangdetectAction
+
 
+
probThreshold(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
profile(String) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
+ + + +

R

+
+
read(InputStream) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
readFrom(StreamInput) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
readFrom(StreamInput) - Method in class org.xbib.elasticsearch.common.langdetect.Language
+
 
+
RestLangdetectAction - Class in org.xbib.elasticsearch.rest.action.langdetect
+
 
+
RestLangdetectAction(Settings, RestController) - Constructor for class org.xbib.elasticsearch.rest.action.langdetect.RestLangdetectAction
+
 
+
+ + + +

S

+
+
searchAnalyzer(NamedAnalyzer) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
searchQuotedAnalyzer(NamedAnalyzer) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
setFreq(Map<String, Integer>) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
setLanguages(List<Language>) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
+
 
+
setName(String) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
+
 
+
setProfile(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
setProfile(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder
+
 
+
setProfile(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
+
 
+
setText(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
setText(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder
+
 
+
settingsBuilder - Variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
+
 
+
status() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
+
 
+
+ + + +

T

+
+
toString() - Method in class org.xbib.elasticsearch.common.langdetect.Language
+
 
+
toXContent(XContentBuilder, ToXContent.Params) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
+
 
+
toXContent(XContentBuilder, ToXContent.Params) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
+
 
+
TransportLangdetectAction - Class in org.xbib.elasticsearch.action.langdetect
+
 
+
TransportLangdetectAction(Settings, ThreadPool, ActionFilters, IndexNameExpressionResolver, TransportService) - Constructor for class org.xbib.elasticsearch.action.langdetect.TransportLangdetectAction
+
 
+
TypeParser() - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser
+
 
+
+ + + +

V

+
+
validate() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
+ + + +

W

+
+
writeTo(StreamOutput) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
+
 
+
writeTo(StreamOutput) - Method in class org.xbib.elasticsearch.common.langdetect.Language
+
 
+
+A B C D G I L M N O P R S T V W 
+ +
+ + + + + + + +
+ + + + diff --git a/docs/javadoc/index.html b/docs/javadoc/index.html new file mode 100644 index 0000000..6656182 --- /dev/null +++ b/docs/javadoc/index.html @@ -0,0 +1,75 @@ + + + + + +elasticsearch-langdetect 5.1.2.0 API + + + + + + + + + +<noscript> +<div>JavaScript is disabled on your browser.</div> +</noscript> +<h2>Frame Alert</h2> +<p>This document is designed to be viewed using the frames feature. If you see this message, you are using a non-frame-capable web client. Link to <a href="overview-summary.html">Non-frame version</a>.</p> + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html new file mode 100644 index 0000000..2c906dc --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html @@ -0,0 +1,325 @@ + + + + + +LangdetectAction (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.action.langdetect
+

Class LangdetectAction

+
+
+ +
+ +
+
+ +
+
+ +
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html new file mode 100644 index 0000000..53f0d30 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html @@ -0,0 +1,423 @@ + + + + + +LangdetectRequest (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.action.langdetect
+

Class LangdetectRequest

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.elasticsearch.transport.TransportMessage
    • +
    • +
        +
      • org.elasticsearch.transport.TransportRequest
      • +
      • +
          +
        • org.elasticsearch.action.ActionRequest
        • +
        • +
            +
          • org.xbib.elasticsearch.action.langdetect.LangdetectRequest
          • +
          +
        • +
        +
      • +
      +
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    org.elasticsearch.common.io.stream.Streamable
    +
    +
    +
    +
    public class LangdetectRequest
    +extends org.elasticsearch.action.ActionRequest
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Nested Class Summary

      +
        +
      • + + +

        Nested classes/interfaces inherited from class org.elasticsearch.transport.TransportRequest

        +org.elasticsearch.transport.TransportRequest.Empty
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      LangdetectRequest() 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      java.lang.StringgetProfile() 
      java.lang.StringgetText() 
      voidreadFrom(org.elasticsearch.common.io.stream.StreamInput in) 
      LangdetectRequestsetProfile(java.lang.String profile) 
      LangdetectRequestsetText(java.lang.String text) 
      org.elasticsearch.action.ActionRequestValidationExceptionvalidate() 
      voidwriteTo(org.elasticsearch.common.io.stream.StreamOutput out) 
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.action.ActionRequest

        +getShouldStoreResult
      • +
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.transport.TransportRequest

        +createTask, getDescription, getParentTask, setParentTask, setParentTask
      • +
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.transport.TransportMessage

        +remoteAddress, remoteAddress
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LangdetectRequest

        +
        public LangdetectRequest()
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        validate

        +
        public org.elasticsearch.action.ActionRequestValidationException validate()
        +
        +
        Specified by:
        +
        validate in class org.elasticsearch.action.ActionRequest
        +
        +
      • +
      + + + +
        +
      • +

        getProfile

        +
        public java.lang.String getProfile()
        +
      • +
      + + + + + + + +
        +
      • +

        getText

        +
        public java.lang.String getText()
        +
      • +
      + + + + + + + +
        +
      • +

        readFrom

        +
        public void readFrom(org.elasticsearch.common.io.stream.StreamInput in)
        +              throws java.io.IOException
        +
        +
        Specified by:
        +
        readFrom in interface org.elasticsearch.common.io.stream.Streamable
        +
        Overrides:
        +
        readFrom in class org.elasticsearch.action.ActionRequest
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + +
        +
      • +

        writeTo

        +
        public void writeTo(org.elasticsearch.common.io.stream.StreamOutput out)
        +             throws java.io.IOException
        +
        +
        Specified by:
        +
        writeTo in interface org.elasticsearch.common.io.stream.Streamable
        +
        Overrides:
        +
        writeTo in class org.elasticsearch.action.ActionRequest
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html new file mode 100644 index 0000000..3596489 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html @@ -0,0 +1,308 @@ + + + + + +LangdetectRequestBuilder (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.action.langdetect
+

Class LangdetectRequestBuilder

+
+
+ +
+ +
+
+
    +
  • + +
      +
    • + + +

      Field Summary

      +
        +
      • + + +

        Fields inherited from class org.elasticsearch.action.ActionRequestBuilder

        +action, client, request
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      LangdetectRequestBuilder(org.elasticsearch.client.ElasticsearchClient client) 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      LangdetectRequestBuildersetProfile(java.lang.String string) 
      LangdetectRequestBuildersetText(java.lang.String string) 
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.action.ActionRequestBuilder

        +beforeExecute, execute, execute, get, get, get, request
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LangdetectRequestBuilder

        +
        public LangdetectRequestBuilder(org.elasticsearch.client.ElasticsearchClient client)
        +
      • +
      +
    • +
    + + +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html new file mode 100644 index 0000000..3659424 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html @@ -0,0 +1,417 @@ + + + + + +LangdetectResponse (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.action.langdetect
+

Class LangdetectResponse

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.elasticsearch.transport.TransportMessage
    • +
    • +
        +
      • org.elasticsearch.transport.TransportResponse
      • +
      • +
          +
        • org.elasticsearch.action.ActionResponse
        • +
        • +
            +
          • org.xbib.elasticsearch.action.langdetect.LangdetectResponse
          • +
          +
        • +
        +
      • +
      +
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    org.elasticsearch.common.io.stream.Streamable, org.elasticsearch.common.xcontent.StatusToXContent, org.elasticsearch.common.xcontent.ToXContent
    +
    +
    +
    +
    public class LangdetectResponse
    +extends org.elasticsearch.action.ActionResponse
    +implements org.elasticsearch.common.xcontent.StatusToXContent
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Nested Class Summary

      +
        +
      • + + +

        Nested classes/interfaces inherited from class org.elasticsearch.transport.TransportResponse

        +org.elasticsearch.transport.TransportResponse.Empty
      • +
      +
        +
      • + + +

        Nested classes/interfaces inherited from interface org.elasticsearch.common.xcontent.ToXContent

        +org.elasticsearch.common.xcontent.ToXContent.DelegatingMapParams, org.elasticsearch.common.xcontent.ToXContent.MapParams, org.elasticsearch.common.xcontent.ToXContent.Params
      • +
      +
    • +
    + +
      +
    • + + +

      Field Summary

      +
        +
      • + + +

        Fields inherited from interface org.elasticsearch.common.xcontent.ToXContent

        +EMPTY_PARAMS
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      LangdetectResponse() 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      java.util.List<Language>getLanguages() 
      java.lang.StringgetProfile() 
      LangdetectResponsesetLanguages(java.util.List<Language> languages) 
      LangdetectResponsesetProfile(java.lang.String profile) 
      org.elasticsearch.rest.RestStatusstatus() 
      org.elasticsearch.common.xcontent.XContentBuildertoXContent(org.elasticsearch.common.xcontent.XContentBuilder builder, + org.elasticsearch.common.xcontent.ToXContent.Params params) 
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.action.ActionResponse

        +readFrom, writeTo
      • +
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.transport.TransportMessage

        +remoteAddress, remoteAddress
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LangdetectResponse

        +
        public LangdetectResponse()
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        getProfile

        +
        public java.lang.String getProfile()
        +
      • +
      + + + + + + + +
        +
      • +

        getLanguages

        +
        public java.util.List<Language> getLanguages()
        +
      • +
      + + + + + + + +
        +
      • +

        toXContent

        +
        public org.elasticsearch.common.xcontent.XContentBuilder toXContent(org.elasticsearch.common.xcontent.XContentBuilder builder,
        +                                                                    org.elasticsearch.common.xcontent.ToXContent.Params params)
        +                                                             throws java.io.IOException
        +
        +
        Specified by:
        +
        toXContent in interface org.elasticsearch.common.xcontent.ToXContent
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + +
        +
      • +

        status

        +
        public org.elasticsearch.rest.RestStatus status()
        +
        +
        Specified by:
        +
        status in interface org.elasticsearch.common.xcontent.StatusToXContent
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html new file mode 100644 index 0000000..1591687 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html @@ -0,0 +1,329 @@ + + + + + +TransportLangdetectAction (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.action.langdetect
+

Class TransportLangdetectAction

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.elasticsearch.common.component.AbstractComponent
    • +
    • + +
    • +
    +
  • +
+
+ +
+
+
    +
  • + +
      +
    • + + +

      Field Summary

      +
        +
      • + + +

        Fields inherited from class org.elasticsearch.action.support.TransportAction

        +actionName, indexNameExpressionResolver, parseFieldMatcher, taskManager, threadPool
      • +
      +
        +
      • + + +

        Fields inherited from class org.elasticsearch.common.component.AbstractComponent

        +deprecationLogger, logger, settings
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      TransportLangdetectAction(org.elasticsearch.common.settings.Settings settings, + org.elasticsearch.threadpool.ThreadPool threadPool, + org.elasticsearch.action.support.ActionFilters actionFilters, + org.elasticsearch.cluster.metadata.IndexNameExpressionResolver indexNameExpressionResolver, + org.elasticsearch.transport.TransportService transportService) 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      protected voiddoExecute(LangdetectRequest request, + org.elasticsearch.action.ActionListener<LangdetectResponse> listener) 
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.action.support.TransportAction

        +doExecute, execute, execute, execute, execute
      • +
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.common.component.AbstractComponent

        +logDeprecatedSetting, logRemovedSetting, nodeName
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        TransportLangdetectAction

        +
        @Inject
        +public TransportLangdetectAction(org.elasticsearch.common.settings.Settings settings,
        +                                         org.elasticsearch.threadpool.ThreadPool threadPool,
        +                                         org.elasticsearch.action.support.ActionFilters actionFilters,
        +                                         org.elasticsearch.cluster.metadata.IndexNameExpressionResolver indexNameExpressionResolver,
        +                                         org.elasticsearch.transport.TransportService transportService)
        +
      • +
      +
    • +
    + + +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html new file mode 100644 index 0000000..ec05823 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html @@ -0,0 +1,23 @@ + + + + + +org.xbib.elasticsearch.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + +

org.xbib.elasticsearch.action.langdetect

+ + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html new file mode 100644 index 0000000..3f9bca1 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html @@ -0,0 +1,155 @@ + + + + + +org.xbib.elasticsearch.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Package org.xbib.elasticsearch.action.langdetect

+
+
+ +
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html new file mode 100644 index 0000000..95a252e --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html @@ -0,0 +1,178 @@ + + + + + +org.xbib.elasticsearch.action.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Hierarchy For Package org.xbib.elasticsearch.action.langdetect

+Package Hierarchies: + +
+
+

Class Hierarchy

+
    +
  • java.lang.Object +
      +
    • org.elasticsearch.common.component.AbstractComponent +
        +
      • org.elasticsearch.action.support.TransportAction<Request,Response> + +
      • +
      +
    • +
    • org.elasticsearch.action.ActionRequestBuilder<Request,Response,RequestBuilder> + +
    • +
    • org.elasticsearch.action.GenericAction<Request,Response> +
        +
      • org.elasticsearch.action.Action<Request,Response,RequestBuilder> + +
      • +
      +
    • +
    • org.elasticsearch.transport.TransportMessage (implements org.elasticsearch.common.io.stream.Streamable) +
        +
      • org.elasticsearch.transport.TransportRequest +
          +
        • org.elasticsearch.action.ActionRequest + +
        • +
        +
      • +
      • org.elasticsearch.transport.TransportResponse +
          +
        • org.elasticsearch.action.ActionResponse +
            +
          • org.xbib.elasticsearch.action.langdetect.LangdetectResponse (implements org.elasticsearch.common.xcontent.StatusToXContent)
          • +
          +
        • +
        +
      • +
      +
    • +
    +
  • +
+
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html new file mode 100644 index 0000000..9c931f4 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html @@ -0,0 +1,351 @@ + + + + + +LangProfile (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.common.langdetect
+

Class LangProfile

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.common.langdetect.LangProfile
    • +
    +
  • +
+
+
    +
  • +
    +
    +
    public class LangProfile
    +extends java.lang.Object
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      LangProfile() 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      voidadd(java.lang.String gram) 
      java.util.Map<java.lang.String,java.lang.Integer>getFreq() 
      java.lang.StringgetName() 
      java.util.List<java.lang.Integer>getNWords() 
      voidread(java.io.InputStream input) 
      voidsetFreq(java.util.Map<java.lang.String,java.lang.Integer> freq) 
      voidsetName(java.lang.String name) 
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LangProfile

        +
        public LangProfile()
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        add

        +
        public void add(java.lang.String gram)
        +
      • +
      + + + +
        +
      • +

        getName

        +
        public java.lang.String getName()
        +
      • +
      + + + +
        +
      • +

        setName

        +
        public void setName(java.lang.String name)
        +
      • +
      + + + +
        +
      • +

        getNWords

        +
        public java.util.List<java.lang.Integer> getNWords()
        +
      • +
      + + + +
        +
      • +

        getFreq

        +
        public java.util.Map<java.lang.String,java.lang.Integer> getFreq()
        +
      • +
      + + + +
        +
      • +

        setFreq

        +
        public void setFreq(java.util.Map<java.lang.String,java.lang.Integer> freq)
        +
      • +
      + + + +
        +
      • +

        read

        +
        public void read(java.io.InputStream input)
        +          throws java.io.IOException
        +
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html new file mode 100644 index 0000000..e6e58f1 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html @@ -0,0 +1,369 @@ + + + + + +LangdetectService (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.common.langdetect
+

Class LangdetectService

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.common.langdetect.LangdetectService
    • +
    +
  • +
+
+
    +
  • +
    +
    +
    public class LangdetectService
    +extends java.lang.Object
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + + + + + + + +
      Constructors 
      Constructor and Description
      LangdetectService() 
      LangdetectService(org.elasticsearch.common.settings.Settings settings) 
      LangdetectService(org.elasticsearch.common.settings.Settings settings, + java.lang.String profile) 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      voidaddProfile(LangProfile profile, + int index, + int langsize) 
      java.util.List<Language>detectAll(java.lang.String text) 
      java.lang.StringgetProfile() 
      org.elasticsearch.common.settings.SettingsgetSettings() 
      voidloadProfileFromResource(java.lang.String resource, + int index, + int langsize) 
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LangdetectService

        +
        public LangdetectService()
        +
      • +
      + + + +
        +
      • +

        LangdetectService

        +
        public LangdetectService(org.elasticsearch.common.settings.Settings settings)
        +
      • +
      + + + +
        +
      • +

        LangdetectService

        +
        public LangdetectService(org.elasticsearch.common.settings.Settings settings,
        +                         java.lang.String profile)
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        getSettings

        +
        public org.elasticsearch.common.settings.Settings getSettings()
        +
      • +
      + + + +
        +
      • +

        loadProfileFromResource

        +
        public void loadProfileFromResource(java.lang.String resource,
        +                                    int index,
        +                                    int langsize)
        +                             throws java.io.IOException
        +
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + +
        +
      • +

        addProfile

        +
        public void addProfile(LangProfile profile,
        +                       int index,
        +                       int langsize)
        +                throws java.io.IOException
        +
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + +
        +
      • +

        getProfile

        +
        public java.lang.String getProfile()
        +
      • +
      + + + + +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html new file mode 100644 index 0000000..acc2fbd --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html @@ -0,0 +1,345 @@ + + + + + +Language (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.common.langdetect
+

Class Language

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.common.langdetect.Language
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    org.elasticsearch.common.io.stream.Streamable
    +
    +
    +
    +
    public class Language
    +extends java.lang.Object
    +implements org.elasticsearch.common.io.stream.Streamable
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      Language(java.lang.String lang, + double prob) 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      java.lang.StringgetLanguage() 
      doublegetProbability() 
      voidreadFrom(org.elasticsearch.common.io.stream.StreamInput in) 
      java.lang.StringtoString() 
      voidwriteTo(org.elasticsearch.common.io.stream.StreamOutput out) 
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        Language

        +
        public Language(java.lang.String lang,
        +                double prob)
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        getLanguage

        +
        public java.lang.String getLanguage()
        +
      • +
      + + + +
        +
      • +

        getProbability

        +
        public double getProbability()
        +
      • +
      + + + +
        +
      • +

        readFrom

        +
        public void readFrom(org.elasticsearch.common.io.stream.StreamInput in)
        +              throws java.io.IOException
        +
        +
        Specified by:
        +
        readFrom in interface org.elasticsearch.common.io.stream.Streamable
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + +
        +
      • +

        writeTo

        +
        public void writeTo(org.elasticsearch.common.io.stream.StreamOutput out)
        +             throws java.io.IOException
        +
        +
        Specified by:
        +
        writeTo in interface org.elasticsearch.common.io.stream.Streamable
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + +
        +
      • +

        toString

        +
        public java.lang.String toString()
        +
        +
        Overrides:
        +
        toString in class java.lang.Object
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html new file mode 100644 index 0000000..941bd68 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html @@ -0,0 +1,264 @@ + + + + + +LanguageDetectionException (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.common.langdetect
+

Class LanguageDetectionException

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • java.lang.Throwable
    • +
    • +
        +
      • java.lang.Exception
      • +
      • +
          +
        • java.io.IOException
        • +
        • +
            +
          • org.xbib.elasticsearch.common.langdetect.LanguageDetectionException
          • +
          +
        • +
        +
      • +
      +
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    java.io.Serializable
    +
    +
    +
    +
    public class LanguageDetectionException
    +extends java.io.IOException
    +
    +
    See Also:
    +
    Serialized Form
    +
    +
  • +
+
+
+
    +
  • + + + +
      +
    • + + +

      Method Summary

      +
        +
      • + + +

        Methods inherited from class java.lang.Throwable

        +addSuppressed, fillInStackTrace, getCause, getLocalizedMessage, getMessage, getStackTrace, getSuppressed, initCause, printStackTrace, printStackTrace, printStackTrace, setStackTrace, toString
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LanguageDetectionException

        +
        public LanguageDetectionException(java.lang.String message)
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html new file mode 100644 index 0000000..ea6e5a1 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html @@ -0,0 +1,334 @@ + + + + + +NGram (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.common.langdetect
+

Class NGram

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.common.langdetect.NGram
    • +
    +
  • +
+
+
    +
  • +
    +
    +
    public class NGram
    +extends java.lang.Object
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Field Summary

      + + + + + + + + + + +
      Fields 
      Modifier and TypeField and Description
      static intN_GRAM 
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      NGram() 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + +
      All Methods Static Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      voidaddChar(char c) 
      java.lang.Stringget(int n) 
      static charnormalize(char c) 
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + + + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        NGram

        +
        public NGram()
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        normalize

        +
        public static char normalize(char c)
        +
      • +
      + + + +
        +
      • +

        addChar

        +
        public void addChar(char c)
        +
      • +
      + + + +
        +
      • +

        get

        +
        public java.lang.String get(int n)
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html new file mode 100644 index 0000000..80edd73 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html @@ -0,0 +1,26 @@ + + + + + +org.xbib.elasticsearch.common.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + +

org.xbib.elasticsearch.common.langdetect

+ + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html new file mode 100644 index 0000000..128e93b --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html @@ -0,0 +1,166 @@ + + + + + +org.xbib.elasticsearch.common.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Package org.xbib.elasticsearch.common.langdetect

+
+
+ +
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html new file mode 100644 index 0000000..7253968 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html @@ -0,0 +1,150 @@ + + + + + +org.xbib.elasticsearch.common.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Hierarchy For Package org.xbib.elasticsearch.common.langdetect

+Package Hierarchies: + +
+
+

Class Hierarchy

+
    +
  • java.lang.Object +
      +
    • org.xbib.elasticsearch.common.langdetect.LangdetectService
    • +
    • org.xbib.elasticsearch.common.langdetect.LangProfile
    • +
    • org.xbib.elasticsearch.common.langdetect.Language (implements org.elasticsearch.common.io.stream.Streamable)
    • +
    • org.xbib.elasticsearch.common.langdetect.NGram
    • +
    • java.lang.Throwable (implements java.io.Serializable) + +
    • +
    +
  • +
+
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html new file mode 100644 index 0000000..82f5bb2 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html @@ -0,0 +1,601 @@ + + + + + +LangdetectMapper.Builder (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.index.mapper.langdetect
+

Class LangdetectMapper.Builder

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.elasticsearch.index.mapper.Mapper.Builder<T,Y>
    • +
    • +
        +
      • org.elasticsearch.index.mapper.FieldMapper.Builder<LangdetectMapper.Builder,org.elasticsearch.index.mapper.TextFieldMapper>
      • +
      • +
          +
        • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
        • +
        +
      • +
      +
    • +
    +
  • +
+
+
    +
  • +
    +
    Enclosing class:
    +
    LangdetectMapper
    +
    +
    +
    +
    public static class LangdetectMapper.Builder
    +extends org.elasticsearch.index.mapper.FieldMapper.Builder<LangdetectMapper.Builder,org.elasticsearch.index.mapper.TextFieldMapper>
    +
  • +
+
+
+ +
+
+ +
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html new file mode 100644 index 0000000..e83e24e --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html @@ -0,0 +1,274 @@ + + + + + +LangdetectMapper.Defaults (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.index.mapper.langdetect
+

Class LangdetectMapper.Defaults

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
    • +
    +
  • +
+
+
    +
  • +
    +
    Enclosing class:
    +
    LangdetectMapper
    +
    +
    +
    +
    public static class LangdetectMapper.Defaults
    +extends java.lang.Object
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Field Summary

      + + + + + + + + + + +
      Fields 
      Modifier and TypeField and Description
      static org.elasticsearch.index.mapper.MappedFieldTypeLANG_FIELD_TYPE 
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      Defaults() 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Field Detail

      + + + +
        +
      • +

        LANG_FIELD_TYPE

        +
        public static final org.elasticsearch.index.mapper.MappedFieldType LANG_FIELD_TYPE
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        Defaults

        +
        public Defaults()
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html new file mode 100644 index 0000000..81dbb6a --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html @@ -0,0 +1,300 @@ + + + + + +LangdetectMapper.LanguageTo.Builder (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.index.mapper.langdetect
+

Class LangdetectMapper.LanguageTo.Builder

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
    • +
    +
  • +
+
+
    +
  • +
    +
    Enclosing class:
    +
    LangdetectMapper.LanguageTo
    +
    +
    +
    +
    public static class LangdetectMapper.LanguageTo.Builder
    +extends java.lang.Object
    +
  • +
+
+
+ +
+
+ +
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html new file mode 100644 index 0000000..673c918 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html @@ -0,0 +1,290 @@ + + + + + +LangdetectMapper.LanguageTo (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.index.mapper.langdetect
+

Class LangdetectMapper.LanguageTo

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
    • +
    +
  • +
+
+
    +
  • +
    +
    Enclosing class:
    +
    LangdetectMapper
    +
    +
    +
    +
    public static class LangdetectMapper.LanguageTo
    +extends java.lang.Object
    +
  • +
+
+
+ +
+
+
    +
  • + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        toXContent

        +
        public org.elasticsearch.common.xcontent.XContentBuilder toXContent(org.elasticsearch.common.xcontent.XContentBuilder builder,
        +                                                                    org.elasticsearch.common.xcontent.ToXContent.Params params)
        +                                                             throws java.io.IOException
        +
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + + + + + +
        +
      • +

        languageToFields

        +
        public java.util.Map<java.lang.String,java.lang.Object> languageToFields()
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html new file mode 100644 index 0000000..9d9201d --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html @@ -0,0 +1,300 @@ + + + + + +LangdetectMapper.TypeParser (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.index.mapper.langdetect
+

Class LangdetectMapper.TypeParser

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    org.elasticsearch.index.mapper.Mapper.TypeParser
    +
    +
    +
    Enclosing class:
    +
    LangdetectMapper
    +
    +
    +
    +
    public static class LangdetectMapper.TypeParser
    +extends java.lang.Object
    +implements org.elasticsearch.index.mapper.Mapper.TypeParser
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Nested Class Summary

      +
        +
      • + + +

        Nested classes/interfaces inherited from interface org.elasticsearch.index.mapper.Mapper.TypeParser

        +org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      TypeParser() 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      org.elasticsearch.index.mapper.Mapper.Builder<?,?>parse(java.lang.String name, + java.util.Map<java.lang.String,java.lang.Object> mapping, + org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext parserContext) 
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        TypeParser

        +
        public TypeParser()
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        parse

        +
        public org.elasticsearch.index.mapper.Mapper.Builder<?,?> parse(java.lang.String name,
        +                                                                java.util.Map<java.lang.String,java.lang.Object> mapping,
        +                                                                org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext parserContext)
        +
        +
        Specified by:
        +
        parse in interface org.elasticsearch.index.mapper.Mapper.TypeParser
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html new file mode 100644 index 0000000..2299518 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html @@ -0,0 +1,501 @@ + + + + + +LangdetectMapper (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.index.mapper.langdetect
+

Class LangdetectMapper

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.elasticsearch.index.mapper.Mapper
    • +
    • +
        +
      • org.elasticsearch.index.mapper.FieldMapper
      • +
      • +
          +
        • org.elasticsearch.index.mapper.TextFieldMapper
        • +
        • +
            +
          • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
          • +
          +
        • +
        +
      • +
      +
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    java.lang.Cloneable, java.lang.Iterable<org.elasticsearch.index.mapper.Mapper>, org.elasticsearch.common.xcontent.ToXContent
    +
    +
    +
    +
    public class LangdetectMapper
    +extends org.elasticsearch.index.mapper.TextFieldMapper
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Nested Class Summary

      + + + + + + + + + + + + + + + + + + + + + + +
      Nested Classes 
      Modifier and TypeClass and Description
      static class LangdetectMapper.Builder 
      static class LangdetectMapper.Defaults 
      static class LangdetectMapper.LanguageTo 
      static class LangdetectMapper.TypeParser 
      +
        +
      • + + +

        Nested classes/interfaces inherited from class org.elasticsearch.index.mapper.TextFieldMapper

        +org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType
      • +
      +
        +
      • + + +

        Nested classes/interfaces inherited from class org.elasticsearch.index.mapper.FieldMapper

        +org.elasticsearch.index.mapper.FieldMapper.CopyTo, org.elasticsearch.index.mapper.FieldMapper.MultiFields
      • +
      +
        +
      • + + +

        Nested classes/interfaces inherited from class org.elasticsearch.index.mapper.Mapper

        +org.elasticsearch.index.mapper.Mapper.BuilderContext
      • +
      +
        +
      • + + +

        Nested classes/interfaces inherited from interface org.elasticsearch.common.xcontent.ToXContent

        +org.elasticsearch.common.xcontent.ToXContent.DelegatingMapParams, org.elasticsearch.common.xcontent.ToXContent.MapParams, org.elasticsearch.common.xcontent.ToXContent.Params
      • +
      +
    • +
    + +
      +
    • + + +

      Field Summary

      + + + + + + + + + + +
      Fields 
      Modifier and TypeField and Description
      static java.lang.StringMAPPER_TYPE 
      +
        +
      • + + +

        Fields inherited from class org.elasticsearch.index.mapper.TextFieldMapper

        +CONTENT_TYPE
      • +
      +
        +
      • + + +

        Fields inherited from class org.elasticsearch.index.mapper.FieldMapper

        +COERCE_SETTING, copyTo, defaultFieldType, fieldType, IGNORE_MALFORMED_SETTING, multiFields
      • +
      +
        +
      • + + +

        Fields inherited from interface org.elasticsearch.common.xcontent.ToXContent

        +EMPTY_PARAMS
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      LangdetectMapper(java.lang.String simpleName, + org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType fieldType, + org.elasticsearch.index.mapper.MappedFieldType defaultFieldType, + int positionIncrementGap, + org.elasticsearch.common.settings.Settings indexSettings, + org.elasticsearch.index.mapper.FieldMapper.MultiFields multiFields, + org.elasticsearch.index.mapper.FieldMapper.CopyTo copyTo, + LangdetectMapper.LanguageTo languageTo, + LangdetectService langdetectService) 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      protected java.lang.StringcontentType() 
      protected voiddoXContentBody(org.elasticsearch.common.xcontent.XContentBuilder builder, + boolean includeDefaults, + org.elasticsearch.common.xcontent.ToXContent.Params params) 
      protected voidparseCreateField(org.elasticsearch.index.mapper.ParseContext context, + java.util.List<org.apache.lucene.document.Field> fields) 
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.index.mapper.TextFieldMapper

        +clone, doMerge, fieldType, getPositionIncrementGap
      • +
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.index.mapper.FieldMapper

        +copyTo, customBoost, doXContentAnalyzers, doXContentDocValues, indexOptionToString, indexTokenizeOption, iterator, merge, name, parse, termVectorOptionsToString, toXContent, updateFieldType
      • +
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.index.mapper.Mapper

        +simpleName
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
        +
      • + + +

        Methods inherited from interface java.lang.Iterable

        +forEach, spliterator
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + + + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LangdetectMapper

        +
        public LangdetectMapper(java.lang.String simpleName,
        +                        org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType fieldType,
        +                        org.elasticsearch.index.mapper.MappedFieldType defaultFieldType,
        +                        int positionIncrementGap,
        +                        org.elasticsearch.common.settings.Settings indexSettings,
        +                        org.elasticsearch.index.mapper.FieldMapper.MultiFields multiFields,
        +                        org.elasticsearch.index.mapper.FieldMapper.CopyTo copyTo,
        +                        LangdetectMapper.LanguageTo languageTo,
        +                        LangdetectService langdetectService)
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        contentType

        +
        protected java.lang.String contentType()
        +
        +
        Overrides:
        +
        contentType in class org.elasticsearch.index.mapper.TextFieldMapper
        +
        +
      • +
      + + + +
        +
      • +

        parseCreateField

        +
        protected void parseCreateField(org.elasticsearch.index.mapper.ParseContext context,
        +                                java.util.List<org.apache.lucene.document.Field> fields)
        +                         throws java.io.IOException
        +
        +
        Overrides:
        +
        parseCreateField in class org.elasticsearch.index.mapper.TextFieldMapper
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      + + + +
        +
      • +

        doXContentBody

        +
        protected void doXContentBody(org.elasticsearch.common.xcontent.XContentBuilder builder,
        +                              boolean includeDefaults,
        +                              org.elasticsearch.common.xcontent.ToXContent.Params params)
        +                       throws java.io.IOException
        +
        +
        Overrides:
        +
        doXContentBody in class org.elasticsearch.index.mapper.TextFieldMapper
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html new file mode 100644 index 0000000..6e00802 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html @@ -0,0 +1,24 @@ + + + + + +org.xbib.elasticsearch.index.mapper.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + +

org.xbib.elasticsearch.index.mapper.langdetect

+ + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html new file mode 100644 index 0000000..80c0aa1 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html @@ -0,0 +1,159 @@ + + + + + +org.xbib.elasticsearch.index.mapper.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Package org.xbib.elasticsearch.index.mapper.langdetect

+
+
+ +
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html new file mode 100644 index 0000000..858097c --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html @@ -0,0 +1,159 @@ + + + + + +org.xbib.elasticsearch.index.mapper.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Hierarchy For Package org.xbib.elasticsearch.index.mapper.langdetect

+Package Hierarchies: + +
+
+

Class Hierarchy

+
    +
  • java.lang.Object +
      +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
    • +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
    • +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
    • +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser (implements org.elasticsearch.index.mapper.Mapper.TypeParser)
    • +
    • org.elasticsearch.index.mapper.Mapper (implements java.lang.Iterable<T>, org.elasticsearch.common.xcontent.ToXContent) +
        +
      • org.elasticsearch.index.mapper.FieldMapper (implements java.lang.Cloneable) +
          +
        • org.elasticsearch.index.mapper.TextFieldMapper + +
        • +
        +
      • +
      +
    • +
    • org.elasticsearch.index.mapper.Mapper.Builder<T,Y> +
        +
      • org.elasticsearch.index.mapper.FieldMapper.Builder<T,Y> + +
      • +
      +
    • +
    +
  • +
+
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html new file mode 100644 index 0000000..07e3335 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html @@ -0,0 +1,352 @@ + + + + + +LangdetectPlugin (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.plugin.langdetect
+

Class LangdetectPlugin

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.elasticsearch.plugins.Plugin
    • +
    • +
        +
      • org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
      • +
      +
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    java.io.Closeable, java.lang.AutoCloseable, org.elasticsearch.plugins.ActionPlugin, org.elasticsearch.plugins.MapperPlugin
    +
    +
    +
    +
    public class LangdetectPlugin
    +extends org.elasticsearch.plugins.Plugin
    +implements org.elasticsearch.plugins.MapperPlugin, org.elasticsearch.plugins.ActionPlugin
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Nested Class Summary

      +
        +
      • + + +

        Nested classes/interfaces inherited from interface org.elasticsearch.plugins.ActionPlugin

        +org.elasticsearch.plugins.ActionPlugin.ActionHandler<Request extends org.elasticsearch.action.ActionRequest,Response extends org.elasticsearch.action.ActionResponse>
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      LangdetectPlugin() 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      java.util.List<org.elasticsearch.plugins.ActionPlugin.ActionHandler<? extends org.elasticsearch.action.ActionRequest,? extends org.elasticsearch.action.ActionResponse>>getActions() 
      java.util.Map<java.lang.String,org.elasticsearch.index.mapper.Mapper.TypeParser>getMappers() 
      java.util.List<java.lang.Class<? extends org.elasticsearch.rest.RestHandler>>getRestHandlers() 
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.plugins.Plugin

        +additionalSettings, close, createComponents, createGuiceModules, getCustomMetaDataUpgrader, getExecutorBuilders, getGuiceServiceClasses, getNamedWriteables, getSettings, getSettingsFilter, onIndexModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
        +
      • + + +

        Methods inherited from interface org.elasticsearch.plugins.MapperPlugin

        +getMetadataMappers
      • +
      +
        +
      • + + +

        Methods inherited from interface org.elasticsearch.plugins.ActionPlugin

        +getActionFilters, getRestHeaders
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        LangdetectPlugin

        +
        public LangdetectPlugin()
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        getMappers

        +
        public java.util.Map<java.lang.String,org.elasticsearch.index.mapper.Mapper.TypeParser> getMappers()
        +
        +
        Specified by:
        +
        getMappers in interface org.elasticsearch.plugins.MapperPlugin
        +
        +
      • +
      + + + +
        +
      • +

        getActions

        +
        public java.util.List<org.elasticsearch.plugins.ActionPlugin.ActionHandler<? extends org.elasticsearch.action.ActionRequest,? extends org.elasticsearch.action.ActionResponse>> getActions()
        +
        +
        Specified by:
        +
        getActions in interface org.elasticsearch.plugins.ActionPlugin
        +
        +
      • +
      + + + +
        +
      • +

        getRestHandlers

        +
        public java.util.List<java.lang.Class<? extends org.elasticsearch.rest.RestHandler>> getRestHandlers()
        +
        +
        Specified by:
        +
        getRestHandlers in interface org.elasticsearch.plugins.ActionPlugin
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html new file mode 100644 index 0000000..e7855c6 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html @@ -0,0 +1,19 @@ + + + + + +org.xbib.elasticsearch.plugin.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + +

org.xbib.elasticsearch.plugin.langdetect

+
+

Classes

+ +
+ + diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html new file mode 100644 index 0000000..be48689 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html @@ -0,0 +1,139 @@ + + + + + +org.xbib.elasticsearch.plugin.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Package org.xbib.elasticsearch.plugin.langdetect

+
+
+
    +
  • + + + + + + + + + + + + +
    Class Summary 
    ClassDescription
    LangdetectPlugin 
    +
  • +
+
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html new file mode 100644 index 0000000..26b293f --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html @@ -0,0 +1,138 @@ + + + + + +org.xbib.elasticsearch.plugin.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Hierarchy For Package org.xbib.elasticsearch.plugin.langdetect

+Package Hierarchies: + +
+
+

Class Hierarchy

+
    +
  • java.lang.Object +
      +
    • org.elasticsearch.plugins.Plugin (implements java.io.Closeable) +
        +
      • org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin (implements org.elasticsearch.plugins.ActionPlugin, org.elasticsearch.plugins.MapperPlugin)
      • +
      +
    • +
    +
  • +
+
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html new file mode 100644 index 0000000..0d365c8 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html @@ -0,0 +1,352 @@ + + + + + +RestLangdetectAction (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + + +
+
org.xbib.elasticsearch.rest.action.langdetect
+

Class RestLangdetectAction

+
+
+
    +
  • java.lang.Object
  • +
  • +
      +
    • org.elasticsearch.common.component.AbstractComponent
    • +
    • +
        +
      • org.elasticsearch.rest.BaseRestHandler
      • +
      • +
          +
        • org.xbib.elasticsearch.rest.action.langdetect.RestLangdetectAction
        • +
        +
      • +
      +
    • +
    +
  • +
+
+
    +
  • +
    +
    All Implemented Interfaces:
    +
    org.elasticsearch.rest.RestHandler
    +
    +
    +
    +
    public class RestLangdetectAction
    +extends org.elasticsearch.rest.BaseRestHandler
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Nested Class Summary

      +
        +
      • + + +

        Nested classes/interfaces inherited from class org.elasticsearch.rest.BaseRestHandler

        +org.elasticsearch.rest.BaseRestHandler.RestChannelConsumer
      • +
      +
    • +
    + +
      +
    • + + +

      Field Summary

      +
        +
      • + + +

        Fields inherited from class org.elasticsearch.rest.BaseRestHandler

        +MULTI_ALLOW_EXPLICIT_INDEX, parseFieldMatcher
      • +
      +
        +
      • + + +

        Fields inherited from class org.elasticsearch.common.component.AbstractComponent

        +deprecationLogger, logger, settings
      • +
      +
    • +
    + +
      +
    • + + +

      Constructor Summary

      + + + + + + + + +
      Constructors 
      Constructor and Description
      RestLangdetectAction(org.elasticsearch.common.settings.Settings settings, + org.elasticsearch.rest.RestController controller) 
      +
    • +
    + +
      +
    • + + +

      Method Summary

      + + + + + + + + + + +
      All Methods Instance Methods Concrete Methods 
      Modifier and TypeMethod and Description
      protected org.elasticsearch.rest.BaseRestHandler.RestChannelConsumerprepareRequest(org.elasticsearch.rest.RestRequest request, + org.elasticsearch.client.node.NodeClient client) 
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.rest.BaseRestHandler

        +handleRequest, responseParams, unrecognized
      • +
      +
        +
      • + + +

        Methods inherited from class org.elasticsearch.common.component.AbstractComponent

        +logDeprecatedSetting, logRemovedSetting, nodeName
      • +
      +
        +
      • + + +

        Methods inherited from class java.lang.Object

        +clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
      • +
      +
        +
      • + + +

        Methods inherited from interface org.elasticsearch.rest.RestHandler

        +canTripCircuitBreaker
      • +
      +
    • +
    +
  • +
+
+
+
    +
  • + +
      +
    • + + +

      Constructor Detail

      + + + +
        +
      • +

        RestLangdetectAction

        +
        @Inject
        +public RestLangdetectAction(org.elasticsearch.common.settings.Settings settings,
        +                                    org.elasticsearch.rest.RestController controller)
        +
      • +
      +
    • +
    + +
      +
    • + + +

      Method Detail

      + + + +
        +
      • +

        prepareRequest

        +
        protected org.elasticsearch.rest.BaseRestHandler.RestChannelConsumer prepareRequest(org.elasticsearch.rest.RestRequest request,
        +                                                                                    org.elasticsearch.client.node.NodeClient client)
        +                                                                             throws java.io.IOException
        +
        +
        Specified by:
        +
        prepareRequest in class org.elasticsearch.rest.BaseRestHandler
        +
        Throws:
        +
        java.io.IOException
        +
        +
      • +
      +
    • +
    +
  • +
+
+
+ + + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html new file mode 100644 index 0000000..f1dfd3a --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html @@ -0,0 +1,19 @@ + + + + + +org.xbib.elasticsearch.rest.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + +

org.xbib.elasticsearch.rest.action.langdetect

+
+

Classes

+ +
+ + diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html new file mode 100644 index 0000000..4401256 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html @@ -0,0 +1,139 @@ + + + + + +org.xbib.elasticsearch.rest.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Package org.xbib.elasticsearch.rest.action.langdetect

+
+
+ +
+ + + + + + diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html new file mode 100644 index 0000000..1664e45 --- /dev/null +++ b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html @@ -0,0 +1,142 @@ + + + + + +org.xbib.elasticsearch.rest.action.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Hierarchy For Package org.xbib.elasticsearch.rest.action.langdetect

+Package Hierarchies: + +
+
+

Class Hierarchy

+
    +
  • java.lang.Object +
      +
    • org.elasticsearch.common.component.AbstractComponent +
        +
      • org.elasticsearch.rest.BaseRestHandler (implements org.elasticsearch.rest.RestHandler) + +
      • +
      +
    • +
    +
  • +
+
+ + + + + + diff --git a/docs/javadoc/overview-frame.html b/docs/javadoc/overview-frame.html new file mode 100644 index 0000000..96a94b2 --- /dev/null +++ b/docs/javadoc/overview-frame.html @@ -0,0 +1,24 @@ + + + + + +Overview List (elasticsearch-langdetect 5.1.2.0 API) + + + + + + +

 

+ + diff --git a/docs/javadoc/overview-summary.html b/docs/javadoc/overview-summary.html new file mode 100644 index 0000000..a3bf32d --- /dev/null +++ b/docs/javadoc/overview-summary.html @@ -0,0 +1,162 @@ + + + + + +Overview (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + +
+ + + + + + + +
+ + +
+

elasticsearch-langdetect 5.1.2.0 API

+
+
+
+
Bibliographic entity processing for Java
+
+

See: Description

+
+ +
+ + +

Bibliographic entity processing for Java

+
+ +
+ + + + + + + +
+ + + + diff --git a/docs/javadoc/overview-tree.html b/docs/javadoc/overview-tree.html new file mode 100644 index 0000000..7a6aeb9 --- /dev/null +++ b/docs/javadoc/overview-tree.html @@ -0,0 +1,235 @@ + + + + + +Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + +
+ + + + + + + +
+ + + +
+

Class Hierarchy

+
    +
  • java.lang.Object +
      +
    • org.elasticsearch.common.component.AbstractComponent +
        +
      • org.elasticsearch.rest.BaseRestHandler (implements org.elasticsearch.rest.RestHandler) + +
      • +
      • org.elasticsearch.action.support.TransportAction<Request,Response> + +
      • +
      +
    • +
    • org.elasticsearch.action.ActionRequestBuilder<Request,Response,RequestBuilder> + +
    • +
    • org.elasticsearch.action.GenericAction<Request,Response> +
        +
      • org.elasticsearch.action.Action<Request,Response,RequestBuilder> + +
      • +
      +
    • +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
    • +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
    • +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
    • +
    • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser (implements org.elasticsearch.index.mapper.Mapper.TypeParser)
    • +
    • org.xbib.elasticsearch.common.langdetect.LangdetectService
    • +
    • org.xbib.elasticsearch.common.langdetect.LangProfile
    • +
    • org.xbib.elasticsearch.common.langdetect.Language (implements org.elasticsearch.common.io.stream.Streamable)
    • +
    • org.elasticsearch.index.mapper.Mapper (implements java.lang.Iterable<T>, org.elasticsearch.common.xcontent.ToXContent) +
        +
      • org.elasticsearch.index.mapper.FieldMapper (implements java.lang.Cloneable) +
          +
        • org.elasticsearch.index.mapper.TextFieldMapper + +
        • +
        +
      • +
      +
    • +
    • org.elasticsearch.index.mapper.Mapper.Builder<T,Y> +
        +
      • org.elasticsearch.index.mapper.FieldMapper.Builder<T,Y> + +
      • +
      +
    • +
    • org.xbib.elasticsearch.common.langdetect.NGram
    • +
    • org.elasticsearch.plugins.Plugin (implements java.io.Closeable) +
        +
      • org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin (implements org.elasticsearch.plugins.ActionPlugin, org.elasticsearch.plugins.MapperPlugin)
      • +
      +
    • +
    • java.lang.Throwable (implements java.io.Serializable) + +
    • +
    • org.elasticsearch.transport.TransportMessage (implements org.elasticsearch.common.io.stream.Streamable) +
        +
      • org.elasticsearch.transport.TransportRequest +
          +
        • org.elasticsearch.action.ActionRequest + +
        • +
        +
      • +
      • org.elasticsearch.transport.TransportResponse +
          +
        • org.elasticsearch.action.ActionResponse +
            +
          • org.xbib.elasticsearch.action.langdetect.LangdetectResponse (implements org.elasticsearch.common.xcontent.StatusToXContent)
          • +
          +
        • +
        +
      • +
      +
    • +
    +
  • +
+
+ +
+ + + + + + + +
+ + + + diff --git a/docs/javadoc/package-list b/docs/javadoc/package-list new file mode 100644 index 0000000..1a4b64f --- /dev/null +++ b/docs/javadoc/package-list @@ -0,0 +1,5 @@ +org.xbib.elasticsearch.action.langdetect +org.xbib.elasticsearch.common.langdetect +org.xbib.elasticsearch.index.mapper.langdetect +org.xbib.elasticsearch.plugin.langdetect +org.xbib.elasticsearch.rest.action.langdetect diff --git a/docs/javadoc/script.js b/docs/javadoc/script.js new file mode 100644 index 0000000..b346356 --- /dev/null +++ b/docs/javadoc/script.js @@ -0,0 +1,30 @@ +function show(type) +{ + count = 0; + for (var key in methods) { + var row = document.getElementById(key); + if ((methods[key] & type) != 0) { + row.style.display = ''; + row.className = (count++ % 2) ? rowColor : altColor; + } + else + row.style.display = 'none'; + } + updateTabs(type); +} + +function updateTabs(type) +{ + for (var value in tabs) { + var sNode = document.getElementById(tabs[value][0]); + var spanNode = sNode.firstChild; + if (value == type) { + sNode.className = activeTableTab; + spanNode.innerHTML = tabs[value][1]; + } + else { + sNode.className = tableTab; + spanNode.innerHTML = "" + tabs[value][1] + ""; + } + } +} diff --git a/docs/javadoc/serialized-form.html b/docs/javadoc/serialized-form.html new file mode 100644 index 0000000..8588382 --- /dev/null +++ b/docs/javadoc/serialized-form.html @@ -0,0 +1,138 @@ + + + + + +Serialized Form (elasticsearch-langdetect 5.1.2.0 API) + + + + + + + + + + +
+

Serialized Form

+
+
+ +
+ + + + + + diff --git a/docs/javadoc/stylesheet.css b/docs/javadoc/stylesheet.css new file mode 100644 index 0000000..3242ab9 --- /dev/null +++ b/docs/javadoc/stylesheet.css @@ -0,0 +1,732 @@ +/* Asciidoclet Java 7/8 javadoc stylesheet + * + * Javadoc stylesheet based on http://docs.oracle.com/javase/8/docs/api/stylesheet.css + * with additional styles from Asciidoctor. + */ + +@import url('coderay-asciidoctor.css'); + +/* Javadoc style sheet */ +/* +Overall document style +*/ + +/* Asciidoclet +@import url('resources/fonts/dejavu.css'); +*/ + +body { + background-color:#ffffff; + color:#353833; + font-family:'DejaVu Sans', Arial, Helvetica, sans-serif; + font-size:14px; + margin:0; +} +a:link, a:visited { + text-decoration:none; + color:#4A6782; +} +a:hover, a:focus { + text-decoration:none; + color:#bb7a2a; +} +a:active { + text-decoration:none; + color:#4A6782; +} +a[name] { + color:#353833; +} +a[name]:hover { + text-decoration:none; + color:#353833; +} +pre { + font-family:'DejaVu Sans Mono', monospace; + font-size:14px; +} +h1 { + font-size:20px; +} +h2 { + font-size:18px; +} +h3 { + font-size:16px; + font-style:italic; +} +h4 { + font-size:13px; +} +h5 { + font-size:12px; +} +h6 { + font-size:11px; +} +ul { + list-style-type:disc; +} +code, tt, dt code, table tr td dt code { + color:rgba(0,0,0,.9); + font-family:'DejaVu Sans Mono', monospace; + font-size:13px; + line-height:1.4; +} +pre > code { + font-size: 14px !important; +} +table tr td dt code { + vertical-align:top; +} +sup { + font-size:8px; +} +/* +Document title and Copyright styles +*/ +.clear { + clear:both; + height:0px; + overflow:hidden; +} +.aboutLanguage { + float:right; + padding:0px 21px; + font-size:11px; + z-index:200; + margin-top:-9px; +} +.legalCopy { + margin:.5em; + float:left; +} +.bar a, .bar a:link, .bar a:visited, .bar a:active { + color:#FFFFFF; + text-decoration:none; +} +.bar a:hover, .bar a:focus { + color:#bb7a2a; +} +.tab { + background-color:#0066FF; + color:#ffffff; + padding:8px; + width:5em; + font-weight:bold; +} +/* +Navigation bar styles +*/ +.bar { + background-color:#4D7A97; + color:#FFFFFF; + padding:.8em .5em .4em .8em; + height:auto;/*height:1.8em;*/ + font-size:11px; + margin:0; +} +.topNav { + background-color:#4D7A97; + color:#FFFFFF; + float:left; + padding:0; + width:100%; + clear:right; + height:2.8em; + padding-top:10px; + overflow:hidden; + font-size:12px; +} +.bottomNav { + margin-top:10px; + background-color:#4D7A97; + color:#FFFFFF; + float:left; + padding:0; + width:100%; + clear:right; + height:2.8em; + padding-top:10px; + overflow:hidden; + font-size:12px; +} +.subNav { + background-color:#dee3e9; + float:left; + width:100%; + overflow:hidden; + font-size:12px; +} +.subNav div { + clear:left; + float:left; + padding:0 0 5px 6px; + text-transform:uppercase; +} +ul.navList, ul.subNavList { + float:left; + margin:0 25px 0 0; + padding:0; +} +ul.navList li{ + list-style:none; + float:left; + padding: 5px 6px; + text-transform:uppercase; +} +ul.subNavList li{ + list-style:none; + float:left; +} +.topNav a:link, .topNav a:active, .topNav a:visited, .bottomNav a:link, .bottomNav a:active, .bottomNav a:visited { + color:#FFFFFF; + text-decoration:none; + text-transform:uppercase; +} +.topNav a:hover, .bottomNav a:hover { + text-decoration:none; + color:#bb7a2a; + text-transform:uppercase; +} +.navBarCell1Rev { + background-color:#F8981D; + color:#253441; + margin: auto 5px; +} +.skipNav { + position:absolute; + top:auto; + left:-9999px; + overflow:hidden; +} +/* +Page header and footer styles +*/ +.header, .footer { + clear:both; + margin:0 20px; + padding:5px 0 0 0; +} +.indexHeader { + margin:10px; + position:relative; +} +.indexHeader span{ + margin-right:15px; +} +.indexHeader h1 { + font-size:13px; +} +.title { + color:#2c4557; + margin:10px 0; +} +.subTitle { + margin:5px 0 0 0; +} +.header ul { + margin:0 0 15px 0; + padding:0; +} +.footer ul { + margin:20px 0 5px 0; +} +.header ul li, .footer ul li { + list-style:none; + font-size:13px; +} +/* +Heading styles +*/ +div.details ul.blockList ul.blockList ul.blockList li.blockList h4, div.details ul.blockList ul.blockList ul.blockListLast li.blockList h4 { + background-color:#dee3e9; + border:1px solid #d0d9e0; + margin:0 0 6px -8px; + padding:7px 5px; +} +ul.blockList ul.blockList ul.blockList li.blockList h3 { + background-color:#dee3e9; + border:1px solid #d0d9e0; + margin:0 0 6px -8px; + padding:7px 5px; +} +ul.blockList ul.blockList li.blockList h3 { + padding:0; + margin:15px 0; +} +ul.blockList li.blockList h2 { + padding:0px 0 20px 0; +} +/* +Page layout container styles +*/ +.contentContainer, .sourceContainer, .classUseContainer, .serializedFormContainer, .constantValuesContainer { + clear:both; + padding:10px 20px; + position:relative; +} +.indexContainer { + margin:10px; + position:relative; + font-size:12px; +} +.indexContainer h2 { + font-size:13px; + padding:0 0 3px 0; +} +.indexContainer ul { + margin:0; + padding:0; +} +.indexContainer ul li { + list-style:none; + padding-top:2px; +} +/*.contentContainer dl dt, .contentContainer .description dl dt, .contentContainer .details dl dt, .serializedFormContainer dl dt { */ +.contentContainer dl dt, .serializedFormContainer dl dt, .dlist dl dt { + font-size:13px; + font-weight:bold; + margin:10px 0 0 0; +} + +/*.contentContainer .description dl dd, .contentContainer .details dl dd, .serializedFormContainer dl dd { */ +.contentContainer dl dd, .serializedFormContainer dl dd { + margin:5px 0 10px 0px; + font-size:14px; + font-family:'DejaVu Sans Mono',monospace; +} + +.serializedFormContainer dl.nameValue dt { + margin-left:1px; + font-size:1.1em; + display:inline; + font-weight:bold; +} +.serializedFormContainer dl.nameValue dd { + margin:0 0 0 1px; + font-size:1.1em; + display:inline; +} +/* +List styles +*/ +ul.horizontal li { + display:inline; + font-size:0.9em; +} +ul.inheritance { + margin:0; + padding:0; +} +ul.inheritance li { + display:inline; + list-style:none; +} +ul.inheritance li ul.inheritance { + margin-left:15px; + padding-left:15px; + padding-top:1px; +} +ul.blockList, ul.blockListLast { + margin:10px 0 10px 0; + padding:0; +} +ul.blockList li.blockList, ul.blockListLast li.blockList { + list-style:none; + margin-bottom:15px; + line-height:1.4; +} +ul.blockList ul.blockList li.blockList, ul.blockList ul.blockListLast li.blockList { + padding:0px 20px 5px 10px; + border:1px solid #ededed; + background-color:#f8f8f8; +} +ul.blockList ul.blockList ul.blockList li.blockList, ul.blockList ul.blockList ul.blockListLast li.blockList { + padding:0 0 5px 8px; + background-color:#ffffff; + border:none; +} +ul.blockList ul.blockList ul.blockList ul.blockList li.blockList { + margin-left:0; + padding-left:0; + padding-bottom:15px; + border:none; +} +ul.blockList ul.blockList ul.blockList ul.blockList li.blockListLast { + list-style:none; + border-bottom:none; + padding-bottom:0; +} +table tr td dl, table tr td dl dt, table tr td dl dd { + margin-top:0; + margin-bottom:1px; +} +/* +Table styles +- Asciidoclet - added .packageSummary to table styles below, used by Java 7 output. +*/ +.overviewSummary, .memberSummary, .typeSummary, .useSummary, .constantsSummary, .deprecatedSummary, .packageSummary { + width:100%; + border-left:1px solid #EEE; + border-right:1px solid #EEE; + border-bottom:1px solid #EEE; +} +.overviewSummary, .memberSummary { + padding:0px; +} +.overviewSummary caption, .memberSummary caption, .typeSummary caption, .packageSummary caption, +.useSummary caption, .constantsSummary caption, .deprecatedSummary caption { + position:relative; + text-align:left; + background-repeat:no-repeat; + color:#253441; + font-weight:bold; + clear:none; + overflow:hidden; + padding:0px; + padding-top:10px; + padding-left:1px; + margin:0px; + white-space:pre; +} +.overviewSummary caption a:link, .memberSummary caption a:link, .typeSummary caption a:link, .packageSummary caption a:link, +.useSummary caption a:link, .constantsSummary caption a:link, .deprecatedSummary caption a:link, +.overviewSummary caption a:hover, .memberSummary caption a:hover, .typeSummary caption a:hover, .packageSummary caption a:hover, +.useSummary caption a:hover, .constantsSummary caption a:hover, .deprecatedSummary caption a:hover, +.overviewSummary caption a:active, .memberSummary caption a:active, .typeSummary caption a:active, .packageSummary caption a:active, +.useSummary caption a:active, .constantsSummary caption a:active, .deprecatedSummary caption a:active, +.overviewSummary caption a:visited, .memberSummary caption a:visited, .typeSummary caption a:visited, .packageSummary caption a:visited, +.useSummary caption a:visited, .constantsSummary caption a:visited, .deprecatedSummary caption a:visited { + color:#FFFFFF; +} +.overviewSummary caption span, .memberSummary caption span, .typeSummary caption span, .packageSummary caption span, +.useSummary caption span, .constantsSummary caption span, .deprecatedSummary caption span { + white-space:nowrap; + padding-top:5px; + padding-left:12px; + padding-right:12px; + padding-bottom:7px; + display:inline-block; + float:left; + background-color:#F8981D; + border: none; + height:16px; +} +.memberSummary caption span.activeTableTab span { + white-space:nowrap; + padding-top:5px; + padding-left:12px; + padding-right:12px; + margin-right:3px; + display:inline-block; + float:left; + background-color:#F8981D; + height:16px; +} +.memberSummary caption span.tableTab span { + white-space:nowrap; + padding-top:5px; + padding-left:12px; + padding-right:12px; + margin-right:3px; + display:inline-block; + float:left; + background-color:#4D7A97; + height:16px; +} +.memberSummary caption span.tableTab, .memberSummary caption span.activeTableTab { + padding-top:0px; + padding-left:0px; + padding-right:0px; + background-image:none; + float:none; + display:inline; +} +.overviewSummary .tabEnd, .memberSummary .tabEnd, .typeSummary .tabEnd, .packageSummary .tabEnd, +.useSummary .tabEnd, .constantsSummary .tabEnd, .deprecatedSummary .tabEnd { + display:none; + width:5px; + position:relative; + float:left; + background-color:#F8981D; +} +.memberSummary .activeTableTab .tabEnd { + display:none; + width:5px; + margin-right:3px; + position:relative; + float:left; + background-color:#F8981D; +} +.memberSummary .tableTab .tabEnd { + display:none; + width:5px; + margin-right:3px; + position:relative; + background-color:#4D7A97; + float:left; + +} +.overviewSummary td, .memberSummary td, .typeSummary td, .packageSummary td, +.useSummary td, .constantsSummary td, .deprecatedSummary td { + text-align:left; + padding:0px 0px 12px 10px; + width:100%; +} +th.colOne, th.colFirst, th.colLast, .useSummary th, .constantsSummary th, +td.colOne, td.colFirst, td.colLast, .useSummary td, .constantsSummary td{ + vertical-align:top; + padding-right:0px; + padding-top:8px; + padding-bottom:3px; +} +th.colFirst, th.colLast, th.colOne, .constantsSummary th { + background:#dee3e9; + text-align:left; + padding:8px 3px 3px 7px; +} +td.colFirst, th.colFirst { + white-space:nowrap; + font-size:13px; +} +td.colLast, th.colLast { + font-size:13px; +} +td.colOne, th.colOne { + font-size:13px; +} +.overviewSummary td.colFirst, .overviewSummary th.colFirst, +.overviewSummary td.colOne, .overviewSummary th.colOne, +.memberSummary td.colFirst, .memberSummary th.colFirst, +.memberSummary td.colOne, .memberSummary th.colOne, +.typeSummary td.colFirst, .packageSummary td.colFirst{ + width:25%; + vertical-align:top; +} +td.colOne a:link, td.colOne a:active, td.colOne a:visited, td.colOne a:hover, td.colFirst a:link, td.colFirst a:active, td.colFirst a:visited, td.colFirst a:hover, td.colLast a:link, td.colLast a:active, td.colLast a:visited, td.colLast a:hover, .constantValuesContainer td a:link, .constantValuesContainer td a:active, .constantValuesContainer td a:visited, .constantValuesContainer td a:hover { + font-weight:bold; +} +.tableSubHeadingColor { + background-color:#EEEEFF; +} +.altColor { + background-color:#FFFFFF; +} +.rowColor { + background-color:#EEEEEF; +} +/* +Content styles +*/ +.description pre { + margin-top:0; +} +.deprecatedContent { + margin:0; + padding:10px 0; +} +.docSummary { + padding:0; +} + +ul.blockList ul.blockList ul.blockList li.blockList h3 { + font-style:normal; +} + +div.block { + font-size:14px; + font-family:'DejaVu Serif', Georgia, "Times New Roman", Times, serif; +} + +td.colLast div { + padding-top:0px; +} + + +td.colLast a { + padding-bottom:3px; +} +/* +Formatting effect styles +*/ +.sourceLineNo { + color:green; + padding:0 30px 0 0; +} +h1.hidden { + visibility:hidden; + overflow:hidden; + font-size:10px; +} +div.block { + display:block; + margin:3px 10px 2px 0px; + color:rgba(0,0,0,.8); +} + +div.block h1, div.block h2, div.block h3, div.block h4, div.block h5, div.block h6 { + font-family:'DejaVu Sans', Arial, Helvetica, sans-serif; + font-weight:300; + font-style: normal; + color:#7a4a0e; +} +div.block *:not(pre) > code { + font-weight: normal; + padding: 2px 4px; + background-color: #f7f7f7; + -webkit-border-radius: 4px; + border-radius: 4px; +} +div.block a { + text-decoration: underline; +} +.deprecatedLabel, .descfrmTypeLabel, .memberNameLabel, .memberNameLink, +.overrideSpecifyLabel, .packageHierarchyLabel, .paramLabel, .returnLabel, +.seeLabel, .simpleTagLabel, .throwsLabel, .typeNameLabel, .typeNameLink { + font-weight:bold; +} +.deprecationComment, .emphasizedPhrase, .interfaceName { + font-style:italic; +} + +div.block div.block span.deprecationComment, div.block div.block span.emphasizedPhrase, +div.block div.block span.interfaceName { + font-style:normal; +} + +div.contentContainer ul.blockList li.blockList h2{ + padding-bottom:0px; +} + +/* Asciidoclet styles - adapted from + * https://github.com/asciidoctor/asciidoctor/blob/master/data/stylesheets/asciidoctor-default.css + */ + +/* Asciidoclet - reset to normal paragraph font in description text, javadoc wants monospace for some reason */ +.contentContainer dl dd { + font-family: 'DejaVu Serif', Georgia, "Times New Roman", Times, serif; +} + +span.strong { font-weight: bold; } + +/* select on .ulist, .olist */ +.ulist ul, .olist ol { margin-left: 1.5em; padding: inherit; } +ul.no-bullet, ol.no-bullet { margin-left: 1.5em; } +.ulist ul li ul, .ulist ul li ol { margin-left: 1.25em; margin-bottom: 0; font-size: 1em; } +ul.square li ul, ul.circle li ul, ul.disc li ul { list-style: inherit; } +ul.square { list-style-type: square; } +ul.circle { list-style-type: circle; } +ul.disc { list-style-type: disc; } +ul.no-bullet { list-style: none; } +.olist ol li ul, .olist ol li ol { margin-left: 1.25em; margin-bottom: 0; } + +blockquote { margin: 0 0 1.25em; padding: 0.5625em 1.25em 0 1.1875em; border-left: 3px solid #487c58; } +blockquote cite { display: block; font-size: inherit; color: #454545; } +blockquote cite:before { content: "\2014 \0020"; } +blockquote cite a, blockquote cite a:visited { color: #454545; } +blockquote, blockquote p { line-height: 1.6; color: #6e6e6e; } + +/* Added div.block */ +div.block table { background: white; margin-bottom: 1.25em; border: solid 1px #dddddd; } +div.block table thead, div.block table tfoot { background: whitesmoke; font-weight: bold; } +div.block table thead tr th, div.block table thead tr td, div.block table tfoot tr th, div.block table tfoot tr td { padding: 0.5em 0.625em 0.625em; font-size: inherit; color: #333333; text-align: left; } +div.block table tr th, div.block table tr td { padding: 0.5625em 0.625em; font-size: inherit; color: #333333; } +div.block table tr.even, div.block table tr.alt, div.block table tr:nth-of-type(even) { background: #f9f9f9; } + +.subheader, #content #toctitle, .admonitionblock td.content > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .mathblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, .tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title, .tableblock > caption { line-height: 1.4; color: #7a4a0e; font-weight: 300; margin-top: 0.5em; margin-bottom: 0.25em; } + +.imageblock, .literalblock, .listingblock, .mathblock, .verseblock, .videoblock { margin-bottom: 1.25em; } +.admonitionblock td.content > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .mathblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, .tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { text-align: left; font-family: "DejaVu Sans", Arial, Helvetica; font-weight: 300; font-style: italic; } +.tableblock > caption { text-align: left; font-family: "DejaVu Sans", Arial, Helvetica; font-weight: 300; font-style: italic; white-space: nowrap; overflow: visible; max-width: 0; } +table.tableblock #preamble > .sectionbody > .paragraph:first-of-type p { font-size: inherit; } +div.block .admonitionblock > table { border: 0; background: none; width: 100%; } +.admonitionblock > table td.icon { text-align: center; width: 80px; } +.admonitionblock > table td.icon img { max-width: none; } +.admonitionblock > table td.icon .title { font-weight: 300; text-transform: uppercase; } +.admonitionblock > table td.content { padding-left: 1.125em; padding-right: 1.25em; border-left: 1px solid #d8d8d8; color: #6e6e6e; } +.admonitionblock > table td.content > :last-child > :last-child { margin-bottom: 0; } +.exampleblock > .content { border-style: solid; border-width: 1px; border-color: #e6e6e6; margin-bottom: 1.25em; padding: 1.25em; background: white; -webkit-border-radius: 4px; border-radius: 4px; } +.exampleblock > .content > :first-child { margin-top: 0; } +.exampleblock > .content > :last-child { margin-bottom: 0; } +.exampleblock > .content h1, .exampleblock > .content h2, .exampleblock > .content h3, .exampleblock > .content #toctitle, .sidebarblock.exampleblock > .content > .title, .exampleblock > .content h4, .exampleblock > .content h5, .exampleblock > .content h6, .exampleblock > .content p { color: #333333; } +.exampleblock > .content h1, .exampleblock > .content h2, .exampleblock > .content h3, .exampleblock > .content #toctitle, .sidebarblock.exampleblock > .content > .title, .exampleblock > .content h4, .exampleblock > .content h5, .exampleblock > .content h6 { line-height: 1; margin-bottom: 0.625em; } +.exampleblock > .content h1.subheader, .exampleblock > .content h2.subheader, .exampleblock > .content h3.subheader, .exampleblock > .content .subheader#toctitle, .sidebarblock.exampleblock > .content > .subheader.title, .exampleblock > .content h4.subheader, .exampleblock > .content h5.subheader, .exampleblock > .content h6.subheader { line-height: 1.4; } +.exampleblock.result > .content { -webkit-box-shadow: 0 1px 8px #e3e3dd; box-shadow: 0 1px 8px #e3e3dd; } +.sidebarblock { border-style: solid; border-width: 1px; border-color: #e3e3dd; margin-bottom: 1.25em; padding: 1.25em; background: #fafaf9; -webkit-border-radius: 4px; border-radius: 4px; } +.sidebarblock > :first-child { margin-top: 0; } +.sidebarblock > :last-child { margin-bottom: 0; } +.sidebarblock h1, .sidebarblock h2, .sidebarblock h3, .sidebarblock #toctitle, .sidebarblock > .content > .title, .sidebarblock h4, .sidebarblock h5, .sidebarblock h6, .sidebarblock p { color: #333333; } +.sidebarblock h1, .sidebarblock h2, .sidebarblock h3, .sidebarblock #toctitle, .sidebarblock > .content > .title, .sidebarblock h4, .sidebarblock h5, .sidebarblock h6 { line-height: 1; margin-bottom: 0.625em; } +.sidebarblock h1.subheader, .sidebarblock h2.subheader, .sidebarblock h3.subheader, .sidebarblock .subheader#toctitle, .sidebarblock > .content > .subheader.title, .sidebarblock h4.subheader, .sidebarblock h5.subheader, .sidebarblock h6.subheader { line-height: 1.4; } +.sidebarblock > .content > .title { color: #7a4a0e; margin-top: 0; line-height: 1.6; } +.exampleblock > .content > :last-child > :last-child, .exampleblock > .content .olist > ol > li:last-child > :last-child, .exampleblock > .content .ulist > ul > li:last-child > :last-child, .exampleblock > .content .qlist > ol > li:last-child > :last-child, .sidebarblock > .content > :last-child > :last-child, .sidebarblock > .content .olist > ol > li:last-child > :last-child, .sidebarblock > .content .ulist > ul > li:last-child > :last-child, .sidebarblock > .content .qlist > ol > li:last-child > :last-child { margin-bottom: 0; } +.literalblock pre,.listingblock pre:not(.highlight),.listingblock pre[class="highlight"],.listingblock pre[class^="highlight "],.listingblock pre.CodeRay{ background: #f7f7f7 } +.literalblock pre,.literalblock pre[class],.listingblock pre,.listingblock pre[class]{-webkit-border-radius:4px;border-radius:4px;word-wrap:break-word;padding:1em;font-size:.8125em} +.literalblock pre.nowrap,.literalblock pre[class].nowrap,.listingblock pre.nowrap,.listingblock pre[class].nowrap{overflow-x:auto;white-space:pre;word-wrap:normal} +.listingblock>.content{position:relative} +.listingblock code[data-lang]:before{display:none;content:attr(data-lang);position:absolute;font-size:.75em;top:.425rem;right:.5rem;line-height:1;text-transform:uppercase;color:#999} +.listingblock:hover code[data-lang]:before{display:block} +.listingblock.terminal pre .command:before{content:attr(data-prompt);padding-right:.5em;color:#999} +.listingblock.terminal pre .command:not([data-prompt]):before{content:"$"} + +.quoteblock { margin: 0 0 1.25em 0; padding: 0.5625em 1.25em 0 1.1875em; border-left: 3px solid #487c58; } +.quoteblock blockquote { margin: 0 0 1.25em 0; padding: 0 0 0.625em 0; border: 0; } +.quoteblock blockquote > .paragraph:last-child p { margin-bottom: 0; } +.quoteblock .attribution { margin-top: -0.625em; padding-bottom: 0.625em; font-size: inherit; color: #454545; line-height: 1.6; } +.quoteblock .attribution br { display: none; } +.quoteblock .attribution cite { display: block; } + +table.tableblock{max-width:100%;border-collapse:separate;border-spacing:0} +table.tableblock td>.paragraph:last-child p>p:last-child,table.tableblock th>p:last-child,table.tableblock td>p:last-child{margin-bottom:0} +table.spread{width:100%} +table.tableblock,th.tableblock,td.tableblock{border:0 solid #dedede} +table.grid-all th.tableblock,table.grid-all td.tableblock{border-width:0 1px 1px 0} +table.grid-all tfoot>tr>th.tableblock,table.grid-all tfoot>tr>td.tableblock{border-width:1px 1px 0 0} +table.grid-cols th.tableblock,table.grid-cols td.tableblock{border-width:0 1px 0 0} +table.grid-all *>tr>.tableblock:last-child,table.grid-cols *>tr>.tableblock:last-child{border-right-width:0} +table.grid-rows th.tableblock,table.grid-rows td.tableblock{border-width:0 0 1px 0} +table.grid-all tbody>tr:last-child>th.tableblock,table.grid-all tbody>tr:last-child>td.tableblock,table.grid-all thead:last-child>tr>th.tableblock,table.grid-rows tbody>tr:last-child>th.tableblock,table.grid-rows tbody>tr:last-child>td.tableblock,table.grid-rows thead:last-child>tr>th.tableblock{border-bottom-width:0} +table.grid-rows tfoot>tr>th.tableblock,table.grid-rows tfoot>tr>td.tableblock{border-width:1px 0 0 0} +table.frame-all{border-width:1px} +table.frame-sides{border-width:0 1px} +table.frame-topbot{border-width:1px 0} +th.halign-left,td.halign-left{text-align:left} +th.halign-right,td.halign-right{text-align:right} +th.halign-center,td.halign-center{text-align:center} +th.valign-top,td.valign-top{vertical-align:top} +th.valign-bottom,td.valign-bottom{vertical-align:bottom} +th.valign-middle,td.valign-middle{vertical-align:middle} + +.dlist dl dd, .contentContainer .description .dlist dl dd, .contentContainer .details .dlist dl dd { margin-left: 1.125em; } + +.contentContainer hr { + border: 0 solid #ddddd8; + border-top-width: 1px; + height: 0; + margin: 1em 0 1.25em 0; +} + +.contentContainer hr + br { + display: none; +} + +p.tableblock { + margin-top: .5em; + margin-bottom: 0; +} + +/* Javadoc puts its output inside a
    element which confuses nested ul, ol styles in user text. + * Select on asciidoctor's div.ulist & div.olist to get correct nested bullet styles. */ +.ulist > ul { + list-style-type: disc; +} + +.ulist > ul .ulist > ul, .olist > ol .ulist > ul { + list-style-type: circle; +} + +.olist > ol .olist > ol .ulist > ul, .olist > ol .ulist > ul .ulist > ul, .ulist > ul .olist > ol .ulist > ul, .ulist > ul .ulist > ul .ulist > ul { + list-style-type: square; +} + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html new file mode 100644 index 0000000..fe8229e --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html @@ -0,0 +1,124 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    4
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    1.523s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    TestDurationResult
    testChinese0.197spassed
    testEnglish0.950spassed
    testJapanese0.191spassed
    testKorean0.185spassed
    +
    +
    +

    Standard output

    + +
    [11:30:08,831][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:09,040][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:09,240][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:09,424][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +
    +
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html new file mode 100644 index 0000000..92618ec --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html @@ -0,0 +1,116 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    5
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    0s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    TestDurationResult
    testDetector10spassed
    testDetector20spassed
    testDetector30spassed
    testDetector40spassed
    testPunctuation0spassed
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html new file mode 100644 index 0000000..383f698 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html @@ -0,0 +1,480 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    2
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    47.717s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + + + + + + +
    TestDurationResult
    testLangDetectProfile5.799spassed
    testSort41.918spassed
    +
    +
    +

    Standard output

    + +
    [11:30:09,478][INFO ][test                     ][Test worker] settings cluster name
    +[11:30:09,478][INFO ][test                     ][Test worker] starting nodes
    +[11:30:09,513][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    +[11:30:09,680][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    +[11:30:09,785][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    +[11:30:09,794][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    +[11:30:09,801][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    +[11:30:09,804][INFO ][org.elasticsearch.node.Node][Test worker] node name [xjGsg-9] derived from node ID [xjGsg-9xSHuExRZsmYSNxw]; set [node.name] to override
    +[11:30:09,808][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    +[11:30:09,809][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    +[11:30:09,819][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    +[11:30:09,821][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    +[11:30:09,823][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    +[11:30:09,876][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    +[11:30:09,878][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    +[11:30:09,879][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    +[11:30:09,885][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    +[11:30:09,886][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    +[11:30:09,887][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    +[11:30:09,887][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    +[11:30:09,888][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    +[11:30:09,889][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    +[11:30:09,889][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    +[11:30:09,890][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    +[11:30:09,890][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    +[11:30:09,891][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    +[11:30:09,891][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    +[11:30:10,056][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    +[11:30:10,788][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
    +
    +lo0
    +        inet 127.0.0.1 netmask:255.0.0.0 scope:host
    +        inet6 fe80::1 prefixlen:64 scope:link
    +        inet6 ::1 prefixlen:128 scope:host
    +        UP MULTICAST LOOPBACK mtu:16384 index:1
    +
    +en4
    +        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
    +        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
    +        hardware 68:5B:35:BC:46:72
    +        UP MULTICAST mtu:1500 index:9
    +
    +[11:30:10,834][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
    +[11:30:10,843][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
    +[11:30:10,891][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
    +[11:30:10,901][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
    +[11:30:10,901][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
    +[11:30:10,904][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
    +[11:30:10,905][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
    +[11:30:11,021][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
    +[11:30:11,778][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
    +[11:30:11,782][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
    +[11:30:11,789][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
    +[11:30:11,798][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
    +[11:30:11,811][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
    +[11:30:11,814][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
    +[11:30:11,815][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
    +[11:30:11,820][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:30:11,824][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:30:12,092][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
    +[11:30:12,275][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
    +[11:30:12,492][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:12,545][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
    +[11:30:12,551][INFO ][org.elasticsearch.node.Node][Test worker] initialized
    +[11:30:12,551][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
    +[11:30:12,556][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[1]}, bound_addresses {local[1]}
    +[11:30:12,576][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [initial_join]: execute
    +[11:30:12,577][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
    +[11:30:12,579][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [initial_join]: took [2ms] no change in cluster_state
    +[11:30:15,599][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[xjGsg-9][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
    +	--> ping_response{node [{xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]}], id[7], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
    +[11:30:15,602][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[xjGsg-9][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
    +[11:30:15,605][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
    +[11:30:15,619][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
    +[11:30:15,620][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] new_master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]}, reason: zen-disco-elected-as-master ([0] nodes joined)
    +[11:30:15,620][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [1]
    +[11:30:15,623][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 1
    +[11:30:15,627][INFO ][org.elasticsearch.node.Node][Test worker] started
    +[11:30:15,627][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [22ms] done applying updated cluster_state (version: 1, uuid: vPR-e6RiRzeKRhFhtworoQ)
    +[11:30:15,642][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
    +[11:30:15,644][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
    +[11:30:15,644][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [2]
    +[11:30:15,645][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 2
    +[11:30:15,650][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
    +[11:30:15,650][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [7ms] done applying updated cluster_state (version: 2, uuid: tABH6uFAQli18Ckpg_Gdog)
    +[11:30:15,651][INFO ][test                     ][Test worker] nodes are started
    +[11:30:15,656][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute
    +[11:30:15,684][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating Index [[test/Xm20JUvBQCGan_kpOOa9Dw]], shards [5]/[1] - reason [create index]
    +[11:30:15,694][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
    +[11:30:15,755][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using dynamic[true]
    +[11:30:16,080][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:16,372][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:16,399][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [article]
    +[11:30:16,427][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master])
    +[11:30:16,427][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closing index service (reason [cleaning up after validating index on master])
    +[11:30:16,427][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] clearing all bitsets because [close]
    +[11:30:16,428][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] full cache clear, reason [close]
    +[11:30:16,428][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] clearing all bitsets because [close]
    +[11:30:16,430][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closed... (reason [cleaning up after validating index on master])
    +[11:30:16,433][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]]
    +[11:30:16,433][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [3]
    +[11:30:16,433][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 3
    +[11:30:16,435][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [[test/Xm20JUvBQCGan_kpOOa9Dw]] creating index
    +[11:30:16,436][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating Index [[test/Xm20JUvBQCGan_kpOOa9Dw]], shards [5]/[1] - reason [create index]
    +[11:30:16,437][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
    +[11:30:16,438][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using dynamic[true]
    +[11:30:16,439][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [[test/Xm20JUvBQCGan_kpOOa9Dw]] adding mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]}}}}]
    +[11:30:16,606][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:16,785][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:16,787][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][1] creating shard
    +[11:30:16,794][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/1, shard=[test][1]}]
    +[11:30:16,794][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][1]
    +[11:30:16,809][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:30:16,872][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:30:16,880][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:30:16,881][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][3] creating shard
    +[11:30:16,882][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/3, shard=[test][3]}]
    +[11:30:16,882][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][3]
    +[11:30:16,885][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:30:16,886][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:30:16,888][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:30:16,889][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][2] creating shard
    +[11:30:16,890][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#1]] starting recovery from store ...
    +[11:30:16,890][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] starting recovery from store ...
    +[11:30:16,890][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/2, shard=[test][2]}]
    +[11:30:16,890][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][2]
    +[11:30:16,891][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:30:16,892][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:30:16,894][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:30:16,894][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] creating shard
    +[11:30:16,894][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#2]] starting recovery from store ...
    +[11:30:16,895][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/0, shard=[test][0]}]
    +[11:30:16,895][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][0]
    +[11:30:16,897][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:30:16,899][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:30:16,904][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:30:16,906][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#4]] starting recovery from store ...
    +[11:30:16,914][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.2s] done applying updated cluster_state (version: 3, uuid: 8FEr5_B6TAGyOsOUZNFWRg)
    +[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#4]] wipe translog location - creating new translog
    +[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#3]] wipe translog location - creating new translog
    +[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#2]] wipe translog location - creating new translog
    +[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#1]] wipe translog location - creating new translog
    +[11:30:17,000][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#4]] no translog ID present in the current generation - creating one
    +[11:30:17,000][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#1]] no translog ID present in the current generation - creating one
    +[11:30:17,001][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#2]] no translog ID present in the current generation - creating one
    +[11:30:17,002][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#3]] no translog ID present in the current generation - creating one
    +[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:30:17,041][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#4]] recovery completed from [shard_store], took [145ms]
    +[11:30:17,041][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#1]] recovery completed from [shard_store], took [158ms]
    +[11:30:17,041][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] recovery completed from [shard_store], took [249ms]
    +[11:30:17,042][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#2]] recovery completed from [shard_store], took [150ms]
    +[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][1] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery]]
    +[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery]]
    +[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#2]] [test][2] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]
    +[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery]]
    +[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][1] received shard started for [shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery]]
    +[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery]]
    +[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#2]] [test][2] received shard started for [shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]
    +[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery]]
    +[11:30:17,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]]: execute
    +[11:30:17,070][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=r31ohnA5TS-24_8ha4AXyQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery]])
    +[11:30:17,070][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=b2Rs7kcaTROKAERrSklvZw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery]])
    +[11:30:17,070][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=N9wqcAeVRUuHyf-U33djlA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]])
    +[11:30:17,076][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]]
    +[11:30:17,076][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [4]
    +[11:30:17,076][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 4
    +[11:30:17,077][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:30:17,078][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:30:17,078][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:30:17,079][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] creating shard
    +[11:30:17,080][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/4, shard=[test][4]}]
    +[11:30:17,080][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][4]
    +[11:30:17,081][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:30:17,082][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:30:17,083][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:30:17,083][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] starting recovery from store ...
    +[11:30:17,083][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:30:17,084][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:30:17,086][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#3]] wipe translog location - creating new translog
    +[11:30:17,088][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]]: took [42ms] done applying updated cluster_state (version: 4, uuid: 9m6tB2ahT8CkbXeYtl-KkQ)
    +[11:30:17,089][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
    +[11:30:17,089][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#3]] no translog ID present in the current generation - creating one
    +[11:30:17,089][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=z78LVcKSTOWuUYQNekMARw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery]])
    +[11:30:17,093][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
    +[11:30:17,093][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [5]
    +[11:30:17,093][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 5
    +[11:30:17,105][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:30:17,105][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] recovery completed from [shard_store], took [25ms]
    +[11:30:17,106][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:30:17,106][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:30:17,115][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:30:17,115][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]
    +[11:30:17,115][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]
    +[11:30:17,120][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [30ms] done applying updated cluster_state (version: 5, uuid: QyfZAKvASqSwPu5-A5fW0g)
    +[11:30:17,121][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]]: execute
    +[11:30:17,122][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=J-2CbydsTECn5IDw9UhBXg], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]])
    +[11:30:17,126][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]]
    +[11:30:17,127][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [6]
    +[11:30:17,128][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 6
    +[11:30:17,130][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:30:17,135][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]]: took [13ms] done applying updated cluster_state (version: 6, uuid: LFnl7Ex3SFa4MRmFAhPVtg)
    +[11:30:45,651][INFO ][org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor][elasticsearch[xjGsg-9][management][T#2]] low disk watermark [85%] exceeded on [xjGsg-9xSHuExRZsmYSNxw][xjGsg-9][/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0] free: 138.3gb[14.8%], replicas will not be assigned to this node
    +[11:30:47,177][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [put-mapping[article]]: execute
    +[11:30:47,181][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using dynamic[true]
    +[11:30:47,351][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:47,702][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:48,009][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:48,010][DEBUG][org.elasticsearch.cluster.metadata.MetaDataMappingService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] update_mapping [article] with source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}]
    +[11:30:48,011][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [7], source [put-mapping[article]]
    +[11:30:48,011][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [7]
    +[11:30:48,012][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 7
    +[11:30:48,012][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [[test/Xm20JUvBQCGan_kpOOa9Dw]] updating mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}]
    +[11:30:48,515][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:48,967][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:48,980][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [put-mapping[article]]: took [1.8s] done applying updated cluster_state (version: 7, uuid: bK5LpBuLTDiHQwYR36L__w)
    +[11:30:49,295][INFO ][test                     ][Test worker] stopping nodes
    +[11:30:49,296][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
    +[11:30:49,299][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown])
    +[11:30:49,299][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closing index service (reason [shutdown])
    +[11:30:49,300][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown])
    +[11:30:49,300][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:30:49,300][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:30:49,301][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:30:49,302][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:30:49,302][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:30:49,305][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:30:49,306][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:30:49,306][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown])
    +[11:30:49,306][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown])
    +[11:30:49,306][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:30:49,306][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:30:49,306][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:30:49,307][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:30:49,307][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:30:49,310][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:30:49,311][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:30:49,311][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown])
    +[11:30:49,311][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown])
    +[11:30:49,311][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:30:49,311][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:30:49,345][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:30:49,345][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:30:49,346][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:30:49,348][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:30:49,348][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:30:49,348][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown])
    +[11:30:49,348][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown])
    +[11:30:49,349][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:30:49,349][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:30:49,359][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:30:49,360][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:30:49,360][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:30:49,362][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:30:49,362][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:30:49,362][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown])
    +[11:30:49,362][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown])
    +[11:30:49,362][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:30:49,362][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:30:49,369][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:30:49,370][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:30:49,370][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:30:49,372][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:30:49,372][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:30:49,373][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown])
    +[11:30:49,373][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
    +[11:30:49,374][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close]
    +[11:30:49,375][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
    +[11:30:49,378][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closed... (reason [shutdown])
    +[11:30:49,378][INFO ][org.elasticsearch.node.Node][Test worker] stopped
    +[11:30:49,379][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
    +[11:30:49,386][INFO ][org.elasticsearch.node.Node][Test worker] closed
    +[11:30:49,392][INFO ][test                     ][Test worker] data files wiped
    +[11:30:51,394][INFO ][test                     ][Test worker] settings cluster name
    +[11:30:51,394][INFO ][test                     ][Test worker] starting nodes
    +[11:30:51,395][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    +[11:30:51,397][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    +[11:30:51,400][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    +[11:30:51,401][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    +[11:30:51,401][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    +[11:30:51,402][INFO ][org.elasticsearch.node.Node][Test worker] node name [QHkWj5x] derived from node ID [QHkWj5xaR6WKhgt-6zzIZw]; set [node.name] to override
    +[11:30:51,402][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    +[11:30:51,402][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    +[11:30:51,402][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    +[11:30:51,403][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    +[11:30:51,403][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    +[11:30:51,408][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    +[11:30:51,408][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    +[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    +[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    +[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    +[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    +[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    +[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    +[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    +[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    +[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    +[11:30:51,411][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    +[11:30:51,411][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    +[11:30:51,411][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    +[11:30:51,412][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    +[11:30:51,417][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
    +
    +lo0
    +        inet 127.0.0.1 netmask:255.0.0.0 scope:host
    +        inet6 fe80::1 prefixlen:64 scope:link
    +        inet6 ::1 prefixlen:128 scope:host
    +        UP MULTICAST LOOPBACK mtu:16384 index:1
    +
    +en4
    +        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
    +        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
    +        hardware 68:5B:35:BC:46:72
    +        UP MULTICAST mtu:1500 index:9
    +
    +[11:30:51,419][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
    +[11:30:51,420][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
    +[11:30:51,421][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
    +[11:30:51,422][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
    +[11:30:51,423][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
    +[11:30:51,424][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
    +[11:30:51,425][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
    +[11:30:51,426][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
    +[11:30:51,431][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
    +[11:30:51,431][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
    +[11:30:51,432][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
    +[11:30:51,432][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
    +[11:30:51,433][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
    +[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
    +[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
    +[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:30:51,470][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
    +[11:30:51,485][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
    +[11:30:51,659][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:51,665][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
    +[11:30:51,667][INFO ][org.elasticsearch.node.Node][Test worker] initialized
    +[11:30:51,667][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
    +[11:30:51,668][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[2]}, bound_addresses {local[2]}
    +[11:30:51,668][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
    +[11:30:51,669][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [initial_join]: execute
    +[11:30:51,670][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state
    +[11:30:54,675][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[QHkWj5x][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
    +	--> ping_response{node [{QHkWj5x}{QHkWj5xaR6WKhgt-6zzIZw}{j8xbWB8JRiKlk56pTses4g}{local}{local[2]}], id[14], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
    +[11:30:54,676][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[QHkWj5x][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
    +[11:30:54,677][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
    +[11:30:54,678][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
    +[11:30:54,678][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] new_master {QHkWj5x}{QHkWj5xaR6WKhgt-6zzIZw}{j8xbWB8JRiKlk56pTses4g}{local}{local[2]}, reason: zen-disco-elected-as-master ([0] nodes joined)
    +[11:30:54,678][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] publishing cluster state version [1]
    +[11:30:54,678][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] set local cluster state to version 1
    +[11:30:54,679][INFO ][org.elasticsearch.node.Node][Test worker] started
    +[11:30:54,679][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [2ms] done applying updated cluster_state (version: 1, uuid: 0k0iTRoKTU2uk7EQjjO4Xg)
    +[11:30:54,681][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
    +[11:30:54,681][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
    +[11:30:54,682][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] publishing cluster state version [2]
    +[11:30:54,682][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] set local cluster state to version 2
    +[11:30:54,684][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
    +[11:30:54,685][INFO ][test                     ][Test worker] nodes are started
    +[11:30:54,685][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [3ms] done applying updated cluster_state (version: 2, uuid: r5OBMZm5SYKlSfoDj4izKA)
    +[11:30:55,185][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:55,186][INFO ][test                     ][Test worker] stopping nodes
    +[11:30:55,186][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
    +[11:30:55,188][INFO ][org.elasticsearch.node.Node][Test worker] stopped
    +[11:30:55,188][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
    +[11:30:55,191][INFO ][org.elasticsearch.node.Node][Test worker] closed
    +[11:30:55,192][INFO ][test                     ][Test worker] data files wiped
    +
    +
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html new file mode 100644 index 0000000..f422329 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html @@ -0,0 +1,372 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    1
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    6.790s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + +
    TestDurationResult
    testLangDetectBinary6.790spassed
    +
    +
    +

    Standard output

    + +
    [11:30:57,198][INFO ][test                     ][Test worker] settings cluster name
    +[11:30:57,198][INFO ][test                     ][Test worker] starting nodes
    +[11:30:57,198][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    +[11:30:57,200][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    +[11:30:57,205][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    +[11:30:57,205][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    +[11:30:57,205][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    +[11:30:57,207][INFO ][org.elasticsearch.node.Node][Test worker] node name [UDgdZ4w] derived from node ID [UDgdZ4wIRhaOBtqKJUSqGw]; set [node.name] to override
    +[11:30:57,208][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    +[11:30:57,208][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    +[11:30:57,209][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    +[11:30:57,210][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    +[11:30:57,210][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    +[11:30:57,212][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    +[11:30:57,212][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    +[11:30:57,213][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    +[11:30:57,213][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    +[11:30:57,213][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    +[11:30:57,214][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    +[11:30:57,214][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    +[11:30:57,215][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    +[11:30:57,215][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    +[11:30:57,216][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    +[11:30:57,216][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    +[11:30:57,217][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    +[11:30:57,217][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    +[11:30:57,217][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    +[11:30:57,218][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    +[11:30:57,223][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
    +
    +lo0
    +        inet 127.0.0.1 netmask:255.0.0.0 scope:host
    +        inet6 fe80::1 prefixlen:64 scope:link
    +        inet6 ::1 prefixlen:128 scope:host
    +        UP MULTICAST LOOPBACK mtu:16384 index:1
    +
    +en4
    +        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
    +        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
    +        hardware 68:5B:35:BC:46:72
    +        UP MULTICAST mtu:1500 index:9
    +
    +[11:30:57,225][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
    +[11:30:57,225][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
    +[11:30:57,225][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
    +[11:30:57,225][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
    +[11:30:57,226][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
    +[11:30:57,226][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
    +[11:30:57,226][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
    +[11:30:57,227][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
    +[11:30:57,230][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
    +[11:30:57,230][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
    +[11:30:57,231][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
    +[11:30:57,231][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
    +[11:30:57,232][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
    +[11:30:57,232][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
    +[11:30:57,233][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
    +[11:30:57,233][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:30:57,233][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:30:57,273][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
    +[11:30:57,283][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
    +[11:30:57,519][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:30:57,532][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
    +[11:30:57,534][INFO ][org.elasticsearch.node.Node][Test worker] initialized
    +[11:30:57,534][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
    +[11:30:57,535][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[3]}, bound_addresses {local[3]}
    +[11:30:57,536][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
    +[11:30:57,536][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [initial_join]: execute
    +[11:30:57,537][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state
    +[11:31:00,545][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[UDgdZ4w][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
    +	--> ping_response{node [{UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]}], id[21], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
    +[11:31:00,546][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[UDgdZ4w][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
    +[11:31:00,546][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
    +[11:31:00,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
    +[11:31:00,547][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] new_master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]}, reason: zen-disco-elected-as-master ([0] nodes joined)
    +[11:31:00,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [1]
    +[11:31:00,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 1
    +[11:31:00,548][INFO ][org.elasticsearch.node.Node][Test worker] started
    +[11:31:00,548][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [1ms] done applying updated cluster_state (version: 1, uuid: dVRYFnVnTqmbiRZnrxbK7g)
    +[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
    +[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
    +[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [2]
    +[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 2
    +[11:31:00,553][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
    +[11:31:00,553][INFO ][test                     ][Test worker] nodes are started
    +[11:31:00,553][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [3ms] done applying updated cluster_state (version: 2, uuid: 2blnlc2FQwaO5YYITuhDSw)
    +[11:31:00,554][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute
    +[11:31:00,557][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating Index [[test/NQAmgttOR0y0HHLtTzzWmQ]], shards [5]/[1] - reason [create index]
    +[11:31:00,558][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
    +[11:31:00,559][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using dynamic[true]
    +[11:31:01,082][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:01,302][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:01,307][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType]
    +[11:31:01,312][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master])
    +[11:31:01,312][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closing index service (reason [cleaning up after validating index on master])
    +[11:31:01,313][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] clearing all bitsets because [close]
    +[11:31:01,313][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] full cache clear, reason [close]
    +[11:31:01,313][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] clearing all bitsets because [close]
    +[11:31:01,314][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closed... (reason [cleaning up after validating index on master])
    +[11:31:01,315][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]]
    +[11:31:01,315][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [3]
    +[11:31:01,316][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 3
    +[11:31:01,316][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [[test/NQAmgttOR0y0HHLtTzzWmQ]] creating index
    +[11:31:01,319][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating Index [[test/NQAmgttOR0y0HHLtTzzWmQ]], shards [5]/[1] - reason [create index]
    +[11:31:01,320][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
    +[11:31:01,322][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using dynamic[true]
    +[11:31:01,324][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [[test/NQAmgttOR0y0HHLtTzzWmQ]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"binary":"true"}}}}}}]
    +[11:31:01,542][INFO ][org.elasticsearch.monitor.jvm.JvmGcMonitorService][elasticsearch[UDgdZ4w][scheduler][T#1]] [gc][4] overhead, spent [315ms] collecting in the last [1s]
    +[11:31:01,627][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:01,819][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:01,820][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][1] creating shard
    +[11:31:01,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/1, shard=[test][1]}]
    +[11:31:01,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][1]
    +[11:31:01,823][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:01,823][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:01,825][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:01,826][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] creating shard
    +[11:31:01,826][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] starting recovery from store ...
    +[11:31:01,826][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/3, shard=[test][3]}]
    +[11:31:01,826][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][3]
    +[11:31:01,827][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:01,827][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:01,828][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#3]] wipe translog location - creating new translog
    +[11:31:01,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:01,828][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] creating shard
    +[11:31:01,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#1]] starting recovery from store ...
    +[11:31:01,829][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/2, shard=[test][2]}]
    +[11:31:01,829][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][2]
    +[11:31:01,829][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#3]] no translog ID present in the current generation - creating one
    +[11:31:01,830][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:01,830][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:01,831][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#1]] wipe translog location - creating new translog
    +[11:31:01,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:01,831][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] creating shard
    +[11:31:01,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#2]] starting recovery from store ...
    +[11:31:01,832][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/0, shard=[test][0]}]
    +[11:31:01,832][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][0]
    +[11:31:01,832][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#1]] no translog ID present in the current generation - creating one
    +[11:31:01,833][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:01,833][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:01,834][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#2]] wipe translog location - creating new translog
    +[11:31:01,835][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:01,835][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#2]] no translog ID present in the current generation - creating one
    +[11:31:01,835][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] recovery completed from [shard_store], took [14ms]
    +[11:31:01,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][1] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]
    +[11:31:01,836][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][1] received shard started for [shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]
    +[11:31:01,836][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:01,837][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#4]] starting recovery from store ...
    +[11:31:01,838][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.2s] done applying updated cluster_state (version: 3, uuid: 0rsqtdJTQFaAocueOUZOMw)
    +[11:31:01,838][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]]: execute
    +[11:31:01,838][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=PvEmAya9S7GLCSadvFn3Kw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]])
    +[11:31:01,839][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#4]] wipe translog location - creating new translog
    +[11:31:01,840][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#4]] no translog ID present in the current generation - creating one
    +[11:31:01,841][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]]
    +[11:31:01,841][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [4]
    +[11:31:01,841][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 4
    +[11:31:01,842][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:01,843][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#1]] recovery completed from [shard_store], took [16ms]
    +[11:31:01,843][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:01,843][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#2]] recovery completed from [shard_store], took [14ms]
    +[11:31:01,843][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#2]] [test][2] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery]]
    +[11:31:01,843][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#2]] [test][2] received shard started for [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery]]
    +[11:31:01,844][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery]]
    +[11:31:01,844][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery]]
    +[11:31:01,844][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:01,845][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:01,845][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#4]] recovery completed from [shard_store], took [13ms]
    +[11:31:01,845][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery]]
    +[11:31:01,845][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery]]
    +[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] received shard started for [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,846][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] creating shard
    +[11:31:01,847][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/4, shard=[test][4]}]
    +[11:31:01,847][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][4]
    +[11:31:01,848][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:01,848][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:01,849][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:01,849][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,849][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,849][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] starting recovery from store ...
    +[11:31:01,851][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#3]] wipe translog location - creating new translog
    +[11:31:01,851][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]]: took [12ms] done applying updated cluster_state (version: 4, uuid: J52kfurGRQG6HnSDT9gwfg)
    +[11:31:01,852][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#3]] no translog ID present in the current generation - creating one
    +[11:31:01,853][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
    +[11:31:01,853][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=RW0O8x5KTa6DQA72OILqEA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery]])
    +[11:31:01,853][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=wQKNtekeTOeThRmBtA53FA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery]])
    +[11:31:01,854][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=v4bvgxqFQcSIhooSCHaCYg], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery]])
    +[11:31:01,855][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
    +[11:31:01,855][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [5]
    +[11:31:01,856][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 5
    +[11:31:01,857][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:01,861][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] recovery completed from [shard_store], took [10ms]
    +[11:31:01,857][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery]]
    +[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery]]
    +[11:31:01,862][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:01,863][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:01,865][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [11ms] done applying updated cluster_state (version: 5, uuid: 4cGR_70QQYOUucWv-kZHLQ)
    +[11:31:01,865][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
    +[11:31:01,865][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=2nCQ414fRp2el29G3WKNgw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery]])
    +[11:31:01,867][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
    +[11:31:01,867][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [6]
    +[11:31:01,867][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 6
    +[11:31:01,868][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:01,870][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [4ms] done applying updated cluster_state (version: 6, uuid: i3ag2a4aRieMexeNmrDaTA)
    +[11:31:01,943][INFO ][test                     ][Test worker] stopping nodes
    +[11:31:01,943][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
    +[11:31:01,946][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown])
    +[11:31:01,947][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closing index service (reason [shutdown])
    +[11:31:01,947][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown])
    +[11:31:01,947][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:01,947][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:01,947][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:01,947][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:01,948][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:01,949][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:01,949][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown])
    +[11:31:01,949][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown])
    +[11:31:01,949][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:01,950][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:01,951][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:01,951][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:01,951][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown])
    +[11:31:01,951][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown])
    +[11:31:01,951][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:01,952][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:01,952][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:01,952][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:01,953][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:01,957][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:01,958][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:01,958][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown])
    +[11:31:01,958][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown])
    +[11:31:01,958][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:01,958][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:01,970][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:01,971][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:01,971][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:01,972][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:01,972][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:01,973][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown])
    +[11:31:01,973][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown])
    +[11:31:01,973][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:01,973][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:01,974][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:01,974][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:01,974][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:01,976][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:01,976][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:01,976][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown])
    +[11:31:01,976][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
    +[11:31:01,976][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close]
    +[11:31:01,976][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
    +[11:31:01,977][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closed... (reason [shutdown])
    +[11:31:01,977][INFO ][org.elasticsearch.node.Node][Test worker] stopped
    +[11:31:01,977][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
    +[11:31:01,979][INFO ][org.elasticsearch.node.Node][Test worker] closed
    +[11:31:01,985][INFO ][test                     ][Test worker] data files wiped
    +
    +
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html new file mode 100644 index 0000000..bbd7c5d --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html @@ -0,0 +1,371 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    1
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    6.620s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + +
    TestDurationResult
    testChineseLanguageCode6.620spassed
    +
    +
    +

    Standard output

    + +
    [11:31:03,991][INFO ][test                     ][Test worker] settings cluster name
    +[11:31:03,991][INFO ][test                     ][Test worker] starting nodes
    +[11:31:03,992][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    +[11:31:03,997][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    +[11:31:04,003][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    +[11:31:04,003][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    +[11:31:04,003][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    +[11:31:04,004][INFO ][org.elasticsearch.node.Node][Test worker] node name [Z_hILMV] derived from node ID [Z_hILMVpQCKzHo_-OnlBjg]; set [node.name] to override
    +[11:31:04,004][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    +[11:31:04,004][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    +[11:31:04,005][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    +[11:31:04,005][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    +[11:31:04,006][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    +[11:31:04,009][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    +[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    +[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    +[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    +[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    +[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    +[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    +[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    +[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    +[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    +[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    +[11:31:04,012][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    +[11:31:04,012][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    +[11:31:04,012][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    +[11:31:04,014][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    +[11:31:04,019][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
    +
    +lo0
    +        inet 127.0.0.1 netmask:255.0.0.0 scope:host
    +        inet6 fe80::1 prefixlen:64 scope:link
    +        inet6 ::1 prefixlen:128 scope:host
    +        UP MULTICAST LOOPBACK mtu:16384 index:1
    +
    +en4
    +        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
    +        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
    +        hardware 68:5B:35:BC:46:72
    +        UP MULTICAST mtu:1500 index:9
    +
    +[11:31:04,020][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
    +[11:31:04,020][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
    +[11:31:04,020][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
    +[11:31:04,020][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
    +[11:31:04,021][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
    +[11:31:04,023][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
    +[11:31:04,023][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
    +[11:31:04,024][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
    +[11:31:04,028][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
    +[11:31:04,028][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
    +[11:31:04,028][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
    +[11:31:04,033][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
    +[11:31:04,033][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
    +[11:31:04,033][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
    +[11:31:04,034][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
    +[11:31:04,034][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:31:04,034][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
    +[11:31:04,074][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
    +[11:31:04,086][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
    +[11:31:04,278][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:04,283][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
    +[11:31:04,284][INFO ][org.elasticsearch.node.Node][Test worker] initialized
    +[11:31:04,284][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
    +[11:31:04,284][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[4]}, bound_addresses {local[4]}
    +[11:31:04,286][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
    +[11:31:04,286][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [initial_join]: execute
    +[11:31:04,287][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state
    +[11:31:07,294][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z_hILMV][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
    +	--> ping_response{node [{Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]}], id[28], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
    +[11:31:07,295][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z_hILMV][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
    +[11:31:07,295][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
    +[11:31:07,296][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
    +[11:31:07,296][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] new_master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]}, reason: zen-disco-elected-as-master ([0] nodes joined)
    +[11:31:07,296][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [1]
    +[11:31:07,297][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 1
    +[11:31:07,297][INFO ][org.elasticsearch.node.Node][Test worker] started
    +[11:31:07,298][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [2ms] done applying updated cluster_state (version: 1, uuid: agEfSSipRq-52XyKfFa9rQ)
    +[11:31:07,299][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
    +[11:31:07,300][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
    +[11:31:07,300][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [2]
    +[11:31:07,300][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 2
    +[11:31:07,302][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
    +[11:31:07,302][INFO ][test                     ][Test worker] nodes are started
    +[11:31:07,302][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [3ms] done applying updated cluster_state (version: 2, uuid: DwcYxfcETUe7BQ1lNLPp1w)
    +[11:31:07,303][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute
    +[11:31:07,305][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating Index [[test/4OlIqpr0Q4GADffMclLtIw]], shards [5]/[1] - reason [create index]
    +[11:31:07,305][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
    +[11:31:07,307][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using dynamic[true]
    +[11:31:07,587][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:07,916][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:07,917][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType]
    +[11:31:07,919][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master])
    +[11:31:07,919][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closing index service (reason [cleaning up after validating index on master])
    +[11:31:07,919][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] clearing all bitsets because [close]
    +[11:31:07,919][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] full cache clear, reason [close]
    +[11:31:07,919][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] clearing all bitsets because [close]
    +[11:31:07,920][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closed... (reason [cleaning up after validating index on master])
    +[11:31:07,920][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]]
    +[11:31:07,920][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [3]
    +[11:31:07,920][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 3
    +[11:31:07,920][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [[test/4OlIqpr0Q4GADffMclLtIw]] creating index
    +[11:31:07,920][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating Index [[test/4OlIqpr0Q4GADffMclLtIw]], shards [5]/[1] - reason [create index]
    +[11:31:07,921][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
    +[11:31:07,921][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using dynamic[true]
    +[11:31:07,922][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [[test/4OlIqpr0Q4GADffMclLtIw]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["zh-cn"]}}}}}}]
    +[11:31:08,199][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:08,398][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:08,399][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][1] creating shard
    +[11:31:08,400][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/1, shard=[test][1]}]
    +[11:31:08,400][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][1]
    +[11:31:08,401][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:08,401][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:08,402][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:08,403][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] creating shard
    +[11:31:08,403][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] starting recovery from store ...
    +[11:31:08,404][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/3, shard=[test][3]}]
    +[11:31:08,404][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][3]
    +[11:31:08,406][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:08,406][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:08,407][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#3]] wipe translog location - creating new translog
    +[11:31:08,408][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:08,408][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] creating shard
    +[11:31:08,408][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#1]] starting recovery from store ...
    +[11:31:08,409][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#3]] no translog ID present in the current generation - creating one
    +[11:31:08,409][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/2, shard=[test][2]}]
    +[11:31:08,409][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][2]
    +[11:31:08,410][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:08,410][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:08,410][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#1]] wipe translog location - creating new translog
    +[11:31:08,412][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#1]] no translog ID present in the current generation - creating one
    +[11:31:08,413][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:08,413][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] creating shard
    +[11:31:08,414][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/0, shard=[test][0]}]
    +[11:31:08,414][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][0]
    +[11:31:08,414][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#2]] starting recovery from store ...
    +[11:31:08,416][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:08,417][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:08,418][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#2]] wipe translog location - creating new translog
    +[11:31:08,419][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:08,420][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#2]] no translog ID present in the current generation - creating one
    +[11:31:08,421][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#4]] starting recovery from store ...
    +[11:31:08,422][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.1s] done applying updated cluster_state (version: 3, uuid: -s3Mr-S4SaS_Gotb-b7h5A)
    +[11:31:08,426][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#4]] wipe translog location - creating new translog
    +[11:31:08,426][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:08,427][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] recovery completed from [shard_store], took [26ms]
    +[11:31:08,427][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][1] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]
    +[11:31:08,428][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][1] received shard started for [shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]
    +[11:31:08,428][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]]: execute
    +[11:31:08,429][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#4]] no translog ID present in the current generation - creating one
    +[11:31:08,429][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=l-2oGpDiTIe79i7rUsj_3Q], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]])
    +[11:31:08,432][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:08,432][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#1]] recovery completed from [shard_store], took [29ms]
    +[11:31:08,432][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery]]
    +[11:31:08,433][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery]]
    +[11:31:08,433][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:08,434][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#2]] recovery completed from [shard_store], took [24ms]
    +[11:31:08,434][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]]
    +[11:31:08,434][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#2]] [test][2] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery]]
    +[11:31:08,434][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [4]
    +[11:31:08,434][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#2]] [test][2] received shard started for [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery]]
    +[11:31:08,435][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 4
    +[11:31:08,436][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:08,436][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:08,436][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#4]] recovery completed from [shard_store], took [23ms]
    +[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery]]
    +[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery]]
    +[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] received shard started for [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,437][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] creating shard
    +[11:31:08,438][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/4, shard=[test][4]}]
    +[11:31:08,438][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][4]
    +[11:31:08,440][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
    +[11:31:08,440][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]
    +[11:31:08,442][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
    +[11:31:08,442][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] starting recovery from store ...
    +[11:31:08,442][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,442][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,444][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#3]] wipe translog location - creating new translog
    +[11:31:08,444][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]]: took [15ms] done applying updated cluster_state (version: 4, uuid: A0lurl0eR_-anXJ8RE4C0A)
    +[11:31:08,444][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
    +[11:31:08,445][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=YOrZlSmdS3SAZXCDqWFoew], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery]])
    +[11:31:08,445][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=LucUDEzZRqmwPmHmdJiplQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery]])
    +[11:31:08,446][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=6k-K6n2mRVqqOeLjCFMAxA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery]])
    +[11:31:08,446][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#3]] no translog ID present in the current generation - creating one
    +[11:31:08,450][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
    +[11:31:08,450][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [5]
    +[11:31:08,451][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 5
    +[11:31:08,452][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
    +[11:31:08,453][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:08,453][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] recovery completed from [shard_store], took [15ms]
    +[11:31:08,453][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery]]
    +[11:31:08,454][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:08,454][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery]]
    +[11:31:08,455][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,455][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
    +[11:31:08,456][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:08,459][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [14ms] done applying updated cluster_state (version: 5, uuid: Wole9O6GQoSW0VrpuzcShA)
    +[11:31:08,460][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
    +[11:31:08,460][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=2mkmKoaOQqmvAtUvmpCCCQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery]])
    +[11:31:08,462][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
    +[11:31:08,462][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [6]
    +[11:31:08,463][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 6
    +[11:31:08,464][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
    +[11:31:08,470][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [9ms] done applying updated cluster_state (version: 6, uuid: oQxFF5PTSRW3GihKDjMy2w)
    +[11:31:08,567][INFO ][test                     ][Test worker] stopping nodes
    +[11:31:08,567][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
    +[11:31:08,568][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown])
    +[11:31:08,568][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closing index service (reason [shutdown])
    +[11:31:08,568][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown])
    +[11:31:08,568][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:08,568][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:08,568][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:08,568][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:08,569][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:08,570][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:08,570][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown])
    +[11:31:08,570][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown])
    +[11:31:08,570][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:08,570][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:08,571][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:08,571][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:08,571][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown])
    +[11:31:08,572][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown])
    +[11:31:08,572][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:08,572][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:08,572][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:08,572][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:08,573][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:08,578][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:08,578][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:08,578][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown])
    +[11:31:08,578][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown])
    +[11:31:08,579][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:08,579][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:08,588][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:08,589][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:08,589][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:08,591][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:08,592][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:08,592][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown])
    +[11:31:08,592][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown])
    +[11:31:08,592][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
    +[11:31:08,592][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
    +[11:31:08,592][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
    +[11:31:08,592][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
    +[11:31:08,593][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
    +[11:31:08,594][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
    +[11:31:08,594][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
    +[11:31:08,594][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown])
    +[11:31:08,594][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
    +[11:31:08,594][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close]
    +[11:31:08,594][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
    +[11:31:08,595][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closed... (reason [shutdown])
    +[11:31:08,595][INFO ][org.elasticsearch.node.Node][Test worker] stopped
    +[11:31:08,595][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
    +[11:31:08,597][INFO ][org.elasticsearch.node.Node][Test worker] closed
    +[11:31:08,608][INFO ][test                     ][Test worker] data files wiped
    +
    +
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html new file mode 100644 index 0000000..324e477 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html @@ -0,0 +1,121 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    6
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    0.001s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    TestDurationResult
    testAdd0spassed
    testAddIllegally10.001spassed
    testAddIllegally20spassed
    testLangProfile0spassed
    testLangProfileStringInt0spassed
    testOmitLessFreq0spassed
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html new file mode 100644 index 0000000..272e6f1 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html @@ -0,0 +1,348 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    6
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    3.185s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    TestDurationResult
    testBinary0.477spassed
    testBinary20.492spassed
    testCustomMappings0.229spassed
    testShortTextProfile1.129spassed
    testSimpleMappings0.424spassed
    testToFields0.434spassed
    +
    +
    +

    Standard output

    + +
    [11:31:10,617][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:10,834][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:10,838][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:11,047][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:11,051][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:11,294][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:11,298][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@bbff788; line: 1, column: 73]
    +com.fasterxml.jackson.core.JsonParseException: Illegal character '"' (code 0x22) in base64 content
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@bbff788; line: 1, column: 73]
    +	at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.getBinaryValue(UTF8StreamJsonParser.java:553) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.JsonParser.getBinaryValue(JsonParser.java:1346) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at org.elasticsearch.common.xcontent.json.JsonXContentParser.binaryValue(JsonXContentParser.java:182) ~[elasticsearch-5.1.2.jar:5.1.2]
    +	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.parseCreateField(LangdetectMapper.java:96) [main/:?]
    +	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:286) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.FieldMapper$MultiFields.parse(FieldMapper.java:560) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:299) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:438) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:564) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:384) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:361) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:93) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:66) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:275) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:271) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.testBinary2(LangdetectMappingTest.java:78) [test/:?]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) [junit-4.12.jar:4.12]
    +	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.12.jar:4.12]
    +	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) [junit-4.12.jar:4.12]
    +	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) [junit-4.12.jar:4.12]
    +	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) [junit-4.12.jar:4.12]
    +	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.run(ParentRunner.java:363) [junit-4.12.jar:4.12]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.runTestClass(JUnitTestClassExecuter.java:114) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.execute(JUnitTestClassExecuter.java:57) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassProcessor.processTestClass(JUnitTestClassProcessor.java:66) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:51) [gradle-testing-base-3.2.1.jar:3.2.1]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:32) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:93) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at com.sun.proxy.$Proxy2.processTestClass(Unknown Source) [?:?]
    +	at org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:109) [gradle-testing-base-3.2.1.jar:3.2.1]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.remote.internal.hub.MessageHub$Handler.run(MessageHub.java:377) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:54) [gradle-base-services-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.concurrent.StoppableExecutorImpl$1.run(StoppableExecutorImpl.java:40) [gradle-base-services-3.2.1.jar:3.2.1]
    +	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
    +	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
    +	at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
    +[11:31:11,305][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:11,534][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:11,537][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@7bfa36da; line: 1, column: 73]
    +com.fasterxml.jackson.core.JsonParseException: Illegal character '"' (code 0x22) in base64 content
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@7bfa36da; line: 1, column: 73]
    +	at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.getBinaryValue(UTF8StreamJsonParser.java:553) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.JsonParser.getBinaryValue(JsonParser.java:1346) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at org.elasticsearch.common.xcontent.json.JsonXContentParser.binaryValue(JsonXContentParser.java:182) ~[elasticsearch-5.1.2.jar:5.1.2]
    +	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.parseCreateField(LangdetectMapper.java:96) [main/:?]
    +	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:286) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.FieldMapper$MultiFields.parse(FieldMapper.java:560) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:299) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:438) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:564) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:384) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:361) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:93) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:66) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:275) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:271) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.testBinary2(LangdetectMappingTest.java:88) [test/:?]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) [junit-4.12.jar:4.12]
    +	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.12.jar:4.12]
    +	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) [junit-4.12.jar:4.12]
    +	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) [junit-4.12.jar:4.12]
    +	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) [junit-4.12.jar:4.12]
    +	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.run(ParentRunner.java:363) [junit-4.12.jar:4.12]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.runTestClass(JUnitTestClassExecuter.java:114) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.execute(JUnitTestClassExecuter.java:57) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassProcessor.processTestClass(JUnitTestClassProcessor.java:66) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:51) [gradle-testing-base-3.2.1.jar:3.2.1]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:32) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:93) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at com.sun.proxy.$Proxy2.processTestClass(Unknown Source) [?:?]
    +	at org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:109) [gradle-testing-base-3.2.1.jar:3.2.1]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.remote.internal.hub.MessageHub$Handler.run(MessageHub.java:377) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:54) [gradle-base-services-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.concurrent.StoppableExecutorImpl$1.run(StoppableExecutorImpl.java:40) [gradle-base-services-3.2.1.jar:3.2.1]
    +	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
    +	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
    +	at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
    +[11:31:11,547][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:11,744][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:11,748][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:11,963][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:11,969][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:12,191][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:12,199][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:12,386][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:12,390][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:12,668][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:12,669][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal white space character (code 0x20) as character #3 of 4-char base64 unit: can only used between units
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@4bc95c8f; line: 1, column: 61]
    +com.fasterxml.jackson.core.JsonParseException: Illegal white space character (code 0x20) as character #3 of 4-char base64 unit: can only used between units
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@4bc95c8f; line: 1, column: 61]
    +	at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.getBinaryValue(UTF8StreamJsonParser.java:553) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at com.fasterxml.jackson.core.JsonParser.getBinaryValue(JsonParser.java:1346) ~[jackson-core-2.8.1.jar:2.8.1]
    +	at org.elasticsearch.common.xcontent.json.JsonXContentParser.binaryValue(JsonXContentParser.java:182) ~[elasticsearch-5.1.2.jar:5.1.2]
    +	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.parseCreateField(LangdetectMapper.java:96) [main/:?]
    +	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:286) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:438) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:564) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:384) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:361) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:93) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:66) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:275) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:271) [elasticsearch-5.1.2.jar:5.1.2]
    +	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.testBinary(LangdetectMappingTest.java:52) [test/:?]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) [junit-4.12.jar:4.12]
    +	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.12.jar:4.12]
    +	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) [junit-4.12.jar:4.12]
    +	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) [junit-4.12.jar:4.12]
    +	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) [junit-4.12.jar:4.12]
    +	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) [junit-4.12.jar:4.12]
    +	at org.junit.runners.ParentRunner.run(ParentRunner.java:363) [junit-4.12.jar:4.12]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.runTestClass(JUnitTestClassExecuter.java:114) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.execute(JUnitTestClassExecuter.java:57) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassProcessor.processTestClass(JUnitTestClassProcessor.java:66) [gradle-testing-jvm-3.2.1.jar:3.2.1]
    +	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:51) [gradle-testing-base-3.2.1.jar:3.2.1]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:32) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:93) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at com.sun.proxy.$Proxy2.processTestClass(Unknown Source) [?:?]
    +	at org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:109) [gradle-testing-base-3.2.1.jar:3.2.1]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
    +	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
    +	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
    +	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.remote.internal.hub.MessageHub$Handler.run(MessageHub.java:377) [gradle-messaging-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:54) [gradle-base-services-3.2.1.jar:3.2.1]
    +	at org.gradle.internal.concurrent.StoppableExecutorImpl$1.run(StoppableExecutorImpl.java:40) [gradle-base-services-3.2.1.jar:3.2.1]
    +	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
    +	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
    +	at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
    +[11:31:12,675][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:13,276][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[11:31:13,281][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[11:31:13,798][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +
    +
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html new file mode 100644 index 0000000..fcbdbb9 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html @@ -0,0 +1,96 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    1
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    0s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + +
    TestDurationResult
    testLanguage0spassed
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html new file mode 100644 index 0000000..33dd1e7 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html @@ -0,0 +1,111 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.NGramTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.NGramTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    4
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    0s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    TestDurationResult
    testConstants0spassed
    testNGram0spassed
    testNormalizeWithCJKKanji0spassed
    testNormalizeWithLatin0spassed
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html new file mode 100644 index 0000000..fadb992 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html @@ -0,0 +1,106 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    1
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    0
    +

    ignored

    +
    +
    +
    +
    0.245s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + +
    TestDurationResult
    testDetector0.245spassed
    +
    +
    +

    Standard output

    + +
    [11:31:14,058][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +
    +
    +
    +
    + +
    + + diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html new file mode 100644 index 0000000..c418bb8 --- /dev/null +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html @@ -0,0 +1,101 @@ + + + + + +Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest + + + + + +
    +

    Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    2
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    2
    +

    ignored

    +
    +
    +
    +
    0s
    +

    duration

    +
    +
    +
    +
    +
    +
    -
    +

    successful

    +
    +
    +
    +
    + +
    +

    Tests

    + + + + + + + + + + + + + + + + + + +
    TestDurationResult
    httpPost-ignored
    httpPostShortProfile-ignored
    +
    +
    + +
    + + diff --git a/docs/test/css/base-style.css b/docs/test/css/base-style.css new file mode 100644 index 0000000..4afa73e --- /dev/null +++ b/docs/test/css/base-style.css @@ -0,0 +1,179 @@ + +body { + margin: 0; + padding: 0; + font-family: sans-serif; + font-size: 12pt; +} + +body, a, a:visited { + color: #303030; +} + +#content { + padding-left: 50px; + padding-right: 50px; + padding-top: 30px; + padding-bottom: 30px; +} + +#content h1 { + font-size: 160%; + margin-bottom: 10px; +} + +#footer { + margin-top: 100px; + font-size: 80%; + white-space: nowrap; +} + +#footer, #footer a { + color: #a0a0a0; +} + +#line-wrapping-toggle { + vertical-align: middle; +} + +#label-for-line-wrapping-toggle { + vertical-align: middle; +} + +ul { + margin-left: 0; +} + +h1, h2, h3 { + white-space: nowrap; +} + +h2 { + font-size: 120%; +} + +ul.tabLinks { + padding-left: 0; + padding-top: 10px; + padding-bottom: 10px; + overflow: auto; + min-width: 800px; + width: auto !important; + width: 800px; +} + +ul.tabLinks li { + float: left; + height: 100%; + list-style: none; + padding-left: 10px; + padding-right: 10px; + padding-top: 5px; + padding-bottom: 5px; + margin-bottom: 0; + -moz-border-radius: 7px; + border-radius: 7px; + margin-right: 25px; + border: solid 1px #d4d4d4; + background-color: #f0f0f0; +} + +ul.tabLinks li:hover { + background-color: #fafafa; +} + +ul.tabLinks li.selected { + background-color: #c5f0f5; + border-color: #c5f0f5; +} + +ul.tabLinks a { + font-size: 120%; + display: block; + outline: none; + text-decoration: none; + margin: 0; + padding: 0; +} + +ul.tabLinks li h2 { + margin: 0; + padding: 0; +} + +div.tab { +} + +div.selected { + display: block; +} + +div.deselected { + display: none; +} + +div.tab table { + min-width: 350px; + width: auto !important; + width: 350px; + border-collapse: collapse; +} + +div.tab th, div.tab table { + border-bottom: solid #d0d0d0 1px; +} + +div.tab th { + text-align: left; + white-space: nowrap; + padding-left: 6em; +} + +div.tab th:first-child { + padding-left: 0; +} + +div.tab td { + white-space: nowrap; + padding-left: 6em; + padding-top: 5px; + padding-bottom: 5px; +} + +div.tab td:first-child { + padding-left: 0; +} + +div.tab td.numeric, div.tab th.numeric { + text-align: right; +} + +span.code { + display: inline-block; + margin-top: 0em; + margin-bottom: 1em; +} + +span.code pre { + font-size: 11pt; + padding-top: 10px; + padding-bottom: 10px; + padding-left: 10px; + padding-right: 10px; + margin: 0; + background-color: #f7f7f7; + border: solid 1px #d0d0d0; + min-width: 700px; + width: auto !important; + width: 700px; +} + +span.wrapped pre { + word-wrap: break-word; + white-space: pre-wrap; + word-break: break-all; +} + +label.hidden { + display: none; +} \ No newline at end of file diff --git a/docs/test/css/style.css b/docs/test/css/style.css new file mode 100644 index 0000000..3dc4913 --- /dev/null +++ b/docs/test/css/style.css @@ -0,0 +1,84 @@ + +#summary { + margin-top: 30px; + margin-bottom: 40px; +} + +#summary table { + border-collapse: collapse; +} + +#summary td { + vertical-align: top; +} + +.breadcrumbs, .breadcrumbs a { + color: #606060; +} + +.infoBox { + width: 110px; + padding-top: 15px; + padding-bottom: 15px; + text-align: center; +} + +.infoBox p { + margin: 0; +} + +.counter, .percent { + font-size: 120%; + font-weight: bold; + margin-bottom: 8px; +} + +#duration { + width: 125px; +} + +#successRate, .summaryGroup { + border: solid 2px #d0d0d0; + -moz-border-radius: 10px; + border-radius: 10px; +} + +#successRate { + width: 140px; + margin-left: 35px; +} + +#successRate .percent { + font-size: 180%; +} + +.success, .success a { + color: #008000; +} + +div.success, #successRate.success { + background-color: #bbd9bb; + border-color: #008000; +} + +.failures, .failures a { + color: #b60808; +} + +.skipped, .skipped a { + color: #c09853; +} + +div.failures, #successRate.failures { + background-color: #ecdada; + border-color: #b60808; +} + +ul.linkList { + padding-left: 0; +} + +ul.linkList li { + list-style: none; + margin-bottom: 5px; +} diff --git a/docs/test/index.html b/docs/test/index.html new file mode 100644 index 0000000..b7dadda --- /dev/null +++ b/docs/test/index.html @@ -0,0 +1,238 @@ + + + + + +Test results - Test Summary + + + + + +
    +

    Test Summary

    +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    33
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    2
    +

    ignored

    +
    +
    +
    +
    1m6.08s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Ignored tests

    + +
    +
    +

    Packages

    + + + + + + + + + + + + + + + + + + + + + +
    PackageTestsFailuresIgnoredDurationSuccess rate
    +org.xbib.elasticsearch.index.mapper.langdetect +33021m6.08s100%
    +
    + +
    + +
    + + diff --git a/docs/test/js/report.js b/docs/test/js/report.js new file mode 100644 index 0000000..83bab4a --- /dev/null +++ b/docs/test/js/report.js @@ -0,0 +1,194 @@ +(function (window, document) { + "use strict"; + + var tabs = {}; + + function changeElementClass(element, classValue) { + if (element.getAttribute("className")) { + element.setAttribute("className", classValue); + } else { + element.setAttribute("class", classValue); + } + } + + function getClassAttribute(element) { + if (element.getAttribute("className")) { + return element.getAttribute("className"); + } else { + return element.getAttribute("class"); + } + } + + function addClass(element, classValue) { + changeElementClass(element, getClassAttribute(element) + " " + classValue); + } + + function removeClass(element, classValue) { + changeElementClass(element, getClassAttribute(element).replace(classValue, "")); + } + + function initTabs() { + var container = document.getElementById("tabs"); + + tabs.tabs = findTabs(container); + tabs.titles = findTitles(tabs.tabs); + tabs.headers = findHeaders(container); + tabs.select = select; + tabs.deselectAll = deselectAll; + tabs.select(0); + + return true; + } + + function getCheckBox() { + return document.getElementById("line-wrapping-toggle"); + } + + function getLabelForCheckBox() { + return document.getElementById("label-for-line-wrapping-toggle"); + } + + function findCodeBlocks() { + var spans = document.getElementById("tabs").getElementsByTagName("span"); + var codeBlocks = []; + for (var i = 0; i < spans.length; ++i) { + if (spans[i].className.indexOf("code") >= 0) { + codeBlocks.push(spans[i]); + } + } + return codeBlocks; + } + + function forAllCodeBlocks(operation) { + var codeBlocks = findCodeBlocks(); + + for (var i = 0; i < codeBlocks.length; ++i) { + operation(codeBlocks[i], "wrapped"); + } + } + + function toggleLineWrapping() { + var checkBox = getCheckBox(); + + if (checkBox.checked) { + forAllCodeBlocks(addClass); + } else { + forAllCodeBlocks(removeClass); + } + } + + function initControls() { + if (findCodeBlocks().length > 0) { + var checkBox = getCheckBox(); + var label = getLabelForCheckBox(); + + checkBox.onclick = toggleLineWrapping; + checkBox.checked = false; + + removeClass(label, "hidden"); + } + } + + function switchTab() { + var id = this.id.substr(1); + + for (var i = 0; i < tabs.tabs.length; i++) { + if (tabs.tabs[i].id === id) { + tabs.select(i); + break; + } + } + + return false; + } + + function select(i) { + this.deselectAll(); + + changeElementClass(this.tabs[i], "tab selected"); + changeElementClass(this.headers[i], "selected"); + + while (this.headers[i].firstChild) { + this.headers[i].removeChild(this.headers[i].firstChild); + } + + var h2 = document.createElement("H2"); + + h2.appendChild(document.createTextNode(this.titles[i])); + this.headers[i].appendChild(h2); + } + + function deselectAll() { + for (var i = 0; i < this.tabs.length; i++) { + changeElementClass(this.tabs[i], "tab deselected"); + changeElementClass(this.headers[i], "deselected"); + + while (this.headers[i].firstChild) { + this.headers[i].removeChild(this.headers[i].firstChild); + } + + var a = document.createElement("A"); + + a.setAttribute("id", "ltab" + i); + a.setAttribute("href", "#tab" + i); + a.onclick = switchTab; + a.appendChild(document.createTextNode(this.titles[i])); + + this.headers[i].appendChild(a); + } + } + + function findTabs(container) { + return findChildElements(container, "DIV", "tab"); + } + + function findHeaders(container) { + var owner = findChildElements(container, "UL", "tabLinks"); + return findChildElements(owner[0], "LI", null); + } + + function findTitles(tabs) { + var titles = []; + + for (var i = 0; i < tabs.length; i++) { + var tab = tabs[i]; + var header = findChildElements(tab, "H2", null)[0]; + + header.parentNode.removeChild(header); + + if (header.innerText) { + titles.push(header.innerText); + } else { + titles.push(header.textContent); + } + } + + return titles; + } + + function findChildElements(container, name, targetClass) { + var elements = []; + var children = container.childNodes; + + for (var i = 0; i < children.length; i++) { + var child = children.item(i); + + if (child.nodeType === 1 && child.nodeName === name) { + if (targetClass && child.className.indexOf(targetClass) < 0) { + continue; + } + + elements.push(child); + } + } + + return elements; + } + + // Entry point. + + window.onload = function() { + initTabs(); + initControls(); + }; +} (window, window.document)); \ No newline at end of file diff --git a/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html b/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html new file mode 100644 index 0000000..df40b13 --- /dev/null +++ b/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html @@ -0,0 +1,219 @@ + + + + + +Test results - Package org.xbib.elasticsearch.index.mapper.langdetect + + + + + +
    +

    Package org.xbib.elasticsearch.index.mapper.langdetect

    + +
    + + + + + +
    +
    + + + + + + + +
    +
    +
    33
    +

    tests

    +
    +
    +
    +
    0
    +

    failures

    +
    +
    +
    +
    2
    +

    ignored

    +
    +
    +
    +
    1m6.08s
    +

    duration

    +
    +
    +
    +
    +
    +
    100%
    +

    successful

    +
    +
    +
    +
    + +
    +

    Ignored tests

    + +
    +
    +

    Classes

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    ClassTestsFailuresIgnoredDurationSuccess rate
    +DetectLanguageTest +4001.523s100%
    +DetectorTest +5000s100%
    +LangDetectActionTest +20047.717s100%
    +LangDetectBinaryTest +1006.790s100%
    +LangDetectChineseTest +1006.620s100%
    +LangProfileTest +6000.001s100%
    +LangdetectMappingTest +6003.185s100%
    +LanguageTest +1000s100%
    +NGramTest +4000s100%
    +SimpleDetectorTest +1000.245s100%
    +SimpleHttpTest +2020s-
    +
    +
    + +
    + + diff --git a/gradle.properties b/gradle.properties index 4441187..c3bd1f2 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,3 +1,3 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 2.4.4.1 +version = 5.1.2.0 diff --git a/settings.gradle b/settings.gradle index 3c46c89..24c2c63 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1 +1 @@ -rootProject.name = 'elasticsearch-langdetect' \ No newline at end of file +rootProject.name = name \ No newline at end of file diff --git a/src/docs/asciidoc/css/foundation.css b/src/docs/asciidoc/css/foundation.css new file mode 100644 index 0000000..27be611 --- /dev/null +++ b/src/docs/asciidoc/css/foundation.css @@ -0,0 +1,684 @@ +/*! normalize.css v2.1.2 | MIT License | git.io/normalize */ +/* ========================================================================== HTML5 display definitions ========================================================================== */ +/** Correct `block` display not defined in IE 8/9. */ +article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary { display: block; } + +/** Correct `inline-block` display not defined in IE 8/9. */ +audio, canvas, video { display: inline-block; } + +/** Prevent modern browsers from displaying `audio` without controls. Remove excess height in iOS 5 devices. */ +audio:not([controls]) { display: none; height: 0; } + +/** Address `[hidden]` styling not present in IE 8/9. Hide the `template` element in IE, Safari, and Firefox < 22. */ +[hidden], template { display: none; } + +script { display: none !important; } + +/* ========================================================================== Base ========================================================================== */ +/** 1. Set default font family to sans-serif. 2. Prevent iOS text size adjust after orientation change, without disabling user zoom. */ +html { font-family: sans-serif; /* 1 */ -ms-text-size-adjust: 100%; /* 2 */ -webkit-text-size-adjust: 100%; /* 2 */ } + +/** Remove default margin. */ +body { margin: 0; } + +/* ========================================================================== Links ========================================================================== */ +/** Remove the gray background color from active links in IE 10. */ +a { background: transparent; } + +/** Address `outline` inconsistency between Chrome and other browsers. */ +a:focus { outline: thin dotted; } + +/** Improve readability when focused and also mouse hovered in all browsers. */ +a:active, a:hover { outline: 0; } + +/* ========================================================================== Typography ========================================================================== */ +/** Address variable `h1` font-size and margin within `section` and `article` contexts in Firefox 4+, Safari 5, and Chrome. */ +h1 { font-size: 2em; margin: 0.67em 0; } + +/** Address styling not present in IE 8/9, Safari 5, and Chrome. */ +abbr[title] { border-bottom: 1px dotted; } + +/** Address style set to `bolder` in Firefox 4+, Safari 5, and Chrome. */ +b, strong { font-weight: bold; } + +/** Address styling not present in Safari 5 and Chrome. */ +dfn { font-style: italic; } + +/** Address differences between Firefox and other browsers. */ +hr { -moz-box-sizing: content-box; box-sizing: content-box; height: 0; } + +/** Address styling not present in IE 8/9. */ +mark { background: #ff0; color: #000; } + +/** Correct font family set oddly in Safari 5 and Chrome. */ +code, kbd, pre, samp { font-family: monospace, serif; font-size: 1em; } + +/** Improve readability of pre-formatted text in all browsers. */ +pre { white-space: pre-wrap; } + +/** Set consistent quote types. */ +q { quotes: "\201C" "\201D" "\2018" "\2019"; } + +/** Address inconsistent and variable font size in all browsers. */ +small { font-size: 80%; } + +/** Prevent `sub` and `sup` affecting `line-height` in all browsers. */ +sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; } + +sup { top: -0.5em; } + +sub { bottom: -0.25em; } + +/* ========================================================================== Embedded content ========================================================================== */ +/** Remove border when inside `a` element in IE 8/9. */ +img { border: 0; } + +/** Correct overflow displayed oddly in IE 9. */ +svg:not(:root) { overflow: hidden; } + +/* ========================================================================== Figures ========================================================================== */ +/** Address margin not present in IE 8/9 and Safari 5. */ +figure { margin: 0; } + +/* ========================================================================== Forms ========================================================================== */ +/** Define consistent border, margin, and padding. */ +fieldset { border: 1px solid #c0c0c0; margin: 0 2px; padding: 0.35em 0.625em 0.75em; } + +/** 1. Correct `color` not being inherited in IE 8/9. 2. Remove padding so people aren't caught out if they zero out fieldsets. */ +legend { border: 0; /* 1 */ padding: 0; /* 2 */ } + +/** 1. Correct font family not being inherited in all browsers. 2. Correct font size not being inherited in all browsers. 3. Address margins set differently in Firefox 4+, Safari 5, and Chrome. */ +button, input, select, textarea { font-family: inherit; /* 1 */ font-size: 100%; /* 2 */ margin: 0; /* 3 */ } + +/** Address Firefox 4+ setting `line-height` on `input` using `!important` in the UA stylesheet. */ +button, input { line-height: normal; } + +/** Address inconsistent `text-transform` inheritance for `button` and `select`. All other form control elements do not inherit `text-transform` values. Correct `button` style inheritance in Chrome, Safari 5+, and IE 8+. Correct `select` style inheritance in Firefox 4+ and Opera. */ +button, select { text-transform: none; } + +/** 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` and `video` controls. 2. Correct inability to style clickable `input` types in iOS. 3. Improve usability and consistency of cursor style between image-type `input` and others. */ +button, html input[type="button"], input[type="reset"], input[type="submit"] { -webkit-appearance: button; /* 2 */ cursor: pointer; /* 3 */ } + +/** Re-set default cursor for disabled elements. */ +button[disabled], html input[disabled] { cursor: default; } + +/** 1. Address box sizing set to `content-box` in IE 8/9. 2. Remove excess padding in IE 8/9. */ +input[type="checkbox"], input[type="radio"] { box-sizing: border-box; /* 1 */ padding: 0; /* 2 */ } + +/** 1. Address `appearance` set to `searchfield` in Safari 5 and Chrome. 2. Address `box-sizing` set to `border-box` in Safari 5 and Chrome (include `-moz` to future-proof). */ +input[type="search"] { -webkit-appearance: textfield; /* 1 */ -moz-box-sizing: content-box; -webkit-box-sizing: content-box; /* 2 */ box-sizing: content-box; } + +/** Remove inner padding and search cancel button in Safari 5 and Chrome on OS X. */ +input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; } + +/** Remove inner padding and border in Firefox 4+. */ +button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; } + +/** 1. Remove default vertical scrollbar in IE 8/9. 2. Improve readability and alignment in all browsers. */ +textarea { overflow: auto; /* 1 */ vertical-align: top; /* 2 */ } + +/* ========================================================================== Tables ========================================================================== */ +/** Remove most spacing between table cells. */ +table { border-collapse: collapse; border-spacing: 0; } + +meta.foundation-mq-small { font-family: "only screen and (min-width: 768px)"; width: 768px; } + +meta.foundation-mq-medium { font-family: "only screen and (min-width:1280px)"; width: 1280px; } + +meta.foundation-mq-large { font-family: "only screen and (min-width:1440px)"; width: 1440px; } + +*, *:before, *:after { -moz-box-sizing: border-box; -webkit-box-sizing: border-box; box-sizing: border-box; } + +html, body { font-size: 100%; } + +body { background: white; color: #222222; padding: 0; margin: 0; font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; font-weight: normal; font-style: normal; line-height: 1; position: relative; cursor: auto; } + +a:hover { cursor: pointer; } + +img, object, embed { max-width: 100%; height: auto; } + +object, embed { height: 100%; } + +img { -ms-interpolation-mode: bicubic; } + +#map_canvas img, #map_canvas embed, #map_canvas object, .map_canvas img, .map_canvas embed, .map_canvas object { max-width: none !important; } + +.left { float: left !important; } + +.right { float: right !important; } + +.text-left { text-align: left !important; } + +.text-right { text-align: right !important; } + +.text-center { text-align: center !important; } + +.text-justify { text-align: justify !important; } + +.hide { display: none; } + +.antialiased { -webkit-font-smoothing: antialiased; } + +img { display: inline-block; vertical-align: middle; } + +textarea { height: auto; min-height: 50px; } + +select { width: 100%; } + +object, svg { display: inline-block; vertical-align: middle; } + +.center { margin-left: auto; margin-right: auto; } + +.spread { width: 100%; } + +p.lead, .paragraph.lead > p, #preamble > .sectionbody > .paragraph:first-of-type p { font-size: 1.21875em; line-height: 1.6; } + +.subheader, .admonitionblock td.content > .title, .audioblock > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .stemblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, table.tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { line-height: 1.4; color: #6f6f6f; font-weight: 300; margin-top: 0.2em; margin-bottom: 0.5em; } + +/* Typography resets */ +div, dl, dt, dd, ul, ol, li, h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6, pre, form, p, blockquote, th, td { margin: 0; padding: 0; direction: ltr; } + +/* Default Link Styles */ +a { color: #2ba6cb; text-decoration: none; line-height: inherit; } +a:hover, a:focus { color: #2795b6; } +a img { border: none; } + +/* Default paragraph styles */ +p { font-family: inherit; font-weight: normal; font-size: 1em; line-height: 1.6; margin-bottom: 1.25em; text-rendering: optimizeLegibility; } +p aside { font-size: 0.875em; line-height: 1.35; font-style: italic; } + +/* Default header styles */ +h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; font-weight: bold; font-style: normal; color: #222222; text-rendering: optimizeLegibility; margin-top: 1em; margin-bottom: 0.5em; line-height: 1.2125em; } +h1 small, h2 small, h3 small, #toctitle small, .sidebarblock > .content > .title small, h4 small, h5 small, h6 small { font-size: 60%; color: #6f6f6f; line-height: 0; } + +h1 { font-size: 2.125em; } + +h2 { font-size: 1.6875em; } + +h3, #toctitle, .sidebarblock > .content > .title { font-size: 1.375em; } + +h4 { font-size: 1.125em; } + +h5 { font-size: 1.125em; } + +h6 { font-size: 1em; } + +hr { border: solid #dddddd; border-width: 1px 0 0; clear: both; margin: 1.25em 0 1.1875em; height: 0; } + +/* Helpful Typography Defaults */ +em, i { font-style: italic; line-height: inherit; } + +strong, b { font-weight: bold; line-height: inherit; } + +small { font-size: 60%; line-height: inherit; } + +code { font-family: Consolas, "Liberation Mono", Courier, monospace; font-weight: bold; color: #7f0a0c; } + +/* Lists */ +ul, ol, dl { font-size: 1em; line-height: 1.6; margin-bottom: 1.25em; list-style-position: outside; font-family: inherit; } + +ul, ol { margin-left: 1.5em; } +ul.no-bullet, ol.no-bullet { margin-left: 1.5em; } + +/* Unordered Lists */ +ul li ul, ul li ol { margin-left: 1.25em; margin-bottom: 0; font-size: 1em; /* Override nested font-size change */ } +ul.square li ul, ul.circle li ul, ul.disc li ul { list-style: inherit; } +ul.square { list-style-type: square; } +ul.circle { list-style-type: circle; } +ul.disc { list-style-type: disc; } +ul.no-bullet { list-style: none; } + +/* Ordered Lists */ +ol li ul, ol li ol { margin-left: 1.25em; margin-bottom: 0; } + +/* Definition Lists */ +dl dt { margin-bottom: 0.3125em; font-weight: bold; } +dl dd { margin-bottom: 1.25em; } + +/* Abbreviations */ +abbr, acronym { text-transform: uppercase; font-size: 90%; color: #222222; border-bottom: 1px dotted #dddddd; cursor: help; } + +abbr { text-transform: none; } + +/* Blockquotes */ +blockquote { margin: 0 0 1.25em; padding: 0.5625em 1.25em 0 1.1875em; border-left: 1px solid #dddddd; } +blockquote cite { display: block; font-size: 0.8125em; color: #555555; } +blockquote cite:before { content: "\2014 \0020"; } +blockquote cite a, blockquote cite a:visited { color: #555555; } + +blockquote, blockquote p { line-height: 1.6; color: #6f6f6f; } + +/* Microformats */ +.vcard { display: inline-block; margin: 0 0 1.25em 0; border: 1px solid #dddddd; padding: 0.625em 0.75em; } +.vcard li { margin: 0; display: block; } +.vcard .fn { font-weight: bold; font-size: 0.9375em; } + +.vevent .summary { font-weight: bold; } +.vevent abbr { cursor: auto; text-decoration: none; font-weight: bold; border: none; padding: 0 0.0625em; } + +@media only screen and (min-width: 768px) { h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { line-height: 1.4; } + h1 { font-size: 2.75em; } + h2 { font-size: 2.3125em; } + h3, #toctitle, .sidebarblock > .content > .title { font-size: 1.6875em; } + h4 { font-size: 1.4375em; } } +/* Tables */ +table { background: white; margin-bottom: 1.25em; border: solid 1px #dddddd; } +table thead, table tfoot { background: whitesmoke; font-weight: bold; } +table thead tr th, table thead tr td, table tfoot tr th, table tfoot tr td { padding: 0.5em 0.625em 0.625em; font-size: inherit; color: #222222; text-align: left; } +table tr th, table tr td { padding: 0.5625em 0.625em; font-size: inherit; color: #222222; } +table tr.even, table tr.alt, table tr:nth-of-type(even) { background: #f9f9f9; } +table thead tr th, table tfoot tr th, table tbody tr td, table tr td, table tfoot tr td { display: table-cell; line-height: 1.4; } + +body { -moz-osx-font-smoothing: grayscale; -webkit-font-smoothing: antialiased; tab-size: 4; } + +h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { line-height: 1.4; } + +.clearfix:before, .clearfix:after, .float-group:before, .float-group:after { content: " "; display: table; } +.clearfix:after, .float-group:after { clear: both; } + +*:not(pre) > code { font-size: inherit; font-style: normal !important; letter-spacing: 0; padding: 0; line-height: inherit; word-wrap: break-word; } +*:not(pre) > code.nobreak { word-wrap: normal; } +*:not(pre) > code.nowrap { white-space: nowrap; } + +pre, pre > code { line-height: 1.4; color: black; font-family: monospace, serif; font-weight: normal; } + +em em { font-style: normal; } + +strong strong { font-weight: normal; } + +.keyseq { color: #555555; } + +kbd { font-family: Consolas, "Liberation Mono", Courier, monospace; display: inline-block; color: #222222; font-size: 0.65em; line-height: 1.45; background-color: #f7f7f7; border: 1px solid #ccc; -webkit-border-radius: 3px; border-radius: 3px; -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 0 0 0.1em white inset; box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 0 0 0.1em white inset; margin: 0 0.15em; padding: 0.2em 0.5em; vertical-align: middle; position: relative; top: -0.1em; white-space: nowrap; } + +.keyseq kbd:first-child { margin-left: 0; } + +.keyseq kbd:last-child { margin-right: 0; } + +.menuseq, .menu { color: #090909; } + +b.button:before, b.button:after { position: relative; top: -1px; font-weight: normal; } + +b.button:before { content: "["; padding: 0 3px 0 2px; } + +b.button:after { content: "]"; padding: 0 2px 0 3px; } + +#header, #content, #footnotes, #footer { width: 100%; margin-left: auto; margin-right: auto; margin-top: 0; margin-bottom: 0; max-width: 62.5em; *zoom: 1; position: relative; padding-left: 0.9375em; padding-right: 0.9375em; } +#header:before, #header:after, #content:before, #content:after, #footnotes:before, #footnotes:after, #footer:before, #footer:after { content: " "; display: table; } +#header:after, #content:after, #footnotes:after, #footer:after { clear: both; } + +#content { margin-top: 1.25em; } + +#content:before { content: none; } + +#header > h1:first-child { color: black; margin-top: 2.25rem; margin-bottom: 0; } +#header > h1:first-child + #toc { margin-top: 8px; border-top: 1px solid #dddddd; } +#header > h1:only-child, body.toc2 #header > h1:nth-last-child(2) { border-bottom: 1px solid #dddddd; padding-bottom: 8px; } +#header .details { border-bottom: 1px solid #dddddd; line-height: 1.45; padding-top: 0.25em; padding-bottom: 0.25em; padding-left: 0.25em; color: #555555; display: -ms-flexbox; display: -webkit-flex; display: flex; -ms-flex-flow: row wrap; -webkit-flex-flow: row wrap; flex-flow: row wrap; } +#header .details span:first-child { margin-left: -0.125em; } +#header .details span.email a { color: #6f6f6f; } +#header .details br { display: none; } +#header .details br + span:before { content: "\00a0\2013\00a0"; } +#header .details br + span.author:before { content: "\00a0\22c5\00a0"; color: #6f6f6f; } +#header .details br + span#revremark:before { content: "\00a0|\00a0"; } +#header #revnumber { text-transform: capitalize; } +#header #revnumber:after { content: "\00a0"; } + +#content > h1:first-child:not([class]) { color: black; border-bottom: 1px solid #dddddd; padding-bottom: 8px; margin-top: 0; padding-top: 1rem; margin-bottom: 1.25rem; } + +#toc { border-bottom: 1px solid #dddddd; padding-bottom: 0.5em; } +#toc > ul { margin-left: 0.125em; } +#toc ul.sectlevel0 > li > a { font-style: italic; } +#toc ul.sectlevel0 ul.sectlevel1 { margin: 0.5em 0; } +#toc ul { font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; list-style-type: none; } +#toc li { line-height: 1.3334; margin-top: 0.3334em; } +#toc a { text-decoration: none; } +#toc a:active { text-decoration: underline; } + +#toctitle { color: #6f6f6f; font-size: 1.2em; } + +@media only screen and (min-width: 768px) { #toctitle { font-size: 1.375em; } + body.toc2 { padding-left: 15em; padding-right: 0; } + #toc.toc2 { margin-top: 0 !important; background-color: #f2f2f2; position: fixed; width: 15em; left: 0; top: 0; border-right: 1px solid #dddddd; border-top-width: 0 !important; border-bottom-width: 0 !important; z-index: 1000; padding: 1.25em 1em; height: 100%; overflow: auto; } + #toc.toc2 #toctitle { margin-top: 0; margin-bottom: 0.8rem; font-size: 1.2em; } + #toc.toc2 > ul { font-size: 0.9em; margin-bottom: 0; } + #toc.toc2 ul ul { margin-left: 0; padding-left: 1em; } + #toc.toc2 ul.sectlevel0 ul.sectlevel1 { padding-left: 0; margin-top: 0.5em; margin-bottom: 0.5em; } + body.toc2.toc-right { padding-left: 0; padding-right: 15em; } + body.toc2.toc-right #toc.toc2 { border-right-width: 0; border-left: 1px solid #dddddd; left: auto; right: 0; } } +@media only screen and (min-width: 1280px) { body.toc2 { padding-left: 20em; padding-right: 0; } + #toc.toc2 { width: 20em; } + #toc.toc2 #toctitle { font-size: 1.375em; } + #toc.toc2 > ul { font-size: 0.95em; } + #toc.toc2 ul ul { padding-left: 1.25em; } + body.toc2.toc-right { padding-left: 0; padding-right: 20em; } } +#content #toc { border-style: solid; border-width: 1px; border-color: #d9d9d9; margin-bottom: 1.25em; padding: 1.25em; background: #f2f2f2; -webkit-border-radius: 0; border-radius: 0; } +#content #toc > :first-child { margin-top: 0; } +#content #toc > :last-child { margin-bottom: 0; } + +#footer { max-width: 100%; background-color: #222222; padding: 1.25em; } + +#footer-text { color: #dddddd; line-height: 1.44; } + +.sect1 { padding-bottom: 0.625em; } + +@media only screen and (min-width: 768px) { .sect1 { padding-bottom: 1.25em; } } +.sect1 + .sect1 { border-top: 1px solid #dddddd; } + +#content h1 > a.anchor, h2 > a.anchor, h3 > a.anchor, #toctitle > a.anchor, .sidebarblock > .content > .title > a.anchor, h4 > a.anchor, h5 > a.anchor, h6 > a.anchor { position: absolute; z-index: 1001; width: 1.5ex; margin-left: -1.5ex; display: block; text-decoration: none !important; visibility: hidden; text-align: center; font-weight: normal; } +#content h1 > a.anchor:before, h2 > a.anchor:before, h3 > a.anchor:before, #toctitle > a.anchor:before, .sidebarblock > .content > .title > a.anchor:before, h4 > a.anchor:before, h5 > a.anchor:before, h6 > a.anchor:before { content: "\00A7"; font-size: 0.85em; display: block; padding-top: 0.1em; } +#content h1:hover > a.anchor, #content h1 > a.anchor:hover, h2:hover > a.anchor, h2 > a.anchor:hover, h3:hover > a.anchor, #toctitle:hover > a.anchor, .sidebarblock > .content > .title:hover > a.anchor, h3 > a.anchor:hover, #toctitle > a.anchor:hover, .sidebarblock > .content > .title > a.anchor:hover, h4:hover > a.anchor, h4 > a.anchor:hover, h5:hover > a.anchor, h5 > a.anchor:hover, h6:hover > a.anchor, h6 > a.anchor:hover { visibility: visible; } +#content h1 > a.link, h2 > a.link, h3 > a.link, #toctitle > a.link, .sidebarblock > .content > .title > a.link, h4 > a.link, h5 > a.link, h6 > a.link { color: #222222; text-decoration: none; } +#content h1 > a.link:hover, h2 > a.link:hover, h3 > a.link:hover, #toctitle > a.link:hover, .sidebarblock > .content > .title > a.link:hover, h4 > a.link:hover, h5 > a.link:hover, h6 > a.link:hover { color: #151515; } + +.audioblock, .imageblock, .literalblock, .listingblock, .stemblock, .videoblock { margin-bottom: 1.25em; } + +.admonitionblock td.content > .title, .audioblock > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .stemblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, table.tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { text-rendering: optimizeLegibility; text-align: left; } + +table.tableblock > caption.title { white-space: nowrap; overflow: visible; max-width: 0; } + +.paragraph.lead > p, #preamble > .sectionbody > .paragraph:first-of-type p { color: black; } + +table.tableblock #preamble > .sectionbody > .paragraph:first-of-type p { font-size: inherit; } + +.admonitionblock > table { border-collapse: separate; border: 0; background: none; width: 100%; } +.admonitionblock > table td.icon { text-align: center; width: 80px; } +.admonitionblock > table td.icon img { max-width: initial; } +.admonitionblock > table td.icon .title { font-weight: bold; font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; text-transform: uppercase; } +.admonitionblock > table td.content { padding-left: 1.125em; padding-right: 1.25em; border-left: 1px solid #dddddd; color: #555555; } +.admonitionblock > table td.content > :last-child > :last-child { margin-bottom: 0; } + +.exampleblock > .content { border-style: solid; border-width: 1px; border-color: #e6e6e6; margin-bottom: 1.25em; padding: 1.25em; background: white; -webkit-border-radius: 0; border-radius: 0; } +.exampleblock > .content > :first-child { margin-top: 0; } +.exampleblock > .content > :last-child { margin-bottom: 0; } + +.sidebarblock { border-style: solid; border-width: 1px; border-color: #d9d9d9; margin-bottom: 1.25em; padding: 1.25em; background: #f2f2f2; -webkit-border-radius: 0; border-radius: 0; } +.sidebarblock > :first-child { margin-top: 0; } +.sidebarblock > :last-child { margin-bottom: 0; } +.sidebarblock > .content > .title { color: #6f6f6f; margin-top: 0; } + +.exampleblock > .content > :last-child > :last-child, .exampleblock > .content .olist > ol > li:last-child > :last-child, .exampleblock > .content .ulist > ul > li:last-child > :last-child, .exampleblock > .content .qlist > ol > li:last-child > :last-child, .sidebarblock > .content > :last-child > :last-child, .sidebarblock > .content .olist > ol > li:last-child > :last-child, .sidebarblock > .content .ulist > ul > li:last-child > :last-child, .sidebarblock > .content .qlist > ol > li:last-child > :last-child { margin-bottom: 0; } + +.literalblock pre, .listingblock pre:not(.highlight), .listingblock pre[class="highlight"], .listingblock pre[class^="highlight "], .listingblock pre.CodeRay, .listingblock pre.prettyprint { background: #eeeeee; } +.sidebarblock .literalblock pre, .sidebarblock .listingblock pre:not(.highlight), .sidebarblock .listingblock pre[class="highlight"], .sidebarblock .listingblock pre[class^="highlight "], .sidebarblock .listingblock pre.CodeRay, .sidebarblock .listingblock pre.prettyprint { background: #f2f1f1; } + +.literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { border: 1px solid #cccccc; -webkit-border-radius: 0; border-radius: 0; word-wrap: break-word; padding: 0.8em 0.8em 0.65em 0.8em; font-size: 0.8125em; } +.literalblock pre.nowrap, .literalblock pre[class].nowrap, .listingblock pre.nowrap, .listingblock pre[class].nowrap { overflow-x: auto; white-space: pre; word-wrap: normal; } +@media only screen and (min-width: 768px) { .literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { font-size: 0.90625em; } } +@media only screen and (min-width: 1280px) { .literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { font-size: 1em; } } + +.literalblock.output pre { color: #eeeeee; background-color: black; } + +.listingblock pre.highlightjs { padding: 0; } +.listingblock pre.highlightjs > code { padding: 0.8em 0.8em 0.65em 0.8em; -webkit-border-radius: 0; border-radius: 0; } + +.listingblock > .content { position: relative; } + +.listingblock code[data-lang]:before { display: none; content: attr(data-lang); position: absolute; font-size: 0.75em; top: 0.425rem; right: 0.5rem; line-height: 1; text-transform: uppercase; color: #999; } + +.listingblock:hover code[data-lang]:before { display: block; } + +.listingblock.terminal pre .command:before { content: attr(data-prompt); padding-right: 0.5em; color: #999; } + +.listingblock.terminal pre .command:not([data-prompt]):before { content: "$"; } + +table.pyhltable { border-collapse: separate; border: 0; margin-bottom: 0; background: none; } + +table.pyhltable td { vertical-align: top; padding-top: 0; padding-bottom: 0; line-height: 1.4; } + +table.pyhltable td.code { padding-left: .75em; padding-right: 0; } + +pre.pygments .lineno, table.pyhltable td:not(.code) { color: #999; padding-left: 0; padding-right: .5em; border-right: 1px solid #dddddd; } + +pre.pygments .lineno { display: inline-block; margin-right: .25em; } + +table.pyhltable .linenodiv { background: none !important; padding-right: 0 !important; } + +.quoteblock { margin: 0 1em 1.25em 1.5em; display: table; } +.quoteblock > .title { margin-left: -1.5em; margin-bottom: 0.75em; } +.quoteblock blockquote, .quoteblock blockquote p { color: #6f6f6f; font-size: 1.15rem; line-height: 1.75; word-spacing: 0.1em; letter-spacing: 0; font-style: italic; text-align: justify; } +.quoteblock blockquote { margin: 0; padding: 0; border: 0; } +.quoteblock blockquote:before { content: "\201c"; float: left; font-size: 2.75em; font-weight: bold; line-height: 0.6em; margin-left: -0.6em; color: #6f6f6f; text-shadow: 0 1px 2px rgba(0, 0, 0, 0.1); } +.quoteblock blockquote > .paragraph:last-child p { margin-bottom: 0; } +.quoteblock .attribution { margin-top: 0.5em; margin-right: 0.5ex; text-align: right; } +.quoteblock .quoteblock { margin-left: 0; margin-right: 0; padding: 0.5em 0; border-left: 3px solid #555555; } +.quoteblock .quoteblock blockquote { padding: 0 0 0 0.75em; } +.quoteblock .quoteblock blockquote:before { display: none; } + +.verseblock { margin: 0 1em 1.25em 1em; } +.verseblock pre { font-family: "Open Sans", "DejaVu Sans", sans; font-size: 1.15rem; color: #6f6f6f; font-weight: 300; text-rendering: optimizeLegibility; } +.verseblock pre strong { font-weight: 400; } +.verseblock .attribution { margin-top: 1.25rem; margin-left: 0.5ex; } + +.quoteblock .attribution, .verseblock .attribution { font-size: 0.8125em; line-height: 1.45; font-style: italic; } +.quoteblock .attribution br, .verseblock .attribution br { display: none; } +.quoteblock .attribution cite, .verseblock .attribution cite { display: block; letter-spacing: -0.025em; color: #555555; } + +.quoteblock.abstract { margin: 0 0 1.25em 0; display: block; } +.quoteblock.abstract blockquote, .quoteblock.abstract blockquote p { text-align: left; word-spacing: 0; } +.quoteblock.abstract blockquote:before, .quoteblock.abstract blockquote p:first-of-type:before { display: none; } + +table.tableblock { max-width: 100%; border-collapse: separate; } +table.tableblock td > .paragraph:last-child p > p:last-child, table.tableblock th > p:last-child, table.tableblock td > p:last-child { margin-bottom: 0; } + +table.tableblock, th.tableblock, td.tableblock { border: 0 solid #dddddd; } + +table.grid-all th.tableblock, table.grid-all td.tableblock { border-width: 0 1px 1px 0; } + +table.grid-all tfoot > tr > th.tableblock, table.grid-all tfoot > tr > td.tableblock { border-width: 1px 1px 0 0; } + +table.grid-cols th.tableblock, table.grid-cols td.tableblock { border-width: 0 1px 0 0; } + +table.grid-all * > tr > .tableblock:last-child, table.grid-cols * > tr > .tableblock:last-child { border-right-width: 0; } + +table.grid-rows th.tableblock, table.grid-rows td.tableblock { border-width: 0 0 1px 0; } + +table.grid-all tbody > tr:last-child > th.tableblock, table.grid-all tbody > tr:last-child > td.tableblock, table.grid-all thead:last-child > tr > th.tableblock, table.grid-rows tbody > tr:last-child > th.tableblock, table.grid-rows tbody > tr:last-child > td.tableblock, table.grid-rows thead:last-child > tr > th.tableblock { border-bottom-width: 0; } + +table.grid-rows tfoot > tr > th.tableblock, table.grid-rows tfoot > tr > td.tableblock { border-width: 1px 0 0 0; } + +table.frame-all { border-width: 1px; } + +table.frame-sides { border-width: 0 1px; } + +table.frame-topbot { border-width: 1px 0; } + +th.halign-left, td.halign-left { text-align: left; } + +th.halign-right, td.halign-right { text-align: right; } + +th.halign-center, td.halign-center { text-align: center; } + +th.valign-top, td.valign-top { vertical-align: top; } + +th.valign-bottom, td.valign-bottom { vertical-align: bottom; } + +th.valign-middle, td.valign-middle { vertical-align: middle; } + +table thead th, table tfoot th { font-weight: bold; } + +tbody tr th { display: table-cell; line-height: 1.4; background: whitesmoke; } + +tbody tr th, tbody tr th p, tfoot tr th, tfoot tr th p { color: #222222; font-weight: bold; } + +p.tableblock > code:only-child { background: none; padding: 0; } + +p.tableblock { font-size: 1em; } + +td > div.verse { white-space: pre; } + +ol { margin-left: 1.75em; } + +ul li ol { margin-left: 1.5em; } + +dl dd { margin-left: 1.125em; } + +dl dd:last-child, dl dd:last-child > :last-child { margin-bottom: 0; } + +ol > li p, ul > li p, ul dd, ol dd, .olist .olist, .ulist .ulist, .ulist .olist, .olist .ulist { margin-bottom: 0.625em; } + +ul.unstyled, ol.unnumbered, ul.checklist, ul.none { list-style-type: none; } + +ul.unstyled, ol.unnumbered, ul.checklist { margin-left: 0.625em; } + +ul.checklist li > p:first-child > .fa-square-o:first-child, ul.checklist li > p:first-child > .fa-check-square-o:first-child { width: 1em; font-size: 0.85em; } + +ul.checklist li > p:first-child > input[type="checkbox"]:first-child { width: 1em; position: relative; top: 1px; } + +ul.inline { margin: 0 auto 0.625em auto; margin-left: -1.375em; margin-right: 0; padding: 0; list-style: none; overflow: hidden; } +ul.inline > li { list-style: none; float: left; margin-left: 1.375em; display: block; } +ul.inline > li > * { display: block; } + +.unstyled dl dt { font-weight: normal; font-style: normal; } + +ol.arabic { list-style-type: decimal; } + +ol.decimal { list-style-type: decimal-leading-zero; } + +ol.loweralpha { list-style-type: lower-alpha; } + +ol.upperalpha { list-style-type: upper-alpha; } + +ol.lowerroman { list-style-type: lower-roman; } + +ol.upperroman { list-style-type: upper-roman; } + +ol.lowergreek { list-style-type: lower-greek; } + +.hdlist > table, .colist > table { border: 0; background: none; } +.hdlist > table > tbody > tr, .colist > table > tbody > tr { background: none; } + +td.hdlist1, td.hdlist2 { vertical-align: top; padding: 0 0.625em; } + +td.hdlist1 { font-weight: bold; padding-bottom: 1.25em; } + +.literalblock + .colist, .listingblock + .colist { margin-top: -0.5em; } + +.colist > table tr > td:first-of-type { padding: 0 0.75em; line-height: 1; } +.colist > table tr > td:first-of-type img { max-width: initial; } +.colist > table tr > td:last-of-type { padding: 0.25em 0; } + +.thumb, .th { line-height: 0; display: inline-block; border: solid 4px white; -webkit-box-shadow: 0 0 0 1px #dddddd; box-shadow: 0 0 0 1px #dddddd; } + +.imageblock.left, .imageblock[style*="float: left"] { margin: 0.25em 0.625em 1.25em 0; } +.imageblock.right, .imageblock[style*="float: right"] { margin: 0.25em 0 1.25em 0.625em; } +.imageblock > .title { margin-bottom: 0; } +.imageblock.thumb, .imageblock.th { border-width: 6px; } +.imageblock.thumb > .title, .imageblock.th > .title { padding: 0 0.125em; } + +.image.left, .image.right { margin-top: 0.25em; margin-bottom: 0.25em; display: inline-block; line-height: 0; } +.image.left { margin-right: 0.625em; } +.image.right { margin-left: 0.625em; } + +a.image { text-decoration: none; display: inline-block; } +a.image object { pointer-events: none; } + +sup.footnote, sup.footnoteref { font-size: 0.875em; position: static; vertical-align: super; } +sup.footnote a, sup.footnoteref a { text-decoration: none; } +sup.footnote a:active, sup.footnoteref a:active { text-decoration: underline; } + +#footnotes { padding-top: 0.75em; padding-bottom: 0.75em; margin-bottom: 0.625em; } +#footnotes hr { width: 20%; min-width: 6.25em; margin: -0.25em 0 0.75em 0; border-width: 1px 0 0 0; } +#footnotes .footnote { padding: 0 0.375em 0 0.225em; line-height: 1.3334; font-size: 0.875em; margin-left: 1.2em; text-indent: -1.05em; margin-bottom: 0.2em; } +#footnotes .footnote a:first-of-type { font-weight: bold; text-decoration: none; } +#footnotes .footnote:last-of-type { margin-bottom: 0; } +#content #footnotes { margin-top: -0.625em; margin-bottom: 0; padding: 0.75em 0; } + +.gist .file-data > table { border: 0; background: #fff; width: 100%; margin-bottom: 0; } +.gist .file-data > table td.line-data { width: 99%; } + +div.unbreakable { page-break-inside: avoid; } + +.big { font-size: larger; } + +.small { font-size: smaller; } + +.underline { text-decoration: underline; } + +.overline { text-decoration: overline; } + +.line-through { text-decoration: line-through; } + +.aqua { color: #00bfbf; } + +.aqua-background { background-color: #00fafa; } + +.black { color: black; } + +.black-background { background-color: black; } + +.blue { color: #0000bf; } + +.blue-background { background-color: #0000fa; } + +.fuchsia { color: #bf00bf; } + +.fuchsia-background { background-color: #fa00fa; } + +.gray { color: #606060; } + +.gray-background { background-color: #7d7d7d; } + +.green { color: #006000; } + +.green-background { background-color: #007d00; } + +.lime { color: #00bf00; } + +.lime-background { background-color: #00fa00; } + +.maroon { color: #600000; } + +.maroon-background { background-color: #7d0000; } + +.navy { color: #000060; } + +.navy-background { background-color: #00007d; } + +.olive { color: #606000; } + +.olive-background { background-color: #7d7d00; } + +.purple { color: #600060; } + +.purple-background { background-color: #7d007d; } + +.red { color: #bf0000; } + +.red-background { background-color: #fa0000; } + +.silver { color: #909090; } + +.silver-background { background-color: #bcbcbc; } + +.teal { color: #006060; } + +.teal-background { background-color: #007d7d; } + +.white { color: #bfbfbf; } + +.white-background { background-color: #fafafa; } + +.yellow { color: #bfbf00; } + +.yellow-background { background-color: #fafa00; } + +span.icon > .fa { cursor: default; } + +.admonitionblock td.icon [class^="fa icon-"] { font-size: 2.5em; text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.5); cursor: default; } +.admonitionblock td.icon .icon-note:before { content: "\f05a"; color: #207c98; } +.admonitionblock td.icon .icon-tip:before { content: "\f0eb"; text-shadow: 1px 1px 2px rgba(155, 155, 0, 0.8); color: #111; } +.admonitionblock td.icon .icon-warning:before { content: "\f071"; color: #bf6900; } +.admonitionblock td.icon .icon-caution:before { content: "\f06d"; color: #bf3400; } +.admonitionblock td.icon .icon-important:before { content: "\f06a"; color: #bf0000; } + +.conum[data-value] { display: inline-block; color: #fff !important; background-color: #222222; -webkit-border-radius: 100px; border-radius: 100px; text-align: center; font-size: 0.75em; width: 1.67em; height: 1.67em; line-height: 1.67em; font-family: "Open Sans", "DejaVu Sans", sans-serif; font-style: normal; font-weight: bold; } +.conum[data-value] * { color: #fff !important; } +.conum[data-value] + b { display: none; } +.conum[data-value]:after { content: attr(data-value); } +pre .conum[data-value] { position: relative; top: -0.125em; } + +b.conum * { color: inherit !important; } + +.conum:not([data-value]):empty { display: none; } + +.literalblock pre, .listingblock pre { background: #eeeeee; } diff --git a/src/docs/asciidoclet/overview.adoc b/src/docs/asciidoclet/overview.adoc new file mode 100644 index 0000000..8834ba1 --- /dev/null +++ b/src/docs/asciidoclet/overview.adoc @@ -0,0 +1,4 @@ += Bibliographic entity processing for Java +Jörg Prante +Version 1.0 + diff --git a/src/site/resources/towerofbabel.jpg b/src/docs/img/towerofbabel.jpg similarity index 100% rename from src/site/resources/towerofbabel.jpg rename to src/docs/img/towerofbabel.jpg diff --git a/src/integration-test/java/org/xbib/elasticsearch/NodeTestUtils.java b/src/integration-test/java/org/xbib/elasticsearch/NodeTestUtils.java deleted file mode 100644 index e7ccf28..0000000 --- a/src/integration-test/java/org/xbib/elasticsearch/NodeTestUtils.java +++ /dev/null @@ -1,73 +0,0 @@ -package org.xbib.elasticsearch; - -import org.elasticsearch.client.Client; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.MockNode; -import org.elasticsearch.node.Node; -import org.junit.After; -import org.junit.Before; -import org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin; - -import java.io.IOException; -import java.nio.file.FileVisitResult; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; - -public class NodeTestUtils { - - private Node node; - private Client client; - - public static Node createNode() { - Settings nodeSettings = Settings.settingsBuilder() - .put("path.home", System.getProperty("path.home")) - .put("index.number_of_shards", 1) - .put("index.number_of_replica", 0) - .build(); - Node node = new MockNode(nodeSettings, LangdetectPlugin.class); - node.start(); - return node; - } - - public static void releaseNode(Node node) throws IOException { - if (node != null) { - node.close(); - deleteFiles(); - } - } - - @Before - public void setupNode() throws IOException { - node = createNode(); - client = node.client(); - } - - protected Client client() { - return client; - } - - @After - public void cleanupNode() throws IOException { - releaseNode(node); - } - - private static void deleteFiles() throws IOException { - Path directory = Paths.get(System.getProperty("path.home") + "/data"); - Files.walkFileTree(directory, new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - Files.delete(file); - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - Files.delete(dir); - return FileVisitResult.CONTINUE; - } - }); - } -} diff --git a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectActionTest.java b/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectActionTest.java deleted file mode 100644 index d11f49e..0000000 --- a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectActionTest.java +++ /dev/null @@ -1,105 +0,0 @@ -package org.xbib.elasticsearch.plugin.langdetect; - -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.QueryBuilders; -import org.junit.Test; -import org.xbib.elasticsearch.NodeTestUtils; -import org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder; -import org.xbib.elasticsearch.action.langdetect.LangdetectResponse; - -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -public class LangDetectActionTest extends NodeTestUtils { - - @Test - public void testLangDetectProfile() throws Exception { - - // normal profile - LangdetectRequestBuilder langdetectRequestBuilder = - new LangdetectRequestBuilder(client()) - .setText("hello this is a test"); - LangdetectResponse response = langdetectRequestBuilder.execute().actionGet(); - assertFalse(response.getLanguages().isEmpty()); - assertEquals("en", response.getLanguages().get(0).getLanguage()); - assertNull(response.getProfile()); - - // short-text profile - LangdetectRequestBuilder langdetectProfileRequestBuilder = - new LangdetectRequestBuilder(client()) - .setText("hello this is a test") - .setProfile("short-text"); - response = langdetectProfileRequestBuilder.execute().actionGet(); - assertNotNull(response); - assertFalse(response.getLanguages().isEmpty()); - assertEquals("en", response.getLanguages().get(0).getLanguage()); - assertEquals("short-text", response.getProfile()); - - // again normal profile - langdetectRequestBuilder = new LangdetectRequestBuilder(client()) - .setText("hello this is a test"); - response = langdetectRequestBuilder.execute().actionGet(); - assertNotNull(response); - assertFalse(response.getLanguages().isEmpty()); - assertEquals("en", response.getLanguages().get(0).getLanguage()); - assertNull(response.getProfile()); - } - - @Test - public void testSort() throws Exception { - Settings settings = Settings.settingsBuilder() - .build(); - - client().admin().indices().prepareCreate("test") - .setSettings(settings) - .addMapping("article", - "{ article : { properties : { content : { type : \"langdetect\", languages : [\"de\", \"en\", \"fr\"] } } } }") - .execute().actionGet(); - - client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); - - client().prepareIndex("test", "article", "1") - .setSource(jsonBuilder().startObject() - .field("title", "Some title") - .field("content", "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?") - .endObject()).execute().actionGet(); - client().prepareIndex("test", "article", "2") - .setSource(jsonBuilder().startObject() - .field("title", "Ein Titel") - .field("content", "Einigkeit und Recht und Freiheit für das deutsche Vaterland!") - .endObject()).execute().actionGet(); - client().prepareIndex("test", "article", "3") - .setSource(jsonBuilder().startObject() - .field("title", "Un titre") - .field("content", "Allons enfants de la Patrie, Le jour de gloire est arrivé!") - .endObject()).execute().actionGet(); - - client().admin().indices().prepareRefresh().execute().actionGet(); - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.termQuery("content", "en")) - .execute().actionGet(); - assertEquals(1L, searchResponse.getHits().totalHits()); - assertEquals("Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?", - searchResponse.getHits().getAt(0).getSource().get("content").toString()); - - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.termQuery("content", "de")) - .execute().actionGet(); - assertEquals(1L, searchResponse.getHits().totalHits()); - assertEquals("Einigkeit und Recht und Freiheit für das deutsche Vaterland!", - searchResponse.getHits().getAt(0).getSource().get("content").toString()); - - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.termQuery("content", "fr")) - .execute().actionGet(); - assertEquals(1L, searchResponse.getHits().totalHits()); - assertEquals("Allons enfants de la Patrie, Le jour de gloire est arrivé!", - searchResponse.getHits().getAt(0).getSource().get("content").toString()); - } - -} diff --git a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectBinaryTest.java b/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectBinaryTest.java deleted file mode 100644 index 54c8c22..0000000 --- a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectBinaryTest.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.xbib.elasticsearch.plugin.langdetect; - -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.index.query.QueryBuilders; -import org.junit.Test; -import org.xbib.elasticsearch.NodeTestUtils; - -import static org.junit.Assert.assertEquals; - -public class LangDetectBinaryTest extends NodeTestUtils { - - @Test - public void testLangDetectBinary() throws Exception { - CreateIndexRequestBuilder createIndexRequestBuilder = - new CreateIndexRequestBuilder(client(), CreateIndexAction.INSTANCE).setIndex("test"); - createIndexRequestBuilder.addMapping("someType", "{\n" + - " \"properties\": {\n" + - " \"content\": {\n" + - " \"type\": \"multi_field\",\n" + - " \"fields\": {\n" + - " \"content\": {\n" + - " \"type\": \"string\"\n" + - " },\n" + - " \"language\": {\n" + - " \"type\": \"langdetect\",\n" + - " \"binary\": true\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"); - createIndexRequestBuilder.execute().actionGet(); - IndexRequestBuilder indexRequestBuilder = - new IndexRequestBuilder(client(), IndexAction.INSTANCE) - .setIndex("test").setType("someType").setId("1") - //\"God Save the Queen\" (alternatively \"God Save the King\" - .setSource("content", "IkdvZCBTYXZlIHRoZSBRdWVlbiIgKGFsdGVybmF0aXZlbHkgIkdvZCBTYXZlIHRoZSBLaW5nIg=="); - indexRequestBuilder.setRefresh(true).execute().actionGet(); - SearchRequestBuilder searchRequestBuilder = - new SearchRequestBuilder(client(), SearchAction.INSTANCE) - .setIndices("test") - .setQuery(QueryBuilders.matchAllQuery()) - .addFields("content", "content.language"); - SearchResponse searchResponse = searchRequestBuilder.execute().actionGet(); - assertEquals(1L, searchResponse.getHits().getTotalHits()); - assertEquals("en", searchResponse.getHits().getAt(0).field("content.language").getValue()); - } -} diff --git a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectChineseTest.java b/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectChineseTest.java deleted file mode 100644 index f0dd7bb..0000000 --- a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/LangDetectChineseTest.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.xbib.elasticsearch.plugin.langdetect; - -import org.elasticsearch.action.admin.indices.create.CreateIndexAction; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.index.query.QueryBuilders; -import org.junit.Test; -import org.xbib.elasticsearch.NodeTestUtils; - -import static org.junit.Assert.assertEquals; - -public class LangDetectChineseTest extends NodeTestUtils { - - @Test - public void testChineseLanguageCode() throws Exception { - CreateIndexRequestBuilder createIndexRequestBuilder = - new CreateIndexRequestBuilder(client(), CreateIndexAction.INSTANCE) - .setIndex("test"); - createIndexRequestBuilder.addMapping("someType", "{\n" + - " \"properties\": {\n" + - " \"content\": {\n" + - " \"type\": \"multi_field\",\n" + - " \"fields\": {\n" + - " \"content\": {\n" + - " \"type\": \"string\"\n" + - " },\n" + - " \"language\": {\n" + - " \"type\": \"langdetect\",\n" + - " \"languages\": [\"zh-cn\"]\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"); - createIndexRequestBuilder.execute().actionGet(); - IndexRequestBuilder indexRequestBuilder = - new IndexRequestBuilder(client(), IndexAction.INSTANCE) - .setIndex("test").setType("someType").setId("1") - .setSource("content", "位于美国首都华盛顿都会圈的希望中文学校5日晚举办活动庆祝建立20周年。从中国大陆留学生为子女学中文而自发建立的学习班,到学生规模在全美名列前茅的中文学校,这个平台的发展也折射出美国的中文教育热度逐步提升。\n" + - "希望中文学校是大华盛顿地区最大中文学校,现有7个校区逾4000名学生,规模在美国东部数一数二。不过,见证了希望中文学校20年发展的人们起初根本无法想象这个小小的中文教育平台能发展到今日之规模。"); - indexRequestBuilder.setRefresh(true).execute().actionGet(); - SearchRequestBuilder searchRequestBuilder = - new SearchRequestBuilder(client(), SearchAction.INSTANCE) - .setIndices("test") - .setTypes("someType") - .setQuery(QueryBuilders.termQuery("content.language", "zh-cn")) - .addField("content.language"); - SearchResponse searchResponse = searchRequestBuilder.execute().actionGet(); - assertEquals(1L, searchResponse.getHits().getTotalHits()); - assertEquals("zh-cn", searchResponse.getHits().getAt(0).field("content.language").getValue()); - } -} diff --git a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/SimpleHttpTest.java b/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/SimpleHttpTest.java deleted file mode 100644 index 17e4e21..0000000 --- a/src/integration-test/java/org/xbib/elasticsearch/plugin/langdetect/SimpleHttpTest.java +++ /dev/null @@ -1,72 +0,0 @@ - -package org.xbib.elasticsearch.plugin.langdetect; - -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.junit.Test; -import org.xbib.elasticsearch.NodeTestUtils; - -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.io.StringReader; -import java.io.StringWriter; -import java.net.HttpURLConnection; -import java.net.URL; -import java.nio.charset.StandardCharsets; - -import static org.junit.Assert.assertEquals; - -public class SimpleHttpTest extends NodeTestUtils { - - @Test - public void httpPost() throws IOException { - InetSocketTransportAddress httpAddress = findHttpAddress(client()); - if (httpAddress == null) { - throw new IllegalArgumentException("no HTTP address found"); - } - URL base = new URL("http://" + httpAddress.getHost() + ":" + httpAddress.getPort()); - URL url = new URL(base, "_langdetect"); - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod("POST"); - connection.setDoOutput(true); - connection.setDoInput(true); - Streams.copy(new StringReader("Das ist ein Text"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); - StringWriter response = new StringWriter(); - Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999967609942226}]}", response.toString()); - } - - @Test - public void httpPostShortProfile() throws IOException { - InetSocketTransportAddress httpAddress = findHttpAddress(client()); - if (httpAddress == null) { - throw new IllegalArgumentException("no HTTP address found"); - } - URL base = new URL("http://" + httpAddress.getHost() + ":" + httpAddress.getPort()); - URL url = new URL(base, "_langdetect?profile=short-text"); - HttpURLConnection connection = (HttpURLConnection) url.openConnection(); - connection.setRequestMethod("POST"); - connection.setDoOutput(true); - connection.setDoInput(true); - Streams.copy(new StringReader("Das ist ein Text"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); - StringWriter response = new StringWriter(); - Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999968539079941}]}", response.toString()); - } - - public static InetSocketTransportAddress findHttpAddress(Client client) { - NodesInfoRequestBuilder nodesInfoRequestBuilder = new NodesInfoRequestBuilder(client, NodesInfoAction.INSTANCE); - nodesInfoRequestBuilder.setHttp(true).setTransport(false); - NodesInfoResponse response = nodesInfoRequestBuilder.execute().actionGet(); - Object obj = response.iterator().next().getHttp().getAddress().publishAddress(); - if (obj instanceof InetSocketTransportAddress) { - return (InetSocketTransportAddress) obj; - } - return null; - } -} \ No newline at end of file diff --git a/src/integration-test/resources/log4j2.xml b/src/integration-test/resources/log4j2.xml deleted file mode 100644 index f71aced..0000000 --- a/src/integration-test/resources/log4j2.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectAction.java b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectAction.java index ec2eb1a..08a299a 100644 --- a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectAction.java +++ b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectAction.java @@ -3,6 +3,9 @@ import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; +/** + * + */ public class LangdetectAction extends Action { public static final String NAME = "langdetect"; @@ -22,5 +25,4 @@ public LangdetectRequestBuilder newRequestBuilder(ElasticsearchClient client) { public LangdetectResponse newResponse() { return new LangdetectResponse(); } - } diff --git a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.java b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.java index 115b32a..7b9c215 100644 --- a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.java +++ b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.java @@ -9,15 +9,15 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class LangdetectRequest extends ActionRequest { +/** + * + */ +public class LangdetectRequest extends ActionRequest { private String profile; private String text; - public LangdetectRequest() { - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; @@ -27,25 +27,24 @@ public ActionRequestValidationException validate() { return validationException; } + public String getProfile() { + return profile; + } + public LangdetectRequest setProfile(String profile) { this.profile = profile; return this; } - public String getProfile() { - return profile; + public String getText() { + return text; } - public LangdetectRequest setText(String text) { this.text = text; return this; } - public String getText() { - return text; - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); diff --git a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.java b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.java index 93c5bee..7db1fb6 100644 --- a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.java +++ b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.java @@ -3,6 +3,9 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +/** + * + */ public class LangdetectRequestBuilder extends ActionRequestBuilder { public LangdetectRequestBuilder(ElasticsearchClient client) { diff --git a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java index 0c75343..c36a8ab 100644 --- a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java +++ b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java @@ -13,13 +13,17 @@ import static org.elasticsearch.rest.RestStatus.OK; +/** + * + */ public class LangdetectResponse extends ActionResponse implements StatusToXContent { private String profile; private List languages = new ArrayList<>(); - public LangdetectResponse() { + public String getProfile() { + return profile; } public LangdetectResponse setProfile(String profile) { @@ -27,8 +31,8 @@ public LangdetectResponse setProfile(String profile) { return this; } - public String getProfile() { - return profile; + public List getLanguages() { + return languages; } public LangdetectResponse setLanguages(List languages) { @@ -36,10 +40,6 @@ public LangdetectResponse setLanguages(List languages) { return this; } - public List getLanguages() { - return languages; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { if (!Strings.isNullOrEmpty(profile)) { diff --git a/src/main/java/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.java b/src/main/java/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.java index 7fa5256..3a77494 100644 --- a/src/main/java/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.java +++ b/src/main/java/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.java @@ -8,17 +8,20 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.xbib.elasticsearch.common.langdetect.LangdetectService; import org.xbib.elasticsearch.common.langdetect.Language; import org.xbib.elasticsearch.common.langdetect.LanguageDetectionException; -import org.xbib.elasticsearch.common.langdetect.LangdetectService; import java.util.HashMap; import java.util.List; import java.util.Map; +/** + * + */ public class TransportLangdetectAction extends TransportAction { - private final static Map services = new HashMap<>(); + private static final Map services = new HashMap<>(); @Inject public TransportLangdetectAction(Settings settings, ThreadPool threadPool, @@ -38,6 +41,7 @@ protected void doExecute(LangdetectRequest request, ActionListener langs = services.get(profile).detectAll(request.getText()); diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java index ba5069d..5573dd0 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java @@ -11,19 +11,22 @@ import java.util.List; import java.util.Map; +/** + * + */ public class LangProfile { private String name; private Map freq; - private List n_words; + private List nWords; public LangProfile() { this.freq = new HashMap<>(); - this.n_words = new ArrayList<>(NGram.N_GRAM); + this.nWords = new ArrayList<>(NGram.N_GRAM); for (int i = 0; i < NGram.N_GRAM; i++) { - n_words.add(0); + nWords.add(0); } } @@ -35,7 +38,7 @@ public void add(String gram) { if (len < 1 || len > NGram.N_GRAM) { return; } - n_words.set(len - 1, n_words.get(len -1) + 1); + nWords.set(len - 1, nWords.get(len - 1) + 1); if (freq.containsKey(gram)) { freq.put(gram, freq.get(gram) + 1); } else { @@ -43,33 +46,33 @@ public void add(String gram) { } } - public void setName(String name) { - this.name = name; - } - public String getName() { return name; } - public List getNWords() { - return n_words; + public void setName(String name) { + this.name = name; } - public void setFreq(Map freq) { - this.freq = freq; + public List getNWords() { + return nWords; } public Map getFreq() { return freq; } + public void setFreq(Map freq) { + this.freq = freq; + } + @SuppressWarnings("unchecked") public void read(InputStream input) throws IOException { XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(input); - Map map = parser.map(); + Map map = parser.map(); freq = (Map) map.get("freq"); - name = (String)map.get("name"); - n_words = (List)map.get("n_words"); + name = (String) map.get("name"); + nWords = (List) map.get("n_words"); } } diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java index 9b2cddb..91f7f62 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java @@ -1,25 +1,27 @@ package org.xbib.elasticsearch.common.langdetect; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.io.InputStream; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Random; import java.util.regex.Pattern; +/** + * + */ public class LangdetectService { - private final static ESLogger logger = ESLoggerFactory.getLogger(LangdetectService.class.getName()); - - private final Settings settings; - - private final static Pattern word = Pattern.compile("[\\P{IsWord}]", Pattern.UNICODE_CHARACTER_CLASS); - - public final static String[] DEFAULT_LANGUAGES = new String[] { - // "af", + private static final String[] DEFAULT_LANGUAGES = new String[]{ + // "af", "ar", "bg", "bn", @@ -41,14 +43,14 @@ public class LangdetectService { "id", "it", "ja", - // "kn", + // "kn", "ko", "lt", "lv", "mk", "ml", - // "mr", - // "ne", + // "mr", + // "ne", "nl", "no", "pa", @@ -56,12 +58,12 @@ public class LangdetectService { "pt", "ro", "ru", - // "sk", + // "sk", //"sl", - // "so", + // "so", "sq", "sv", - // "sw", + // "sw", "ta", "te", "th", @@ -73,34 +75,35 @@ public class LangdetectService { "zh-cn", "zh-tw" }; - - private final static Settings DEFAULT_SETTINGS = Settings.builder() + private static final Logger logger = LogManager.getLogger(LangdetectService.class.getName()); + private static final Pattern word = Pattern.compile("[\\P{IsWord}]", Pattern.UNICODE_CHARACTER_CLASS); + private static final Settings DEFAULT_SETTINGS = Settings.builder() .putArray("languages", DEFAULT_LANGUAGES) .build(); - + private final Settings settings; private Map wordLangProbMap = new HashMap<>(); private List langlist = new LinkedList<>(); - private Map langmap = new HashMap<>(); + private Map langmap = new HashMap<>(); private String profile; private double alpha; - private double alpha_width; + private double alphaWidth; - private int n_trial; + private int nTrial; private double[] priorMap; - private int iteration_limit; + private int iterationLimit; - private double prob_threshold; + private double probThreshold; - private double conv_threshold; + private double convThreshold; - private int base_freq; + private int baseFreq; private Pattern filterPattern; @@ -116,7 +119,7 @@ public LangdetectService(Settings settings) { public LangdetectService(Settings settings, String profile) { this.settings = settings; - this.profile = settings.get("profile", profile) ; + this.profile = settings.get("profile", profile); load(settings); init(); } @@ -150,7 +153,7 @@ private void load(Settings settings) { // map by settings Settings map = Settings.EMPTY; if (settings.getByPrefix("map.") != null) { - map = Settings.settingsBuilder().put(settings.getByPrefix("map.")).build(); + map = Settings.builder().put(settings.getByPrefix("map.")).build(); } if (map.getAsMap().isEmpty()) { // is in "map" a resource name? @@ -158,7 +161,7 @@ private void load(Settings settings) { settings.get("map") : this.profile + "language.json"; InputStream in = getClass().getResourceAsStream(s); if (in != null) { - map = Settings.settingsBuilder().loadFromStream(s, in).build(); + map = Settings.builder().loadFromStream(s, in).build(); } } this.langmap = map.getAsMap(); @@ -170,21 +173,21 @@ private void load(Settings settings) { private void init() { this.priorMap = null; - this.n_trial = settings.getAsInt("number_of_trials", 7); + this.nTrial = settings.getAsInt("number_of_trials", 7); this.alpha = settings.getAsDouble("alpha", 0.5); - this.alpha_width = settings.getAsDouble("alpha_width", 0.05); - this.iteration_limit = settings.getAsInt("iteration_limit", 10000); - this.prob_threshold = settings.getAsDouble("prob_threshold", 0.1); - this.conv_threshold = settings.getAsDouble("conv_threshold", 0.99999); - this.base_freq = settings.getAsInt("base_freq", 10000); + this.alphaWidth = settings.getAsDouble("alpha_width", 0.05); + this.iterationLimit = settings.getAsInt("iteration_limit", 10000); + this.probThreshold = settings.getAsDouble("prob_threshold", 0.1); + this.convThreshold = settings.getAsDouble("conv_threshold", 0.99999); + this.baseFreq = settings.getAsInt("base_freq", 10000); this.filterPattern = settings.get("pattern") != null ? - Pattern.compile(settings.get("pattern"),Pattern.UNICODE_CHARACTER_CLASS) : null; + Pattern.compile(settings.get("pattern"), Pattern.UNICODE_CHARACTER_CLASS) : null; isStarted = true; } - public void loadProfileFromResource(String resource, int index, int langsize) throws IOException { - String profile = "/langdetect/" + (this.profile != null ? this.profile + "/" : ""); - InputStream in = getClass().getResourceAsStream(profile + resource); + public void loadProfileFromResource(String resource, int index, int langsize) throws IOException { + String thisProfile = "/langdetect/" + (this.profile != null ? this.profile + "/" : ""); + InputStream in = getClass().getResourceAsStream(thisProfile + resource); if (in == null) { throw new IOException("profile '" + resource + "' not found"); } @@ -199,14 +202,14 @@ public void addProfile(LangProfile profile, int index, int langsize) throws IOEx throw new IOException("duplicate of the same language profile: " + lang); } langlist.add(lang); - for (String word : profile.getFreq().keySet()) { - if (!wordLangProbMap.containsKey(word)) { - wordLangProbMap.put(word, new double[langsize]); + for (String s : profile.getFreq().keySet()) { + if (!wordLangProbMap.containsKey(s)) { + wordLangProbMap.put(s, new double[langsize]); } - int length = word.length(); + int length = s.length(); if (length >= 1 && length <= 3) { - double prob = profile.getFreq().get(word).doubleValue() / profile.getNWords().get(length - 1); - wordLangProbMap.get(word)[index] = prob; + double prob = profile.getFreq().get(s).doubleValue() / profile.getNWords().get(length - 1); + wordLangProbMap.get(s)[index] = prob; } } } @@ -229,32 +232,29 @@ public List detectAll(String text) throws LanguageDetectionException { return languages.subList(0, Math.min(languages.size(), settings.getAsInt("max", languages.size()))); } - private double[] detectBlock(List list, String text) throws LanguageDetectionException { + private double[] detectBlock(List list, String string) throws LanguageDetectionException { // clean all non-work characters from text - text = text.replaceAll(word.pattern(), " "); + String text = string.replaceAll(word.pattern(), " "); extractNGrams(list, text); double[] langprob = new double[langlist.size()]; if (list.isEmpty()) { - //throw new LanguageDetectionException("no features in text"); return langprob; } Random rand = new Random(); Long seed = 0L; rand.setSeed(seed); - for (int t = 0; t < n_trial; ++t) { + for (int t = 0; t < nTrial; ++t) { double[] prob = initProbability(); - double a = this.alpha + rand.nextGaussian() * alpha_width; + double a = this.alpha + rand.nextGaussian() * alphaWidth; for (int i = 0; ; ++i) { int r = rand.nextInt(list.size()); updateLangProb(prob, list.get(r), a); - if (i % 5 == 0) { - if (normalizeProb(prob) > conv_threshold || i >= iteration_limit) { - break; - } + if (i % 5 == 0 && normalizeProb(prob) > convThreshold || i >= iterationLimit) { + break; } } for (int j = 0; j < langprob.length; ++j) { - langprob[j] += prob[j] / n_trial; + langprob[j] += prob[j] / nTrial; } } return langprob; @@ -290,7 +290,7 @@ private boolean updateLangProb(double[] prob, String word, double alpha) { return false; } double[] langProbMap = wordLangProbMap.get(word); - double weight = alpha / base_freq; + double weight = alpha / baseFreq; for (int i = 0; i < prob.length; ++i) { prob[i] *= weight + langProbMap[i]; } @@ -298,11 +298,15 @@ private boolean updateLangProb(double[] prob, String word, double alpha) { } private double normalizeProb(double[] prob) { - double maxp = 0, sump = 0; - for (double aProb : prob) { - sump += aProb; + if (prob.length == 0) { + return 0d; } - for (int i = 0; i < prob.length; ++i) { + double sump = prob[0]; + for (int i = 1; i < prob.length; i++) { + sump += prob[i]; + } + double maxp = 0d; + for (int i = 0; i < prob.length; i++) { double p = prob[i] / sump; if (maxp < p) { maxp = p; @@ -315,7 +319,7 @@ private double normalizeProb(double[] prob) { private List sortProbability(List list, double[] prob) { for (int j = 0; j < prob.length; ++j) { double p = prob[j]; - if (p > prob_threshold) { + if (p > probThreshold) { for (int i = 0; i <= list.size(); ++i) { if (i == list.size() || list.get(i).getProbability() < p) { String code = langlist.get(j); diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/Language.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/Language.java index 3613a0c..7ce13b2 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/Language.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/Language.java @@ -6,9 +6,13 @@ import java.io.IOException; +/** + * + */ public class Language implements Streamable { private String lang; + private double prob; public Language(String lang, double prob) { @@ -36,8 +40,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(prob); } + @Override public String toString() { return lang + " (prob=" + prob + ")"; } - } diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java index e4c0e93..d827f51 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.java @@ -2,6 +2,9 @@ import java.io.IOException; +/** + * + */ public class LanguageDetectionException extends IOException { private static final long serialVersionUID = 752257035371915875L; @@ -9,5 +12,4 @@ public class LanguageDetectionException extends IOException { public LanguageDetectionException(String message) { super(message); } - } diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/NGram.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/NGram.java index c6289a2..bfcb7d6 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/NGram.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/NGram.java @@ -4,101 +4,16 @@ import java.util.HashMap; import java.util.Map; +/** + * + */ public class NGram { - public final static Map cjk_map = new HashMap<>(); + private static final Map cjk_map = new HashMap<>(); - public final static int N_GRAM = 3; + public static final int N_GRAM = 3; - private final static String LATIN1_EXCLUDED = "\u00A0\u00AB\u00B0\u00BB"; - - private StringBuilder grams; - - private boolean capitalword; - - public NGram() { - grams = new StringBuilder(" "); - capitalword = false; - } - - public void addChar(char ch) { - ch = normalize(ch); - char lastchar = grams.charAt(grams.length() - 1); - if (lastchar == ' ') { - grams = new StringBuilder(" "); - capitalword = false; - if (ch == ' ') { - return; - } - } else if (grams.length() >= N_GRAM) { - grams.deleteCharAt(0); - } - grams.append(ch); - if (Character.isUpperCase(ch)) { - if (Character.isUpperCase(lastchar)) { - capitalword = true; - } - } else { - capitalword = false; - } - } - - public String get(int n) { - if (capitalword) { - return null; - } - int len = grams.length(); - if (n < 1 || n > 3 || len < n) { - return null; - } - if (n == 1) { - char ch = grams.charAt(len - 1); - if (ch == ' ') { - return null; - } - return Character.toString(ch); - } else { - return grams.substring(len - n, len); - } - } - - public static char normalize(char ch) { - Character.UnicodeBlock block = Character.UnicodeBlock.of(ch); - if (block == UnicodeBlock.BASIC_LATIN) { - if (ch < 'A' || (ch < 'a' && ch > 'Z') || ch > 'z') { - ch = ' '; - } - } else if (block == UnicodeBlock.LATIN_1_SUPPLEMENT) { - if (LATIN1_EXCLUDED.indexOf(ch) >= 0) { - ch = ' '; - } - } else if (block == UnicodeBlock.GENERAL_PUNCTUATION) { - ch = ' '; - } else if (block == UnicodeBlock.ARABIC) { - if (ch == '\u06cc') { - ch = '\u064a'; - } - } else if (block == UnicodeBlock.LATIN_EXTENDED_ADDITIONAL) { - if (ch >= '\u1ea0') { - ch = '\u1ec3'; - } - } else if (block == UnicodeBlock.HIRAGANA) { - ch = '\u3042'; - } else if (block == UnicodeBlock.KATAKANA) { - ch = '\u30a2'; - } else if (block == UnicodeBlock.BOPOMOFO || block == UnicodeBlock.BOPOMOFO_EXTENDED) { - ch = '\u3105'; - } else if (block == UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS) { - if (cjk_map.containsKey(ch)) { - ch = cjk_map.get(ch); - } - } else if (block == UnicodeBlock.HANGUL_SYLLABLES) { - ch = '\uac00'; - } - return ch; - } - - static final String[] CJK_CLASS = { + private static final String[] CJK_CLASS = { "\u4F7C\u6934", "\u88CF\u95B2", "\u7027\u7DCB", @@ -226,7 +141,7 @@ public static char normalize(char ch) { "\u55C5\u57A2\u58D5\u59E5\u637A\u74E2\u7CE0\u895F", "\u4E19\u4E32\u4E4F\u4E91\u4EC7\u4ED4\u4F0D\u5141\u51E1\u51F6\u51F8\u52AB\u535C\u53C9\u53DB\u540A\u5410\u54C0\u559D\u5750\u5751\u576A\u57E0\u5824\u582A\u5830\u5835\u5851\u5858\u586B\u5954\u59FB\u5A46\u5B5F\u5BB4\u5BD3\u5C16\u5C60\u5CFB\u5D16\u5E16\u5E3D\u5E7D\u5E87\u5ECA\u5FD9\u60DC\u60F9\u6155\u6167\u6234\u626E\u6276\u6284\u633A\u6377\u6492\u649E\u64B0\u6562\u6591\u65A5\u65E6\u65FA\u6602\u670B\u676D\u68AF\u695A\u6B23\u6BC5\u6C70\u6C83\u6CE1\u6D8C\u6DD8\u6E20\u71D5\u72D0\u72D7\u73B2\u73CA\u7433\u7483\u74DC\u74F6\u7554\u764C\u7761\u77DB\u78A7\u7A46\u7A7F\u7A84\u7C97\u7D2F\u7FC1\u7FE0\u8000\u8017\u808C\u80AF\u8404\u8461\u8463\u8475\u8513\u85AA\u8679\u86CB\u871C\u87BA\u88F8\u8C8C\u8DF3\u8FC4\u901D\u9022\u906E\u9075\u9192\u91C7\u966A\u971E\u9910\u9B41\u9F0E\u9F20" }; - + private static final String LATIN1_EXCLUDED = "\u00A0\u00AB\u00B0\u00BB"; static { for (String cjk_list : CJK_CLASS) { @@ -236,4 +151,90 @@ public static char normalize(char ch) { } } } + + private StringBuilder grams; + private boolean capitalword; + + public NGram() { + grams = new StringBuilder(" "); + capitalword = false; + } + + public static char normalize(char c) { + char ch = c; + UnicodeBlock block = UnicodeBlock.of(ch); + if (block == UnicodeBlock.BASIC_LATIN) { + if (ch < 'A' || (ch < 'a' && ch > 'Z') || ch > 'z') { + ch = ' '; + } + } else if (block == UnicodeBlock.LATIN_1_SUPPLEMENT) { + if (LATIN1_EXCLUDED.indexOf(ch) >= 0) { + ch = ' '; + } + } else if (block == UnicodeBlock.GENERAL_PUNCTUATION) { + ch = ' '; + } else if (block == UnicodeBlock.ARABIC) { + if (ch == '\u06cc') { + ch = '\u064a'; + } + } else if (block == UnicodeBlock.LATIN_EXTENDED_ADDITIONAL) { + if (ch >= '\u1ea0') { + ch = '\u1ec3'; + } + } else if (block == UnicodeBlock.HIRAGANA) { + ch = '\u3042'; + } else if (block == UnicodeBlock.KATAKANA) { + ch = '\u30a2'; + } else if (block == UnicodeBlock.BOPOMOFO || block == UnicodeBlock.BOPOMOFO_EXTENDED) { + ch = '\u3105'; + } else if (block == UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS) { + if (cjk_map.containsKey(ch)) { + ch = cjk_map.get(ch); + } + } else if (block == UnicodeBlock.HANGUL_SYLLABLES) { + ch = '\uac00'; + } + return ch; + } + + public void addChar(char c) { + char ch = normalize(c); + char lastchar = grams.charAt(grams.length() - 1); + if (lastchar == ' ') { + grams = new StringBuilder(" "); + capitalword = false; + if (ch == ' ') { + return; + } + } else if (grams.length() >= N_GRAM) { + grams.deleteCharAt(0); + } + grams.append(ch); + if (Character.isUpperCase(ch)) { + if (Character.isUpperCase(lastchar)) { + capitalword = true; + } + } else { + capitalword = false; + } + } + + public String get(int n) { + if (capitalword) { + return null; + } + int len = grams.length(); + if (n < 1 || n > 3 || len < n) { + return null; + } + if (n == 1) { + char ch = grams.charAt(len - 1); + if (ch == ' ') { + return null; + } + return Character.toString(ch); + } else { + return grams.substring(len - n, len); + } + } } diff --git a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java index efba15d..f834eb8 100644 --- a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java +++ b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java @@ -1,5 +1,7 @@ package org.xbib.elasticsearch.index.mapper.langdetect; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.lucene.Lucene; @@ -13,7 +15,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.core.StringFieldMapper; +import org.elasticsearch.index.mapper.TextFieldMapper; import org.xbib.elasticsearch.common.langdetect.LangdetectService; import org.xbib.elasticsearch.common.langdetect.Language; import org.xbib.elasticsearch.common.langdetect.LanguageDetectionException; @@ -26,38 +28,176 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore; +import static org.elasticsearch.index.mapper.TypeParsers.parseStore; -public class LangdetectMapper extends StringFieldMapper { +/** + * + */ +public class LangdetectMapper extends TextFieldMapper { - public static final String CONTENT_TYPE = "langdetect"; + private static final Logger logger = LogManager.getLogger(LangdetectMapper.class.getName()); + + public static final String MAPPER_TYPE = "langdetect"; + + private final LangdetectService langdetectService; + + private final LanguageTo languageTo; + + private final int positionIncrementGap; + + public LangdetectMapper(String simpleName, + TextFieldType fieldType, + MappedFieldType defaultFieldType, + int positionIncrementGap, + Settings indexSettings, + MultiFields multiFields, + CopyTo copyTo, + LanguageTo languageTo, + LangdetectService langdetectService) { + super(simpleName, fieldType, defaultFieldType, + positionIncrementGap, false, indexSettings, multiFields, copyTo); + this.langdetectService = langdetectService; + this.languageTo = languageTo; + this.positionIncrementGap = positionIncrementGap; + } + + @Override + protected String contentType() { + return MAPPER_TYPE; + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + if (context.externalValueSet()) { + return; + } + XContentParser parser = context.parser(); + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + return; + } + String value = fieldType().nullValueAsString(); + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { + value = parser.textOrNull(); + } + } + } + } else { + value = parser.textOrNull(); + } + if (langdetectService.getSettings().getAsBoolean("binary", false)) { + try { + byte[] b = parser.binaryValue(); + if (b != null && b.length > 0) { + value = new String(b, StandardCharsets.UTF_8); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } + try { + List langs = langdetectService.detectAll(value); + for (Language lang : langs) { + Field field = new Field(fieldType().name(), lang.getLanguage(), fieldType()); + fields.add(field); + if (languageTo.languageToFields().containsKey(lang.getLanguage())) { + parseLanguageToFields(context, languageTo.languageToFields().get(lang.getLanguage())); + } + } + } catch (LanguageDetectionException e) { + logger.trace(e.getMessage(), e); + context.createExternalValueContext("unknown"); + } + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + if (includeDefaults || fieldType().nullValue() != null) { + builder.field("null_value", fieldType().nullValue()); + } + if (includeDefaults || positionIncrementGap != -1) { + builder.field("position_increment_gap", positionIncrementGap); + } + NamedAnalyzer searchQuoteAnalyzer = fieldType().searchQuoteAnalyzer(); + if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType().searchAnalyzer().name())) { + builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); + } else if (includeDefaults) { + if (searchQuoteAnalyzer == null) { + builder.field("search_quote_analyzer", "default"); + } else { + builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); + } + } + Map map = langdetectService.getSettings().getAsStructuredMap(); + for (Map.Entry entry : map.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + } + languageTo.toXContent(builder, params); + } + + @SuppressWarnings("unchecked") + private static void parseLanguageToFields(ParseContext originalContext, Object languageToFields) throws IOException { + List fieldList = languageToFields instanceof List ? + (List)languageToFields : Collections.singletonList(languageToFields); + ParseContext context = originalContext.createCopyToContext(); + for (Object field : fieldList) { + ParseContext.Document targetDoc = null; + for (ParseContext.Document doc = context.doc(); doc != null; doc = doc.getParent()) { + if (field.toString().startsWith(doc.getPrefix())) { + targetDoc = doc; + break; + } + } + if (targetDoc == null) { + throw new IllegalArgumentException("target doc is null"); + } + final ParseContext copyToContext; + if (targetDoc == context.doc()) { + copyToContext = context; + } else { + copyToContext = context.switchDoc(targetDoc); + } + FieldMapper fieldMapper = copyToContext.docMapper().mappers().getMapper(field.toString()); + if (fieldMapper != null) { + fieldMapper.parse(copyToContext); + } else { + throw new MapperParsingException("attempt to copy value to non-existing field [" + field + "]"); + } + } + } public static class Defaults { - public static final MappedFieldType LANG_FIELD_TYPE = new StringFieldType(); + public static final MappedFieldType LANG_FIELD_TYPE = new TextFieldType(); static { LANG_FIELD_TYPE.setStored(true); LANG_FIELD_TYPE.setOmitNorms(true); LANG_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); LANG_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - LANG_FIELD_TYPE.setNames(new MappedFieldType.Names(CONTENT_TYPE)); + LANG_FIELD_TYPE.setName(MAPPER_TYPE); LANG_FIELD_TYPE.freeze(); } } - public static class Builder extends FieldMapper.Builder { + public static class Builder extends FieldMapper.Builder { protected int positionIncrementGap = -1; protected LanguageTo languageTo = LanguageTo.builder().build(); - protected Settings.Builder settingsBuilder = Settings.settingsBuilder(); + protected Settings.Builder settingsBuilder = Settings.builder(); public Builder(String name) { super(name, Defaults.LANG_FIELD_TYPE, Defaults.LANG_FIELD_TYPE); this.builder = this; - settingsBuilder.putArray("languages", LangdetectService.DEFAULT_LANGUAGES); } @Override @@ -126,9 +266,9 @@ public Builder binary(boolean binary) { return this; } - public Builder map(Map map) { - for (String key : map.keySet()) { - settingsBuilder.put("map." + key, map.get(key)); + public Builder map(Map map) { + for (Map.Entry entry : map.entrySet()) { + settingsBuilder.put("map." + entry.getKey(), entry.getValue()); } return this; } @@ -158,7 +298,7 @@ public LangdetectMapper build(BuilderContext context) { if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) { defaultFieldType.setOmitNorms(true); defaultFieldType.setIndexOptions(IndexOptions.DOCS); - if (!omitNormsSet && fieldType.boost() == 1.0f) { + if (!omitNormsSet && Float.compare(fieldType.boost(), 1.0f) == 0) { fieldType.setOmitNorms(true); } if (!indexOptionsSet) { @@ -167,17 +307,22 @@ public LangdetectMapper build(BuilderContext context) { } setupFieldType(context); LangdetectService service = new LangdetectService(settingsBuilder.build()); - return new LangdetectMapper(name, fieldType, defaultFieldType, 100, -1, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, - languageTo, service); + return new LangdetectMapper(name, + (TextFieldType) fieldType(), + defaultFieldType, + positionIncrementGap, + context.indexSettings(), + multiFieldsBuilder.build(this, context), + copyTo, + languageTo, + service); } } public static class TypeParser implements Mapper.TypeParser { @Override - public Mapper.Builder parse(String name, Map mapping, ParserContext parserContext) - throws MapperParsingException { + public Mapper.Builder parse(String name, Map mapping, ParserContext parserContext) { Builder builder = new Builder(name); Iterator> iterator = mapping.entrySet().iterator(); while (iterator.hasNext()) { @@ -185,12 +330,12 @@ public static class TypeParser implements Mapper.TypeParser { String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); switch (fieldName) { - case "analyzer" : - // "_keyword" - we do ignore this, it's our internal analyzer + case "analyzer": + case "include_in_all": iterator.remove(); break; case "search_quote_analyzer": - NamedAnalyzer analyzer = parserContext.analysisService().analyzer(fieldNode.toString()); + NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(fieldNode.toString()); if (analyzer == null) { throw new MapperParsingException("Analyzer [" + fieldNode.toString() + "] not found for field [" + name + "]"); } @@ -204,18 +349,18 @@ public static class TypeParser implements Mapper.TypeParser { } builder.positionIncrementGap(newPositionIncrementGap); if (builder.fieldType().indexAnalyzer() == null) { - builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer()); + builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer()); } if (builder.fieldType().searchAnalyzer() == null) { - builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer()); + builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer()); } if (builder.fieldType().searchQuoteAnalyzer() == null) { - builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer()); + builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer()); } iterator.remove(); break; - case "store" : - builder.store(parseStore(fieldName, fieldNode.toString())); + case "store": + builder.store(parseStore(fieldName, fieldNode.toString(), parserContext)); iterator.remove(); break; case "number_of_trials": @@ -259,15 +404,15 @@ public static class TypeParser implements Mapper.TypeParser { builder.binary(b); iterator.remove(); break; - case "map" : + case "map": builder.map(XContentMapValues.nodeMapValue(fieldNode, "map")); iterator.remove(); break; - case "languages" : + case "languages": builder.languages(XContentMapValues.nodeStringArrayValue(fieldNode)); iterator.remove(); break; - case "profile" : + case "profile": builder.profile(XContentMapValues.nodeStringValue(fieldNode, null)); iterator.remove(); break; @@ -286,140 +431,6 @@ public static class TypeParser implements Mapper.TypeParser { } } - private final LangdetectService langdetectService; - - private final int positionIncrementGap; - - private final LanguageTo languageTo; - - public LangdetectMapper(String simpleName, - MappedFieldType fieldType, - MappedFieldType defaultFieldType, - int positionIncrementGap, - int ignoreAbove, - Settings indexSettings, - MultiFields multiFields, - CopyTo copyTo, - LanguageTo languageTo, - LangdetectService langdetectService) { - super(simpleName, fieldType, defaultFieldType, - positionIncrementGap, ignoreAbove, indexSettings, multiFields, copyTo); - this.languageTo = languageTo; - this.langdetectService = langdetectService; - this.positionIncrementGap = positionIncrementGap; - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - if (context.externalValueSet()) { - return; - } - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { - return; - } - String value = fieldType().nullValueAsString(); - if (parser.currentToken() == XContentParser.Token.START_OBJECT) { - XContentParser.Token token; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - value = parser.textOrNull(); - } - } - } - } else { - value = parser.textOrNull(); - } - if (langdetectService.getSettings().getAsBoolean("binary", false)) { - try { - byte[] b = parser.binaryValue(); - if (b != null && b.length > 0) { - value = new String(b, StandardCharsets.UTF_8); - } - } catch (Exception e) { - // ignore - } - } - try { - List langs = langdetectService.detectAll(value); - for (Language lang : langs) { - Field field = new Field(fieldType().names().indexName(), lang.getLanguage(), fieldType()); - fields.add(field); - if (languageTo.languageToFields().containsKey(lang.getLanguage())) { - parseLanguageToFields(context, languageTo.languageToFields().get(lang.getLanguage())); - } - } - } catch (LanguageDetectionException e) { - context.createExternalValueContext("unknown"); - } - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeDefaults || positionIncrementGap != -1) { - builder.field("position_increment_gap", positionIncrementGap); - } - NamedAnalyzer searchQuoteAnalyzer = fieldType().searchQuoteAnalyzer(); - if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType().searchAnalyzer().name())) { - builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); - } else if (includeDefaults) { - if (searchQuoteAnalyzer == null) { - builder.field("search_quote_analyzer", "default"); - } else { - builder.field("search_quote_analyzer", searchQuoteAnalyzer.name()); - } - } - Map map = langdetectService.getSettings().getAsStructuredMap(); - for (String key : map.keySet()) { - builder.field(key, map.get(key)); - } - languageTo.toXContent(builder, params); - } - - @SuppressWarnings("unchecked") - private static void parseLanguageToFields(ParseContext originalContext, Object languageToFields) throws IOException { - List fieldList = languageToFields instanceof List ? - (List)languageToFields : Collections.singletonList(languageToFields); - ParseContext context = originalContext.createCopyToContext(); - for (Object field : fieldList) { - ParseContext.Document targetDoc = null; - for (ParseContext.Document doc = context.doc(); doc != null; doc = doc.getParent()) { - if (field.toString().startsWith(doc.getPrefix())) { - targetDoc = doc; - break; - } - } - if (targetDoc == null) { - throw new IllegalArgumentException("target doc is null"); - } - final ParseContext copyToContext; - if (targetDoc == context.doc()) { - copyToContext = context; - } else { - copyToContext = context.switchDoc(targetDoc); - } - FieldMapper fieldMapper = copyToContext.docMapper().mappers().getMapper(field.toString()); - if (fieldMapper != null) { - fieldMapper.parse(copyToContext); - } else { - throw new MapperParsingException("attempt to copy value to non-existing field [" + field + "]"); - } - } - } - public static class LanguageTo { private final Map languageToFields; diff --git a/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java b/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java index b72046d..3de1722 100644 --- a/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java +++ b/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java @@ -1,51 +1,45 @@ package org.xbib.elasticsearch.plugin.langdetect; -import org.elasticsearch.action.ActionModule; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestModule; +import org.elasticsearch.rest.RestHandler; import org.xbib.elasticsearch.action.langdetect.LangdetectAction; import org.xbib.elasticsearch.action.langdetect.TransportLangdetectAction; import org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper; import org.xbib.elasticsearch.rest.action.langdetect.RestLangdetectAction; -public class LangdetectPlugin extends Plugin { +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; - private final Settings settings; - - @Inject - public LangdetectPlugin(Settings settings) { - this.settings = settings; - } +/** + * + */ +public class LangdetectPlugin extends Plugin implements MapperPlugin, ActionPlugin { @Override - public String name() { - return "langdetect"; + public Map getMappers() { + Map extra = new LinkedHashMap<>(); + extra.put(LangdetectMapper.MAPPER_TYPE, new LangdetectMapper.TypeParser()); + return extra; } @Override - public String description() { - return "Language detector for Elasticsearch"; - } - - public void onModule(ActionModule module) { - if (settings.getAsBoolean("plugins.langdetect.enabled", true)) { - module.registerAction(LangdetectAction.INSTANCE, TransportLangdetectAction.class); - } + public List> getActions() { + List> extra = new ArrayList<>(); + extra.add(new ActionHandler<>(LangdetectAction.INSTANCE, TransportLangdetectAction.class)); + return extra; } - public void onModule(RestModule module) { - if (settings.getAsBoolean("plugins.langdetect.enabled", true)) { - module.addRestAction(RestLangdetectAction.class); - } - } - - public void onModule(IndicesModule indicesModule) { - if (settings.getAsBoolean("plugins.langdetect.enabled", true)) { - indicesModule.registerMapper(LangdetectMapper.CONTENT_TYPE, new LangdetectMapper.TypeParser()); - } + @Override + public List> getRestHandlers() { + List> extra = new ArrayList<>(); + extra.add(RestLangdetectAction.class); + return extra; } } diff --git a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java index 4013a75..7188eae 100644 --- a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java +++ b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java @@ -1,33 +1,35 @@ package org.xbib.elasticsearch.rest.action.langdetect; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.action.support.RestStatusToXContentListener; +import org.elasticsearch.rest.action.RestStatusToXContentListener; import org.xbib.elasticsearch.action.langdetect.LangdetectAction; import org.xbib.elasticsearch.action.langdetect.LangdetectRequest; -import org.xbib.elasticsearch.action.langdetect.LangdetectResponse; + +import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.POST; +/** + * + */ public class RestLangdetectAction extends BaseRestHandler { @Inject - public RestLangdetectAction(Settings settings, Client client, RestController controller) { - super(settings, controller, client); + public RestLangdetectAction(Settings settings, RestController controller) { + super(settings); controller.registerHandler(POST, "/_langdetect", this); } @Override - public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { - LangdetectRequest langdetectRequest = new LangdetectRequest() - .setProfile(request.param("profile", "")) - .setText(request.content().toUtf8()); - client.execute(LangdetectAction.INSTANCE, langdetectRequest, - new RestStatusToXContentListener(channel)); + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + return channel -> client.execute(LangdetectAction.INSTANCE, new LangdetectRequest() + .setProfile(request.param("profile", "")) + .setText(request.content().utf8ToString()), + new RestStatusToXContentListener<>(channel)); } } \ No newline at end of file diff --git a/src/main/templates/plugin-descriptor.properties b/src/main/templates/plugin-descriptor.properties index d599e0c..0b9867c 100644 --- a/src/main/templates/plugin-descriptor.properties +++ b/src/main/templates/plugin-descriptor.properties @@ -1,9 +1,6 @@ classname=${descriptor.classname} name=${descriptor.name} description=${descriptor.description} -jvm=${descriptor.jvm} -site=${descriptor.site} -isolated=${descriptor.isolated} version=${descriptor.version} java.version=${descriptor.javaVersion} elasticsearch.version=${descriptor.elasticsearchVersion} diff --git a/src/site/site.xml b/src/site/site.xml deleted file mode 100644 index 9810b38..0000000 --- a/src/site/site.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - org.apache.maven.skins - maven-fluido-skin - 1.2.1 - - - - true - true - - jprante/elasticsearch-langdetect - right - black - - - xbib - true - true - - - - - - - - - - \ No newline at end of file diff --git a/src/integration-test/java/org/elasticsearch/node/MockNode.java b/src/test/java/org/elasticsearch/node/MockNode.java similarity index 78% rename from src/integration-test/java/org/elasticsearch/node/MockNode.java rename to src/test/java/org/elasticsearch/node/MockNode.java index 7b9895c..686fdec 100644 --- a/src/integration-test/java/org/elasticsearch/node/MockNode.java +++ b/src/test/java/org/elasticsearch/node/MockNode.java @@ -1,7 +1,5 @@ - package org.elasticsearch.node; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; @@ -9,20 +7,30 @@ import java.util.ArrayList; import java.util.Collection; +/** + * + */ public class MockNode extends Node { public MockNode(Settings settings, Collection> classpathPlugins) { - super(InternalSettingsPreparer.prepareEnvironment(settings, null), Version.CURRENT, classpathPlugins); + super(InternalSettingsPreparer.prepareEnvironment(settings, null), classpathPlugins); } public MockNode(Settings settings, Class classpathPlugin) { this(settings, list(classpathPlugin)); } + public MockNode(Settings settings) { + this(settings, list()); + } + + private static Collection> list() { + return new ArrayList<>(); + } + private static Collection> list(Class classpathPlugin) { Collection> list = new ArrayList<>(); list.add(classpathPlugin); return list; } - } diff --git a/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java b/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java index 7d96836..4f1b604 100644 --- a/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java +++ b/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java @@ -1,144 +1,264 @@ package org.xbib.elasticsearch; +import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.inject.Injector; -import org.elasticsearch.common.inject.ModulesBuilder; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.EnvironmentModule; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexNameModule; -import org.elasticsearch.index.analysis.AnalysisModule; -import org.elasticsearch.index.analysis.AnalysisService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MetadataFieldMapper; -import org.elasticsearch.index.mapper.core.BinaryFieldMapper; -import org.elasticsearch.index.mapper.core.BooleanFieldMapper; -import org.elasticsearch.index.mapper.core.ByteFieldMapper; -import org.elasticsearch.index.mapper.core.CompletionFieldMapper; -import org.elasticsearch.index.mapper.core.DateFieldMapper; -import org.elasticsearch.index.mapper.core.DoubleFieldMapper; -import org.elasticsearch.index.mapper.core.FloatFieldMapper; -import org.elasticsearch.index.mapper.core.IntegerFieldMapper; -import org.elasticsearch.index.mapper.core.LongFieldMapper; -import org.elasticsearch.index.mapper.core.ShortFieldMapper; -import org.elasticsearch.index.mapper.core.StringFieldMapper; -import org.elasticsearch.index.mapper.core.TokenCountFieldMapper; -import org.elasticsearch.index.mapper.core.TypeParsers; -import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.internal.AllFieldMapper; -import org.elasticsearch.index.mapper.internal.IdFieldMapper; -import org.elasticsearch.index.mapper.internal.IndexFieldMapper; -import org.elasticsearch.index.mapper.internal.ParentFieldMapper; -import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; -import org.elasticsearch.index.mapper.internal.SourceFieldMapper; -import org.elasticsearch.index.mapper.internal.TTLFieldMapper; -import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; -import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.mapper.internal.VersionFieldMapper; -import org.elasticsearch.index.mapper.ip.IpFieldMapper; -import org.elasticsearch.index.mapper.object.ObjectMapper; -import org.elasticsearch.index.settings.IndexSettingsModule; -import org.elasticsearch.index.similarity.SimilarityLookupService; -import org.elasticsearch.indices.analysis.IndicesAnalysisService; +import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.xbib.elasticsearch.common.langdetect.LangdetectService; -import org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper; +import org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin; -import java.util.LinkedHashMap; +import java.io.IOException; +import java.util.Collections; import java.util.Map; +import static org.junit.Assert.assertNotNull; + +/** + * + */ public class MapperTestUtils { - public static AnalysisService newAnalysisService(Settings indexSettings) { - Injector parentInjector = new ModulesBuilder().add(new SettingsModule(indexSettings), - new EnvironmentModule(new Environment(indexSettings))).createInjector(); - Index index = new Index("test"); - Injector injector = new ModulesBuilder().add( - new IndexSettingsModule(index, indexSettings), - new IndexNameModule(index), - new AnalysisModule(indexSettings, parentInjector.getInstance(IndicesAnalysisService.class))).createChildInjector(parentInjector); + public static AnalysisRegistry analysisService(Settings customSettings) throws IOException { + Settings settings = Settings.builder() + .put("path.home", System.getProperty("path.home", "/tmp")) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(customSettings) + .build(); + Environment environment = new Environment(settings); + AnalysisModule analysisModule = new AnalysisModule(environment, Collections.emptyList()); + return analysisModule.getAnalysisRegistry(); + } + + public static DocumentMapperParser newDocumentMapperParser(String index) throws IOException { + return newDocumentMapperParser(Settings.EMPTY, index); + } + + public static DocumentMapperParser newDocumentMapperParser(Settings customSettings, String index) throws IOException { + Settings settings = Settings.builder() + .put("path.home", System.getProperty("path.home", "/tmp")) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(customSettings) + .build(); + Environment environment = new Environment(settings); + LangdetectPlugin langdetectPlugin = new LangdetectPlugin(); + AnalysisModule analysisModule = new AnalysisModule(environment, Collections.emptyList()); + IndicesModule indicesModule = new IndicesModule(Collections.singletonList(langdetectPlugin)); + MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); + AnalysisRegistry analysisRegistry = analysisModule.getAnalysisRegistry(); + IndexMetaData indexMetaData = IndexMetaData.builder(index) + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + SimilarityService similarityService = new SimilarityService(indexSettings, SimilarityService.BUILT_IN); + Map charFilterFactoryMap = analysisRegistry.buildCharFilterFactories(indexSettings); + Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); + Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); + Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, + tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); + MapperService mapperService = new MapperService(indexSettings, indexAnalyzers, + similarityService, mapperRegistry, null); + return new DocumentMapperParser(indexSettings, + mapperService, indexAnalyzers, similarityService, mapperRegistry, null); + } + + public static Analyzer analyzer(String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("path.home", System.getProperty("path.home")) + .build(); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map charFilterFactoryMap = analysisRegistry.buildCharFilterFactories(indexSettings); + Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); + Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); + Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, + tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); + Analyzer analyzer = indexAnalyzers.get(name) != null ? indexAnalyzers.get(name) : analysisRegistry.getAnalyzer(name); + assertNotNull(analyzer); + return analyzer; + } + + public static Analyzer analyzer(Settings customSettings, String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("path.home", System.getProperty("path.home", "/tmp")) + .put(customSettings) + .build(); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map charFilterFactoryMap = analysisRegistry.buildCharFilterFactories(indexSettings); + Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); + Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); + Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, + tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); + Analyzer analyzer = indexAnalyzers.get(name) != null ? indexAnalyzers.get(name) : analysisRegistry.getAnalyzer(name); + assertNotNull(analyzer); + return analyzer; + } - return injector.getInstance(AnalysisService.class); + public static Analyzer analyzer(String resource, String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("path.home", System.getProperty("path.home", "/tmp")) + .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) + .build(); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map charFilterFactoryMap = analysisRegistry.buildCharFilterFactories(indexSettings); + Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); + Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); + Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, + tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); + Analyzer analyzer = indexAnalyzers.get(name) != null ? indexAnalyzers.get(name) : analysisRegistry.getAnalyzer(name); + assertNotNull(analyzer); + return analyzer; } - public static SimilarityLookupService newSimilarityLookupService(Settings indexSettings) { - return new SimilarityLookupService(new Index("test"), indexSettings); + public static TokenizerFactory tokenizerFactory(String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("path.home", System.getProperty("path.home", "/tmp")) + .build(); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map map = analysisRegistry.buildTokenizerFactories(indexSettings); + TokenizerFactory tokenizerFactory = map.containsKey(name) ? map.get(name) : + analysisRegistry.getTokenizerProvider(name).get(new Environment(settings), name); + assertNotNull(tokenizerFactory); + return tokenizerFactory; } - public static DocumentMapperParser newDocumentMapperParser() { - return newDocumentMapperParser(Settings.builder() + public static TokenizerFactory tokenizerFactory(String resource, String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("path.home", System.getProperty("path.home")) - .build()); + .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) + .build(); + Environment environment = new Environment(settings); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map map = analysisRegistry.buildTokenizerFactories(indexSettings); + TokenizerFactory tokenizerFactory = map.containsKey(name) ? map.get(name) : + analysisRegistry.getTokenizerProvider(name).get(environment, name); + assertNotNull(tokenizerFactory); + return tokenizerFactory; + } + + public static TokenFilterFactory tokenFilterFactory(String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("path.home", System.getProperty("path.home", "/tmp")) + .build(); + Environment environment = new Environment(settings); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map map = analysisRegistry.buildTokenFilterFactories(indexSettings); + return map.containsKey(name) ? map.get(name) : + analysisRegistry.getTokenFilterProvider(name).get(environment, name); } - public static DocumentMapperParser newDocumentMapperParser(Settings settings) { - Settings forcedSettings = Settings.builder() + public static TokenFilterFactory tokenFilterFactory(String resource, String name) throws IOException { + Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(settings) - .build(); - SimilarityLookupService similarityLookupService = newSimilarityLookupService(forcedSettings); - Map mappers = registerBuiltInMappers(); - mappers.put(LangdetectMapper.CONTENT_TYPE, new LangdetectMapper.TypeParser()); - Map metadataMappers = registerBuiltInMetadataMappers(); - MapperRegistry mapperRegistry = new MapperRegistry(mappers, metadataMappers); - MapperService mapperService = new MapperService(new Index("test"), - forcedSettings, - newAnalysisService(forcedSettings), - similarityLookupService, - null, - mapperRegistry); - return new DocumentMapperParser( - forcedSettings, - mapperService, - MapperTestUtils.newAnalysisService(forcedSettings), - similarityLookupService, - null, - mapperRegistry); + .put("path.home", System.getProperty("path.home", "/tmp")) + .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) + .build(); + Environment environment = new Environment(settings); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map map = analysisRegistry.buildTokenFilterFactories(indexSettings); + return map.containsKey(name) ? map.get(name) : + analysisRegistry.getTokenFilterProvider(name).get(environment, name); } - // copy from org.elasticsearch.indices.IndicesModule - private static Map registerBuiltInMappers() { - Map mapperParsers = new LinkedHashMap<>(); - mapperParsers.put(ByteFieldMapper.CONTENT_TYPE, new ByteFieldMapper.TypeParser()); - mapperParsers.put(ShortFieldMapper.CONTENT_TYPE, new ShortFieldMapper.TypeParser()); - mapperParsers.put(IntegerFieldMapper.CONTENT_TYPE, new IntegerFieldMapper.TypeParser()); - mapperParsers.put(LongFieldMapper.CONTENT_TYPE, new LongFieldMapper.TypeParser()); - mapperParsers.put(FloatFieldMapper.CONTENT_TYPE, new FloatFieldMapper.TypeParser()); - mapperParsers.put(DoubleFieldMapper.CONTENT_TYPE, new DoubleFieldMapper.TypeParser()); - mapperParsers.put(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser()); - mapperParsers.put(BinaryFieldMapper.CONTENT_TYPE, new BinaryFieldMapper.TypeParser()); - mapperParsers.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser()); - mapperParsers.put(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser()); - mapperParsers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser()); - mapperParsers.put(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser()); - mapperParsers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); - mapperParsers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); - mapperParsers.put(TypeParsers.MULTI_FIELD_CONTENT_TYPE, TypeParsers.multiFieldConverterTypeParser); - mapperParsers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); - mapperParsers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); - return mapperParsers; + public static CharFilterFactory charFilterFactory(String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("path.home", System.getProperty("path.home", "/tmp")) + .build(); + Environment environment = new Environment(settings); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map map = analysisRegistry.buildCharFilterFactories(indexSettings); + return map.containsKey(name) ? map.get(name) : + analysisRegistry.getCharFilterProvider(name).get(environment, name); } - // copy from org.elasticsearch.indices.IndicesModule - private static Map registerBuiltInMetadataMappers() { - Map metadataMapperParsers = new LinkedHashMap<>(); - metadataMapperParsers.put(UidFieldMapper.NAME, new UidFieldMapper.TypeParser()); - metadataMapperParsers.put(IdFieldMapper.NAME, new IdFieldMapper.TypeParser()); - metadataMapperParsers.put(RoutingFieldMapper.NAME, new RoutingFieldMapper.TypeParser()); - metadataMapperParsers.put(IndexFieldMapper.NAME, new IndexFieldMapper.TypeParser()); - metadataMapperParsers.put(SourceFieldMapper.NAME, new SourceFieldMapper.TypeParser()); - metadataMapperParsers.put(TypeFieldMapper.NAME, new TypeFieldMapper.TypeParser()); - metadataMapperParsers.put(AllFieldMapper.NAME, new AllFieldMapper.TypeParser()); - metadataMapperParsers.put(TimestampFieldMapper.NAME, new TimestampFieldMapper.TypeParser()); - metadataMapperParsers.put(TTLFieldMapper.NAME, new TTLFieldMapper.TypeParser()); - metadataMapperParsers.put(VersionFieldMapper.NAME, new VersionFieldMapper.TypeParser()); - metadataMapperParsers.put(ParentFieldMapper.NAME, new ParentFieldMapper.TypeParser()); - return metadataMapperParsers; + public static CharFilterFactory charFilterFactory(String resource, String name) throws IOException { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("path.home", System.getProperty("path.home", "/tmp")) + .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) + .build(); + Environment environment = new Environment(settings); + AnalysisRegistry analysisRegistry = analysisService(settings); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); + Map map = analysisRegistry.buildCharFilterFactories(indexSettings); + return map.containsKey(name) ? map.get(name) : + analysisRegistry.getCharFilterProvider(name).get(environment, name); } } diff --git a/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java b/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java new file mode 100644 index 0000000..ec733e2 --- /dev/null +++ b/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java @@ -0,0 +1,205 @@ +package org.xbib.elasticsearch; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.node.MockNode; +import org.elasticsearch.node.Node; +import org.elasticsearch.node.NodeValidationException; +import org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin; + +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.Collections; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * + */ +public class NodeTestUtils { + + private static final Logger logger = LogManager.getLogger("test"); + + private Node node; + + private Client client; + + private AtomicInteger counter = new AtomicInteger(); + + private String clustername; + + private String host; + + public void startCluster() { + try { + logger.info("settings cluster name"); + setClusterName(); + logger.info("starting nodes"); + this.node = startNode(); + this.client = node.client(); + findNodeAddress(); + ClusterHealthResponse healthResponse = client.execute(ClusterHealthAction.INSTANCE, + new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.YELLOW) + .timeout(TimeValue.timeValueSeconds(30))).actionGet(); + if (healthResponse != null && healthResponse.isTimedOut()) { + throw new IOException("cluster state is " + healthResponse.getStatus().name() + + ", from here on, everything will fail!"); + } + logger.info("nodes are started"); + } catch (Throwable t) { + logger.error("start of nodes failed", t); + } + } + + public void stopCluster() { + try { + logger.info("stopping nodes"); + closeNodes(); + } catch (Throwable e) { + logger.error("can not close nodes", e); + } finally { + try { + deleteFiles(); + logger.info("data files wiped"); + Thread.sleep(2000L); // let OS commit changes + } catch (IOException e) { + logger.error(e.getMessage(), e); + } catch (InterruptedException e) { + // ignore + } + } + } + + protected void setClusterName() { + this.clustername = "test-helper-cluster-" + + "-" + System.getProperty("user.name") + + "-" + counter.incrementAndGet(); + } + + protected String getClusterName() { + return clustername; + } + + protected Settings getNodeSettings() { + //String hostname = NetworkUtils.getLocalAddress().getHostName(); + return Settings.builder() + .put("cluster.name", clustername) + .put("transport.type", "local") + .put("http.enabled", false) + .put("path.home", getHome()) + //.put("node.max_local_storage_nodes", 1) + .build(); + } + + protected String getHome() { + return System.getProperty("path.home"); + } + + public Node startNode() throws IOException { + try { + return buildNode().start(); + } catch (NodeValidationException e) { + throw new IOException(e); + } + } + + public Client client() { + return client; + } + + private void closeNodes() throws IOException { + if (client != null) { + client.close(); + } + if (node != null) { + node.close(); + } + } + + protected void findNodeAddress() { + NodesInfoRequestBuilder nodesInfoRequestBuilder = new NodesInfoRequestBuilder(client, NodesInfoAction.INSTANCE); + nodesInfoRequestBuilder.setTransport(true); + NodesInfoResponse response = nodesInfoRequestBuilder.execute().actionGet(); + Object obj = response.getNodes().iterator().next().getTransport().getAddress().publishAddress(); + if (obj instanceof InetSocketTransportAddress) { + InetSocketTransportAddress address = (InetSocketTransportAddress) obj; + host = address.address().getHostName(); + } else if (obj instanceof LocalTransportAddress) { + LocalTransportAddress address = (LocalTransportAddress) obj; + host = address.getHost(); + } else { + logger.info("class=" + obj.getClass()); + } + if (host == null) { + throw new IllegalArgumentException("host not found"); + } + } + + public static String findHttpAddress(Client client) { + NodesInfoRequestBuilder nodesInfoRequestBuilder = new NodesInfoRequestBuilder(client, NodesInfoAction.INSTANCE); + nodesInfoRequestBuilder.setHttp(true).setTransport(false); + NodesInfoResponse response = nodesInfoRequestBuilder.execute().actionGet(); + Object obj = response.getNodes().iterator().next().getHttp().getAddress().publishAddress(); + if (obj instanceof InetSocketTransportAddress) { + InetSocketTransportAddress httpAddress = (InetSocketTransportAddress) obj; + return "http://" + httpAddress.getHost() + ":" + httpAddress.getPort(); + } else if (obj instanceof LocalTransportAddress) { + LocalTransportAddress httpAddress = (LocalTransportAddress) obj; + return "http://" + httpAddress.getHost() + ":" + httpAddress.getPort(); + } else { + logger.info("class=" + obj.getClass()); + } + return null; + } + + public Node buildNodeWithoutPlugins() throws IOException { + Settings nodeSettings = Settings.builder() + .put(getNodeSettings()) + .build(); + logger.info("settings={}", nodeSettings.getAsMap()); + return new MockNode(nodeSettings, Collections.emptyList()); + } + + public Node buildNode() throws IOException { + Settings nodeSettings = Settings.builder() + .put(getNodeSettings()) + .build(); + logger.info("settings={}", nodeSettings.getAsMap()); + return new MockNode(nodeSettings, Collections.singletonList(LangdetectPlugin.class)); + } + + + private static void deleteFiles() throws IOException { + Path directory = Paths.get(System.getProperty("path.home") + "/data"); + Files.walkFileTree(directory, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + Files.delete(file); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + Files.delete(dir); + return FileVisitResult.CONTINUE; + } + + }); + } +} diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectLanguageTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectLanguageTest.java index 7bc0f6e..ee12831 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectLanguageTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectLanguageTest.java @@ -41,7 +41,7 @@ private void testLanguage(String path, String lang) throws IOException { reader.close(); writer.close(); LangdetectService detect = new LangdetectService(); - assertEquals(detect.detectAll(writer.toString()).get(0).getLanguage(), lang); + assertEquals(lang, detect.detectAll(writer.toString()).get(0).getLanguage()); } } diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectorTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectorTest.java index b5b9d0b..428a8cb 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectorTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/DetectorTest.java @@ -4,10 +4,9 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; - import org.xbib.elasticsearch.common.langdetect.LangProfile; -import org.xbib.elasticsearch.common.langdetect.LanguageDetectionException; import org.xbib.elasticsearch.common.langdetect.LangdetectService; +import org.xbib.elasticsearch.common.langdetect.LanguageDetectionException; public class DetectorTest extends Assert { diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectActionTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectActionTest.java new file mode 100644 index 0000000..efad583 --- /dev/null +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectActionTest.java @@ -0,0 +1,126 @@ +package org.xbib.elasticsearch.index.mapper.langdetect; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilders; +import org.junit.Test; +import org.xbib.elasticsearch.NodeTestUtils; +import org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder; +import org.xbib.elasticsearch.action.langdetect.LangdetectResponse; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +/** + * + */ +public class LangDetectActionTest extends NodeTestUtils { + + @Test + public void testLangDetectProfile() throws Exception { + startCluster(); + try { + // normal profile + LangdetectRequestBuilder langdetectRequestBuilder = + new LangdetectRequestBuilder(client()) + .setText("hello this is a test"); + LangdetectResponse response = langdetectRequestBuilder.execute().actionGet(); + assertFalse(response.getLanguages().isEmpty()); + assertEquals("en", response.getLanguages().get(0).getLanguage()); + assertNull(response.getProfile()); + + // short-text profile + LangdetectRequestBuilder langdetectProfileRequestBuilder = + new LangdetectRequestBuilder(client()) + .setText("hello this is a test") + .setProfile("short-text"); + response = langdetectProfileRequestBuilder.execute().actionGet(); + assertNotNull(response); + assertFalse(response.getLanguages().isEmpty()); + assertEquals("en", response.getLanguages().get(0).getLanguage()); + assertEquals("short-text", response.getProfile()); + + // again normal profile + langdetectRequestBuilder = new LangdetectRequestBuilder(client()) + .setText("hello this is a test"); + response = langdetectRequestBuilder.execute().actionGet(); + assertNotNull(response); + assertFalse(response.getLanguages().isEmpty()); + assertEquals("en", response.getLanguages().get(0).getLanguage()); + assertNull(response.getProfile()); + } finally { + stopCluster(); + } + } + + @Test + public void testSort() throws Exception { + startCluster(); + try { + + Settings settings = Settings.builder() + .build(); + + client().admin().indices().prepareCreate("test") + .setSettings(settings) + .addMapping("article", + jsonBuilder().startObject() + .startObject("article") + .startObject("properties") + .startObject("content") + .field("type", "langdetect") + .array("languages", "de", "en", "fr") + .endObject() + .endObject() + .endObject() + .endObject()) + .execute().actionGet(); + + client().admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet(); + + client().prepareIndex("test", "article", "1") + .setSource(jsonBuilder().startObject() + .field("title", "Some title") + .field("content", "Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?") + .endObject()).execute().actionGet(); + client().prepareIndex("test", "article", "2") + .setSource(jsonBuilder().startObject() + .field("title", "Ein Titel") + .field("content", "Einigkeit und Recht und Freiheit für das deutsche Vaterland!") + .endObject()).execute().actionGet(); + client().prepareIndex("test", "article", "3") + .setSource(jsonBuilder().startObject() + .field("title", "Un titre") + .field("content", "Allons enfants de la Patrie, Le jour de gloire est arrivé!") + .endObject()).execute().actionGet(); + + client().admin().indices().prepareRefresh().execute().actionGet(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(QueryBuilders.termQuery("content", "en")) + .execute().actionGet(); + assertEquals(1L, searchResponse.getHits().totalHits()); + assertEquals("Oh, say can you see by the dawn`s early light, What so proudly we hailed at the twilight`s last gleaming?", + searchResponse.getHits().getAt(0).getSource().get("content").toString()); + + searchResponse = client().prepareSearch() + .setQuery(QueryBuilders.termQuery("content", "de")) + .execute().actionGet(); + assertEquals(1L, searchResponse.getHits().totalHits()); + assertEquals("Einigkeit und Recht und Freiheit für das deutsche Vaterland!", + searchResponse.getHits().getAt(0).getSource().get("content").toString()); + + searchResponse = client().prepareSearch() + .setQuery(QueryBuilders.termQuery("content", "fr")) + .execute().actionGet(); + assertEquals(1L, searchResponse.getHits().totalHits()); + assertEquals("Allons enfants de la Patrie, Le jour de gloire est arrivé!", + searchResponse.getHits().getAt(0).getSource().get("content").toString()); + } finally { + stopCluster(); + } + } +} diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectBinaryTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectBinaryTest.java new file mode 100644 index 0000000..7ef21ca --- /dev/null +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectBinaryTest.java @@ -0,0 +1,62 @@ +package org.xbib.elasticsearch.index.mapper.langdetect; + +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.index.query.QueryBuilders; +import org.junit.Test; +import org.xbib.elasticsearch.NodeTestUtils; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.junit.Assert.assertEquals; + +/** + * + */ +public class LangDetectBinaryTest extends NodeTestUtils { + + @Test + public void testLangDetectBinary() throws Exception { + startCluster(); + try { + CreateIndexRequestBuilder createIndexRequestBuilder = + new CreateIndexRequestBuilder(client(), CreateIndexAction.INSTANCE).setIndex("test"); + createIndexRequestBuilder.addMapping("someType", jsonBuilder() + .startObject() + .startObject("properties") + .startObject("content") + .field("type", "text") + .startObject("fields") + .startObject("language") + .field("type", "langdetect") + .field("binary", true) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject()); + createIndexRequestBuilder.execute().actionGet(); + IndexRequestBuilder indexRequestBuilder = + new IndexRequestBuilder(client(), IndexAction.INSTANCE) + .setIndex("test").setType("someType").setId("1") + //\"God Save the Queen\" (alternatively \"God Save the King\" + .setSource("content", "IkdvZCBTYXZlIHRoZSBRdWVlbiIgKGFsdGVybmF0aXZlbHkgIkdvZCBTYXZlIHRoZSBLaW5nIg=="); + indexRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).execute().actionGet(); + SearchRequestBuilder searchRequestBuilder = + new SearchRequestBuilder(client(), SearchAction.INSTANCE) + .setIndices("test") + .setQuery(QueryBuilders.matchAllQuery()) + .addStoredField("content.language"); + SearchResponse searchResponse = searchRequestBuilder.execute().actionGet(); + assertEquals(1L, searchResponse.getHits().getTotalHits()); + assertEquals("en", searchResponse.getHits().getAt(0).field("content.language").getValue()); + } finally { + stopCluster(); + } + } +} diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectChineseTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectChineseTest.java new file mode 100644 index 0000000..615cd20 --- /dev/null +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangDetectChineseTest.java @@ -0,0 +1,63 @@ +package org.xbib.elasticsearch.index.mapper.langdetect; + +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.index.query.QueryBuilders; +import org.junit.Test; +import org.xbib.elasticsearch.NodeTestUtils; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.junit.Assert.assertEquals; + +/** + * + */ +public class LangDetectChineseTest extends NodeTestUtils { + + @Test + public void testChineseLanguageCode() throws Exception { + startCluster(); + try { + CreateIndexRequestBuilder createIndexRequestBuilder = + new CreateIndexRequestBuilder(client(), CreateIndexAction.INSTANCE) + .setIndex("test"); + createIndexRequestBuilder.addMapping("someType", jsonBuilder() + .startObject() + .startObject("properties") + .startObject("content") + .field("type", "text") + .startObject("fields") + .startObject("language") + .field("type", "langdetect") + .array("languages", "zh-cn") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject()); + createIndexRequestBuilder.execute().actionGet(); + IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(client(), IndexAction.INSTANCE) + .setIndex("test").setType("someType").setId("1") + .setSource("content", "位于美国首都华盛顿都会圈的希望中文学校5日晚举办活动庆祝建立20周年。从中国大陆留学生为子女学中文而自发建立的学习班,到学生规模在全美名列前茅的中文学校,这个平台的发展也折射出美国的中文教育热度逐步提升。\n" + + "希望中文学校是大华盛顿地区最大中文学校,现有7个校区逾4000名学生,规模在美国东部数一数二。不过,见证了希望中文学校20年发展的人们起初根本无法想象这个小小的中文教育平台能发展到今日之规模。"); + indexRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).execute().actionGet(); + SearchRequestBuilder searchRequestBuilder = + new SearchRequestBuilder(client(), SearchAction.INSTANCE) + .setIndices("test") + .setTypes("someType") + .setQuery(QueryBuilders.termQuery("content.language", "zh-cn")) + .addStoredField("content.language"); + SearchResponse searchResponse = searchRequestBuilder.execute().actionGet(); + assertEquals(1L, searchResponse.getHits().getTotalHits()); + assertEquals("zh-cn", searchResponse.getHits().getAt(0).field("content.language").getValue()); + } finally { + stopCluster(); + } + } +} diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java index 7fea4a8..c42ccce 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java @@ -5,14 +5,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParseContext; - import org.junit.Assert; import org.junit.Test; import org.xbib.elasticsearch.MapperTestUtils; import java.io.IOException; import java.io.InputStreamReader; -import java.nio.charset.StandardCharsets; import static org.elasticsearch.common.io.Streams.copyToString; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -22,18 +20,15 @@ public class LangdetectMappingTest extends Assert { @Test public void testSimpleMappings() throws Exception { String mapping = copyToStringFromClasspath("simple-mapping.json"); - DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(mapping)); + DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(mapping)); String sampleText = copyToStringFromClasspath("english.txt"); BytesReference json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); ParseContext.Document doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); - //for (IndexableField field : doc.getFields()) { - // logger.info("{} = {}", field.name(), field.stringValue()); - //} assertEquals(1, doc.getFields("someField").length); assertEquals("en", doc.getFields("someField")[0].stringValue()); // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(builtMapping)); + docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); assertEquals(1, doc.getFields("someField").length); @@ -43,32 +38,29 @@ public void testSimpleMappings() throws Exception { @Test public void testBinary() throws Exception { String mapping = copyToStringFromClasspath("base64-mapping.json"); - DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(mapping)); + DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(mapping)); String sampleBinary = copyToStringFromClasspath("base64.txt"); String sampleText = copyToStringFromClasspath("base64-decoded.txt"); BytesReference json = jsonBuilder().startObject().field("someField", sampleBinary).endObject().bytes(); ParseContext.Document doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); - //for (IndexableField field : doc.getFields()) { - // logger.info("binary {} = {}", field.name(), field.stringValue()); - //} assertEquals(1, doc.getFields("someField").length); assertEquals("en", doc.getFields("someField")[0].stringValue()); // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(builtMapping)); + docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); - assertEquals(1, doc.getFields("someField").length); - assertEquals("en", doc.getFields("someField")[0].stringValue()); + assertEquals(1, doc.getFields("someField").length, 1); + assertEquals("en", doc.getFields("someField")[0].stringValue(), "en"); } @Test public void testCustomMappings() throws Exception { - Settings settings = Settings.settingsBuilder() + Settings settings = Settings.builder() .put("path.home", System.getProperty("path.home")) .loadFromStream("settings.json", getClass().getResourceAsStream("settings.json")).build(); String mapping = copyToStringFromClasspath("mapping.json"); - DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser(settings).parse("someType", new CompressedXContent(mapping)); + DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser(settings, "someIndex").parse("someType", new CompressedXContent(mapping)); String sampleText = copyToStringFromClasspath("german.txt"); BytesReference json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); ParseContext.Document doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); @@ -79,7 +71,7 @@ public void testCustomMappings() throws Exception { @Test public void testBinary2() throws Exception { String mapping = copyToStringFromClasspath("base64-2-mapping.json"); - DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(mapping)); + DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(mapping)); //String sampleBinary = copyToStringFromClasspath("base64-2.txt"); String sampleText = copyToStringFromClasspath("base64-2-decoded.txt"); BytesReference json = jsonBuilder().startObject().field("content", sampleText).endObject().bytes(); @@ -91,7 +83,7 @@ public void testBinary2() throws Exception { assertEquals("en", doc.getFields("content.language")[0].stringValue()); // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(builtMapping)); + docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("content", sampleText).endObject().bytes(); doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); assertEquals(1, doc.getFields("content.language").length); @@ -101,7 +93,7 @@ public void testBinary2() throws Exception { @Test public void testShortTextProfile() throws Exception { String mapping = copyToStringFromClasspath("short-text-mapping.json"); - DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(mapping)); + DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(mapping)); String sampleText = copyToStringFromClasspath("english.txt"); BytesReference json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); ParseContext.Document doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); @@ -109,7 +101,7 @@ public void testShortTextProfile() throws Exception { assertEquals("en", doc.getFields("someField")[0].stringValue()); // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(builtMapping)); + docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); assertEquals(1, doc.getFields("someField").length); @@ -119,7 +111,7 @@ public void testShortTextProfile() throws Exception { @Test public void testToFields() throws Exception { String mapping = copyToStringFromClasspath("mapping-to-fields.json"); - DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(mapping)); + DocumentMapper docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(mapping)); String sampleText = copyToStringFromClasspath("english.txt"); BytesReference json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); ParseContext.Document doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); @@ -127,19 +119,16 @@ public void testToFields() throws Exception { assertEquals("en", doc.getFields("someField")[0].stringValue()); // re-parse it String builtMapping = docMapper.mappingSource().string(); - docMapper = MapperTestUtils.newDocumentMapperParser().parse("someType", new CompressedXContent(builtMapping)); + docMapper = MapperTestUtils.newDocumentMapperParser("someIndex").parse("someType", new CompressedXContent(builtMapping)); json = jsonBuilder().startObject().field("someField", sampleText).endObject().bytes(); doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); - //for (IndexableField field : doc.getFields()) { - // System.err.println(field.name() + " = " + field.stringValue()); - //} assertEquals(1, doc.getFields("someField").length); assertEquals("en", doc.getFields("someField")[0].stringValue()); assertEquals(1, doc.getFields("english_field").length); assertEquals("This is a very small example of a text", doc.getFields("english_field")[0].stringValue()); } - public String copyToStringFromClasspath(String path) throws IOException { - return copyToString(new InputStreamReader(getClass().getResource(path).openStream(), StandardCharsets.UTF_8)); + private String copyToStringFromClasspath(String path) throws IOException { + return copyToString(new InputStreamReader(getClass().getResource(path).openStream(), "UTF-8")); } } diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LanguageTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LanguageTest.java index eb61dcc..bf0ee30 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LanguageTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LanguageTest.java @@ -2,7 +2,6 @@ import org.junit.Assert; import org.junit.Test; - import org.xbib.elasticsearch.common.langdetect.Language; public class LanguageTest extends Assert { diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/NGramTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/NGramTest.java index 80821d2..fa5e1ac 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/NGramTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/NGramTest.java @@ -2,7 +2,6 @@ import org.junit.Assert; import org.junit.Test; - import org.xbib.elasticsearch.common.langdetect.NGram; public class NGramTest extends Assert { diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleDetectorTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleDetectorTest.java index 14be4d5..dd30355 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleDetectorTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleDetectorTest.java @@ -2,7 +2,6 @@ import org.junit.Assert; import org.junit.Test; - import org.xbib.elasticsearch.common.langdetect.LangdetectService; public class SimpleDetectorTest extends Assert { @@ -13,5 +12,4 @@ public final void testDetector() throws Exception { assertEquals("de", detect.detectAll("Das kann deutsch sein").get(0).getLanguage()); assertEquals("en", detect.detectAll("This is a very small test").get(0).getLanguage()); } - -} \ No newline at end of file +} diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java new file mode 100644 index 0000000..5329663 --- /dev/null +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java @@ -0,0 +1,70 @@ +package org.xbib.elasticsearch.index.mapper.langdetect; + +import org.elasticsearch.common.io.Streams; +import org.junit.Ignore; +import org.junit.Test; +import org.xbib.elasticsearch.NodeTestUtils; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.OutputStreamWriter; +import java.io.StringReader; +import java.io.StringWriter; +import java.net.HttpURLConnection; +import java.net.URL; +import java.nio.charset.StandardCharsets; + +import static org.junit.Assert.assertEquals; + +/** + * + */ +@Ignore +public class SimpleHttpTest extends NodeTestUtils { + + @Test + public void httpPost() throws IOException { + startCluster(); + try { + String httpAddress = findHttpAddress(client()); + if (httpAddress == null) { + throw new IllegalArgumentException("no HTTP address found"); + } + URL base = new URL(httpAddress); + URL url = new URL(base, "_langdetect"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod("POST"); + connection.setDoOutput(true); + connection.setDoInput(true); + Streams.copy(new StringReader("Das ist ein Text"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); + StringWriter response = new StringWriter(); + Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); + assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999967609942226}]}", response.toString()); + } finally { + stopCluster(); + } + } + + @Test + public void httpPostShortProfile() throws IOException { + startCluster(); + try { + String httpAddress = findHttpAddress(client()); + if (httpAddress == null) { + throw new IllegalArgumentException("no HTTP address found"); + } + URL base = new URL(httpAddress); + URL url = new URL(base, "_langdetect?profile=short-text"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestMethod("POST"); + connection.setDoOutput(true); + connection.setDoInput(true); + Streams.copy(new StringReader("Das ist ein Text"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); + StringWriter response = new StringWriter(); + Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); + assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999968539079941}]}", response.toString()); + } finally { + stopCluster(); + } + } +} diff --git a/src/test/resources/log4j2.xml b/src/test/resources/log4j2.xml index f71aced..284a1c9 100644 --- a/src/test/resources/log4j2.xml +++ b/src/test/resources/log4j2.xml @@ -4,10 +4,19 @@ + + + + + + + - + + \ No newline at end of file diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json index 08ca359..3652b09 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json @@ -2,11 +2,8 @@ "someType" : { "properties": { "content": { - "type": "multi_field", + "type": "text", "fields": { - "content": { - "type": "string" - }, "language": { "type": "langdetect", "binary" : true diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json index 70bd95d..9378754 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json @@ -11,11 +11,11 @@ }, "german_field" : { "analyzer" : "german", - "type": "string" + "type": "text" }, "english_field" : { "analyzer" : "english", - "type" : "string" + "type" : "text" } } } From be009f8946be8c219fa6de29b330cafa3af93f55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 26 Jan 2017 16:14:08 +0100 Subject: [PATCH 06/19] README --- README.adoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.adoc b/README.adoc index fedec60..84344fc 100644 --- a/README.adoc +++ b/README.adoc @@ -8,8 +8,8 @@ image:https://img.shields.io/twitter/url/https/twitter.com/xbib.svg?style=social image:https://upload.wikimedia.org/wikipedia/commons/thumb/2/29/Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg/299px-Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg["Tower of Babel"] -This is an implementation of a plugin for [Elasticsearch](http://github.com/elasticsearch/elasticsearch) using the -implementation of Nakatani Shuyo's [language detector](http://code.google.com/p/language-detection/). +This is an implementation of a plugin for http://github.com/elasticsearch/elasticsearch[Elasticsearch] using the +implementation of Nakatani Shuyo's http://code.google.com/p/language-detection/[language detector]. It uses 3-gram character and a Bayesian filter with various normalizations and feature sampling. The precision is over 99% for 53 languages. From 3396b527cea0374747b83af61ae9073d500bef21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 26 Jan 2017 17:08:09 +0100 Subject: [PATCH 07/19] README --- README.adoc | 2 +- ....mapper.langdetect.DetectLanguageTest.html | 18 +- ....index.mapper.langdetect.DetectorTest.html | 2 +- ...apper.langdetect.LangDetectActionTest.html | 687 +++++++++--------- ...apper.langdetect.LangDetectBinaryTest.html | 511 +++++++------ ...pper.langdetect.LangDetectChineseTest.html | 512 ++++++------- ...dex.mapper.langdetect.LangProfileTest.html | 6 +- ...pper.langdetect.LangdetectMappingTest.html | 78 +- ....index.mapper.langdetect.LanguageTest.html | 2 +- ...rch.index.mapper.langdetect.NGramTest.html | 2 +- ....mapper.langdetect.SimpleDetectorTest.html | 8 +- ...ndex.mapper.langdetect.SimpleHttpTest.html | 4 +- docs/test/index.html | 22 +- ...elasticsearch.index.mapper.langdetect.html | 20 +- 14 files changed, 931 insertions(+), 943 deletions(-) diff --git a/README.adoc b/README.adoc index 84344fc..31d0522 100644 --- a/README.adoc +++ b/README.adoc @@ -6,7 +6,7 @@ image:https://maven-badges.herokuapp.com/maven-central/org.xbib.elasticsearch.pl image:https://img.shields.io/badge/License-Apache%202.0-blue.svg[title="Apache License 2.0", link="https://opensource.org/licenses/Apache-2.0"] image:https://img.shields.io/twitter/url/https/twitter.com/xbib.svg?style=social&label=Follow%20%40xbib[title="Twitter", link="https://twitter.com/xbib"] -image:https://upload.wikimedia.org/wikipedia/commons/thumb/2/29/Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg/299px-Pieter_Bruegel_the_Elder_-_The_Tower_of_Babel_%28Rotterdam%29_-_Google_Art_Project.jpg["Tower of Babel"] +image:https://github.com/jprante/elasticsearch-langdetect/blob/master/src/docs/img/towerofbabel.jpg?raw=true["Tower of Babel"] This is an implementation of a plugin for http://github.com/elasticsearch/elasticsearch[Elasticsearch] using the implementation of Nakatani Shuyo's http://code.google.com/p/language-detection/[language detector]. diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html index fe8229e..5ff514e 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest

    -
    1.523s
    +
    1.807s

    duration

    @@ -79,17 +79,17 @@

    Tests

    testChinese -0.197s +0.172s passed testEnglish -0.950s +1.260s passed testJapanese -0.191s +0.190s passed @@ -102,10 +102,10 @@

    Tests

    Standard output

    -
    [11:30:08,831][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    -[11:30:09,040][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    -[11:30:09,240][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    -[11:30:09,424][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +
    [16:15:10,036][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[16:15:10,238][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[16:15:10,412][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[16:15:10,596][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
     
    @@ -117,7 +117,7 @@

    Standard output

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html index 92618ec..887a6a9 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html @@ -109,7 +109,7 @@

    Tests

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html index 383f698..5f56fe5 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest
    -
    47.717s
    +
    48.559s

    duration

    @@ -79,12 +79,12 @@

    Tests

    testLangDetectProfile -5.799s +6.109s passed testSort -41.918s +42.450s passed @@ -92,36 +92,36 @@

    Tests

    Standard output

    -
    [11:30:09,478][INFO ][test                     ][Test worker] settings cluster name
    -[11:30:09,478][INFO ][test                     ][Test worker] starting nodes
    -[11:30:09,513][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    -[11:30:09,680][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    -[11:30:09,785][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    -[11:30:09,794][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    -[11:30:09,801][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    -[11:30:09,804][INFO ][org.elasticsearch.node.Node][Test worker] node name [xjGsg-9] derived from node ID [xjGsg-9xSHuExRZsmYSNxw]; set [node.name] to override
    -[11:30:09,808][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    -[11:30:09,809][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    -[11:30:09,819][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    -[11:30:09,821][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    -[11:30:09,823][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    -[11:30:09,876][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    -[11:30:09,878][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    -[11:30:09,879][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    -[11:30:09,885][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    -[11:30:09,886][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    -[11:30:09,887][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    -[11:30:09,887][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    -[11:30:09,888][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    -[11:30:09,889][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    -[11:30:09,889][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    -[11:30:09,890][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    -[11:30:09,890][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    -[11:30:09,891][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    -[11:30:09,891][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    -[11:30:10,056][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    -[11:30:10,788][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
    +
    [16:15:10,624][INFO ][test                     ][Test worker] settings cluster name
    +[16:15:10,624][INFO ][test                     ][Test worker] starting nodes
    +[16:15:10,631][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    +[16:15:10,793][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    +[16:15:10,900][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    +[16:15:10,907][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    +[16:15:10,911][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    +[16:15:10,913][INFO ][org.elasticsearch.node.Node][Test worker] node name [bewR05Z] derived from node ID [bewR05ZwQo2sQ7yw5lONSg]; set [node.name] to override
    +[16:15:10,914][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    +[16:15:10,915][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    +[16:15:10,927][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    +[16:15:10,929][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    +[16:15:10,930][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    +[16:15:10,964][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    +[16:15:10,966][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    +[16:15:10,967][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    +[16:15:10,971][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    +[16:15:10,971][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    +[16:15:10,971][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    +[16:15:10,972][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    +[16:15:10,972][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    +[16:15:10,973][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    +[16:15:10,973][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    +[16:15:10,974][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    +[16:15:10,974][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    +[16:15:10,974][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    +[16:15:10,975][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    +[16:15:11,122][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    +[16:15:11,953][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
     
     lo0
             inet 127.0.0.1 netmask:255.0.0.0 scope:host
    @@ -135,269 +135,260 @@ 

    Standard output

    hardware 68:5B:35:BC:46:72 UP MULTICAST mtu:1500 index:9 -[11:30:10,834][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] -[11:30:10,843][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] -[11:30:10,891][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] -[11:30:10,901][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] -[11:30:10,901][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] -[11:30:10,904][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] -[11:30:10,905][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] -[11:30:11,021][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] -[11:30:11,778][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] -[11:30:11,782][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] -[11:30:11,789][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] -[11:30:11,798][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] -[11:30:11,811][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] -[11:30:11,814][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] -[11:30:11,815][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] -[11:30:11,820][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:30:11,824][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:30:12,092][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] -[11:30:12,275][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] -[11:30:12,492][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:12,545][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state -[11:30:12,551][INFO ][org.elasticsearch.node.Node][Test worker] initialized -[11:30:12,551][INFO ][org.elasticsearch.node.Node][Test worker] starting ... -[11:30:12,556][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[1]}, bound_addresses {local[1]} -[11:30:12,576][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [initial_join]: execute -[11:30:12,577][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] -[11:30:12,579][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [initial_join]: took [2ms] no change in cluster_state -[11:30:15,599][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[xjGsg-9][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) - --> ping_response{node [{xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]}], id[7], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} -[11:30:15,602][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[xjGsg-9][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) -[11:30:15,605][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute -[11:30:15,619][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] -[11:30:15,620][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] new_master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]}, reason: zen-disco-elected-as-master ([0] nodes joined) -[11:30:15,620][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [1] -[11:30:15,623][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 1 -[11:30:15,627][INFO ][org.elasticsearch.node.Node][Test worker] started -[11:30:15,627][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [22ms] done applying updated cluster_state (version: 1, uuid: vPR-e6RiRzeKRhFhtworoQ) -[11:30:15,642][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute -[11:30:15,644][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] -[11:30:15,644][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [2] -[11:30:15,645][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 2 -[11:30:15,650][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state -[11:30:15,650][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [7ms] done applying updated cluster_state (version: 2, uuid: tABH6uFAQli18Ckpg_Gdog) -[11:30:15,651][INFO ][test ][Test worker] nodes are started -[11:30:15,656][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute -[11:30:15,684][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating Index [[test/Xm20JUvBQCGan_kpOOa9Dw]], shards [5]/[1] - reason [create index] -[11:30:15,694][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] -[11:30:15,755][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using dynamic[true] -[11:30:16,080][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:16,372][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:16,399][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [article] -[11:30:16,427][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master]) -[11:30:16,427][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closing index service (reason [cleaning up after validating index on master]) -[11:30:16,427][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] clearing all bitsets because [close] -[11:30:16,428][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] full cache clear, reason [close] -[11:30:16,428][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] clearing all bitsets because [close] -[11:30:16,430][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closed... (reason [cleaning up after validating index on master]) -[11:30:16,433][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]] -[11:30:16,433][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [3] -[11:30:16,433][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 3 -[11:30:16,435][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [[test/Xm20JUvBQCGan_kpOOa9Dw]] creating index -[11:30:16,436][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating Index [[test/Xm20JUvBQCGan_kpOOa9Dw]], shards [5]/[1] - reason [create index] -[11:30:16,437][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] -[11:30:16,438][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using dynamic[true] -[11:30:16,439][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [[test/Xm20JUvBQCGan_kpOOa9Dw]] adding mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]}}}}] -[11:30:16,606][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:16,785][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:16,787][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][1] creating shard -[11:30:16,794][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/1, shard=[test][1]}] -[11:30:16,794][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][1] -[11:30:16,809][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:30:16,872][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED] -[11:30:16,880][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:30:16,881][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][3] creating shard -[11:30:16,882][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/3, shard=[test][3]}] -[11:30:16,882][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][3] -[11:30:16,885][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:30:16,886][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED] -[11:30:16,888][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:30:16,889][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][2] creating shard -[11:30:16,890][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#1]] starting recovery from store ... -[11:30:16,890][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] starting recovery from store ... -[11:30:16,890][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/2, shard=[test][2]}] -[11:30:16,890][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][2] -[11:30:16,891][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:30:16,892][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED] -[11:30:16,894][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:30:16,894][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] creating shard -[11:30:16,894][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#2]] starting recovery from store ... -[11:30:16,895][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/0, shard=[test][0]}] -[11:30:16,895][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][0] -[11:30:16,897][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:30:16,899][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED] -[11:30:16,904][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:30:16,906][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#4]] starting recovery from store ... -[11:30:16,914][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.2s] done applying updated cluster_state (version: 3, uuid: 8FEr5_B6TAGyOsOUZNFWRg) -[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#4]] wipe translog location - creating new translog -[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#3]] wipe translog location - creating new translog -[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#2]] wipe translog location - creating new translog -[11:30:16,988][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#1]] wipe translog location - creating new translog -[11:30:17,000][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#4]] no translog ID present in the current generation - creating one -[11:30:17,000][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#1]] no translog ID present in the current generation - creating one -[11:30:17,001][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#2]] no translog ID present in the current generation - creating one -[11:30:17,002][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#3]] no translog ID present in the current generation - creating one -[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:30:17,040][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:30:17,041][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#4]] recovery completed from [shard_store], took [145ms] -[11:30:17,041][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#1]] recovery completed from [shard_store], took [158ms] -[11:30:17,041][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] recovery completed from [shard_store], took [249ms] -[11:30:17,042][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#2]] recovery completed from [shard_store], took [150ms] -[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][1] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery]] -[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery]] -[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#2]] [test][2] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]] -[11:30:17,042][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery]] -[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][1] received shard started for [shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery]] -[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery]] -[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#2]] [test][2] received shard started for [shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]] -[11:30:17,044][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery]] -[11:30:17,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]]: execute -[11:30:17,070][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=r31ohnA5TS-24_8ha4AXyQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery]]) -[11:30:17,070][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=b2Rs7kcaTROKAERrSklvZw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery]]) -[11:30:17,070][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=N9wqcAeVRUuHyf-U33djlA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]) -[11:30:17,076][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]] -[11:30:17,076][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [4] -[11:30:17,076][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 4 -[11:30:17,077][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:30:17,078][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:30:17,078][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:30:17,079][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] creating shard -[11:30:17,080][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/Xm20JUvBQCGan_kpOOa9Dw/4, shard=[test][4]}] -[11:30:17,080][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] creating shard_id [test][4] -[11:30:17,081][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:30:17,082][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED] -[11:30:17,083][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:30:17,083][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] starting recovery from store ... -[11:30:17,083][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:30:17,084][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:30:17,086][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[xjGsg-9][generic][T#3]] wipe translog location - creating new translog -[11:30:17,088][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [r31ohnA5TS-24_8ha4AXyQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [b2Rs7kcaTROKAERrSklvZw], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [N9wqcAeVRUuHyf-U33djlA], primary term [0], message [after new shard recovery]]]: took [42ms] done applying updated cluster_state (version: 4, uuid: 9m6tB2ahT8CkbXeYtl-KkQ) -[11:30:17,089][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute -[11:30:17,089][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[xjGsg-9][generic][T#3]] no translog ID present in the current generation - creating one -[11:30:17,089][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=z78LVcKSTOWuUYQNekMARw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery]]) -[11:30:17,093][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] -[11:30:17,093][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [5] -[11:30:17,093][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 5 -[11:30:17,105][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:30:17,105][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][generic][T#3]] recovery completed from [shard_store], took [25ms] -[11:30:17,106][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:30:17,106][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:30:17,115][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:30:17,115][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [xjGsg-9xSHuExRZsmYSNxw] for shard entry [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]] -[11:30:17,115][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]] -[11:30:17,120][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [z78LVcKSTOWuUYQNekMARw], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [30ms] done applying updated cluster_state (version: 5, uuid: QyfZAKvASqSwPu5-A5fW0g) -[11:30:17,121][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]]: execute -[11:30:17,122][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[xjGsg-9xSHuExRZsmYSNxw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=J-2CbydsTECn5IDw9UhBXg], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:30:16.404Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]) -[11:30:17,126][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]] -[11:30:17,127][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [6] -[11:30:17,128][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 6 -[11:30:17,130][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:30:17,135][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [master {xjGsg-9}{xjGsg-9xSHuExRZsmYSNxw}{nFs4lD9rTWaCFS4k7yjX5A}{local}{local[1]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][4]], allocation id [J-2CbydsTECn5IDw9UhBXg], primary term [0], message [after new shard recovery]]]: took [13ms] done applying updated cluster_state (version: 6, uuid: LFnl7Ex3SFa4MRmFAhPVtg) -[11:30:45,651][INFO ][org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor][elasticsearch[xjGsg-9][management][T#2]] low disk watermark [85%] exceeded on [xjGsg-9xSHuExRZsmYSNxw][xjGsg-9][/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0] free: 138.3gb[14.8%], replicas will not be assigned to this node -[11:30:47,177][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [put-mapping[article]]: execute -[11:30:47,181][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] using dynamic[true] -[11:30:47,351][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:47,702][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:48,009][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:48,010][DEBUG][org.elasticsearch.cluster.metadata.MetaDataMappingService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] update_mapping [article] with source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}] -[11:30:48,011][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] cluster state updated, version [7], source [put-mapping[article]] -[11:30:48,011][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] publishing cluster state version [7] -[11:30:48,012][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] set local cluster state to version 7 -[11:30:48,012][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] [[test/Xm20JUvBQCGan_kpOOa9Dw]] updating mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}] -[11:30:48,515][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:48,967][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:48,980][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[xjGsg-9][clusterService#updateTask][T#1]] processing [put-mapping[article]]: took [1.8s] done applying updated cluster_state (version: 7, uuid: bK5LpBuLTDiHQwYR36L__w) -[11:30:49,295][INFO ][test ][Test worker] stopping nodes -[11:30:49,296][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... -[11:30:49,299][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown]) -[11:30:49,299][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closing index service (reason [shutdown]) -[11:30:49,300][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown]) -[11:30:49,300][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:30:49,300][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:30:49,301][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:30:49,302][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:30:49,302][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:30:49,305][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:30:49,306][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:30:49,306][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown]) -[11:30:49,306][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown]) -[11:30:49,306][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:30:49,306][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:30:49,306][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:30:49,307][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:30:49,307][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:30:49,310][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:30:49,311][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:30:49,311][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown]) -[11:30:49,311][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown]) -[11:30:49,311][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:30:49,311][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:30:49,345][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:30:49,345][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:30:49,346][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:30:49,348][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:30:49,348][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:30:49,348][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown]) -[11:30:49,348][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown]) -[11:30:49,349][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:30:49,349][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:30:49,359][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:30:49,360][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:30:49,360][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:30:49,362][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:30:49,362][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:30:49,362][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown]) -[11:30:49,362][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown]) -[11:30:49,362][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:30:49,362][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:30:49,369][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:30:49,370][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:30:49,370][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:30:49,372][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:30:49,372][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:30:49,373][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown]) -[11:30:49,373][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] -[11:30:49,374][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close] -[11:30:49,375][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] -[11:30:49,378][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/Xm20JUvBQCGan_kpOOa9Dw] closed... (reason [shutdown]) -[11:30:49,378][INFO ][org.elasticsearch.node.Node][Test worker] stopped -[11:30:49,379][INFO ][org.elasticsearch.node.Node][Test worker] closing ... -[11:30:49,386][INFO ][org.elasticsearch.node.Node][Test worker] closed -[11:30:49,392][INFO ][test ][Test worker] data files wiped -[11:30:51,394][INFO ][test ][Test worker] settings cluster name -[11:30:51,394][INFO ][test ][Test worker] starting nodes -[11:30:51,395][INFO ][test ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local} -[11:30:51,397][INFO ][org.elasticsearch.node.Node][Test worker] initializing ... -[11:30:51,400][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0] -[11:30:51,401][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details: - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs] -[11:30:51,401][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true] -[11:30:51,402][INFO ][org.elasticsearch.node.Node][Test worker] node name [QHkWj5x] derived from node ID [QHkWj5xaR6WKhgt-6zzIZw]; set [node.name] to override -[11:30:51,402][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16] -[11:30:51,402][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] -[11:30:51,402][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist. -[11:30:51,403][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded -[11:30:51,403][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin] -[11:30:51,408][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded] -[11:30:51,408][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m] -[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded] -[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200] -[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m] -[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s] -[11:30:51,409][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m] -[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k] -[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m] -[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m] -[11:30:51,410][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m] -[11:30:51,411][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k] -[11:30:51,411][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50] -[11:30:51,411][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m] -[11:30:51,412][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s] -[11:30:51,417][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration: +[16:15:12,012][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] +[16:15:12,028][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] +[16:15:12,108][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] +[16:15:12,128][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] +[16:15:12,129][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] +[16:15:12,135][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] +[16:15:12,138][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] +[16:15:12,241][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] +[16:15:13,548][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] +[16:15:13,553][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] +[16:15:13,567][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] +[16:15:13,575][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] +[16:15:13,587][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] +[16:15:13,593][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] +[16:15:13,594][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] +[16:15:13,598][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:15:13,606][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:15:13,884][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] +[16:15:14,093][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] +[16:15:14,391][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:14,456][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state +[16:15:14,465][INFO ][org.elasticsearch.node.Node][Test worker] initialized +[16:15:14,465][INFO ][org.elasticsearch.node.Node][Test worker] starting ... +[16:15:14,472][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[1]}, bound_addresses {local[1]} +[16:15:14,493][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [initial_join]: execute +[16:15:14,494][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] +[16:15:14,500][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [initial_join]: took [6ms] no change in cluster_state +[16:15:17,525][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[bewR05Z][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) + --> ping_response{node [{bewR05Z}{bewR05ZwQo2sQ7yw5lONSg}{esS4HfhQRa2brUxmrNnN1A}{local}{local[1]}], id[7], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} +[16:15:17,528][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[bewR05Z][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) +[16:15:17,532][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute +[16:15:17,549][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] +[16:15:17,552][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] new_master {bewR05Z}{bewR05ZwQo2sQ7yw5lONSg}{esS4HfhQRa2brUxmrNnN1A}{local}{local[1]}, reason: zen-disco-elected-as-master ([0] nodes joined) +[16:15:17,552][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [1] +[16:15:17,555][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 1 +[16:15:17,559][INFO ][org.elasticsearch.node.Node][Test worker] started +[16:15:17,559][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [27ms] done applying updated cluster_state (version: 1, uuid: qYen7CDnT0uhEwnmnuvGyA) +[16:15:17,577][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute +[16:15:17,578][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] +[16:15:17,578][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [2] +[16:15:17,578][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 2 +[16:15:17,583][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state +[16:15:17,584][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [6ms] done applying updated cluster_state (version: 2, uuid: xj0xXEP0SVOTZrdzdOm9og) +[16:15:17,585][INFO ][test ][Test worker] nodes are started +[16:15:17,591][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute +[16:15:17,629][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating Index [[test/wuR-344MSUyxJc4_TzCJ4g]], shards [5]/[1] - reason [create index] +[16:15:17,646][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] +[16:15:17,756][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using dynamic[true] +[16:15:18,165][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:18,597][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:18,642][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [article] +[16:15:18,683][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master]) +[16:15:18,684][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closing index service (reason [cleaning up after validating index on master]) +[16:15:18,685][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] clearing all bitsets because [close] +[16:15:18,687][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] full cache clear, reason [close] +[16:15:18,688][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] clearing all bitsets because [close] +[16:15:18,690][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closed... (reason [cleaning up after validating index on master]) +[16:15:18,694][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]] +[16:15:18,695][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [3] +[16:15:18,695][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 3 +[16:15:18,697][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [[test/wuR-344MSUyxJc4_TzCJ4g]] creating index +[16:15:18,699][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating Index [[test/wuR-344MSUyxJc4_TzCJ4g]], shards [5]/[1] - reason [create index] +[16:15:18,701][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] +[16:15:18,704][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using dynamic[true] +[16:15:18,706][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [[test/wuR-344MSUyxJc4_TzCJ4g]] adding mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]}}}}] +[16:15:18,925][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:19,158][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:19,162][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][2] creating shard +[16:15:19,175][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/2, shard=[test][2]}] +[16:15:19,176][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][2] +[16:15:19,185][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:15:19,281][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED] +[16:15:19,299][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:15:19,301][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][3] creating shard +[16:15:19,304][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/3, shard=[test][3]}] +[16:15:19,305][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][3] +[16:15:19,308][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:15:19,309][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED] +[16:15:19,310][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#3]] starting recovery from store ... +[16:15:19,312][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:15:19,313][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#1]] starting recovery from store ... +[16:15:19,313][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][1] creating shard +[16:15:19,315][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/1, shard=[test][1]}] +[16:15:19,316][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][1] +[16:15:19,320][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:15:19,321][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED] +[16:15:19,325][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:15:19,325][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][0] creating shard +[16:15:19,325][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] starting recovery from store ... +[16:15:19,327][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/0, shard=[test][0]}] +[16:15:19,328][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][0] +[16:15:19,331][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:15:19,333][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED] +[16:15:19,336][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:15:19,337][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#4]] starting recovery from store ... +[16:15:19,350][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.7s] done applying updated cluster_state (version: 3, uuid: M3VF5uEPQ5ymwKcnmMoVJA) +[16:15:19,513][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#4]] wipe translog location - creating new translog +[16:15:19,514][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#2]] wipe translog location - creating new translog +[16:15:19,515][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#1]] wipe translog location - creating new translog +[16:15:19,515][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#3]] wipe translog location - creating new translog +[16:15:19,543][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#4]] no translog ID present in the current generation - creating one +[16:15:19,544][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#3]] no translog ID present in the current generation - creating one +[16:15:19,544][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#1]] no translog ID present in the current generation - creating one +[16:15:19,545][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#2]] no translog ID present in the current generation - creating one +[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:15:19,614][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#3]] recovery completed from [shard_store], took [444ms] +[16:15:19,615][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] recovery completed from [shard_store], took [298ms] +[16:15:19,615][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#3]] [test][2] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery]] +[16:15:19,616][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][1] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery]] +[16:15:19,616][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#1]] recovery completed from [shard_store], took [310ms] +[16:15:19,616][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]] +[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:15:19,617][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#4]] recovery completed from [shard_store], took [287ms] +[16:15:19,618][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery]] +[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][1] received shard started for [shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery]] +[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#3]] [test][2] received shard started for [shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery]] +[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]] +[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery]] +[16:15:19,621][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]]: execute +[16:15:19,665][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=bRvnNWcsQiOqqtjcoUnxkg], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery]]) +[16:15:19,667][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=hiFBgviCQHOMNmmVPZm12w], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery]]) +[16:15:19,667][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=fSMgvX2LTJqdEQwaWj3zfA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery]]) +[16:15:19,668][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=mvASi-1VQxi4bC6WlqN-4g], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]) +[16:15:19,683][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]] +[16:15:19,684][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [4] +[16:15:19,685][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 4 +[16:15:19,688][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:15:19,690][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:15:19,692][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:15:19,692][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][4] creating shard +[16:15:19,695][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/4, shard=[test][4]}] +[16:15:19,696][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][4] +[16:15:19,699][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:15:19,701][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED] +[16:15:19,705][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:15:19,706][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] starting recovery from store ... +[16:15:19,707][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:15:19,712][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#2]] wipe translog location - creating new translog +[16:15:19,714][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]]: took [91ms] done applying updated cluster_state (version: 4, uuid: -kx_a1VVT964oBybzZGUZQ) +[16:15:19,716][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#2]] no translog ID present in the current generation - creating one +[16:15:19,723][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:15:19,723][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] recovery completed from [shard_store], took [30ms] +[16:15:19,724][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][4] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]] +[16:15:19,724][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][4] received shard started for [shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]] +[16:15:19,725][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]]: execute +[16:15:19,726][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=O2Q9NrVtQKGkB0MwWXeqxw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]) +[16:15:19,729][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]] +[16:15:19,730][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [5] +[16:15:19,730][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 5 +[16:15:19,732][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:15:19,743][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]]: took [17ms] done applying updated cluster_state (version: 5, uuid: tt-EHy-QQ4qrQN4Ok8uHcw) +[16:15:47,568][INFO ][org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor][elasticsearch[bewR05Z][management][T#2]] low disk watermark [85%] exceeded on [bewR05ZwQo2sQ7yw5lONSg][bewR05Z][/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0] free: 137.8gb[14.8%], replicas will not be assigned to this node +[16:15:49,789][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [put-mapping[article]]: execute +[16:15:49,792][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using dynamic[true] +[16:15:49,960][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:50,131][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:50,355][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:50,357][DEBUG][org.elasticsearch.cluster.metadata.MetaDataMappingService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] update_mapping [article] with source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}] +[16:15:50,358][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [6], source [put-mapping[article]] +[16:15:50,358][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [6] +[16:15:50,358][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 6 +[16:15:50,358][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [[test/wuR-344MSUyxJc4_TzCJ4g]] updating mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}] +[16:15:50,508][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:50,700][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:50,707][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [put-mapping[article]]: took [916ms] done applying updated cluster_state (version: 6, uuid: hccsN_0QQe-qiUaEJPa9mw) +[16:15:51,011][INFO ][test ][Test worker] stopping nodes +[16:15:51,012][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... +[16:15:51,015][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown]) +[16:15:51,015][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closing index service (reason [shutdown]) +[16:15:51,015][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown]) +[16:15:51,016][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:15:51,016][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:15:51,017][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:15:51,017][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:15:51,017][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:15:51,018][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:15:51,019][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:15:51,019][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown]) +[16:15:51,019][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown]) +[16:15:51,019][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:15:51,019][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:15:51,019][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:15:51,019][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:15:51,020][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:15:51,022][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:15:51,022][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:15:51,022][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown]) +[16:15:51,023][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown]) +[16:15:51,023][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:15:51,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:15:51,038][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:15:51,038][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:15:51,038][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:15:51,040][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:15:51,040][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:15:51,040][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown]) +[16:15:51,040][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown]) +[16:15:51,040][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:15:51,040][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:15:51,047][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:15:51,047][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:15:51,047][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:15:51,048][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:15:51,048][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:15:51,048][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown]) +[16:15:51,048][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown]) +[16:15:51,048][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:15:51,049][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:15:51,054][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:15:51,054][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:15:51,055][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:15:51,055][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:15:51,056][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:15:51,056][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown]) +[16:15:51,056][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] +[16:15:51,056][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close] +[16:15:51,057][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] +[16:15:51,058][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closed... (reason [shutdown]) +[16:15:51,058][INFO ][org.elasticsearch.node.Node][Test worker] stopped +[16:15:51,058][INFO ][org.elasticsearch.node.Node][Test worker] closing ... +[16:15:51,065][INFO ][org.elasticsearch.node.Node][Test worker] closed +[16:15:51,071][INFO ][test ][Test worker] data files wiped +[16:15:53,074][INFO ][test ][Test worker] settings cluster name +[16:15:53,075][INFO ][test ][Test worker] starting nodes +[16:15:53,077][INFO ][test ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local} +[16:15:53,079][INFO ][org.elasticsearch.node.Node][Test worker] initializing ... +[16:15:53,082][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0] +[16:15:53,082][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details: + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs] +[16:15:53,082][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true] +[16:15:53,083][INFO ][org.elasticsearch.node.Node][Test worker] node name [Cu9MbJQ] derived from node ID [Cu9MbJQaQ6yK_lE9o5Kj9Q]; set [node.name] to override +[16:15:53,083][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16] +[16:15:53,083][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] +[16:15:53,083][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist. +[16:15:53,084][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded +[16:15:53,084][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin] +[16:15:53,084][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded] +[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m] +[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded] +[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200] +[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m] +[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s] +[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m] +[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k] +[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m] +[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m] +[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m] +[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k] +[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50] +[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m] +[16:15:53,087][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s] +[16:15:53,090][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration: lo0 inet 127.0.0.1 netmask:255.0.0.0 scope:host @@ -411,57 +402,57 @@

    Standard output

    hardware 68:5B:35:BC:46:72 UP MULTICAST mtu:1500 index:9 -[11:30:51,419][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] -[11:30:51,420][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] -[11:30:51,421][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] -[11:30:51,422][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] -[11:30:51,423][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] -[11:30:51,424][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] -[11:30:51,425][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] -[11:30:51,426][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] -[11:30:51,431][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] -[11:30:51,431][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] -[11:30:51,432][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] -[11:30:51,432][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] -[11:30:51,433][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] -[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] -[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] -[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:30:51,434][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:30:51,470][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] -[11:30:51,485][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] -[11:30:51,659][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:51,665][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state -[11:30:51,667][INFO ][org.elasticsearch.node.Node][Test worker] initialized -[11:30:51,667][INFO ][org.elasticsearch.node.Node][Test worker] starting ... -[11:30:51,668][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[2]}, bound_addresses {local[2]} -[11:30:51,668][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] -[11:30:51,669][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [initial_join]: execute -[11:30:51,670][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state -[11:30:54,675][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[QHkWj5x][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) - --> ping_response{node [{QHkWj5x}{QHkWj5xaR6WKhgt-6zzIZw}{j8xbWB8JRiKlk56pTses4g}{local}{local[2]}], id[14], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} -[11:30:54,676][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[QHkWj5x][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) -[11:30:54,677][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute -[11:30:54,678][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] -[11:30:54,678][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] new_master {QHkWj5x}{QHkWj5xaR6WKhgt-6zzIZw}{j8xbWB8JRiKlk56pTses4g}{local}{local[2]}, reason: zen-disco-elected-as-master ([0] nodes joined) -[11:30:54,678][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] publishing cluster state version [1] -[11:30:54,678][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] set local cluster state to version 1 -[11:30:54,679][INFO ][org.elasticsearch.node.Node][Test worker] started -[11:30:54,679][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [2ms] done applying updated cluster_state (version: 1, uuid: 0k0iTRoKTU2uk7EQjjO4Xg) -[11:30:54,681][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute -[11:30:54,681][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] -[11:30:54,682][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] publishing cluster state version [2] -[11:30:54,682][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] set local cluster state to version 2 -[11:30:54,684][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state -[11:30:54,685][INFO ][test ][Test worker] nodes are started -[11:30:54,685][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[QHkWj5x][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [3ms] done applying updated cluster_state (version: 2, uuid: r5OBMZm5SYKlSfoDj4izKA) -[11:30:55,185][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:55,186][INFO ][test ][Test worker] stopping nodes -[11:30:55,186][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... -[11:30:55,188][INFO ][org.elasticsearch.node.Node][Test worker] stopped -[11:30:55,188][INFO ][org.elasticsearch.node.Node][Test worker] closing ... -[11:30:55,191][INFO ][org.elasticsearch.node.Node][Test worker] closed -[11:30:55,192][INFO ][test ][Test worker] data files wiped +[16:15:53,091][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] +[16:15:53,092][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] +[16:15:53,092][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] +[16:15:53,092][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] +[16:15:53,092][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] +[16:15:53,092][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] +[16:15:53,093][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] +[16:15:53,093][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] +[16:15:53,095][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] +[16:15:53,096][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] +[16:15:53,096][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] +[16:15:53,097][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] +[16:15:53,098][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] +[16:15:53,099][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] +[16:15:53,100][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] +[16:15:53,100][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:15:53,101][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:15:53,132][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] +[16:15:53,146][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] +[16:15:53,317][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:53,324][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state +[16:15:53,325][INFO ][org.elasticsearch.node.Node][Test worker] initialized +[16:15:53,325][INFO ][org.elasticsearch.node.Node][Test worker] starting ... +[16:15:53,326][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[2]}, bound_addresses {local[2]} +[16:15:53,327][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] +[16:15:53,327][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [initial_join]: execute +[16:15:53,328][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state +[16:15:56,332][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Cu9MbJQ][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) + --> ping_response{node [{Cu9MbJQ}{Cu9MbJQaQ6yK_lE9o5Kj9Q}{vcqlyfm5RQy0B9pDRCDhhw}{local}{local[2]}], id[14], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} +[16:15:56,333][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Cu9MbJQ][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) +[16:15:56,333][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute +[16:15:56,334][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] +[16:15:56,334][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] new_master {Cu9MbJQ}{Cu9MbJQaQ6yK_lE9o5Kj9Q}{vcqlyfm5RQy0B9pDRCDhhw}{local}{local[2]}, reason: zen-disco-elected-as-master ([0] nodes joined) +[16:15:56,334][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] publishing cluster state version [1] +[16:15:56,335][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] set local cluster state to version 1 +[16:15:56,335][INFO ][org.elasticsearch.node.Node][Test worker] started +[16:15:56,336][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [2ms] done applying updated cluster_state (version: 1, uuid: 4DxornPUTw-_Fcvg3x73fw) +[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute +[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] +[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] publishing cluster state version [2] +[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] set local cluster state to version 2 +[16:15:56,341][INFO ][test ][Test worker] nodes are started +[16:15:56,341][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state +[16:15:56,341][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [4ms] done applying updated cluster_state (version: 2, uuid: nj3qDzE3QM27TjZgz1zLKQ) +[16:15:57,174][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:15:57,176][INFO ][test ][Test worker] stopping nodes +[16:15:57,176][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... +[16:15:57,177][INFO ][org.elasticsearch.node.Node][Test worker] stopped +[16:15:57,178][INFO ][org.elasticsearch.node.Node][Test worker] closing ... +[16:15:57,180][INFO ][org.elasticsearch.node.Node][Test worker] closed +[16:15:57,181][INFO ][test ][Test worker] data files wiped
    @@ -473,7 +464,7 @@

    Standard output

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html index f422329..0261732 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest
    -
    6.790s
    +
    6.736s

    duration

    @@ -79,7 +79,7 @@

    Tests

    testLangDetectBinary -6.790s +6.736s passed @@ -87,36 +87,36 @@

    Tests

    Standard output

    -
    [11:30:57,198][INFO ][test                     ][Test worker] settings cluster name
    -[11:30:57,198][INFO ][test                     ][Test worker] starting nodes
    -[11:30:57,198][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    -[11:30:57,200][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    -[11:30:57,205][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    -[11:30:57,205][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    -[11:30:57,205][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    -[11:30:57,207][INFO ][org.elasticsearch.node.Node][Test worker] node name [UDgdZ4w] derived from node ID [UDgdZ4wIRhaOBtqKJUSqGw]; set [node.name] to override
    -[11:30:57,208][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    -[11:30:57,208][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    -[11:30:57,209][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    -[11:30:57,210][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    -[11:30:57,210][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    -[11:30:57,212][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    -[11:30:57,212][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    -[11:30:57,213][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    -[11:30:57,213][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    -[11:30:57,213][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    -[11:30:57,214][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    -[11:30:57,214][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    -[11:30:57,215][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    -[11:30:57,215][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    -[11:30:57,216][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    -[11:30:57,216][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    -[11:30:57,217][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    -[11:30:57,217][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    -[11:30:57,217][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    -[11:30:57,218][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    -[11:30:57,223][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
    +
    [16:15:59,186][INFO ][test                     ][Test worker] settings cluster name
    +[16:15:59,187][INFO ][test                     ][Test worker] starting nodes
    +[16:15:59,187][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    +[16:15:59,190][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    +[16:15:59,197][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    +[16:15:59,197][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    +[16:15:59,197][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    +[16:15:59,198][INFO ][org.elasticsearch.node.Node][Test worker] node name [_vWPEAD] derived from node ID [_vWPEAD8R3q57wu4rbdE0A]; set [node.name] to override
    +[16:15:59,198][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    +[16:15:59,198][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    +[16:15:59,198][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    +[16:15:59,199][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    +[16:15:59,199][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    +[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    +[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    +[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    +[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    +[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    +[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    +[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    +[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    +[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    +[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    +[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    +[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    +[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    +[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    +[16:15:59,203][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    +[16:15:59,208][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
     
     lo0
             inet 127.0.0.1 netmask:255.0.0.0 scope:host
    @@ -130,230 +130,227 @@ 

    Standard output

    hardware 68:5B:35:BC:46:72 UP MULTICAST mtu:1500 index:9 -[11:30:57,225][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] -[11:30:57,225][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] -[11:30:57,225][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] -[11:30:57,225][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] -[11:30:57,226][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] -[11:30:57,226][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] -[11:30:57,226][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] -[11:30:57,227][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] -[11:30:57,230][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] -[11:30:57,230][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] -[11:30:57,231][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] -[11:30:57,231][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] -[11:30:57,232][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] -[11:30:57,232][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] -[11:30:57,233][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] -[11:30:57,233][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:30:57,233][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:30:57,273][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] -[11:30:57,283][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] -[11:30:57,519][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:30:57,532][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state -[11:30:57,534][INFO ][org.elasticsearch.node.Node][Test worker] initialized -[11:30:57,534][INFO ][org.elasticsearch.node.Node][Test worker] starting ... -[11:30:57,535][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[3]}, bound_addresses {local[3]} -[11:30:57,536][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] -[11:30:57,536][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [initial_join]: execute -[11:30:57,537][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state -[11:31:00,545][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[UDgdZ4w][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) - --> ping_response{node [{UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]}], id[21], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} -[11:31:00,546][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[UDgdZ4w][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) -[11:31:00,546][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute -[11:31:00,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] -[11:31:00,547][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] new_master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]}, reason: zen-disco-elected-as-master ([0] nodes joined) -[11:31:00,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [1] -[11:31:00,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 1 -[11:31:00,548][INFO ][org.elasticsearch.node.Node][Test worker] started -[11:31:00,548][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [1ms] done applying updated cluster_state (version: 1, uuid: dVRYFnVnTqmbiRZnrxbK7g) -[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute -[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] -[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [2] -[11:31:00,550][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 2 -[11:31:00,553][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state -[11:31:00,553][INFO ][test ][Test worker] nodes are started -[11:31:00,553][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [3ms] done applying updated cluster_state (version: 2, uuid: 2blnlc2FQwaO5YYITuhDSw) -[11:31:00,554][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute -[11:31:00,557][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating Index [[test/NQAmgttOR0y0HHLtTzzWmQ]], shards [5]/[1] - reason [create index] -[11:31:00,558][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] -[11:31:00,559][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using dynamic[true] -[11:31:01,082][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:01,302][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:01,307][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType] -[11:31:01,312][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master]) -[11:31:01,312][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closing index service (reason [cleaning up after validating index on master]) -[11:31:01,313][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] clearing all bitsets because [close] -[11:31:01,313][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] full cache clear, reason [close] -[11:31:01,313][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] clearing all bitsets because [close] -[11:31:01,314][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closed... (reason [cleaning up after validating index on master]) -[11:31:01,315][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]] -[11:31:01,315][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [3] -[11:31:01,316][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 3 -[11:31:01,316][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [[test/NQAmgttOR0y0HHLtTzzWmQ]] creating index -[11:31:01,319][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating Index [[test/NQAmgttOR0y0HHLtTzzWmQ]], shards [5]/[1] - reason [create index] -[11:31:01,320][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] -[11:31:01,322][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] using dynamic[true] -[11:31:01,324][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [[test/NQAmgttOR0y0HHLtTzzWmQ]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"binary":"true"}}}}}}] -[11:31:01,542][INFO ][org.elasticsearch.monitor.jvm.JvmGcMonitorService][elasticsearch[UDgdZ4w][scheduler][T#1]] [gc][4] overhead, spent [315ms] collecting in the last [1s] -[11:31:01,627][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:01,819][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:01,820][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][1] creating shard -[11:31:01,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/1, shard=[test][1]}] -[11:31:01,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][1] -[11:31:01,823][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:01,823][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:01,825][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:01,826][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] creating shard -[11:31:01,826][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] starting recovery from store ... -[11:31:01,826][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/3, shard=[test][3]}] -[11:31:01,826][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][3] -[11:31:01,827][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:01,827][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:01,828][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#3]] wipe translog location - creating new translog -[11:31:01,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:01,828][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] creating shard -[11:31:01,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#1]] starting recovery from store ... -[11:31:01,829][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/2, shard=[test][2]}] -[11:31:01,829][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][2] -[11:31:01,829][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#3]] no translog ID present in the current generation - creating one -[11:31:01,830][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:01,830][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:01,831][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#1]] wipe translog location - creating new translog -[11:31:01,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:01,831][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] creating shard -[11:31:01,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#2]] starting recovery from store ... -[11:31:01,832][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/0, shard=[test][0]}] -[11:31:01,832][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][0] -[11:31:01,832][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#1]] no translog ID present in the current generation - creating one -[11:31:01,833][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:01,833][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:01,834][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#2]] wipe translog location - creating new translog -[11:31:01,835][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:01,835][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#2]] no translog ID present in the current generation - creating one -[11:31:01,835][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] recovery completed from [shard_store], took [14ms] -[11:31:01,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][1] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]] -[11:31:01,836][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][1] received shard started for [shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]] -[11:31:01,836][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:01,837][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#4]] starting recovery from store ... -[11:31:01,838][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.2s] done applying updated cluster_state (version: 3, uuid: 0rsqtdJTQFaAocueOUZOMw) -[11:31:01,838][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]]: execute -[11:31:01,838][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=PvEmAya9S7GLCSadvFn3Kw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]) -[11:31:01,839][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#4]] wipe translog location - creating new translog -[11:31:01,840][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#4]] no translog ID present in the current generation - creating one -[11:31:01,841][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]] -[11:31:01,841][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [4] -[11:31:01,841][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 4 -[11:31:01,842][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:01,843][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#1]] recovery completed from [shard_store], took [16ms] -[11:31:01,843][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:01,843][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#2]] recovery completed from [shard_store], took [14ms] -[11:31:01,843][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#2]] [test][2] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery]] -[11:31:01,843][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#2]] [test][2] received shard started for [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery]] -[11:31:01,844][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery]] -[11:31:01,844][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery]] -[11:31:01,844][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:01,845][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:01,845][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#4]] recovery completed from [shard_store], took [13ms] -[11:31:01,845][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery]] -[11:31:01,845][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery]] -[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,846][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] received shard started for [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,846][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] creating shard -[11:31:01,847][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/NQAmgttOR0y0HHLtTzzWmQ/4, shard=[test][4]}] -[11:31:01,847][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] creating shard_id [test][4] -[11:31:01,848][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:01,848][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:01,849][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:01,849][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,849][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,849][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] starting recovery from store ... -[11:31:01,851][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[UDgdZ4w][generic][T#3]] wipe translog location - creating new translog -[11:31:01,851][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [PvEmAya9S7GLCSadvFn3Kw], primary term [0], message [after new shard recovery]]]: took [12ms] done applying updated cluster_state (version: 4, uuid: J52kfurGRQG6HnSDT9gwfg) -[11:31:01,852][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[UDgdZ4w][generic][T#3]] no translog ID present in the current generation - creating one -[11:31:01,853][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute -[11:31:01,853][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=RW0O8x5KTa6DQA72OILqEA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery]]) -[11:31:01,853][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=wQKNtekeTOeThRmBtA53FA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery]]) -[11:31:01,854][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=v4bvgxqFQcSIhooSCHaCYg], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery]]) -[11:31:01,855][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] -[11:31:01,855][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [5] -[11:31:01,856][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 5 -[11:31:01,857][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:01,861][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][generic][T#3]] recovery completed from [shard_store], took [10ms] -[11:31:01,857][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery]] -[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery]] -[11:31:01,862][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [UDgdZ4wIRhaOBtqKJUSqGw] for shard entry [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,862][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:01,863][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:01,865][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [wQKNtekeTOeThRmBtA53FA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][2]], allocation id [RW0O8x5KTa6DQA72OILqEA], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [v4bvgxqFQcSIhooSCHaCYg], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [11ms] done applying updated cluster_state (version: 5, uuid: 4cGR_70QQYOUucWv-kZHLQ) -[11:31:01,865][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute -[11:31:01,865][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[UDgdZ4wIRhaOBtqKJUSqGw], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=2nCQ414fRp2el29G3WKNgw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:01.307Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery]]) -[11:31:01,867][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] -[11:31:01,867][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] publishing cluster state version [6] -[11:31:01,867][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] set local cluster state to version 6 -[11:31:01,868][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:01,870][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[UDgdZ4w][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2nCQ414fRp2el29G3WKNgw], primary term [0], message [master {UDgdZ4w}{UDgdZ4wIRhaOBtqKJUSqGw}{nmtV_Nr5R8munoeE3iUwBQ}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [4ms] done applying updated cluster_state (version: 6, uuid: i3ag2a4aRieMexeNmrDaTA) -[11:31:01,943][INFO ][test ][Test worker] stopping nodes -[11:31:01,943][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... -[11:31:01,946][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown]) -[11:31:01,947][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closing index service (reason [shutdown]) -[11:31:01,947][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown]) -[11:31:01,947][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:01,947][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:01,947][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:01,947][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:01,948][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:01,949][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:01,949][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown]) -[11:31:01,949][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown]) -[11:31:01,949][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:01,949][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:01,950][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:01,951][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:01,951][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:01,951][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown]) -[11:31:01,951][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown]) -[11:31:01,951][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:01,952][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:01,952][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:01,952][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:01,953][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:01,957][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:01,958][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:01,958][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown]) -[11:31:01,958][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown]) -[11:31:01,958][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:01,958][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:01,970][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:01,971][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:01,971][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:01,972][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:01,972][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:01,973][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown]) -[11:31:01,973][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown]) -[11:31:01,973][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:01,973][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:01,974][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:01,974][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:01,974][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:01,976][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:01,976][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:01,976][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown]) -[11:31:01,976][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] -[11:31:01,976][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close] -[11:31:01,976][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] -[11:31:01,977][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/NQAmgttOR0y0HHLtTzzWmQ] closed... (reason [shutdown]) -[11:31:01,977][INFO ][org.elasticsearch.node.Node][Test worker] stopped -[11:31:01,977][INFO ][org.elasticsearch.node.Node][Test worker] closing ... -[11:31:01,979][INFO ][org.elasticsearch.node.Node][Test worker] closed -[11:31:01,985][INFO ][test ][Test worker] data files wiped +[16:15:59,210][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] +[16:15:59,210][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] +[16:15:59,211][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] +[16:15:59,211][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] +[16:15:59,211][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] +[16:15:59,212][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] +[16:15:59,212][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] +[16:15:59,213][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] +[16:15:59,215][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] +[16:15:59,216][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] +[16:15:59,216][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] +[16:15:59,217][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] +[16:15:59,218][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] +[16:15:59,218][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] +[16:15:59,218][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] +[16:15:59,219][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:15:59,219][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:15:59,255][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] +[16:15:59,274][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] +[16:16:00,026][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:00,033][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state +[16:16:00,035][INFO ][org.elasticsearch.node.Node][Test worker] initialized +[16:16:00,035][INFO ][org.elasticsearch.node.Node][Test worker] starting ... +[16:16:00,036][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[3]}, bound_addresses {local[3]} +[16:16:00,037][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] +[16:16:00,037][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [initial_join]: execute +[16:16:00,038][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state +[16:16:03,042][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[_vWPEAD][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) + --> ping_response{node [{_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]}], id[21], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} +[16:16:03,044][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[_vWPEAD][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) +[16:16:03,045][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute +[16:16:03,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] +[16:16:03,046][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] new_master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]}, reason: zen-disco-elected-as-master ([0] nodes joined) +[16:16:03,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [1] +[16:16:03,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 1 +[16:16:03,047][INFO ][org.elasticsearch.node.Node][Test worker] started +[16:16:03,047][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [1ms] done applying updated cluster_state (version: 1, uuid: oJ4rhd9NQDSDvIwR3F29ng) +[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute +[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] +[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [2] +[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 2 +[16:16:03,050][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state +[16:16:03,050][INFO ][test ][Test worker] nodes are started +[16:16:03,050][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [2ms] done applying updated cluster_state (version: 2, uuid: aw8v7MD1SpeuLF9dnx3Tog) +[16:16:03,051][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute +[16:16:03,052][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating Index [[test/HfBDjEv4TJGXdFIpMdLtdQ]], shards [5]/[1] - reason [create index] +[16:16:03,053][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] +[16:16:03,054][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using dynamic[true] +[16:16:03,250][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:03,424][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:03,426][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType] +[16:16:03,429][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master]) +[16:16:03,429][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closing index service (reason [cleaning up after validating index on master]) +[16:16:03,429][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] clearing all bitsets because [close] +[16:16:03,429][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] full cache clear, reason [close] +[16:16:03,430][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] clearing all bitsets because [close] +[16:16:03,430][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closed... (reason [cleaning up after validating index on master]) +[16:16:03,430][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]] +[16:16:03,430][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [3] +[16:16:03,430][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 3 +[16:16:03,430][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [[test/HfBDjEv4TJGXdFIpMdLtdQ]] creating index +[16:16:03,431][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating Index [[test/HfBDjEv4TJGXdFIpMdLtdQ]], shards [5]/[1] - reason [create index] +[16:16:03,431][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] +[16:16:03,432][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using dynamic[true] +[16:16:03,433][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [[test/HfBDjEv4TJGXdFIpMdLtdQ]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"binary":"true"}}}}}}] +[16:16:03,612][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:03,813][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:03,814][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][2] creating shard +[16:16:03,815][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/2, shard=[test][2]}] +[16:16:03,815][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][2] +[16:16:03,816][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:03,816][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:03,818][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:03,818][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][3] creating shard +[16:16:03,819][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] starting recovery from store ... +[16:16:03,819][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/3, shard=[test][3]}] +[16:16:03,819][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][3] +[16:16:03,819][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:03,820][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:03,820][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:03,820][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#3]] wipe translog location - creating new translog +[16:16:03,821][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] creating shard +[16:16:03,821][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#1]] starting recovery from store ... +[16:16:03,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/1, shard=[test][1]}] +[16:16:03,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][1] +[16:16:03,822][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#3]] no translog ID present in the current generation - creating one +[16:16:03,822][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:03,822][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#1]] wipe translog location - creating new translog +[16:16:03,823][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:03,824][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#1]] no translog ID present in the current generation - creating one +[16:16:03,825][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:03,825][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] creating shard +[16:16:03,825][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#2]] starting recovery from store ... +[16:16:03,825][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/0, shard=[test][0]}] +[16:16:03,826][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][0] +[16:16:03,827][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:03,827][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:03,827][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#2]] wipe translog location - creating new translog +[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] recovery completed from [shard_store], took [13ms] +[16:16:03,828][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][2] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery]] +[16:16:03,828][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][2] received shard started for [shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery]] +[16:16:03,828][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#2]] no translog ID present in the current generation - creating one +[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:03,829][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#1]] recovery completed from [shard_store], took [9ms] +[16:16:03,829][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]] +[16:16:03,829][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]] +[16:16:03,829][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#4]] starting recovery from store ... +[16:16:03,830][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [778ms] done applying updated cluster_state (version: 3, uuid: S-H00SMrR66kjfh_m5yK5A) +[16:16:03,830][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]]: execute +[16:16:03,830][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=xYCz6wGXRs2ejBJ85L1cTA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery]]) +[16:16:03,831][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=C88dd9azRtmPqXJyx_yrQA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]) +[16:16:03,831][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#4]] wipe translog location - creating new translog +[16:16:03,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:03,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#2]] recovery completed from [shard_store], took [10ms] +[16:16:03,831][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#2]] [test][1] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery]] +[16:16:03,831][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#2]] [test][1] received shard started for [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery]] +[16:16:03,832][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#4]] no translog ID present in the current generation - creating one +[16:16:03,833][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]] +[16:16:03,833][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [4] +[16:16:03,833][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 4 +[16:16:03,834][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:03,834][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:03,834][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#4]] recovery completed from [shard_store], took [8ms] +[16:16:03,835][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery]] +[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery]] +[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] received shard started for [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:03,835][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] creating shard +[16:16:03,836][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/4, shard=[test][4]}] +[16:16:03,836][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][4] +[16:16:03,838][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:03,839][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:03,840][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:03,841][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] starting recovery from store ... +[16:16:03,841][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:03,841][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:03,842][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#3]] wipe translog location - creating new translog +[16:16:03,843][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]]: took [12ms] done applying updated cluster_state (version: 4, uuid: Honx4OlmQaWwV9ZHUsiHkw) +[16:16:03,843][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute +[16:16:03,843][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=fnYoFX7cQTuHJRBAbm4hfA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery]]) +[16:16:03,844][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#3]] no translog ID present in the current generation - creating one +[16:16:03,844][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=JtIlGmldRCCgVgwUHXhpmQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery]]) +[16:16:03,845][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] +[16:16:03,845][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [5] +[16:16:03,845][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 5 +[16:16:03,846][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:03,846][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:03,847][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] recovery completed from [shard_store], took [10ms] +[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery]] +[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery]] +[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:03,847][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:03,849][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [5ms] done applying updated cluster_state (version: 5, uuid: yHGFA87MTFSc1tsQGGF_2g) +[16:16:03,849][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute +[16:16:03,849][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=yEvk4BTMTy2k6kuoHnuZIA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery]]) +[16:16:03,850][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] +[16:16:03,850][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [6] +[16:16:03,850][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 6 +[16:16:03,851][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:03,852][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [3ms] done applying updated cluster_state (version: 6, uuid: BzKsuzFxRVGzzpH_StdMLg) +[16:16:03,893][INFO ][test ][Test worker] stopping nodes +[16:16:03,893][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... +[16:16:03,894][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown]) +[16:16:03,895][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closing index service (reason [shutdown]) +[16:16:03,895][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown]) +[16:16:03,896][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:03,896][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:03,896][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:03,896][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:03,897][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:03,898][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:03,899][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:03,899][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown]) +[16:16:03,899][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown]) +[16:16:03,899][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:03,899][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:03,899][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:03,899][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:03,900][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:03,901][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:03,901][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown]) +[16:16:03,901][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown]) +[16:16:03,901][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:03,902][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:03,903][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:03,903][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:03,903][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown]) +[16:16:03,903][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown]) +[16:16:03,903][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:03,903][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:03,910][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:03,910][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:03,910][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:03,911][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:03,911][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown]) +[16:16:03,911][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown]) +[16:16:03,911][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:03,911][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:03,912][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:03,912][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:03,912][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown]) +[16:16:03,912][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] +[16:16:03,912][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close] +[16:16:03,913][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] +[16:16:03,913][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closed... (reason [shutdown]) +[16:16:03,913][INFO ][org.elasticsearch.node.Node][Test worker] stopped +[16:16:03,913][INFO ][org.elasticsearch.node.Node][Test worker] closing ... +[16:16:03,915][INFO ][org.elasticsearch.node.Node][Test worker] closed +[16:16:03,922][INFO ][test ][Test worker] data files wiped
    @@ -365,7 +362,7 @@

    Standard output

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html index bbd7c5d..04a0ddc 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest
    -
    6.620s
    +
    6.117s

    duration

    @@ -79,7 +79,7 @@

    Tests

    testChineseLanguageCode -6.620s +6.117s passed @@ -87,36 +87,36 @@

    Tests

    Standard output

    -
    [11:31:03,991][INFO ][test                     ][Test worker] settings cluster name
    -[11:31:03,991][INFO ][test                     ][Test worker] starting nodes
    -[11:31:03,992][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    -[11:31:03,997][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    -[11:31:04,003][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    -[11:31:04,003][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.5gb], usable_space [138.3gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    -[11:31:04,003][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    -[11:31:04,004][INFO ][org.elasticsearch.node.Node][Test worker] node name [Z_hILMV] derived from node ID [Z_hILMVpQCKzHo_-OnlBjg]; set [node.name] to override
    -[11:31:04,004][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[7248], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    -[11:31:04,004][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    -[11:31:04,005][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    -[11:31:04,005][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    -[11:31:04,006][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    -[11:31:04,009][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    -[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    -[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    -[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    -[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    -[11:31:04,010][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    -[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    -[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    -[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    -[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    -[11:31:04,011][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    -[11:31:04,012][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    -[11:31:04,012][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    -[11:31:04,012][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    -[11:31:04,014][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    -[11:31:04,019][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
    +
    [16:16:05,929][INFO ][test                     ][Test worker] settings cluster name
    +[16:16:05,929][INFO ][test                     ][Test worker] starting nodes
    +[16:16:05,929][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
    +[16:16:05,930][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
    +[16:16:05,933][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
    +[16:16:05,933][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
    + -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
    +[16:16:05,933][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
    +[16:16:05,933][INFO ][org.elasticsearch.node.Node][Test worker] node name [Z1dFIC3] derived from node ID [Z1dFIC3HQWqRSaRc5uCucQ]; set [node.name] to override
    +[16:16:05,933][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
    +[16:16:05,933][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
    +[16:16:05,934][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
    +[16:16:05,934][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
    +[16:16:05,934][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
    +[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
    +[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
    +[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
    +[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
    +[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
    +[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
    +[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
    +[16:16:05,937][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
    +[16:16:05,939][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
     
     lo0
             inet 127.0.0.1 netmask:255.0.0.0 scope:host
    @@ -130,229 +130,229 @@ 

    Standard output

    hardware 68:5B:35:BC:46:72 UP MULTICAST mtu:1500 index:9 -[11:31:04,020][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] -[11:31:04,020][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] -[11:31:04,020][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] -[11:31:04,020][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] -[11:31:04,021][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] -[11:31:04,023][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] -[11:31:04,023][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] -[11:31:04,024][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] -[11:31:04,028][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] -[11:31:04,028][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] -[11:31:04,028][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] -[11:31:04,033][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] -[11:31:04,033][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] -[11:31:04,033][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] -[11:31:04,034][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] -[11:31:04,034][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:31:04,034][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] -[11:31:04,074][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] -[11:31:04,086][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] -[11:31:04,278][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:04,283][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state -[11:31:04,284][INFO ][org.elasticsearch.node.Node][Test worker] initialized -[11:31:04,284][INFO ][org.elasticsearch.node.Node][Test worker] starting ... -[11:31:04,284][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[4]}, bound_addresses {local[4]} -[11:31:04,286][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] -[11:31:04,286][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [initial_join]: execute -[11:31:04,287][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state -[11:31:07,294][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z_hILMV][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) - --> ping_response{node [{Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]}], id[28], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} -[11:31:07,295][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z_hILMV][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) -[11:31:07,295][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute -[11:31:07,296][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] -[11:31:07,296][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] new_master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]}, reason: zen-disco-elected-as-master ([0] nodes joined) -[11:31:07,296][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [1] -[11:31:07,297][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 1 -[11:31:07,297][INFO ][org.elasticsearch.node.Node][Test worker] started -[11:31:07,298][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [2ms] done applying updated cluster_state (version: 1, uuid: agEfSSipRq-52XyKfFa9rQ) -[11:31:07,299][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute -[11:31:07,300][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] -[11:31:07,300][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [2] -[11:31:07,300][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 2 -[11:31:07,302][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state -[11:31:07,302][INFO ][test ][Test worker] nodes are started -[11:31:07,302][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [3ms] done applying updated cluster_state (version: 2, uuid: DwcYxfcETUe7BQ1lNLPp1w) -[11:31:07,303][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute -[11:31:07,305][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating Index [[test/4OlIqpr0Q4GADffMclLtIw]], shards [5]/[1] - reason [create index] -[11:31:07,305][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] -[11:31:07,307][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using dynamic[true] -[11:31:07,587][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:07,916][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:07,917][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType] -[11:31:07,919][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master]) -[11:31:07,919][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closing index service (reason [cleaning up after validating index on master]) -[11:31:07,919][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] clearing all bitsets because [close] -[11:31:07,919][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] full cache clear, reason [close] -[11:31:07,919][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] clearing all bitsets because [close] -[11:31:07,920][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closed... (reason [cleaning up after validating index on master]) -[11:31:07,920][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]] -[11:31:07,920][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [3] -[11:31:07,920][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 3 -[11:31:07,920][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [[test/4OlIqpr0Q4GADffMclLtIw]] creating index -[11:31:07,920][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating Index [[test/4OlIqpr0Q4GADffMclLtIw]], shards [5]/[1] - reason [create index] -[11:31:07,921][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] -[11:31:07,921][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] using dynamic[true] -[11:31:07,922][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [[test/4OlIqpr0Q4GADffMclLtIw]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["zh-cn"]}}}}}}] -[11:31:08,199][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:08,398][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:08,399][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][1] creating shard -[11:31:08,400][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/1, shard=[test][1]}] -[11:31:08,400][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][1] -[11:31:08,401][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:08,401][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:08,402][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:08,403][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] creating shard -[11:31:08,403][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] starting recovery from store ... -[11:31:08,404][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/3, shard=[test][3]}] -[11:31:08,404][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][3] -[11:31:08,406][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:08,406][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:08,407][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#3]] wipe translog location - creating new translog -[11:31:08,408][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:08,408][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] creating shard -[11:31:08,408][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#1]] starting recovery from store ... -[11:31:08,409][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#3]] no translog ID present in the current generation - creating one -[11:31:08,409][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/2, shard=[test][2]}] -[11:31:08,409][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][2] -[11:31:08,410][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:08,410][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:08,410][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#1]] wipe translog location - creating new translog -[11:31:08,412][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#1]] no translog ID present in the current generation - creating one -[11:31:08,413][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:08,413][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] creating shard -[11:31:08,414][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/0, shard=[test][0]}] -[11:31:08,414][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][0] -[11:31:08,414][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#2]] starting recovery from store ... -[11:31:08,416][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:08,417][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:08,418][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#2]] wipe translog location - creating new translog -[11:31:08,419][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:08,420][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#2]] no translog ID present in the current generation - creating one -[11:31:08,421][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#4]] starting recovery from store ... -[11:31:08,422][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.1s] done applying updated cluster_state (version: 3, uuid: -s3Mr-S4SaS_Gotb-b7h5A) -[11:31:08,426][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#4]] wipe translog location - creating new translog -[11:31:08,426][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:08,427][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] recovery completed from [shard_store], took [26ms] -[11:31:08,427][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][1] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]] -[11:31:08,428][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][1] received shard started for [shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]] -[11:31:08,428][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]]: execute -[11:31:08,429][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#4]] no translog ID present in the current generation - creating one -[11:31:08,429][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=l-2oGpDiTIe79i7rUsj_3Q], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]) -[11:31:08,432][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:08,432][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#1]] recovery completed from [shard_store], took [29ms] -[11:31:08,432][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery]] -[11:31:08,433][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery]] -[11:31:08,433][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:08,434][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#2]] recovery completed from [shard_store], took [24ms] -[11:31:08,434][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]] -[11:31:08,434][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#2]] [test][2] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery]] -[11:31:08,434][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [4] -[11:31:08,434][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#2]] [test][2] received shard started for [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery]] -[11:31:08,435][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 4 -[11:31:08,436][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:08,436][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:08,436][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#4]] recovery completed from [shard_store], took [23ms] -[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery]] -[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery]] -[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,437][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] received shard started for [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,437][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] creating shard -[11:31:08,438][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/4OlIqpr0Q4GADffMclLtIw/4, shard=[test][4]}] -[11:31:08,438][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] creating shard_id [test][4] -[11:31:08,440][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] -[11:31:08,440][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED] -[11:31:08,442][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] -[11:31:08,442][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] starting recovery from store ... -[11:31:08,442][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,442][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,444][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z_hILMV][generic][T#3]] wipe translog location - creating new translog -[11:31:08,444][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [l-2oGpDiTIe79i7rUsj_3Q], primary term [0], message [after new shard recovery]]]: took [15ms] done applying updated cluster_state (version: 4, uuid: A0lurl0eR_-anXJ8RE4C0A) -[11:31:08,444][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute -[11:31:08,445][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=YOrZlSmdS3SAZXCDqWFoew], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery]]) -[11:31:08,445][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=LucUDEzZRqmwPmHmdJiplQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery]]) -[11:31:08,446][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=6k-K6n2mRVqqOeLjCFMAxA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery]]) -[11:31:08,446][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z_hILMV][generic][T#3]] no translog ID present in the current generation - creating one -[11:31:08,450][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] -[11:31:08,450][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [5] -[11:31:08,451][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 5 -[11:31:08,452][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] -[11:31:08,453][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:08,453][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][generic][T#3]] recovery completed from [shard_store], took [15ms] -[11:31:08,453][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery]] -[11:31:08,454][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:08,454][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery]] -[11:31:08,455][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [Z_hILMVpQCKzHo_-OnlBjg] for shard entry [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,455][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] -[11:31:08,456][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:08,459][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [YOrZlSmdS3SAZXCDqWFoew], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [LucUDEzZRqmwPmHmdJiplQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [6k-K6n2mRVqqOeLjCFMAxA], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [14ms] done applying updated cluster_state (version: 5, uuid: Wole9O6GQoSW0VrpuzcShA) -[11:31:08,460][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute -[11:31:08,460][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[Z_hILMVpQCKzHo_-OnlBjg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=2mkmKoaOQqmvAtUvmpCCCQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T10:31:07.917Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery]]) -[11:31:08,462][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] -[11:31:08,462][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] publishing cluster state version [6] -[11:31:08,463][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] set local cluster state to version 6 -[11:31:08,464][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] -[11:31:08,470][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z_hILMV][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [2mkmKoaOQqmvAtUvmpCCCQ], primary term [0], message [master {Z_hILMV}{Z_hILMVpQCKzHo_-OnlBjg}{c0JA8Oo0T_C9ak08kXFnQA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [9ms] done applying updated cluster_state (version: 6, uuid: oQxFF5PTSRW3GihKDjMy2w) -[11:31:08,567][INFO ][test ][Test worker] stopping nodes -[11:31:08,567][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... -[11:31:08,568][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown]) -[11:31:08,568][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closing index service (reason [shutdown]) -[11:31:08,568][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown]) -[11:31:08,568][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:08,568][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:08,568][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:08,568][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:08,569][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:08,570][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:08,570][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown]) -[11:31:08,570][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown]) -[11:31:08,570][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:08,570][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:08,570][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:08,571][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:08,571][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:08,571][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown]) -[11:31:08,572][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown]) -[11:31:08,572][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:08,572][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:08,572][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:08,572][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:08,573][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:08,578][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:08,578][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:08,578][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown]) -[11:31:08,578][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown]) -[11:31:08,579][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:08,579][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:08,588][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:08,589][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:08,589][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:08,591][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:08,592][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:08,592][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown]) -[11:31:08,592][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown]) -[11:31:08,592][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] -[11:31:08,592][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk -[11:31:08,592][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock -[11:31:08,592][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock -[11:31:08,593][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed -[11:31:08,594][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] -[11:31:08,594][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 -[11:31:08,594][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown]) -[11:31:08,594][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] -[11:31:08,594][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close] -[11:31:08,594][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] -[11:31:08,595][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/4OlIqpr0Q4GADffMclLtIw] closed... (reason [shutdown]) -[11:31:08,595][INFO ][org.elasticsearch.node.Node][Test worker] stopped -[11:31:08,595][INFO ][org.elasticsearch.node.Node][Test worker] closing ... -[11:31:08,597][INFO ][org.elasticsearch.node.Node][Test worker] closed -[11:31:08,608][INFO ][test ][Test worker] data files wiped +[16:16:05,941][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10] +[16:16:05,941][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s] +[16:16:05,941][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s] +[16:16:05,942][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s] +[16:16:05,942][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s] +[16:16:05,942][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active] +[16:16:05,942][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2] +[16:16:05,943][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4] +[16:16:05,945][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b] +[16:16:05,946][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000] +[16:16:05,946][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s] +[16:16:05,946][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1] +[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s] +[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1] +[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false] +[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:16:05,948][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3] +[16:16:05,977][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb] +[16:16:05,985][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum] +[16:16:06,164][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:06,170][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state +[16:16:06,171][INFO ][org.elasticsearch.node.Node][Test worker] initialized +[16:16:06,171][INFO ][org.elasticsearch.node.Node][Test worker] starting ... +[16:16:06,171][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[4]}, bound_addresses {local[4]} +[16:16:06,172][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s] +[16:16:06,172][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [initial_join]: execute +[16:16:06,173][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state +[16:16:09,179][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z1dFIC3][generic][T#1]] filtered ping responses: (ignore_non_masters [false]) + --> ping_response{node [{Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]}], id[28], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]} +[16:16:09,180][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z1dFIC3][generic][T#1]] elected as master, waiting for incoming joins ([0] needed) +[16:16:09,180][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute +[16:16:09,181][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)] +[16:16:09,181][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] new_master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]}, reason: zen-disco-elected-as-master ([0] nodes joined) +[16:16:09,181][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [1] +[16:16:09,182][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 1 +[16:16:09,182][INFO ][org.elasticsearch.node.Node][Test worker] started +[16:16:09,182][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [1ms] done applying updated cluster_state (version: 1, uuid: 8JUfDV_gT8imelkMADqEdQ) +[16:16:09,183][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute +[16:16:09,184][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state] +[16:16:09,184][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [2] +[16:16:09,184][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 2 +[16:16:09,186][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state +[16:16:09,186][INFO ][test ][Test worker] nodes are started +[16:16:09,186][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [2ms] done applying updated cluster_state (version: 2, uuid: LmbsxwgVTIeGeIflcjrzHQ) +[16:16:09,187][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute +[16:16:09,188][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating Index [[test/vk4gpyojS2iq8eEZ4MdMIw]], shards [5]/[1] - reason [create index] +[16:16:09,188][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] +[16:16:09,189][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using dynamic[true] +[16:16:09,370][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:09,544][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:09,545][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType] +[16:16:09,547][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master]) +[16:16:09,547][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closing index service (reason [cleaning up after validating index on master]) +[16:16:09,547][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] clearing all bitsets because [close] +[16:16:09,547][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] full cache clear, reason [close] +[16:16:09,547][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] clearing all bitsets because [close] +[16:16:09,547][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closed... (reason [cleaning up after validating index on master]) +[16:16:09,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]] +[16:16:09,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [3] +[16:16:09,548][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 3 +[16:16:09,548][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [[test/vk4gpyojS2iq8eEZ4MdMIw]] creating index +[16:16:09,548][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating Index [[test/vk4gpyojS2iq8eEZ4MdMIw]], shards [5]/[1] - reason [create index] +[16:16:09,549][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null] +[16:16:09,549][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using dynamic[true] +[16:16:09,550][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [[test/vk4gpyojS2iq8eEZ4MdMIw]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["zh-cn"]}}}}}}] +[16:16:09,750][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:09,918][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:09,919][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][2] creating shard +[16:16:09,920][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/2, shard=[test][2]}] +[16:16:09,920][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][2] +[16:16:09,921][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:09,921][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:09,922][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:09,922][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] creating shard +[16:16:09,922][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] starting recovery from store ... +[16:16:09,922][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/3, shard=[test][3]}] +[16:16:09,923][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][3] +[16:16:09,923][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:09,923][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:09,924][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#3]] wipe translog location - creating new translog +[16:16:09,925][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:09,925][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] creating shard +[16:16:09,925][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#1]] starting recovery from store ... +[16:16:09,925][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#3]] no translog ID present in the current generation - creating one +[16:16:09,926][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/1, shard=[test][1]}] +[16:16:09,926][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][1] +[16:16:09,926][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:09,927][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:09,927][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#1]] wipe translog location - creating new translog +[16:16:09,928][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:09,928][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] creating shard +[16:16:09,928][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#2]] starting recovery from store ... +[16:16:09,928][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/0, shard=[test][0]}] +[16:16:09,928][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][0] +[16:16:09,929][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:09,929][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] recovery completed from [shard_store], took [9ms] +[16:16:09,929][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#1]] no translog ID present in the current generation - creating one +[16:16:09,929][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:09,929][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][2] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]] +[16:16:09,929][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][2] received shard started for [shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]] +[16:16:09,929][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:09,930][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#2]] wipe translog location - creating new translog +[16:16:09,930][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:09,930][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#4]] starting recovery from store ... +[16:16:09,931][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#2]] no translog ID present in the current generation - creating one +[16:16:09,931][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [744ms] done applying updated cluster_state (version: 3, uuid: nGr0qRjISbCib9mIyeiTtw) +[16:16:09,932][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]]: execute +[16:16:09,932][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:09,932][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#4]] wipe translog location - creating new translog +[16:16:09,932][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#1]] recovery completed from [shard_store], took [9ms] +[16:16:09,932][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=-ap99kDrTiq7PDg6sGzmOA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]) +[16:16:09,932][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery]] +[16:16:09,932][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery]] +[16:16:09,933][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#4]] no translog ID present in the current generation - creating one +[16:16:09,933][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:09,933][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#2]] recovery completed from [shard_store], took [7ms] +[16:16:09,933][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#2]] [test][1] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery]] +[16:16:09,933][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#2]] [test][1] received shard started for [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery]] +[16:16:09,934][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]] +[16:16:09,934][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [4] +[16:16:09,934][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 4 +[16:16:09,935][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:09,935][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:09,935][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#4]] recovery completed from [shard_store], took [7ms] +[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery]] +[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery]] +[16:16:09,936][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,936][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] received shard started for [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,936][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] creating shard +[16:16:09,936][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/4, shard=[test][4]}] +[16:16:09,936][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][4] +[16:16:09,937][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s] +[16:16:09,938][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED] +[16:16:09,939][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store] +[16:16:09,939][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] starting recovery from store ... +[16:16:09,939][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,939][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,940][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#3]] wipe translog location - creating new translog +[16:16:09,941][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]]: took [8ms] done applying updated cluster_state (version: 4, uuid: Ve7La_RPQCmQdfMiBHlsEA) +[16:16:09,941][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute +[16:16:09,941][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=eeSTCfSDS6q4q5GtYVmMvQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery]]) +[16:16:09,942][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#3]] no translog ID present in the current generation - creating one +[16:16:09,942][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=LE9gdszmSEChyZPES-ybTQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery]]) +[16:16:09,942][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=9TYM88MbQgKcZw0LsFP50g], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery]]) +[16:16:09,944][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] +[16:16:09,944][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [5] +[16:16:09,944][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 5 +[16:16:09,944][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store] +[16:16:09,944][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] recovery completed from [shard_store], took [8ms] +[16:16:09,944][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery]] +[16:16:09,944][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:09,944][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery]] +[16:16:09,945][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:09,945][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,945][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]] +[16:16:09,945][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:09,946][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [4ms] done applying updated cluster_state (version: 5, uuid: oOvhqM7VQsWas0uMr-oqhA) +[16:16:09,946][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute +[16:16:09,946][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=kiGZYLwnTVaEWyUf14wAfQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery]]) +[16:16:09,947][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]] +[16:16:09,947][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [6] +[16:16:09,948][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 6 +[16:16:09,948][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]] +[16:16:09,949][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [2ms] done applying updated cluster_state (version: 6, uuid: KH-Kc4cnRDWwj85zwrcSdQ) +[16:16:10,019][INFO ][test ][Test worker] stopping nodes +[16:16:10,019][INFO ][org.elasticsearch.node.Node][Test worker] stopping ... +[16:16:10,020][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown]) +[16:16:10,021][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closing index service (reason [shutdown]) +[16:16:10,021][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown]) +[16:16:10,021][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:10,021][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:10,021][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:10,021][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:10,021][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:10,022][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:10,022][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:10,022][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown]) +[16:16:10,022][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown]) +[16:16:10,022][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:10,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:10,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:10,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:10,023][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:10,024][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:10,024][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown]) +[16:16:10,024][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown]) +[16:16:10,024][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:10,025][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:10,026][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:10,026][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:10,026][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown]) +[16:16:10,026][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown]) +[16:16:10,027][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:10,027][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:10,033][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:10,033][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:10,033][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:10,034][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:10,034][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:10,034][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown]) +[16:16:10,034][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown]) +[16:16:10,035][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown] +[16:16:10,035][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk +[16:16:10,035][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock +[16:16:10,035][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock +[16:16:10,035][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed +[16:16:10,036][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api] +[16:16:10,036][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0 +[16:16:10,036][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown]) +[16:16:10,036][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] +[16:16:10,037][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close] +[16:16:10,037][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close] +[16:16:10,037][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closed... (reason [shutdown]) +[16:16:10,037][INFO ][org.elasticsearch.node.Node][Test worker] stopped +[16:16:10,038][INFO ][org.elasticsearch.node.Node][Test worker] closing ... +[16:16:10,040][INFO ][org.elasticsearch.node.Node][Test worker] closed +[16:16:10,045][INFO ][test ][Test worker] data files wiped
    @@ -364,7 +364,7 @@

    Standard output

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html index 324e477..465d759 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest

    -
    0.001s
    +
    0s

    duration

    @@ -81,7 +81,7 @@

    Tests

    testAddIllegally1 -0.001s +0s passed @@ -114,7 +114,7 @@

    Tests

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html index 272e6f1..c68a3fe 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest
    -
    3.185s
    +
    2.649s

    duration

    @@ -79,32 +79,32 @@

    Tests

    testBinary -0.477s +0.373s passed testBinary2 -0.492s +0.374s passed testCustomMappings -0.229s +0.183s passed testShortTextProfile -1.129s +1.011s passed testSimpleMappings -0.424s +0.342s passed testToFields -0.434s +0.366s passed @@ -112,16 +112,16 @@

    Tests

    Standard output

    -
    [11:31:10,617][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    -[11:31:10,834][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    -[11:31:10,838][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    -[11:31:11,047][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    -[11:31:11,051][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    -[11:31:11,294][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    -[11:31:11,298][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content
    - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@bbff788; line: 1, column: 73]
    +
    [16:16:12,060][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[16:16:12,248][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[16:16:12,251][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[16:16:12,413][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[16:16:12,419][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
    +[16:16:12,591][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +[16:16:12,595][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@5559ab11; line: 1, column: 73]
     com.fasterxml.jackson.core.JsonParseException: Illegal character '"' (code 0x22) in base64 content
    - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@bbff788; line: 1, column: 73]
    + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@5559ab11; line: 1, column: 73]
     	at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1]
     	at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1]
     	at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1]
    @@ -184,12 +184,12 @@ 

    Standard output

    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112] -[11:31:11,305][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:11,534][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:11,537][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@7bfa36da; line: 1, column: 73] +[16:16:12,604][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:12,787][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:12,788][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@3ee35472; line: 1, column: 73] com.fasterxml.jackson.core.JsonParseException: Illegal character '"' (code 0x22) in base64 content - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@7bfa36da; line: 1, column: 73] + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@3ee35472; line: 1, column: 73] at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1] at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1] at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1] @@ -252,20 +252,20 @@

    Standard output

    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112] -[11:31:11,547][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:11,744][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:11,748][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:11,963][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:11,969][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:12,191][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:12,199][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:12,386][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:12,390][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:12,668][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:12,669][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal white space character (code 0x20) as character #3 of 4-char base64 unit: can only used between units - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@4bc95c8f; line: 1, column: 61] +[16:16:12,794][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:12,961][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:12,964][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:13,130][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:13,137][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:13,312][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:13,320][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:13,491][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:13,497][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:13,685][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:13,686][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal white space character (code 0x20) as character #3 of 4-char base64 unit: can only used between units + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@1f53f724; line: 1, column: 61] com.fasterxml.jackson.core.JsonParseException: Illegal white space character (code 0x20) as character #3 of 4-char base64 unit: can only used between units - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@4bc95c8f; line: 1, column: 61] + at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@1f53f724; line: 1, column: 61] at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1] at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1] at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1] @@ -326,10 +326,10 @@

    Standard output

    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112] at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112] -[11:31:12,675][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:13,276][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] -[11:31:13,281][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] -[11:31:13,798][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:13,691][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:14,218][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw] +[16:16:14,223][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true] +[16:16:14,698][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    @@ -341,7 +341,7 @@

    Standard output

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html index fcbdbb9..d1b0bd5 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html @@ -89,7 +89,7 @@

    Tests

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html index 33dd1e7..60dcd10 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html @@ -104,7 +104,7 @@

    Tests

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html index fadb992..741be9b 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest

    -
    0.245s
    +
    0.171s

    duration

    @@ -79,7 +79,7 @@

    Tests

    testDetector -0.245s +0.171s passed @@ -87,7 +87,7 @@

    Tests

    Standard output

    -
    [11:31:14,058][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
    +
    [16:16:14,878][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
     
    @@ -99,7 +99,7 @@

    Standard output

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html index c418bb8..75c4ba1 100644 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html +++ b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html @@ -41,7 +41,7 @@

    Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest

    -
    0s
    +
    0.001s

    duration

    @@ -94,7 +94,7 @@

    Tests

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/index.html b/docs/test/index.html index b7dadda..ea85d03 100644 --- a/docs/test/index.html +++ b/docs/test/index.html @@ -38,7 +38,7 @@

    Test Summary

    -
    1m6.08s
    +
    1m6.04s

    duration

    @@ -101,7 +101,7 @@

    Packages

    33 0 2 -1m6.08s +1m6.04s 100% @@ -127,7 +127,7 @@

    Classes

    4 0 0 -1.523s +1.807s 100% @@ -145,7 +145,7 @@

    Classes

    2 0 0 -47.717s +48.559s 100% @@ -154,7 +154,7 @@

    Classes

    1 0 0 -6.790s +6.736s 100% @@ -163,7 +163,7 @@

    Classes

    1 0 0 -6.620s +6.117s 100% @@ -172,7 +172,7 @@

    Classes

    6 0 0 -0.001s +0s 100% @@ -181,7 +181,7 @@

    Classes

    6 0 0 -3.185s +2.649s 100% @@ -208,7 +208,7 @@

    Classes

    1 0 0 -0.245s +0.171s 100% @@ -217,7 +217,7 @@

    Classes

    2 0 2 -0s +0.001s - @@ -231,7 +231,7 @@

    Classes

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    diff --git a/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html b/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html index df40b13..5ab9e1a 100644 --- a/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html +++ b/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html @@ -40,7 +40,7 @@

    Package org.xbib.elasticsearch.index.mapper.langdetect

    -
    1m6.08s
    +
    1m6.04s

    duration

    @@ -99,7 +99,7 @@

    Classes

    4 0 0 -1.523s +1.807s 100% @@ -119,7 +119,7 @@

    Classes

    2 0 0 -47.717s +48.559s 100% @@ -129,7 +129,7 @@

    Classes

    1 0 0 -6.790s +6.736s 100% @@ -139,7 +139,7 @@

    Classes

    1 0 0 -6.620s +6.117s 100% @@ -149,7 +149,7 @@

    Classes

    6 0 0 -0.001s +0s 100% @@ -159,7 +159,7 @@

    Classes

    6 0 0 -3.185s +2.649s 100% @@ -189,7 +189,7 @@

    Classes

    1 0 0 -0.245s +0.171s 100% @@ -199,7 +199,7 @@

    Classes

    2 0 2 -0s +0.001s - @@ -212,7 +212,7 @@

    Classes

    Generated by -Gradle 3.2.1 at 26.01.2017 11:31:14

    +Gradle 3.2.1 at 26.01.2017 16:16:15

    From d9815c578c8d26a6a01d10c3f17819eb70550604 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 2 Mar 2017 18:37:49 +0100 Subject: [PATCH 08/19] update to Elasticsearch 5.2.1 --- README.adoc | 39 +- build.gradle | 45 +- docs/javadoc/allclasses-frame.html | 35 - docs/javadoc/allclasses-noframe.html | 35 - docs/javadoc/coderay-asciidoctor.css | 89 --- docs/javadoc/constant-values.html | 192 ----- docs/javadoc/deprecated-list.html | 121 --- docs/javadoc/help-doc.html | 222 ------ docs/javadoc/index-all.html | 452 ----------- docs/javadoc/index.html | 75 -- .../action/langdetect/LangdetectAction.html | 325 -------- .../action/langdetect/LangdetectRequest.html | 423 ---------- .../langdetect/LangdetectRequestBuilder.html | 308 -------- .../action/langdetect/LangdetectResponse.html | 417 ---------- .../langdetect/TransportLangdetectAction.html | 329 -------- .../action/langdetect/package-frame.html | 23 - .../action/langdetect/package-summary.html | 155 ---- .../action/langdetect/package-tree.html | 178 ----- .../common/langdetect/LangProfile.html | 351 --------- .../common/langdetect/LangdetectService.html | 369 --------- .../common/langdetect/Language.html | 345 --------- .../LanguageDetectionException.html | 264 ------- .../common/langdetect/NGram.html | 334 -------- .../common/langdetect/package-frame.html | 26 - .../common/langdetect/package-summary.html | 166 ---- .../common/langdetect/package-tree.html | 150 ---- .../langdetect/LangdetectMapper.Builder.html | 601 -------------- .../langdetect/LangdetectMapper.Defaults.html | 274 ------- .../LangdetectMapper.LanguageTo.Builder.html | 300 ------- .../LangdetectMapper.LanguageTo.html | 290 ------- .../LangdetectMapper.TypeParser.html | 300 ------- .../mapper/langdetect/LangdetectMapper.html | 501 ------------ .../mapper/langdetect/package-frame.html | 24 - .../mapper/langdetect/package-summary.html | 159 ---- .../index/mapper/langdetect/package-tree.html | 159 ---- .../plugin/langdetect/LangdetectPlugin.html | 352 --------- .../plugin/langdetect/package-frame.html | 19 - .../plugin/langdetect/package-summary.html | 139 ---- .../plugin/langdetect/package-tree.html | 138 ---- .../langdetect/RestLangdetectAction.html | 352 --------- .../rest/action/langdetect/package-frame.html | 19 - .../action/langdetect/package-summary.html | 139 ---- .../rest/action/langdetect/package-tree.html | 142 ---- docs/javadoc/overview-frame.html | 24 - docs/javadoc/overview-summary.html | 162 ---- docs/javadoc/overview-tree.html | 235 ------ docs/javadoc/package-list | 5 - docs/javadoc/script.js | 30 - docs/javadoc/serialized-form.html | 138 ---- docs/javadoc/stylesheet.css | 732 ------------------ ....mapper.langdetect.DetectLanguageTest.html | 124 --- ....index.mapper.langdetect.DetectorTest.html | 116 --- ...apper.langdetect.LangDetectActionTest.html | 471 ----------- ...apper.langdetect.LangDetectBinaryTest.html | 369 --------- ...pper.langdetect.LangDetectChineseTest.html | 371 --------- ...dex.mapper.langdetect.LangProfileTest.html | 121 --- ...pper.langdetect.LangdetectMappingTest.html | 348 --------- ....index.mapper.langdetect.LanguageTest.html | 96 --- ...rch.index.mapper.langdetect.NGramTest.html | 111 --- ....mapper.langdetect.SimpleDetectorTest.html | 106 --- ...ndex.mapper.langdetect.SimpleHttpTest.html | 101 --- docs/test/css/base-style.css | 179 ----- docs/test/css/style.css | 84 -- docs/test/index.html | 238 ------ docs/test/js/report.js | 194 ----- ...elasticsearch.index.mapper.langdetect.html | 219 ------ gradle.properties | 8 +- gradle/ext.gradle | 10 + .../common/langdetect/LangProfile.java | 2 +- .../common/langdetect/LangdetectService.java | 11 +- .../mapper/langdetect/LangdetectMapper.java | 20 +- .../xbib/elasticsearch/MapperTestUtils.java | 29 +- .../langdetect/LangdetectMappingTest.java | 2 +- .../mapper/langdetect/base64-2-mapping.json | 2 +- .../mapper/langdetect/base64-mapping.json | 17 +- .../mapper/langdetect/mapping-to-fields.json | 2 +- .../index/mapper/langdetect/settings.json | 40 +- .../mapper/langdetect/short-text-mapping.json | 16 +- .../mapper/langdetect/simple-mapping.json | 16 +- 79 files changed, 138 insertions(+), 13987 deletions(-) delete mode 100644 docs/javadoc/allclasses-frame.html delete mode 100644 docs/javadoc/allclasses-noframe.html delete mode 100644 docs/javadoc/coderay-asciidoctor.css delete mode 100644 docs/javadoc/constant-values.html delete mode 100644 docs/javadoc/deprecated-list.html delete mode 100644 docs/javadoc/help-doc.html delete mode 100644 docs/javadoc/index-all.html delete mode 100644 docs/javadoc/index.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html delete mode 100644 docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html delete mode 100644 docs/javadoc/overview-frame.html delete mode 100644 docs/javadoc/overview-summary.html delete mode 100644 docs/javadoc/overview-tree.html delete mode 100644 docs/javadoc/package-list delete mode 100644 docs/javadoc/script.js delete mode 100644 docs/javadoc/serialized-form.html delete mode 100644 docs/javadoc/stylesheet.css delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html delete mode 100644 docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html delete mode 100644 docs/test/css/base-style.css delete mode 100644 docs/test/css/style.css delete mode 100644 docs/test/index.html delete mode 100644 docs/test/js/report.js delete mode 100644 docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html create mode 100644 gradle/ext.gradle diff --git a/README.adoc b/README.adoc index 31d0522..ba9df09 100644 --- a/README.adoc +++ b/README.adoc @@ -89,26 +89,27 @@ Here is a list of languages code recognized: [frame="all"] |=== | Plugin version | Elasticsearch version | Release date +| 5.2.1.0 | 5.2.1 | Mar 2, 2017 | 5.1.2.0 | 5.1.2 | Jan 26, 2017 -| 2.4.4.1 | 2.4.4 | Jan 25, 2017 -| 2.3.3.0 | 2.3.3 | Jun 11, 2016 -| 2.3.2.0 | 2.3.2 | Jun 11, 2016 -| 2.3.1.0 | 2.3.1 | Apr 11, 2016 -| 2.2.1.0 | 2.2.1 | Apr 11, 2016 -| 2.2.0.2 | 2.2.0 | Mar 25, 2016 -| 2.2.0.1 | 2.2.0 | Mar 6, 2016 -| 2.1.1.0 | 2.1.1 | Dec 20, 2015 -| 2.1.0.0 | 2.1.0 | Dec 15, 2015 -| 2.0.1.0 | 2.0.1 | Dec 15, 2015 -| 2.0.0.0 | 2.0.0 | Nov 12, 2015 -| 1.6.0.0 | 1.6.0 | Jul 1, 2015 -| 1.4.4.1 | 1.4.4 | Apr 3, 2015 -| 1.4.4.1 | 1.4.4 | Mar 4, 2015 -| 1.4.0.2 | 1.4.0 | Nov 26, 2014 -| 1.4.0.1 | 1.4.0 | Nov 20, 2014 -| 1.4.0.0 | 1.4.0 | Nov 14, 2014 -| 1.3.1.0 | 1.3.0 | Jul 30, 2014 -| 1.2.1.1 | 1.2.1 | Jun 18, 2014 +| 2.4.4.1 | 2.4.4 | Jan 25, 2017 +| 2.3.3.0 | 2.3.3 | Jun 11, 2016 +| 2.3.2.0 | 2.3.2 | Jun 11, 2016 +| 2.3.1.0 | 2.3.1 | Apr 11, 2016 +| 2.2.1.0 | 2.2.1 | Apr 11, 2016 +| 2.2.0.2 | 2.2.0 | Mar 25, 2016 +| 2.2.0.1 | 2.2.0 | Mar 6, 2016 +| 2.1.1.0 | 2.1.1 | Dec 20, 2015 +| 2.1.0.0 | 2.1.0 | Dec 15, 2015 +| 2.0.1.0 | 2.0.1 | Dec 15, 2015 +| 2.0.0.0 | 2.0.0 | Nov 12, 2015 +| 1.6.0.0 | 1.6.0 | Jul 1, 2015 +| 1.4.4.1 | 1.4.4 | Apr 3, 2015 +| 1.4.4.1 | 1.4.4 | Mar 4, 2015 +| 1.4.0.2 | 1.4.0 | Nov 26, 2014 +| 1.4.0.1 | 1.4.0 | Nov 20, 2014 +| 1.4.0.0 | 1.4.0 | Nov 14, 2014 +| 1.3.1.0 | 1.3.0 | Jul 30, 2014 +| 1.2.1.1 | 1.2.1 | Jun 18, 2014 |=== ## Installation diff --git a/build.gradle b/build.gradle index 3037448..f7876f5 100644 --- a/build.gradle +++ b/build.gradle @@ -16,22 +16,6 @@ printf "Host: %s\nOS: %s %s %s\nJVM: %s %s %s %s\nGroovy: %s\nGradle: %s\n" + GroovySystem.getVersion(), gradle.gradleVersion -ext { - pluginName = 'langdetect' - pluginClassname = 'org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin' - pluginDescription = 'Language detection for Elasticsearch' - user = 'jprante' - name = 'elasticsearch-langdetect' - scmUrl = 'https://github.com/' + user + '/' + name - scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' - scmDeveloperConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' - versions = [ - 'elasticsearch' : '5.1.2', - 'log4j': '2.7', - 'junit' : '4.12' - ] -} - apply plugin: 'java' apply plugin: 'maven' apply plugin: 'signing' @@ -42,7 +26,6 @@ apply plugin: "jacoco" apply plugin: 'org.xbib.gradle.plugin.asciidoctor' repositories { - mavenLocal() mavenCentral() } @@ -62,13 +45,18 @@ configurations { } } +apply from: 'gradle/ext.gradle' +apply from: 'gradle/sonarqube.gradle' +apply from: 'gradle/publish.gradle' + + dependencies { - compile "org.elasticsearch:elasticsearch:${versions.elasticsearch}" - compileOnly "org.apache.logging.log4j:log4j-core:${versions.log4j}" - testCompile "junit:junit:${versions.junit}" - testCompile "org.apache.logging.log4j:log4j-core:${versions.log4j}" - asciidoclet 'org.asciidoctor:asciidoclet:1.5.4' - wagon 'org.apache.maven.wagon:wagon-ssh-external:2.10' + compile "org.elasticsearch:elasticsearch:${project.property('elasticsearch.version')}" + compileOnly "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" + testCompile "junit:junit:${project.property('junit.version')}" + testCompile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" + asciidoclet "org.asciidoctor:asciidoclet:${project.property('asciidoclet.version')}" + wagon "org.apache.maven.wagon:wagon-ssh-external:${project.property('wagon-ssh-external.version')}" distJars "${project.group}:${project.name}:${project.version}" } @@ -77,17 +65,15 @@ targetCompatibility = JavaVersion.VERSION_1_8 [compileJava, compileTestJava]*.options*.encoding = 'UTF-8' tasks.withType(JavaCompile) { - options.compilerArgs << "-Xlint:all" << "-profile" << "compact2" + options.compilerArgs << "-Xlint:all" << "-profile" << "compact1" } - test { systemProperties['path.home'] = System.getProperty("user.dir") testLogging { showStandardStreams = false exceptionFormat = 'full' } - reports.html.destination = "docs/test" } task makePluginDescriptor(type: Copy) { @@ -103,7 +89,7 @@ task makePluginDescriptor(type: Copy) { 'isolated': true, 'version': project.property('version'), 'javaVersion': project.property('targetCompatibility'), - 'elasticsearchVersion' : versions.elasticsearch + 'elasticsearchVersion' : project.property('elasticsearch.version') ] ]) } @@ -129,7 +115,6 @@ clean { asciidoctor { backends 'html5' - outputDir = file('docs') separateOutputDirs = false attributes 'source-highlighter': 'coderay', toc : '', @@ -145,7 +130,6 @@ javadoc { options.addStringOption "-base-dir", "${projectDir}" options.addStringOption "-attribute", "name=${project.name},version=${project.version},title-link=https://github.com/${user}/${project.name}" - options.destinationDirectory(file("${projectDir}/docs/javadoc")) configure(options) { noTimestamp = true } @@ -172,6 +156,3 @@ if (project.hasProperty('signing.keyId')) { sign configurations.archives } } - -apply from: 'gradle/sonarqube.gradle' -apply from: 'gradle/publish.gradle' diff --git a/docs/javadoc/allclasses-frame.html b/docs/javadoc/allclasses-frame.html deleted file mode 100644 index 9259a8f..0000000 --- a/docs/javadoc/allclasses-frame.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - -All Classes (elasticsearch-langdetect 5.1.2.0 API) - - - - -

    All Classes

    - - - diff --git a/docs/javadoc/allclasses-noframe.html b/docs/javadoc/allclasses-noframe.html deleted file mode 100644 index 41b7ee7..0000000 --- a/docs/javadoc/allclasses-noframe.html +++ /dev/null @@ -1,35 +0,0 @@ - - - - - -All Classes (elasticsearch-langdetect 5.1.2.0 API) - - - - -

    All Classes

    - - - diff --git a/docs/javadoc/coderay-asciidoctor.css b/docs/javadoc/coderay-asciidoctor.css deleted file mode 100644 index ce7c72e..0000000 --- a/docs/javadoc/coderay-asciidoctor.css +++ /dev/null @@ -1,89 +0,0 @@ -/* Stylesheet for CodeRay to match GitHub theme | MIT License | http://foundation.zurb.com */ -/*pre.CodeRay {background-color:#f7f7f8;}*/ -.CodeRay .line-numbers{border-right:1px solid #d8d8d8;padding:0 0.5em 0 .25em} -.CodeRay span.line-numbers{display:inline-block;margin-right:.5em;color:rgba(0,0,0,.3)} -.CodeRay .line-numbers strong{font-weight: normal} -table.CodeRay{border-collapse:separate;border-spacing:0;margin-bottom:0;border:0;background:none} -table.CodeRay td{vertical-align: top} -table.CodeRay td.line-numbers{text-align:right} -table.CodeRay td.line-numbers>pre{padding:0;color:rgba(0,0,0,.3)} -table.CodeRay td.code{padding:0 0 0 .5em} -table.CodeRay td.code>pre{padding:0} -.CodeRay .debug{color:#fff !important;background:#000080 !important} -.CodeRay .annotation{color:#007} -.CodeRay .attribute-name{color:#000080} -.CodeRay .attribute-value{color:#700} -.CodeRay .binary{color:#509} -.CodeRay .comment{color:#998;font-style:italic} -.CodeRay .char{color:#04d} -.CodeRay .char .content{color:#04d} -.CodeRay .char .delimiter{color:#039} -.CodeRay .class{color:#458;font-weight:bold} -.CodeRay .complex{color:#a08} -.CodeRay .constant,.CodeRay .predefined-constant{color:#008080} -.CodeRay .color{color:#099} -.CodeRay .class-variable{color:#369} -.CodeRay .decorator{color:#b0b} -.CodeRay .definition{color:#099} -.CodeRay .delimiter{color:#000} -.CodeRay .doc{color:#970} -.CodeRay .doctype{color:#34b} -.CodeRay .doc-string{color:#d42} -.CodeRay .escape{color:#666} -.CodeRay .entity{color:#800} -.CodeRay .error{color:#808} -.CodeRay .exception{color:inherit} -.CodeRay .filename{color:#099} -.CodeRay .function{color:#900;font-weight:bold} -.CodeRay .global-variable{color:#008080} -.CodeRay .hex{color:#058} -.CodeRay .integer,.CodeRay .float{color:#099} -.CodeRay .include{color:#555} -.CodeRay .inline{color:#00} -.CodeRay .inline .inline{background:#ccc} -.CodeRay .inline .inline .inline{background:#bbb} -.CodeRay .inline .inline-delimiter{color:#d14} -.CodeRay .inline-delimiter{color:#d14} -.CodeRay .important{color:#555;font-weight:bold} -.CodeRay .interpreted{color:#b2b} -.CodeRay .instance-variable{color:#008080} -.CodeRay .label{color:#970} -.CodeRay .local-variable{color:#963} -.CodeRay .octal{color:#40e} -.CodeRay .predefined{color:#369} -.CodeRay .preprocessor{color:#579} -.CodeRay .pseudo-class{color:#555} -.CodeRay .directive{font-weight:bold} -.CodeRay .type{font-weight:bold} -.CodeRay .predefined-type{color:inherit} -.CodeRay .reserved,.CodeRay .keyword {color:#000;font-weight:bold} -.CodeRay .key{color:#808} -.CodeRay .key .delimiter{color:#606} -.CodeRay .key .char{color:#80f} -.CodeRay .value{color:#088} -.CodeRay .regexp .delimiter{color:#808} -.CodeRay .regexp .content{color:#808} -.CodeRay .regexp .modifier{color:#808} -.CodeRay .regexp .char{color:#d14} -.CodeRay .regexp .function{color:#404;font-weight:bold} -.CodeRay .string{color:#d20} -.CodeRay .string .string .string{background:#ffd0d0} -.CodeRay .string .content{color:#d14} -.CodeRay .string .char{color:#d14} -.CodeRay .string .delimiter{color:#d14} -.CodeRay .shell{color:#d14} -.CodeRay .shell .delimiter{color:#d14} -.CodeRay .symbol{color:#990073} -.CodeRay .symbol .content{color:#a60} -.CodeRay .symbol .delimiter{color:#630} -.CodeRay .tag{color:#008080} -.CodeRay .tag-special{color:#d70} -.CodeRay .variable{color:#036} -.CodeRay .insert{background:#afa} -.CodeRay .delete{background:#faa} -.CodeRay .change{color:#aaf;background:#007} -.CodeRay .head{color:#f8f;background:#505} -.CodeRay .insert .insert{color:#080} -.CodeRay .delete .delete{color:#800} -.CodeRay .change .change{color:#66f} -.CodeRay .head .head{color:#f4f} diff --git a/docs/javadoc/constant-values.html b/docs/javadoc/constant-values.html deleted file mode 100644 index 582f885..0000000 --- a/docs/javadoc/constant-values.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - -Constant Field Values (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Constant Field Values

    -

    Contents

    - -
    -
    - - -

    org.xbib.*

    -
      -
    • - - - - - - - - - - - - - - -
      org.xbib.elasticsearch.action.langdetect.LangdetectAction 
      Modifier and TypeConstant FieldValue
      - -public static final java.lang.StringNAME"langdetect"
      -
    • -
    -
      -
    • - - - - - - - - - - - - - - -
      org.xbib.elasticsearch.common.langdetect.NGram 
      Modifier and TypeConstant FieldValue
      - -public static final intN_GRAM3
      -
    • -
    -
      -
    • - - - - - - - - - - - - - - -
      org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper 
      Modifier and TypeConstant FieldValue
      - -public static final java.lang.StringMAPPER_TYPE"langdetect"
      -
    • -
    -
    - - - - - - diff --git a/docs/javadoc/deprecated-list.html b/docs/javadoc/deprecated-list.html deleted file mode 100644 index 2f92db8..0000000 --- a/docs/javadoc/deprecated-list.html +++ /dev/null @@ -1,121 +0,0 @@ - - - - - -Deprecated List (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - -
    - - - - - - - -
    - - -
    -

    Deprecated API

    -

    Contents

    -
    - -
    - - - - - - - -
    - - - - diff --git a/docs/javadoc/help-doc.html b/docs/javadoc/help-doc.html deleted file mode 100644 index 0c30ac8..0000000 --- a/docs/javadoc/help-doc.html +++ /dev/null @@ -1,222 +0,0 @@ - - - - - -API Help (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - -
    - - - - - - - -
    - - -
    -

    How This API Document Is Organized

    -
    This API (Application Programming Interface) document has pages corresponding to the items in the navigation bar, described as follows.
    -
    -
    -
      -
    • -

      Overview

      -

      The Overview page is the front page of this API document and provides a list of all packages with a summary for each. This page can also contain an overall description of the set of packages.

      -
    • -
    • -

      Package

      -

      Each package has a page that contains a list of its classes and interfaces, with a summary for each. This page can contain six categories:

      -
        -
      • Interfaces (italic)
      • -
      • Classes
      • -
      • Enums
      • -
      • Exceptions
      • -
      • Errors
      • -
      • Annotation Types
      • -
      -
    • -
    • -

      Class/Interface

      -

      Each class, interface, nested class and nested interface has its own separate page. Each of these pages has three sections consisting of a class/interface description, summary tables, and detailed member descriptions:

      -
        -
      • Class inheritance diagram
      • -
      • Direct Subclasses
      • -
      • All Known Subinterfaces
      • -
      • All Known Implementing Classes
      • -
      • Class/interface declaration
      • -
      • Class/interface description
      • -
      -
        -
      • Nested Class Summary
      • -
      • Field Summary
      • -
      • Constructor Summary
      • -
      • Method Summary
      • -
      -
        -
      • Field Detail
      • -
      • Constructor Detail
      • -
      • Method Detail
      • -
      -

      Each summary entry contains the first sentence from the detailed description for that item. The summary entries are alphabetical, while the detailed descriptions are in the order they appear in the source code. This preserves the logical groupings established by the programmer.

      -
    • -
    • -

      Annotation Type

      -

      Each annotation type has its own separate page with the following sections:

      -
        -
      • Annotation Type declaration
      • -
      • Annotation Type description
      • -
      • Required Element Summary
      • -
      • Optional Element Summary
      • -
      • Element Detail
      • -
      -
    • -
    • -

      Enum

      -

      Each enum has its own separate page with the following sections:

      -
        -
      • Enum declaration
      • -
      • Enum description
      • -
      • Enum Constant Summary
      • -
      • Enum Constant Detail
      • -
      -
    • -
    • -

      Tree (Class Hierarchy)

      -

      There is a Class Hierarchy page for all packages, plus a hierarchy for each package. Each hierarchy page contains a list of classes and a list of interfaces. The classes are organized by inheritance structure starting with java.lang.Object. The interfaces do not inherit from java.lang.Object.

      -
        -
      • When viewing the Overview page, clicking on "Tree" displays the hierarchy for all packages.
      • -
      • When viewing a particular package, class or interface page, clicking "Tree" displays the hierarchy for only that package.
      • -
      -
    • -
    • -

      Deprecated API

      -

      The Deprecated API page lists all of the API that have been deprecated. A deprecated API is not recommended for use, generally due to improvements, and a replacement API is usually given. Deprecated APIs may be removed in future implementations.

      -
    • -
    • -

      Index

      -

      The Index contains an alphabetic list of all classes, interfaces, constructors, methods, and fields.

      -
    • -
    • -

      Prev/Next

      -

      These links take you to the next or previous class, interface, package, or related page.

      -
    • -
    • -

      Frames/No Frames

      -

      These links show and hide the HTML frames. All pages are available with or without frames.

      -
    • -
    • -

      All Classes

      -

      The All Classes link shows all classes and interfaces except non-static nested types.

      -
    • -
    • -

      Serialized Form

      -

      Each serializable or externalizable class has a description of its serialization fields and methods. This information is of interest to re-implementors, not to developers using the API. While there is no link in the navigation bar, you can get to this information by going to any serialized class and clicking "Serialized Form" in the "See also" section of the class description.

      -
    • -
    • -

      Constant Field Values

      -

      The Constant Field Values page lists the static final fields and their values.

      -
    • -
    -This help file applies to API documentation generated using the standard doclet.
    - -
    - - - - - - - -
    - - - - diff --git a/docs/javadoc/index-all.html b/docs/javadoc/index-all.html deleted file mode 100644 index db2dcec..0000000 --- a/docs/javadoc/index-all.html +++ /dev/null @@ -1,452 +0,0 @@ - - - - - -Index (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - -
    - - - - - - - -
    - - -
    A B C D G I L M N O P R S T V W  - - -

    A

    -
    -
    add(String) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    add(String, String) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
    -
     
    -
    add(Map<String, Object>) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
    -
     
    -
    addChar(char) - Method in class org.xbib.elasticsearch.common.langdetect.NGram
    -
     
    -
    addProfile(LangProfile, int, int) - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    alpha(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    alphaWidth(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    - - - -

    B

    -
    -
    baseFreq(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    binary(boolean) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    build(Mapper.BuilderContext) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    build() - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
    -
     
    -
    Builder(String) - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    builder() - Static method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
    -
     
    -
    Builder() - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
    -
     
    -
    - - - -

    C

    -
    -
    contentType() - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
    -
     
    -
    convThreshold(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    - - - -

    D

    -
    -
    Defaults() - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
    -
     
    -
    detectAll(String) - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    doExecute(LangdetectRequest, ActionListener<LangdetectResponse>) - Method in class org.xbib.elasticsearch.action.langdetect.TransportLangdetectAction
    -
     
    -
    doXContentBody(XContentBuilder, boolean, ToXContent.Params) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
    -
     
    -
    - - - -

    G

    -
    -
    get(int) - Method in class org.xbib.elasticsearch.common.langdetect.NGram
    -
     
    -
    getActions() - Method in class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
    -
     
    -
    getFreq() - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    getLanguage() - Method in class org.xbib.elasticsearch.common.langdetect.Language
    -
     
    -
    getLanguages() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
    -
     
    -
    getMappers() - Method in class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
    -
     
    -
    getName() - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    getNWords() - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    getProbability() - Method in class org.xbib.elasticsearch.common.langdetect.Language
    -
     
    -
    getProfile() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    getProfile() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
    -
     
    -
    getProfile() - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    getRestHandlers() - Method in class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
    -
     
    -
    getSettings() - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    getText() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    - - - -

    I

    -
    -
    INSTANCE - Static variable in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
    -
     
    -
    iterationLimit(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    - - - -

    L

    -
    -
    LANG_FIELD_TYPE - Static variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
    -
     
    -
    LangdetectAction - Class in org.xbib.elasticsearch.action.langdetect
    -
     
    -
    LangdetectMapper - Class in org.xbib.elasticsearch.index.mapper.langdetect
    -
     
    -
    LangdetectMapper(String, TextFieldMapper.TextFieldType, MappedFieldType, int, Settings, FieldMapper.MultiFields, FieldMapper.CopyTo, LangdetectMapper.LanguageTo, LangdetectService) - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
    -
     
    -
    LangdetectMapper.Builder - Class in org.xbib.elasticsearch.index.mapper.langdetect
    -
     
    -
    LangdetectMapper.Defaults - Class in org.xbib.elasticsearch.index.mapper.langdetect
    -
     
    -
    LangdetectMapper.LanguageTo - Class in org.xbib.elasticsearch.index.mapper.langdetect
    -
     
    -
    LangdetectMapper.LanguageTo.Builder - Class in org.xbib.elasticsearch.index.mapper.langdetect
    -
     
    -
    LangdetectMapper.TypeParser - Class in org.xbib.elasticsearch.index.mapper.langdetect
    -
     
    -
    LangdetectPlugin - Class in org.xbib.elasticsearch.plugin.langdetect
    -
     
    -
    LangdetectPlugin() - Constructor for class org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
    -
     
    -
    LangdetectRequest - Class in org.xbib.elasticsearch.action.langdetect
    -
     
    -
    LangdetectRequest() - Constructor for class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    LangdetectRequestBuilder - Class in org.xbib.elasticsearch.action.langdetect
    -
     
    -
    LangdetectRequestBuilder(ElasticsearchClient) - Constructor for class org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder
    -
     
    -
    LangdetectResponse - Class in org.xbib.elasticsearch.action.langdetect
    -
     
    -
    LangdetectResponse() - Constructor for class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
    -
     
    -
    LangdetectService - Class in org.xbib.elasticsearch.common.langdetect
    -
     
    -
    LangdetectService() - Constructor for class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    LangdetectService(Settings) - Constructor for class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    LangdetectService(Settings, String) - Constructor for class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    LangProfile - Class in org.xbib.elasticsearch.common.langdetect
    -
     
    -
    LangProfile() - Constructor for class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    Language - Class in org.xbib.elasticsearch.common.langdetect
    -
     
    -
    Language(String, double) - Constructor for class org.xbib.elasticsearch.common.langdetect.Language
    -
     
    -
    LanguageDetectionException - Exception in org.xbib.elasticsearch.common.langdetect
    -
     
    -
    LanguageDetectionException(String) - Constructor for exception org.xbib.elasticsearch.common.langdetect.LanguageDetectionException
    -
     
    -
    languages(String[]) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    languageTo - Variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    languageTo(LangdetectMapper.LanguageTo) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    languageToFields() - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
    -
     
    -
    loadProfileFromResource(String, int, int) - Method in class org.xbib.elasticsearch.common.langdetect.LangdetectService
    -
     
    -
    - - - -

    M

    -
    -
    map(Map<String, Object>) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    MAPPER_TYPE - Static variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
    -
     
    -
    max(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    - - - -

    N

    -
    -
    N_GRAM - Static variable in class org.xbib.elasticsearch.common.langdetect.NGram
    -
     
    -
    NAME - Static variable in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
    -
     
    -
    newRequestBuilder(ElasticsearchClient) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
    -
     
    -
    newResponse() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectAction
    -
     
    -
    NGram - Class in org.xbib.elasticsearch.common.langdetect
    -
     
    -
    NGram() - Constructor for class org.xbib.elasticsearch.common.langdetect.NGram
    -
     
    -
    normalize(char) - Static method in class org.xbib.elasticsearch.common.langdetect.NGram
    -
     
    -
    ntrials(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    - - - -

    O

    -
    -
    org.xbib.elasticsearch.action.langdetect - package org.xbib.elasticsearch.action.langdetect
    -
     
    -
    org.xbib.elasticsearch.common.langdetect - package org.xbib.elasticsearch.common.langdetect
    -
     
    -
    org.xbib.elasticsearch.index.mapper.langdetect - package org.xbib.elasticsearch.index.mapper.langdetect
    -
     
    -
    org.xbib.elasticsearch.plugin.langdetect - package org.xbib.elasticsearch.plugin.langdetect
    -
     
    -
    org.xbib.elasticsearch.rest.action.langdetect - package org.xbib.elasticsearch.rest.action.langdetect
    -
     
    -
    - - - -

    P

    -
    -
    parse(String, Map<String, Object>, Mapper.TypeParser.ParserContext) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser
    -
     
    -
    parseCreateField(ParseContext, List<Field>) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
    -
     
    -
    pattern(String) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    positionIncrementGap - Variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    positionIncrementGap(int) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    prepareRequest(RestRequest, NodeClient) - Method in class org.xbib.elasticsearch.rest.action.langdetect.RestLangdetectAction
    -
     
    -
    probThreshold(double) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    profile(String) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    - - - -

    R

    -
    -
    read(InputStream) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    readFrom(StreamInput) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    readFrom(StreamInput) - Method in class org.xbib.elasticsearch.common.langdetect.Language
    -
     
    -
    RestLangdetectAction - Class in org.xbib.elasticsearch.rest.action.langdetect
    -
     
    -
    RestLangdetectAction(Settings, RestController) - Constructor for class org.xbib.elasticsearch.rest.action.langdetect.RestLangdetectAction
    -
     
    -
    - - - -

    S

    -
    -
    searchAnalyzer(NamedAnalyzer) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    searchQuotedAnalyzer(NamedAnalyzer) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    setFreq(Map<String, Integer>) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    setLanguages(List<Language>) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
    -
     
    -
    setName(String) - Method in class org.xbib.elasticsearch.common.langdetect.LangProfile
    -
     
    -
    setProfile(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    setProfile(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder
    -
     
    -
    setProfile(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
    -
     
    -
    setText(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    setText(String) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequestBuilder
    -
     
    -
    settingsBuilder - Variable in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
    -
     
    -
    status() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
    -
     
    -
    - - - -

    T

    -
    -
    toString() - Method in class org.xbib.elasticsearch.common.langdetect.Language
    -
     
    -
    toXContent(XContentBuilder, ToXContent.Params) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectResponse
    -
     
    -
    toXContent(XContentBuilder, ToXContent.Params) - Method in class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
    -
     
    -
    TransportLangdetectAction - Class in org.xbib.elasticsearch.action.langdetect
    -
     
    -
    TransportLangdetectAction(Settings, ThreadPool, ActionFilters, IndexNameExpressionResolver, TransportService) - Constructor for class org.xbib.elasticsearch.action.langdetect.TransportLangdetectAction
    -
     
    -
    TypeParser() - Constructor for class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser
    -
     
    -
    - - - -

    V

    -
    -
    validate() - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    - - - -

    W

    -
    -
    writeTo(StreamOutput) - Method in class org.xbib.elasticsearch.action.langdetect.LangdetectRequest
    -
     
    -
    writeTo(StreamOutput) - Method in class org.xbib.elasticsearch.common.langdetect.Language
    -
     
    -
    -A B C D G I L M N O P R S T V W 
    - -
    - - - - - - - -
    - - - - diff --git a/docs/javadoc/index.html b/docs/javadoc/index.html deleted file mode 100644 index 6656182..0000000 --- a/docs/javadoc/index.html +++ /dev/null @@ -1,75 +0,0 @@ - - - - - -elasticsearch-langdetect 5.1.2.0 API - - - - - - - - - -<noscript> -<div>JavaScript is disabled on your browser.</div> -</noscript> -<h2>Frame Alert</h2> -<p>This document is designed to be viewed using the frames feature. If you see this message, you are using a non-frame-capable web client. Link to <a href="overview-summary.html">Non-frame version</a>.</p> - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html deleted file mode 100644 index 2c906dc..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectAction.html +++ /dev/null @@ -1,325 +0,0 @@ - - - - - -LangdetectAction (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.action.langdetect
    -

    Class LangdetectAction

    -
    -
    - -
    - -
    -
    - -
    -
    - -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html deleted file mode 100644 index 53f0d30..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequest.html +++ /dev/null @@ -1,423 +0,0 @@ - - - - - -LangdetectRequest (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.action.langdetect
    -

    Class LangdetectRequest

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.elasticsearch.transport.TransportMessage
      • -
      • -
          -
        • org.elasticsearch.transport.TransportRequest
        • -
        • -
            -
          • org.elasticsearch.action.ActionRequest
          • -
          • -
              -
            • org.xbib.elasticsearch.action.langdetect.LangdetectRequest
            • -
            -
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      org.elasticsearch.common.io.stream.Streamable
      -
      -
      -
      -
      public class LangdetectRequest
      -extends org.elasticsearch.action.ActionRequest
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Nested Class Summary

        -
          -
        • - - -

          Nested classes/interfaces inherited from class org.elasticsearch.transport.TransportRequest

          -org.elasticsearch.transport.TransportRequest.Empty
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        LangdetectRequest() 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        java.lang.StringgetProfile() 
        java.lang.StringgetText() 
        voidreadFrom(org.elasticsearch.common.io.stream.StreamInput in) 
        LangdetectRequestsetProfile(java.lang.String profile) 
        LangdetectRequestsetText(java.lang.String text) 
        org.elasticsearch.action.ActionRequestValidationExceptionvalidate() 
        voidwriteTo(org.elasticsearch.common.io.stream.StreamOutput out) 
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.action.ActionRequest

          -getShouldStoreResult
        • -
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.transport.TransportRequest

          -createTask, getDescription, getParentTask, setParentTask, setParentTask
        • -
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.transport.TransportMessage

          -remoteAddress, remoteAddress
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LangdetectRequest

          -
          public LangdetectRequest()
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          validate

          -
          public org.elasticsearch.action.ActionRequestValidationException validate()
          -
          -
          Specified by:
          -
          validate in class org.elasticsearch.action.ActionRequest
          -
          -
        • -
        - - - -
          -
        • -

          getProfile

          -
          public java.lang.String getProfile()
          -
        • -
        - - - - - - - -
          -
        • -

          getText

          -
          public java.lang.String getText()
          -
        • -
        - - - - - - - -
          -
        • -

          readFrom

          -
          public void readFrom(org.elasticsearch.common.io.stream.StreamInput in)
          -              throws java.io.IOException
          -
          -
          Specified by:
          -
          readFrom in interface org.elasticsearch.common.io.stream.Streamable
          -
          Overrides:
          -
          readFrom in class org.elasticsearch.action.ActionRequest
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - -
          -
        • -

          writeTo

          -
          public void writeTo(org.elasticsearch.common.io.stream.StreamOutput out)
          -             throws java.io.IOException
          -
          -
          Specified by:
          -
          writeTo in interface org.elasticsearch.common.io.stream.Streamable
          -
          Overrides:
          -
          writeTo in class org.elasticsearch.action.ActionRequest
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html deleted file mode 100644 index 3596489..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectRequestBuilder.html +++ /dev/null @@ -1,308 +0,0 @@ - - - - - -LangdetectRequestBuilder (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.action.langdetect
    -

    Class LangdetectRequestBuilder

    -
    -
    - -
    - -
    -
    -
      -
    • - -
        -
      • - - -

        Field Summary

        -
          -
        • - - -

          Fields inherited from class org.elasticsearch.action.ActionRequestBuilder

          -action, client, request
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        LangdetectRequestBuilder(org.elasticsearch.client.ElasticsearchClient client) 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        LangdetectRequestBuildersetProfile(java.lang.String string) 
        LangdetectRequestBuildersetText(java.lang.String string) 
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.action.ActionRequestBuilder

          -beforeExecute, execute, execute, get, get, get, request
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LangdetectRequestBuilder

          -
          public LangdetectRequestBuilder(org.elasticsearch.client.ElasticsearchClient client)
          -
        • -
        -
      • -
      - - -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html deleted file mode 100644 index 3659424..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.html +++ /dev/null @@ -1,417 +0,0 @@ - - - - - -LangdetectResponse (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.action.langdetect
    -

    Class LangdetectResponse

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.elasticsearch.transport.TransportMessage
      • -
      • -
          -
        • org.elasticsearch.transport.TransportResponse
        • -
        • -
            -
          • org.elasticsearch.action.ActionResponse
          • -
          • -
              -
            • org.xbib.elasticsearch.action.langdetect.LangdetectResponse
            • -
            -
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      org.elasticsearch.common.io.stream.Streamable, org.elasticsearch.common.xcontent.StatusToXContent, org.elasticsearch.common.xcontent.ToXContent
      -
      -
      -
      -
      public class LangdetectResponse
      -extends org.elasticsearch.action.ActionResponse
      -implements org.elasticsearch.common.xcontent.StatusToXContent
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Nested Class Summary

        -
          -
        • - - -

          Nested classes/interfaces inherited from class org.elasticsearch.transport.TransportResponse

          -org.elasticsearch.transport.TransportResponse.Empty
        • -
        -
          -
        • - - -

          Nested classes/interfaces inherited from interface org.elasticsearch.common.xcontent.ToXContent

          -org.elasticsearch.common.xcontent.ToXContent.DelegatingMapParams, org.elasticsearch.common.xcontent.ToXContent.MapParams, org.elasticsearch.common.xcontent.ToXContent.Params
        • -
        -
      • -
      - -
        -
      • - - -

        Field Summary

        -
          -
        • - - -

          Fields inherited from interface org.elasticsearch.common.xcontent.ToXContent

          -EMPTY_PARAMS
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        LangdetectResponse() 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        java.util.List<Language>getLanguages() 
        java.lang.StringgetProfile() 
        LangdetectResponsesetLanguages(java.util.List<Language> languages) 
        LangdetectResponsesetProfile(java.lang.String profile) 
        org.elasticsearch.rest.RestStatusstatus() 
        org.elasticsearch.common.xcontent.XContentBuildertoXContent(org.elasticsearch.common.xcontent.XContentBuilder builder, - org.elasticsearch.common.xcontent.ToXContent.Params params) 
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.action.ActionResponse

          -readFrom, writeTo
        • -
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.transport.TransportMessage

          -remoteAddress, remoteAddress
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LangdetectResponse

          -
          public LangdetectResponse()
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          getProfile

          -
          public java.lang.String getProfile()
          -
        • -
        - - - - - - - -
          -
        • -

          getLanguages

          -
          public java.util.List<Language> getLanguages()
          -
        • -
        - - - - - - - -
          -
        • -

          toXContent

          -
          public org.elasticsearch.common.xcontent.XContentBuilder toXContent(org.elasticsearch.common.xcontent.XContentBuilder builder,
          -                                                                    org.elasticsearch.common.xcontent.ToXContent.Params params)
          -                                                             throws java.io.IOException
          -
          -
          Specified by:
          -
          toXContent in interface org.elasticsearch.common.xcontent.ToXContent
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - -
          -
        • -

          status

          -
          public org.elasticsearch.rest.RestStatus status()
          -
          -
          Specified by:
          -
          status in interface org.elasticsearch.common.xcontent.StatusToXContent
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html deleted file mode 100644 index 1591687..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/TransportLangdetectAction.html +++ /dev/null @@ -1,329 +0,0 @@ - - - - - -TransportLangdetectAction (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.action.langdetect
    -

    Class TransportLangdetectAction

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.elasticsearch.common.component.AbstractComponent
      • -
      • - -
      • -
      -
    • -
    -
    - -
    -
    -
      -
    • - -
        -
      • - - -

        Field Summary

        -
          -
        • - - -

          Fields inherited from class org.elasticsearch.action.support.TransportAction

          -actionName, indexNameExpressionResolver, parseFieldMatcher, taskManager, threadPool
        • -
        -
          -
        • - - -

          Fields inherited from class org.elasticsearch.common.component.AbstractComponent

          -deprecationLogger, logger, settings
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        TransportLangdetectAction(org.elasticsearch.common.settings.Settings settings, - org.elasticsearch.threadpool.ThreadPool threadPool, - org.elasticsearch.action.support.ActionFilters actionFilters, - org.elasticsearch.cluster.metadata.IndexNameExpressionResolver indexNameExpressionResolver, - org.elasticsearch.transport.TransportService transportService) 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        protected voiddoExecute(LangdetectRequest request, - org.elasticsearch.action.ActionListener<LangdetectResponse> listener) 
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.action.support.TransportAction

          -doExecute, execute, execute, execute, execute
        • -
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.common.component.AbstractComponent

          -logDeprecatedSetting, logRemovedSetting, nodeName
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          TransportLangdetectAction

          -
          @Inject
          -public TransportLangdetectAction(org.elasticsearch.common.settings.Settings settings,
          -                                         org.elasticsearch.threadpool.ThreadPool threadPool,
          -                                         org.elasticsearch.action.support.ActionFilters actionFilters,
          -                                         org.elasticsearch.cluster.metadata.IndexNameExpressionResolver indexNameExpressionResolver,
          -                                         org.elasticsearch.transport.TransportService transportService)
          -
        • -
        -
      • -
      - - -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html deleted file mode 100644 index ec05823..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-frame.html +++ /dev/null @@ -1,23 +0,0 @@ - - - - - -org.xbib.elasticsearch.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - -

    org.xbib.elasticsearch.action.langdetect

    - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html deleted file mode 100644 index 3f9bca1..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-summary.html +++ /dev/null @@ -1,155 +0,0 @@ - - - - - -org.xbib.elasticsearch.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Package org.xbib.elasticsearch.action.langdetect

    -
    -
    - -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html deleted file mode 100644 index 95a252e..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/action/langdetect/package-tree.html +++ /dev/null @@ -1,178 +0,0 @@ - - - - - -org.xbib.elasticsearch.action.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Hierarchy For Package org.xbib.elasticsearch.action.langdetect

    -Package Hierarchies: - -
    -
    -

    Class Hierarchy

    -
      -
    • java.lang.Object -
        -
      • org.elasticsearch.common.component.AbstractComponent -
          -
        • org.elasticsearch.action.support.TransportAction<Request,Response> - -
        • -
        -
      • -
      • org.elasticsearch.action.ActionRequestBuilder<Request,Response,RequestBuilder> - -
      • -
      • org.elasticsearch.action.GenericAction<Request,Response> -
          -
        • org.elasticsearch.action.Action<Request,Response,RequestBuilder> - -
        • -
        -
      • -
      • org.elasticsearch.transport.TransportMessage (implements org.elasticsearch.common.io.stream.Streamable) -
          -
        • org.elasticsearch.transport.TransportRequest -
            -
          • org.elasticsearch.action.ActionRequest - -
          • -
          -
        • -
        • org.elasticsearch.transport.TransportResponse -
            -
          • org.elasticsearch.action.ActionResponse -
              -
            • org.xbib.elasticsearch.action.langdetect.LangdetectResponse (implements org.elasticsearch.common.xcontent.StatusToXContent)
            • -
            -
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html deleted file mode 100644 index 9c931f4..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangProfile.html +++ /dev/null @@ -1,351 +0,0 @@ - - - - - -LangProfile (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.common.langdetect
    -

    Class LangProfile

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.common.langdetect.LangProfile
      • -
      -
    • -
    -
    -
      -
    • -
      -
      -
      public class LangProfile
      -extends java.lang.Object
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        LangProfile() 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        voidadd(java.lang.String gram) 
        java.util.Map<java.lang.String,java.lang.Integer>getFreq() 
        java.lang.StringgetName() 
        java.util.List<java.lang.Integer>getNWords() 
        voidread(java.io.InputStream input) 
        voidsetFreq(java.util.Map<java.lang.String,java.lang.Integer> freq) 
        voidsetName(java.lang.String name) 
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LangProfile

          -
          public LangProfile()
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          add

          -
          public void add(java.lang.String gram)
          -
        • -
        - - - -
          -
        • -

          getName

          -
          public java.lang.String getName()
          -
        • -
        - - - -
          -
        • -

          setName

          -
          public void setName(java.lang.String name)
          -
        • -
        - - - -
          -
        • -

          getNWords

          -
          public java.util.List<java.lang.Integer> getNWords()
          -
        • -
        - - - -
          -
        • -

          getFreq

          -
          public java.util.Map<java.lang.String,java.lang.Integer> getFreq()
          -
        • -
        - - - -
          -
        • -

          setFreq

          -
          public void setFreq(java.util.Map<java.lang.String,java.lang.Integer> freq)
          -
        • -
        - - - -
          -
        • -

          read

          -
          public void read(java.io.InputStream input)
          -          throws java.io.IOException
          -
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html deleted file mode 100644 index e6e58f1..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LangdetectService.html +++ /dev/null @@ -1,369 +0,0 @@ - - - - - -LangdetectService (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.common.langdetect
    -

    Class LangdetectService

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.common.langdetect.LangdetectService
      • -
      -
    • -
    -
    -
      -
    • -
      -
      -
      public class LangdetectService
      -extends java.lang.Object
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - - - - - - - -
        Constructors 
        Constructor and Description
        LangdetectService() 
        LangdetectService(org.elasticsearch.common.settings.Settings settings) 
        LangdetectService(org.elasticsearch.common.settings.Settings settings, - java.lang.String profile) 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        voidaddProfile(LangProfile profile, - int index, - int langsize) 
        java.util.List<Language>detectAll(java.lang.String text) 
        java.lang.StringgetProfile() 
        org.elasticsearch.common.settings.SettingsgetSettings() 
        voidloadProfileFromResource(java.lang.String resource, - int index, - int langsize) 
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LangdetectService

          -
          public LangdetectService()
          -
        • -
        - - - -
          -
        • -

          LangdetectService

          -
          public LangdetectService(org.elasticsearch.common.settings.Settings settings)
          -
        • -
        - - - -
          -
        • -

          LangdetectService

          -
          public LangdetectService(org.elasticsearch.common.settings.Settings settings,
          -                         java.lang.String profile)
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          getSettings

          -
          public org.elasticsearch.common.settings.Settings getSettings()
          -
        • -
        - - - -
          -
        • -

          loadProfileFromResource

          -
          public void loadProfileFromResource(java.lang.String resource,
          -                                    int index,
          -                                    int langsize)
          -                             throws java.io.IOException
          -
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - -
          -
        • -

          addProfile

          -
          public void addProfile(LangProfile profile,
          -                       int index,
          -                       int langsize)
          -                throws java.io.IOException
          -
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - -
          -
        • -

          getProfile

          -
          public java.lang.String getProfile()
          -
        • -
        - - - - -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html deleted file mode 100644 index acc2fbd..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/Language.html +++ /dev/null @@ -1,345 +0,0 @@ - - - - - -Language (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.common.langdetect
    -

    Class Language

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.common.langdetect.Language
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      org.elasticsearch.common.io.stream.Streamable
      -
      -
      -
      -
      public class Language
      -extends java.lang.Object
      -implements org.elasticsearch.common.io.stream.Streamable
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        Language(java.lang.String lang, - double prob) 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        java.lang.StringgetLanguage() 
        doublegetProbability() 
        voidreadFrom(org.elasticsearch.common.io.stream.StreamInput in) 
        java.lang.StringtoString() 
        voidwriteTo(org.elasticsearch.common.io.stream.StreamOutput out) 
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          Language

          -
          public Language(java.lang.String lang,
          -                double prob)
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          getLanguage

          -
          public java.lang.String getLanguage()
          -
        • -
        - - - -
          -
        • -

          getProbability

          -
          public double getProbability()
          -
        • -
        - - - -
          -
        • -

          readFrom

          -
          public void readFrom(org.elasticsearch.common.io.stream.StreamInput in)
          -              throws java.io.IOException
          -
          -
          Specified by:
          -
          readFrom in interface org.elasticsearch.common.io.stream.Streamable
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - -
          -
        • -

          writeTo

          -
          public void writeTo(org.elasticsearch.common.io.stream.StreamOutput out)
          -             throws java.io.IOException
          -
          -
          Specified by:
          -
          writeTo in interface org.elasticsearch.common.io.stream.Streamable
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - -
          -
        • -

          toString

          -
          public java.lang.String toString()
          -
          -
          Overrides:
          -
          toString in class java.lang.Object
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html deleted file mode 100644 index 941bd68..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/LanguageDetectionException.html +++ /dev/null @@ -1,264 +0,0 @@ - - - - - -LanguageDetectionException (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.common.langdetect
    -

    Class LanguageDetectionException

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • java.lang.Throwable
      • -
      • -
          -
        • java.lang.Exception
        • -
        • -
            -
          • java.io.IOException
          • -
          • -
              -
            • org.xbib.elasticsearch.common.langdetect.LanguageDetectionException
            • -
            -
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      java.io.Serializable
      -
      -
      -
      -
      public class LanguageDetectionException
      -extends java.io.IOException
      -
      -
      See Also:
      -
      Serialized Form
      -
      -
    • -
    -
    -
    -
      -
    • - - - -
        -
      • - - -

        Method Summary

        -
          -
        • - - -

          Methods inherited from class java.lang.Throwable

          -addSuppressed, fillInStackTrace, getCause, getLocalizedMessage, getMessage, getStackTrace, getSuppressed, initCause, printStackTrace, printStackTrace, printStackTrace, setStackTrace, toString
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LanguageDetectionException

          -
          public LanguageDetectionException(java.lang.String message)
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html deleted file mode 100644 index ea6e5a1..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/NGram.html +++ /dev/null @@ -1,334 +0,0 @@ - - - - - -NGram (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.common.langdetect
    -

    Class NGram

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.common.langdetect.NGram
      • -
      -
    • -
    -
    -
      -
    • -
      -
      -
      public class NGram
      -extends java.lang.Object
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Field Summary

        - - - - - - - - - - -
        Fields 
        Modifier and TypeField and Description
        static intN_GRAM 
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        NGram() 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - -
        All Methods Static Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        voidaddChar(char c) 
        java.lang.Stringget(int n) 
        static charnormalize(char c) 
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - - - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          NGram

          -
          public NGram()
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          normalize

          -
          public static char normalize(char c)
          -
        • -
        - - - -
          -
        • -

          addChar

          -
          public void addChar(char c)
          -
        • -
        - - - -
          -
        • -

          get

          -
          public java.lang.String get(int n)
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html deleted file mode 100644 index 80edd73..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-frame.html +++ /dev/null @@ -1,26 +0,0 @@ - - - - - -org.xbib.elasticsearch.common.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - -

    org.xbib.elasticsearch.common.langdetect

    - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html deleted file mode 100644 index 128e93b..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-summary.html +++ /dev/null @@ -1,166 +0,0 @@ - - - - - -org.xbib.elasticsearch.common.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Package org.xbib.elasticsearch.common.langdetect

    -
    -
    - -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html deleted file mode 100644 index 7253968..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/common/langdetect/package-tree.html +++ /dev/null @@ -1,150 +0,0 @@ - - - - - -org.xbib.elasticsearch.common.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Hierarchy For Package org.xbib.elasticsearch.common.langdetect

    -Package Hierarchies: - -
    -
    -

    Class Hierarchy

    -
      -
    • java.lang.Object -
        -
      • org.xbib.elasticsearch.common.langdetect.LangdetectService
      • -
      • org.xbib.elasticsearch.common.langdetect.LangProfile
      • -
      • org.xbib.elasticsearch.common.langdetect.Language (implements org.elasticsearch.common.io.stream.Streamable)
      • -
      • org.xbib.elasticsearch.common.langdetect.NGram
      • -
      • java.lang.Throwable (implements java.io.Serializable) - -
      • -
      -
    • -
    -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html deleted file mode 100644 index 82f5bb2..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Builder.html +++ /dev/null @@ -1,601 +0,0 @@ - - - - - -LangdetectMapper.Builder (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.index.mapper.langdetect
    -

    Class LangdetectMapper.Builder

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.elasticsearch.index.mapper.Mapper.Builder<T,Y>
      • -
      • -
          -
        • org.elasticsearch.index.mapper.FieldMapper.Builder<LangdetectMapper.Builder,org.elasticsearch.index.mapper.TextFieldMapper>
        • -
        • -
            -
          • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Builder
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    -
      -
    • -
      -
      Enclosing class:
      -
      LangdetectMapper
      -
      -
      -
      -
      public static class LangdetectMapper.Builder
      -extends org.elasticsearch.index.mapper.FieldMapper.Builder<LangdetectMapper.Builder,org.elasticsearch.index.mapper.TextFieldMapper>
      -
    • -
    -
    -
    - -
    -
    - -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html deleted file mode 100644 index e83e24e..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.Defaults.html +++ /dev/null @@ -1,274 +0,0 @@ - - - - - -LangdetectMapper.Defaults (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.index.mapper.langdetect
    -

    Class LangdetectMapper.Defaults

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
      • -
      -
    • -
    -
    -
      -
    • -
      -
      Enclosing class:
      -
      LangdetectMapper
      -
      -
      -
      -
      public static class LangdetectMapper.Defaults
      -extends java.lang.Object
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Field Summary

        - - - - - - - - - - -
        Fields 
        Modifier and TypeField and Description
        static org.elasticsearch.index.mapper.MappedFieldTypeLANG_FIELD_TYPE 
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        Defaults() 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Field Detail

        - - - -
          -
        • -

          LANG_FIELD_TYPE

          -
          public static final org.elasticsearch.index.mapper.MappedFieldType LANG_FIELD_TYPE
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          Defaults

          -
          public Defaults()
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html deleted file mode 100644 index 81dbb6a..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.Builder.html +++ /dev/null @@ -1,300 +0,0 @@ - - - - - -LangdetectMapper.LanguageTo.Builder (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.index.mapper.langdetect
    -

    Class LangdetectMapper.LanguageTo.Builder

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
      • -
      -
    • -
    -
    -
      -
    • -
      -
      Enclosing class:
      -
      LangdetectMapper.LanguageTo
      -
      -
      -
      -
      public static class LangdetectMapper.LanguageTo.Builder
      -extends java.lang.Object
      -
    • -
    -
    -
    - -
    -
    - -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html deleted file mode 100644 index 673c918..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.LanguageTo.html +++ /dev/null @@ -1,290 +0,0 @@ - - - - - -LangdetectMapper.LanguageTo (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.index.mapper.langdetect
    -

    Class LangdetectMapper.LanguageTo

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
      • -
      -
    • -
    -
    -
      -
    • -
      -
      Enclosing class:
      -
      LangdetectMapper
      -
      -
      -
      -
      public static class LangdetectMapper.LanguageTo
      -extends java.lang.Object
      -
    • -
    -
    -
    - -
    -
    -
      -
    • - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          toXContent

          -
          public org.elasticsearch.common.xcontent.XContentBuilder toXContent(org.elasticsearch.common.xcontent.XContentBuilder builder,
          -                                                                    org.elasticsearch.common.xcontent.ToXContent.Params params)
          -                                                             throws java.io.IOException
          -
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - - - - - -
          -
        • -

          languageToFields

          -
          public java.util.Map<java.lang.String,java.lang.Object> languageToFields()
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html deleted file mode 100644 index 9d9201d..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.TypeParser.html +++ /dev/null @@ -1,300 +0,0 @@ - - - - - -LangdetectMapper.TypeParser (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.index.mapper.langdetect
    -

    Class LangdetectMapper.TypeParser

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      org.elasticsearch.index.mapper.Mapper.TypeParser
      -
      -
      -
      Enclosing class:
      -
      LangdetectMapper
      -
      -
      -
      -
      public static class LangdetectMapper.TypeParser
      -extends java.lang.Object
      -implements org.elasticsearch.index.mapper.Mapper.TypeParser
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Nested Class Summary

        -
          -
        • - - -

          Nested classes/interfaces inherited from interface org.elasticsearch.index.mapper.Mapper.TypeParser

          -org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        TypeParser() 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        org.elasticsearch.index.mapper.Mapper.Builder<?,?>parse(java.lang.String name, - java.util.Map<java.lang.String,java.lang.Object> mapping, - org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext parserContext) 
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          TypeParser

          -
          public TypeParser()
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          parse

          -
          public org.elasticsearch.index.mapper.Mapper.Builder<?,?> parse(java.lang.String name,
          -                                                                java.util.Map<java.lang.String,java.lang.Object> mapping,
          -                                                                org.elasticsearch.index.mapper.Mapper.TypeParser.ParserContext parserContext)
          -
          -
          Specified by:
          -
          parse in interface org.elasticsearch.index.mapper.Mapper.TypeParser
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html deleted file mode 100644 index 2299518..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.html +++ /dev/null @@ -1,501 +0,0 @@ - - - - - -LangdetectMapper (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.index.mapper.langdetect
    -

    Class LangdetectMapper

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.elasticsearch.index.mapper.Mapper
      • -
      • -
          -
        • org.elasticsearch.index.mapper.FieldMapper
        • -
        • -
            -
          • org.elasticsearch.index.mapper.TextFieldMapper
          • -
          • -
              -
            • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper
            • -
            -
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      java.lang.Cloneable, java.lang.Iterable<org.elasticsearch.index.mapper.Mapper>, org.elasticsearch.common.xcontent.ToXContent
      -
      -
      -
      -
      public class LangdetectMapper
      -extends org.elasticsearch.index.mapper.TextFieldMapper
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Nested Class Summary

        - - - - - - - - - - - - - - - - - - - - - - -
        Nested Classes 
        Modifier and TypeClass and Description
        static class LangdetectMapper.Builder 
        static class LangdetectMapper.Defaults 
        static class LangdetectMapper.LanguageTo 
        static class LangdetectMapper.TypeParser 
        -
          -
        • - - -

          Nested classes/interfaces inherited from class org.elasticsearch.index.mapper.TextFieldMapper

          -org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType
        • -
        -
          -
        • - - -

          Nested classes/interfaces inherited from class org.elasticsearch.index.mapper.FieldMapper

          -org.elasticsearch.index.mapper.FieldMapper.CopyTo, org.elasticsearch.index.mapper.FieldMapper.MultiFields
        • -
        -
          -
        • - - -

          Nested classes/interfaces inherited from class org.elasticsearch.index.mapper.Mapper

          -org.elasticsearch.index.mapper.Mapper.BuilderContext
        • -
        -
          -
        • - - -

          Nested classes/interfaces inherited from interface org.elasticsearch.common.xcontent.ToXContent

          -org.elasticsearch.common.xcontent.ToXContent.DelegatingMapParams, org.elasticsearch.common.xcontent.ToXContent.MapParams, org.elasticsearch.common.xcontent.ToXContent.Params
        • -
        -
      • -
      - -
        -
      • - - -

        Field Summary

        - - - - - - - - - - -
        Fields 
        Modifier and TypeField and Description
        static java.lang.StringMAPPER_TYPE 
        -
          -
        • - - -

          Fields inherited from class org.elasticsearch.index.mapper.TextFieldMapper

          -CONTENT_TYPE
        • -
        -
          -
        • - - -

          Fields inherited from class org.elasticsearch.index.mapper.FieldMapper

          -COERCE_SETTING, copyTo, defaultFieldType, fieldType, IGNORE_MALFORMED_SETTING, multiFields
        • -
        -
          -
        • - - -

          Fields inherited from interface org.elasticsearch.common.xcontent.ToXContent

          -EMPTY_PARAMS
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        LangdetectMapper(java.lang.String simpleName, - org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType fieldType, - org.elasticsearch.index.mapper.MappedFieldType defaultFieldType, - int positionIncrementGap, - org.elasticsearch.common.settings.Settings indexSettings, - org.elasticsearch.index.mapper.FieldMapper.MultiFields multiFields, - org.elasticsearch.index.mapper.FieldMapper.CopyTo copyTo, - LangdetectMapper.LanguageTo languageTo, - LangdetectService langdetectService) 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        protected java.lang.StringcontentType() 
        protected voiddoXContentBody(org.elasticsearch.common.xcontent.XContentBuilder builder, - boolean includeDefaults, - org.elasticsearch.common.xcontent.ToXContent.Params params) 
        protected voidparseCreateField(org.elasticsearch.index.mapper.ParseContext context, - java.util.List<org.apache.lucene.document.Field> fields) 
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.index.mapper.TextFieldMapper

          -clone, doMerge, fieldType, getPositionIncrementGap
        • -
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.index.mapper.FieldMapper

          -copyTo, customBoost, doXContentAnalyzers, doXContentDocValues, indexOptionToString, indexTokenizeOption, iterator, merge, name, parse, termVectorOptionsToString, toXContent, updateFieldType
        • -
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.index.mapper.Mapper

          -simpleName
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
          -
        • - - -

          Methods inherited from interface java.lang.Iterable

          -forEach, spliterator
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - - - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LangdetectMapper

          -
          public LangdetectMapper(java.lang.String simpleName,
          -                        org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType fieldType,
          -                        org.elasticsearch.index.mapper.MappedFieldType defaultFieldType,
          -                        int positionIncrementGap,
          -                        org.elasticsearch.common.settings.Settings indexSettings,
          -                        org.elasticsearch.index.mapper.FieldMapper.MultiFields multiFields,
          -                        org.elasticsearch.index.mapper.FieldMapper.CopyTo copyTo,
          -                        LangdetectMapper.LanguageTo languageTo,
          -                        LangdetectService langdetectService)
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          contentType

          -
          protected java.lang.String contentType()
          -
          -
          Overrides:
          -
          contentType in class org.elasticsearch.index.mapper.TextFieldMapper
          -
          -
        • -
        - - - -
          -
        • -

          parseCreateField

          -
          protected void parseCreateField(org.elasticsearch.index.mapper.ParseContext context,
          -                                java.util.List<org.apache.lucene.document.Field> fields)
          -                         throws java.io.IOException
          -
          -
          Overrides:
          -
          parseCreateField in class org.elasticsearch.index.mapper.TextFieldMapper
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        - - - -
          -
        • -

          doXContentBody

          -
          protected void doXContentBody(org.elasticsearch.common.xcontent.XContentBuilder builder,
          -                              boolean includeDefaults,
          -                              org.elasticsearch.common.xcontent.ToXContent.Params params)
          -                       throws java.io.IOException
          -
          -
          Overrides:
          -
          doXContentBody in class org.elasticsearch.index.mapper.TextFieldMapper
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html deleted file mode 100644 index 6e00802..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-frame.html +++ /dev/null @@ -1,24 +0,0 @@ - - - - - -org.xbib.elasticsearch.index.mapper.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - -

    org.xbib.elasticsearch.index.mapper.langdetect

    - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html deleted file mode 100644 index 80c0aa1..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-summary.html +++ /dev/null @@ -1,159 +0,0 @@ - - - - - -org.xbib.elasticsearch.index.mapper.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Package org.xbib.elasticsearch.index.mapper.langdetect

    -
    -
    - -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html deleted file mode 100644 index 858097c..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/index/mapper/langdetect/package-tree.html +++ /dev/null @@ -1,159 +0,0 @@ - - - - - -org.xbib.elasticsearch.index.mapper.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Hierarchy For Package org.xbib.elasticsearch.index.mapper.langdetect

    -Package Hierarchies: - -
    -
    -

    Class Hierarchy

    -
      -
    • java.lang.Object -
        -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
      • -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
      • -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
      • -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser (implements org.elasticsearch.index.mapper.Mapper.TypeParser)
      • -
      • org.elasticsearch.index.mapper.Mapper (implements java.lang.Iterable<T>, org.elasticsearch.common.xcontent.ToXContent) -
          -
        • org.elasticsearch.index.mapper.FieldMapper (implements java.lang.Cloneable) -
            -
          • org.elasticsearch.index.mapper.TextFieldMapper - -
          • -
          -
        • -
        -
      • -
      • org.elasticsearch.index.mapper.Mapper.Builder<T,Y> -
          -
        • org.elasticsearch.index.mapper.FieldMapper.Builder<T,Y> - -
        • -
        -
      • -
      -
    • -
    -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html deleted file mode 100644 index 07e3335..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.html +++ /dev/null @@ -1,352 +0,0 @@ - - - - - -LangdetectPlugin (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.plugin.langdetect
    -

    Class LangdetectPlugin

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.elasticsearch.plugins.Plugin
      • -
      • -
          -
        • org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin
        • -
        -
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      java.io.Closeable, java.lang.AutoCloseable, org.elasticsearch.plugins.ActionPlugin, org.elasticsearch.plugins.MapperPlugin
      -
      -
      -
      -
      public class LangdetectPlugin
      -extends org.elasticsearch.plugins.Plugin
      -implements org.elasticsearch.plugins.MapperPlugin, org.elasticsearch.plugins.ActionPlugin
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Nested Class Summary

        -
          -
        • - - -

          Nested classes/interfaces inherited from interface org.elasticsearch.plugins.ActionPlugin

          -org.elasticsearch.plugins.ActionPlugin.ActionHandler<Request extends org.elasticsearch.action.ActionRequest,Response extends org.elasticsearch.action.ActionResponse>
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        LangdetectPlugin() 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        java.util.List<org.elasticsearch.plugins.ActionPlugin.ActionHandler<? extends org.elasticsearch.action.ActionRequest,? extends org.elasticsearch.action.ActionResponse>>getActions() 
        java.util.Map<java.lang.String,org.elasticsearch.index.mapper.Mapper.TypeParser>getMappers() 
        java.util.List<java.lang.Class<? extends org.elasticsearch.rest.RestHandler>>getRestHandlers() 
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.plugins.Plugin

          -additionalSettings, close, createComponents, createGuiceModules, getCustomMetaDataUpgrader, getExecutorBuilders, getGuiceServiceClasses, getNamedWriteables, getSettings, getSettingsFilter, onIndexModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule, onModule
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
          -
        • - - -

          Methods inherited from interface org.elasticsearch.plugins.MapperPlugin

          -getMetadataMappers
        • -
        -
          -
        • - - -

          Methods inherited from interface org.elasticsearch.plugins.ActionPlugin

          -getActionFilters, getRestHeaders
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          LangdetectPlugin

          -
          public LangdetectPlugin()
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          getMappers

          -
          public java.util.Map<java.lang.String,org.elasticsearch.index.mapper.Mapper.TypeParser> getMappers()
          -
          -
          Specified by:
          -
          getMappers in interface org.elasticsearch.plugins.MapperPlugin
          -
          -
        • -
        - - - -
          -
        • -

          getActions

          -
          public java.util.List<org.elasticsearch.plugins.ActionPlugin.ActionHandler<? extends org.elasticsearch.action.ActionRequest,? extends org.elasticsearch.action.ActionResponse>> getActions()
          -
          -
          Specified by:
          -
          getActions in interface org.elasticsearch.plugins.ActionPlugin
          -
          -
        • -
        - - - -
          -
        • -

          getRestHandlers

          -
          public java.util.List<java.lang.Class<? extends org.elasticsearch.rest.RestHandler>> getRestHandlers()
          -
          -
          Specified by:
          -
          getRestHandlers in interface org.elasticsearch.plugins.ActionPlugin
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html deleted file mode 100644 index e7855c6..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-frame.html +++ /dev/null @@ -1,19 +0,0 @@ - - - - - -org.xbib.elasticsearch.plugin.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - -

    org.xbib.elasticsearch.plugin.langdetect

    -
    -

    Classes

    - -
    - - diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html deleted file mode 100644 index be48689..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-summary.html +++ /dev/null @@ -1,139 +0,0 @@ - - - - - -org.xbib.elasticsearch.plugin.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Package org.xbib.elasticsearch.plugin.langdetect

    -
    -
    -
      -
    • - - - - - - - - - - - - -
      Class Summary 
      ClassDescription
      LangdetectPlugin 
      -
    • -
    -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html deleted file mode 100644 index 26b293f..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/plugin/langdetect/package-tree.html +++ /dev/null @@ -1,138 +0,0 @@ - - - - - -org.xbib.elasticsearch.plugin.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Hierarchy For Package org.xbib.elasticsearch.plugin.langdetect

    -Package Hierarchies: - -
    -
    -

    Class Hierarchy

    -
      -
    • java.lang.Object -
        -
      • org.elasticsearch.plugins.Plugin (implements java.io.Closeable) -
          -
        • org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin (implements org.elasticsearch.plugins.ActionPlugin, org.elasticsearch.plugins.MapperPlugin)
        • -
        -
      • -
      -
    • -
    -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html deleted file mode 100644 index 0d365c8..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.html +++ /dev/null @@ -1,352 +0,0 @@ - - - - - -RestLangdetectAction (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - - -
    -
    org.xbib.elasticsearch.rest.action.langdetect
    -

    Class RestLangdetectAction

    -
    -
    -
      -
    • java.lang.Object
    • -
    • -
        -
      • org.elasticsearch.common.component.AbstractComponent
      • -
      • -
          -
        • org.elasticsearch.rest.BaseRestHandler
        • -
        • -
            -
          • org.xbib.elasticsearch.rest.action.langdetect.RestLangdetectAction
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    -
      -
    • -
      -
      All Implemented Interfaces:
      -
      org.elasticsearch.rest.RestHandler
      -
      -
      -
      -
      public class RestLangdetectAction
      -extends org.elasticsearch.rest.BaseRestHandler
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Nested Class Summary

        -
          -
        • - - -

          Nested classes/interfaces inherited from class org.elasticsearch.rest.BaseRestHandler

          -org.elasticsearch.rest.BaseRestHandler.RestChannelConsumer
        • -
        -
      • -
      - -
        -
      • - - -

        Field Summary

        -
          -
        • - - -

          Fields inherited from class org.elasticsearch.rest.BaseRestHandler

          -MULTI_ALLOW_EXPLICIT_INDEX, parseFieldMatcher
        • -
        -
          -
        • - - -

          Fields inherited from class org.elasticsearch.common.component.AbstractComponent

          -deprecationLogger, logger, settings
        • -
        -
      • -
      - -
        -
      • - - -

        Constructor Summary

        - - - - - - - - -
        Constructors 
        Constructor and Description
        RestLangdetectAction(org.elasticsearch.common.settings.Settings settings, - org.elasticsearch.rest.RestController controller) 
        -
      • -
      - -
        -
      • - - -

        Method Summary

        - - - - - - - - - - -
        All Methods Instance Methods Concrete Methods 
        Modifier and TypeMethod and Description
        protected org.elasticsearch.rest.BaseRestHandler.RestChannelConsumerprepareRequest(org.elasticsearch.rest.RestRequest request, - org.elasticsearch.client.node.NodeClient client) 
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.rest.BaseRestHandler

          -handleRequest, responseParams, unrecognized
        • -
        -
          -
        • - - -

          Methods inherited from class org.elasticsearch.common.component.AbstractComponent

          -logDeprecatedSetting, logRemovedSetting, nodeName
        • -
        -
          -
        • - - -

          Methods inherited from class java.lang.Object

          -clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
        • -
        -
          -
        • - - -

          Methods inherited from interface org.elasticsearch.rest.RestHandler

          -canTripCircuitBreaker
        • -
        -
      • -
      -
    • -
    -
    -
    -
      -
    • - -
        -
      • - - -

        Constructor Detail

        - - - -
          -
        • -

          RestLangdetectAction

          -
          @Inject
          -public RestLangdetectAction(org.elasticsearch.common.settings.Settings settings,
          -                                    org.elasticsearch.rest.RestController controller)
          -
        • -
        -
      • -
      - -
        -
      • - - -

        Method Detail

        - - - -
          -
        • -

          prepareRequest

          -
          protected org.elasticsearch.rest.BaseRestHandler.RestChannelConsumer prepareRequest(org.elasticsearch.rest.RestRequest request,
          -                                                                                    org.elasticsearch.client.node.NodeClient client)
          -                                                                             throws java.io.IOException
          -
          -
          Specified by:
          -
          prepareRequest in class org.elasticsearch.rest.BaseRestHandler
          -
          Throws:
          -
          java.io.IOException
          -
          -
        • -
        -
      • -
      -
    • -
    -
    -
    - - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html deleted file mode 100644 index f1dfd3a..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-frame.html +++ /dev/null @@ -1,19 +0,0 @@ - - - - - -org.xbib.elasticsearch.rest.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - -

    org.xbib.elasticsearch.rest.action.langdetect

    -
    -

    Classes

    - -
    - - diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html deleted file mode 100644 index 4401256..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-summary.html +++ /dev/null @@ -1,139 +0,0 @@ - - - - - -org.xbib.elasticsearch.rest.action.langdetect (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Package org.xbib.elasticsearch.rest.action.langdetect

    -
    -
    - -
    - - - - - - diff --git a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html b/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html deleted file mode 100644 index 1664e45..0000000 --- a/docs/javadoc/org/xbib/elasticsearch/rest/action/langdetect/package-tree.html +++ /dev/null @@ -1,142 +0,0 @@ - - - - - -org.xbib.elasticsearch.rest.action.langdetect Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Hierarchy For Package org.xbib.elasticsearch.rest.action.langdetect

    -Package Hierarchies: - -
    -
    -

    Class Hierarchy

    -
      -
    • java.lang.Object -
        -
      • org.elasticsearch.common.component.AbstractComponent -
          -
        • org.elasticsearch.rest.BaseRestHandler (implements org.elasticsearch.rest.RestHandler) - -
        • -
        -
      • -
      -
    • -
    -
    - - - - - - diff --git a/docs/javadoc/overview-frame.html b/docs/javadoc/overview-frame.html deleted file mode 100644 index 96a94b2..0000000 --- a/docs/javadoc/overview-frame.html +++ /dev/null @@ -1,24 +0,0 @@ - - - - - -Overview List (elasticsearch-langdetect 5.1.2.0 API) - - - - - - -

     

    - - diff --git a/docs/javadoc/overview-summary.html b/docs/javadoc/overview-summary.html deleted file mode 100644 index a3bf32d..0000000 --- a/docs/javadoc/overview-summary.html +++ /dev/null @@ -1,162 +0,0 @@ - - - - - -Overview (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - -
    - - - - - - - -
    - - -
    -

    elasticsearch-langdetect 5.1.2.0 API

    -
    -
    -
    -
    Bibliographic entity processing for Java
    -
    -

    See: Description

    -
    - -
    - - -

    Bibliographic entity processing for Java

    -
    - -
    - - - - - - - -
    - - - - diff --git a/docs/javadoc/overview-tree.html b/docs/javadoc/overview-tree.html deleted file mode 100644 index 7a6aeb9..0000000 --- a/docs/javadoc/overview-tree.html +++ /dev/null @@ -1,235 +0,0 @@ - - - - - -Class Hierarchy (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - -
    - - - - - - - -
    - - - -
    -

    Class Hierarchy

    -
      -
    • java.lang.Object -
        -
      • org.elasticsearch.common.component.AbstractComponent -
          -
        • org.elasticsearch.rest.BaseRestHandler (implements org.elasticsearch.rest.RestHandler) - -
        • -
        • org.elasticsearch.action.support.TransportAction<Request,Response> - -
        • -
        -
      • -
      • org.elasticsearch.action.ActionRequestBuilder<Request,Response,RequestBuilder> - -
      • -
      • org.elasticsearch.action.GenericAction<Request,Response> -
          -
        • org.elasticsearch.action.Action<Request,Response,RequestBuilder> - -
        • -
        -
      • -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.Defaults
      • -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo
      • -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.LanguageTo.Builder
      • -
      • org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.TypeParser (implements org.elasticsearch.index.mapper.Mapper.TypeParser)
      • -
      • org.xbib.elasticsearch.common.langdetect.LangdetectService
      • -
      • org.xbib.elasticsearch.common.langdetect.LangProfile
      • -
      • org.xbib.elasticsearch.common.langdetect.Language (implements org.elasticsearch.common.io.stream.Streamable)
      • -
      • org.elasticsearch.index.mapper.Mapper (implements java.lang.Iterable<T>, org.elasticsearch.common.xcontent.ToXContent) -
          -
        • org.elasticsearch.index.mapper.FieldMapper (implements java.lang.Cloneable) -
            -
          • org.elasticsearch.index.mapper.TextFieldMapper - -
          • -
          -
        • -
        -
      • -
      • org.elasticsearch.index.mapper.Mapper.Builder<T,Y> -
          -
        • org.elasticsearch.index.mapper.FieldMapper.Builder<T,Y> - -
        • -
        -
      • -
      • org.xbib.elasticsearch.common.langdetect.NGram
      • -
      • org.elasticsearch.plugins.Plugin (implements java.io.Closeable) -
          -
        • org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin (implements org.elasticsearch.plugins.ActionPlugin, org.elasticsearch.plugins.MapperPlugin)
        • -
        -
      • -
      • java.lang.Throwable (implements java.io.Serializable) - -
      • -
      • org.elasticsearch.transport.TransportMessage (implements org.elasticsearch.common.io.stream.Streamable) -
          -
        • org.elasticsearch.transport.TransportRequest -
            -
          • org.elasticsearch.action.ActionRequest - -
          • -
          -
        • -
        • org.elasticsearch.transport.TransportResponse -
            -
          • org.elasticsearch.action.ActionResponse -
              -
            • org.xbib.elasticsearch.action.langdetect.LangdetectResponse (implements org.elasticsearch.common.xcontent.StatusToXContent)
            • -
            -
          • -
          -
        • -
        -
      • -
      -
    • -
    -
    - -
    - - - - - - - -
    - - - - diff --git a/docs/javadoc/package-list b/docs/javadoc/package-list deleted file mode 100644 index 1a4b64f..0000000 --- a/docs/javadoc/package-list +++ /dev/null @@ -1,5 +0,0 @@ -org.xbib.elasticsearch.action.langdetect -org.xbib.elasticsearch.common.langdetect -org.xbib.elasticsearch.index.mapper.langdetect -org.xbib.elasticsearch.plugin.langdetect -org.xbib.elasticsearch.rest.action.langdetect diff --git a/docs/javadoc/script.js b/docs/javadoc/script.js deleted file mode 100644 index b346356..0000000 --- a/docs/javadoc/script.js +++ /dev/null @@ -1,30 +0,0 @@ -function show(type) -{ - count = 0; - for (var key in methods) { - var row = document.getElementById(key); - if ((methods[key] & type) != 0) { - row.style.display = ''; - row.className = (count++ % 2) ? rowColor : altColor; - } - else - row.style.display = 'none'; - } - updateTabs(type); -} - -function updateTabs(type) -{ - for (var value in tabs) { - var sNode = document.getElementById(tabs[value][0]); - var spanNode = sNode.firstChild; - if (value == type) { - sNode.className = activeTableTab; - spanNode.innerHTML = tabs[value][1]; - } - else { - sNode.className = tableTab; - spanNode.innerHTML = "" + tabs[value][1] + ""; - } - } -} diff --git a/docs/javadoc/serialized-form.html b/docs/javadoc/serialized-form.html deleted file mode 100644 index 8588382..0000000 --- a/docs/javadoc/serialized-form.html +++ /dev/null @@ -1,138 +0,0 @@ - - - - - -Serialized Form (elasticsearch-langdetect 5.1.2.0 API) - - - - - - - - - - -
    -

    Serialized Form

    -
    -
    - -
    - - - - - - diff --git a/docs/javadoc/stylesheet.css b/docs/javadoc/stylesheet.css deleted file mode 100644 index 3242ab9..0000000 --- a/docs/javadoc/stylesheet.css +++ /dev/null @@ -1,732 +0,0 @@ -/* Asciidoclet Java 7/8 javadoc stylesheet - * - * Javadoc stylesheet based on http://docs.oracle.com/javase/8/docs/api/stylesheet.css - * with additional styles from Asciidoctor. - */ - -@import url('coderay-asciidoctor.css'); - -/* Javadoc style sheet */ -/* -Overall document style -*/ - -/* Asciidoclet -@import url('resources/fonts/dejavu.css'); -*/ - -body { - background-color:#ffffff; - color:#353833; - font-family:'DejaVu Sans', Arial, Helvetica, sans-serif; - font-size:14px; - margin:0; -} -a:link, a:visited { - text-decoration:none; - color:#4A6782; -} -a:hover, a:focus { - text-decoration:none; - color:#bb7a2a; -} -a:active { - text-decoration:none; - color:#4A6782; -} -a[name] { - color:#353833; -} -a[name]:hover { - text-decoration:none; - color:#353833; -} -pre { - font-family:'DejaVu Sans Mono', monospace; - font-size:14px; -} -h1 { - font-size:20px; -} -h2 { - font-size:18px; -} -h3 { - font-size:16px; - font-style:italic; -} -h4 { - font-size:13px; -} -h5 { - font-size:12px; -} -h6 { - font-size:11px; -} -ul { - list-style-type:disc; -} -code, tt, dt code, table tr td dt code { - color:rgba(0,0,0,.9); - font-family:'DejaVu Sans Mono', monospace; - font-size:13px; - line-height:1.4; -} -pre > code { - font-size: 14px !important; -} -table tr td dt code { - vertical-align:top; -} -sup { - font-size:8px; -} -/* -Document title and Copyright styles -*/ -.clear { - clear:both; - height:0px; - overflow:hidden; -} -.aboutLanguage { - float:right; - padding:0px 21px; - font-size:11px; - z-index:200; - margin-top:-9px; -} -.legalCopy { - margin:.5em; - float:left; -} -.bar a, .bar a:link, .bar a:visited, .bar a:active { - color:#FFFFFF; - text-decoration:none; -} -.bar a:hover, .bar a:focus { - color:#bb7a2a; -} -.tab { - background-color:#0066FF; - color:#ffffff; - padding:8px; - width:5em; - font-weight:bold; -} -/* -Navigation bar styles -*/ -.bar { - background-color:#4D7A97; - color:#FFFFFF; - padding:.8em .5em .4em .8em; - height:auto;/*height:1.8em;*/ - font-size:11px; - margin:0; -} -.topNav { - background-color:#4D7A97; - color:#FFFFFF; - float:left; - padding:0; - width:100%; - clear:right; - height:2.8em; - padding-top:10px; - overflow:hidden; - font-size:12px; -} -.bottomNav { - margin-top:10px; - background-color:#4D7A97; - color:#FFFFFF; - float:left; - padding:0; - width:100%; - clear:right; - height:2.8em; - padding-top:10px; - overflow:hidden; - font-size:12px; -} -.subNav { - background-color:#dee3e9; - float:left; - width:100%; - overflow:hidden; - font-size:12px; -} -.subNav div { - clear:left; - float:left; - padding:0 0 5px 6px; - text-transform:uppercase; -} -ul.navList, ul.subNavList { - float:left; - margin:0 25px 0 0; - padding:0; -} -ul.navList li{ - list-style:none; - float:left; - padding: 5px 6px; - text-transform:uppercase; -} -ul.subNavList li{ - list-style:none; - float:left; -} -.topNav a:link, .topNav a:active, .topNav a:visited, .bottomNav a:link, .bottomNav a:active, .bottomNav a:visited { - color:#FFFFFF; - text-decoration:none; - text-transform:uppercase; -} -.topNav a:hover, .bottomNav a:hover { - text-decoration:none; - color:#bb7a2a; - text-transform:uppercase; -} -.navBarCell1Rev { - background-color:#F8981D; - color:#253441; - margin: auto 5px; -} -.skipNav { - position:absolute; - top:auto; - left:-9999px; - overflow:hidden; -} -/* -Page header and footer styles -*/ -.header, .footer { - clear:both; - margin:0 20px; - padding:5px 0 0 0; -} -.indexHeader { - margin:10px; - position:relative; -} -.indexHeader span{ - margin-right:15px; -} -.indexHeader h1 { - font-size:13px; -} -.title { - color:#2c4557; - margin:10px 0; -} -.subTitle { - margin:5px 0 0 0; -} -.header ul { - margin:0 0 15px 0; - padding:0; -} -.footer ul { - margin:20px 0 5px 0; -} -.header ul li, .footer ul li { - list-style:none; - font-size:13px; -} -/* -Heading styles -*/ -div.details ul.blockList ul.blockList ul.blockList li.blockList h4, div.details ul.blockList ul.blockList ul.blockListLast li.blockList h4 { - background-color:#dee3e9; - border:1px solid #d0d9e0; - margin:0 0 6px -8px; - padding:7px 5px; -} -ul.blockList ul.blockList ul.blockList li.blockList h3 { - background-color:#dee3e9; - border:1px solid #d0d9e0; - margin:0 0 6px -8px; - padding:7px 5px; -} -ul.blockList ul.blockList li.blockList h3 { - padding:0; - margin:15px 0; -} -ul.blockList li.blockList h2 { - padding:0px 0 20px 0; -} -/* -Page layout container styles -*/ -.contentContainer, .sourceContainer, .classUseContainer, .serializedFormContainer, .constantValuesContainer { - clear:both; - padding:10px 20px; - position:relative; -} -.indexContainer { - margin:10px; - position:relative; - font-size:12px; -} -.indexContainer h2 { - font-size:13px; - padding:0 0 3px 0; -} -.indexContainer ul { - margin:0; - padding:0; -} -.indexContainer ul li { - list-style:none; - padding-top:2px; -} -/*.contentContainer dl dt, .contentContainer .description dl dt, .contentContainer .details dl dt, .serializedFormContainer dl dt { */ -.contentContainer dl dt, .serializedFormContainer dl dt, .dlist dl dt { - font-size:13px; - font-weight:bold; - margin:10px 0 0 0; -} - -/*.contentContainer .description dl dd, .contentContainer .details dl dd, .serializedFormContainer dl dd { */ -.contentContainer dl dd, .serializedFormContainer dl dd { - margin:5px 0 10px 0px; - font-size:14px; - font-family:'DejaVu Sans Mono',monospace; -} - -.serializedFormContainer dl.nameValue dt { - margin-left:1px; - font-size:1.1em; - display:inline; - font-weight:bold; -} -.serializedFormContainer dl.nameValue dd { - margin:0 0 0 1px; - font-size:1.1em; - display:inline; -} -/* -List styles -*/ -ul.horizontal li { - display:inline; - font-size:0.9em; -} -ul.inheritance { - margin:0; - padding:0; -} -ul.inheritance li { - display:inline; - list-style:none; -} -ul.inheritance li ul.inheritance { - margin-left:15px; - padding-left:15px; - padding-top:1px; -} -ul.blockList, ul.blockListLast { - margin:10px 0 10px 0; - padding:0; -} -ul.blockList li.blockList, ul.blockListLast li.blockList { - list-style:none; - margin-bottom:15px; - line-height:1.4; -} -ul.blockList ul.blockList li.blockList, ul.blockList ul.blockListLast li.blockList { - padding:0px 20px 5px 10px; - border:1px solid #ededed; - background-color:#f8f8f8; -} -ul.blockList ul.blockList ul.blockList li.blockList, ul.blockList ul.blockList ul.blockListLast li.blockList { - padding:0 0 5px 8px; - background-color:#ffffff; - border:none; -} -ul.blockList ul.blockList ul.blockList ul.blockList li.blockList { - margin-left:0; - padding-left:0; - padding-bottom:15px; - border:none; -} -ul.blockList ul.blockList ul.blockList ul.blockList li.blockListLast { - list-style:none; - border-bottom:none; - padding-bottom:0; -} -table tr td dl, table tr td dl dt, table tr td dl dd { - margin-top:0; - margin-bottom:1px; -} -/* -Table styles -- Asciidoclet - added .packageSummary to table styles below, used by Java 7 output. -*/ -.overviewSummary, .memberSummary, .typeSummary, .useSummary, .constantsSummary, .deprecatedSummary, .packageSummary { - width:100%; - border-left:1px solid #EEE; - border-right:1px solid #EEE; - border-bottom:1px solid #EEE; -} -.overviewSummary, .memberSummary { - padding:0px; -} -.overviewSummary caption, .memberSummary caption, .typeSummary caption, .packageSummary caption, -.useSummary caption, .constantsSummary caption, .deprecatedSummary caption { - position:relative; - text-align:left; - background-repeat:no-repeat; - color:#253441; - font-weight:bold; - clear:none; - overflow:hidden; - padding:0px; - padding-top:10px; - padding-left:1px; - margin:0px; - white-space:pre; -} -.overviewSummary caption a:link, .memberSummary caption a:link, .typeSummary caption a:link, .packageSummary caption a:link, -.useSummary caption a:link, .constantsSummary caption a:link, .deprecatedSummary caption a:link, -.overviewSummary caption a:hover, .memberSummary caption a:hover, .typeSummary caption a:hover, .packageSummary caption a:hover, -.useSummary caption a:hover, .constantsSummary caption a:hover, .deprecatedSummary caption a:hover, -.overviewSummary caption a:active, .memberSummary caption a:active, .typeSummary caption a:active, .packageSummary caption a:active, -.useSummary caption a:active, .constantsSummary caption a:active, .deprecatedSummary caption a:active, -.overviewSummary caption a:visited, .memberSummary caption a:visited, .typeSummary caption a:visited, .packageSummary caption a:visited, -.useSummary caption a:visited, .constantsSummary caption a:visited, .deprecatedSummary caption a:visited { - color:#FFFFFF; -} -.overviewSummary caption span, .memberSummary caption span, .typeSummary caption span, .packageSummary caption span, -.useSummary caption span, .constantsSummary caption span, .deprecatedSummary caption span { - white-space:nowrap; - padding-top:5px; - padding-left:12px; - padding-right:12px; - padding-bottom:7px; - display:inline-block; - float:left; - background-color:#F8981D; - border: none; - height:16px; -} -.memberSummary caption span.activeTableTab span { - white-space:nowrap; - padding-top:5px; - padding-left:12px; - padding-right:12px; - margin-right:3px; - display:inline-block; - float:left; - background-color:#F8981D; - height:16px; -} -.memberSummary caption span.tableTab span { - white-space:nowrap; - padding-top:5px; - padding-left:12px; - padding-right:12px; - margin-right:3px; - display:inline-block; - float:left; - background-color:#4D7A97; - height:16px; -} -.memberSummary caption span.tableTab, .memberSummary caption span.activeTableTab { - padding-top:0px; - padding-left:0px; - padding-right:0px; - background-image:none; - float:none; - display:inline; -} -.overviewSummary .tabEnd, .memberSummary .tabEnd, .typeSummary .tabEnd, .packageSummary .tabEnd, -.useSummary .tabEnd, .constantsSummary .tabEnd, .deprecatedSummary .tabEnd { - display:none; - width:5px; - position:relative; - float:left; - background-color:#F8981D; -} -.memberSummary .activeTableTab .tabEnd { - display:none; - width:5px; - margin-right:3px; - position:relative; - float:left; - background-color:#F8981D; -} -.memberSummary .tableTab .tabEnd { - display:none; - width:5px; - margin-right:3px; - position:relative; - background-color:#4D7A97; - float:left; - -} -.overviewSummary td, .memberSummary td, .typeSummary td, .packageSummary td, -.useSummary td, .constantsSummary td, .deprecatedSummary td { - text-align:left; - padding:0px 0px 12px 10px; - width:100%; -} -th.colOne, th.colFirst, th.colLast, .useSummary th, .constantsSummary th, -td.colOne, td.colFirst, td.colLast, .useSummary td, .constantsSummary td{ - vertical-align:top; - padding-right:0px; - padding-top:8px; - padding-bottom:3px; -} -th.colFirst, th.colLast, th.colOne, .constantsSummary th { - background:#dee3e9; - text-align:left; - padding:8px 3px 3px 7px; -} -td.colFirst, th.colFirst { - white-space:nowrap; - font-size:13px; -} -td.colLast, th.colLast { - font-size:13px; -} -td.colOne, th.colOne { - font-size:13px; -} -.overviewSummary td.colFirst, .overviewSummary th.colFirst, -.overviewSummary td.colOne, .overviewSummary th.colOne, -.memberSummary td.colFirst, .memberSummary th.colFirst, -.memberSummary td.colOne, .memberSummary th.colOne, -.typeSummary td.colFirst, .packageSummary td.colFirst{ - width:25%; - vertical-align:top; -} -td.colOne a:link, td.colOne a:active, td.colOne a:visited, td.colOne a:hover, td.colFirst a:link, td.colFirst a:active, td.colFirst a:visited, td.colFirst a:hover, td.colLast a:link, td.colLast a:active, td.colLast a:visited, td.colLast a:hover, .constantValuesContainer td a:link, .constantValuesContainer td a:active, .constantValuesContainer td a:visited, .constantValuesContainer td a:hover { - font-weight:bold; -} -.tableSubHeadingColor { - background-color:#EEEEFF; -} -.altColor { - background-color:#FFFFFF; -} -.rowColor { - background-color:#EEEEEF; -} -/* -Content styles -*/ -.description pre { - margin-top:0; -} -.deprecatedContent { - margin:0; - padding:10px 0; -} -.docSummary { - padding:0; -} - -ul.blockList ul.blockList ul.blockList li.blockList h3 { - font-style:normal; -} - -div.block { - font-size:14px; - font-family:'DejaVu Serif', Georgia, "Times New Roman", Times, serif; -} - -td.colLast div { - padding-top:0px; -} - - -td.colLast a { - padding-bottom:3px; -} -/* -Formatting effect styles -*/ -.sourceLineNo { - color:green; - padding:0 30px 0 0; -} -h1.hidden { - visibility:hidden; - overflow:hidden; - font-size:10px; -} -div.block { - display:block; - margin:3px 10px 2px 0px; - color:rgba(0,0,0,.8); -} - -div.block h1, div.block h2, div.block h3, div.block h4, div.block h5, div.block h6 { - font-family:'DejaVu Sans', Arial, Helvetica, sans-serif; - font-weight:300; - font-style: normal; - color:#7a4a0e; -} -div.block *:not(pre) > code { - font-weight: normal; - padding: 2px 4px; - background-color: #f7f7f7; - -webkit-border-radius: 4px; - border-radius: 4px; -} -div.block a { - text-decoration: underline; -} -.deprecatedLabel, .descfrmTypeLabel, .memberNameLabel, .memberNameLink, -.overrideSpecifyLabel, .packageHierarchyLabel, .paramLabel, .returnLabel, -.seeLabel, .simpleTagLabel, .throwsLabel, .typeNameLabel, .typeNameLink { - font-weight:bold; -} -.deprecationComment, .emphasizedPhrase, .interfaceName { - font-style:italic; -} - -div.block div.block span.deprecationComment, div.block div.block span.emphasizedPhrase, -div.block div.block span.interfaceName { - font-style:normal; -} - -div.contentContainer ul.blockList li.blockList h2{ - padding-bottom:0px; -} - -/* Asciidoclet styles - adapted from - * https://github.com/asciidoctor/asciidoctor/blob/master/data/stylesheets/asciidoctor-default.css - */ - -/* Asciidoclet - reset to normal paragraph font in description text, javadoc wants monospace for some reason */ -.contentContainer dl dd { - font-family: 'DejaVu Serif', Georgia, "Times New Roman", Times, serif; -} - -span.strong { font-weight: bold; } - -/* select on .ulist, .olist */ -.ulist ul, .olist ol { margin-left: 1.5em; padding: inherit; } -ul.no-bullet, ol.no-bullet { margin-left: 1.5em; } -.ulist ul li ul, .ulist ul li ol { margin-left: 1.25em; margin-bottom: 0; font-size: 1em; } -ul.square li ul, ul.circle li ul, ul.disc li ul { list-style: inherit; } -ul.square { list-style-type: square; } -ul.circle { list-style-type: circle; } -ul.disc { list-style-type: disc; } -ul.no-bullet { list-style: none; } -.olist ol li ul, .olist ol li ol { margin-left: 1.25em; margin-bottom: 0; } - -blockquote { margin: 0 0 1.25em; padding: 0.5625em 1.25em 0 1.1875em; border-left: 3px solid #487c58; } -blockquote cite { display: block; font-size: inherit; color: #454545; } -blockquote cite:before { content: "\2014 \0020"; } -blockquote cite a, blockquote cite a:visited { color: #454545; } -blockquote, blockquote p { line-height: 1.6; color: #6e6e6e; } - -/* Added div.block */ -div.block table { background: white; margin-bottom: 1.25em; border: solid 1px #dddddd; } -div.block table thead, div.block table tfoot { background: whitesmoke; font-weight: bold; } -div.block table thead tr th, div.block table thead tr td, div.block table tfoot tr th, div.block table tfoot tr td { padding: 0.5em 0.625em 0.625em; font-size: inherit; color: #333333; text-align: left; } -div.block table tr th, div.block table tr td { padding: 0.5625em 0.625em; font-size: inherit; color: #333333; } -div.block table tr.even, div.block table tr.alt, div.block table tr:nth-of-type(even) { background: #f9f9f9; } - -.subheader, #content #toctitle, .admonitionblock td.content > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .mathblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, .tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title, .tableblock > caption { line-height: 1.4; color: #7a4a0e; font-weight: 300; margin-top: 0.5em; margin-bottom: 0.25em; } - -.imageblock, .literalblock, .listingblock, .mathblock, .verseblock, .videoblock { margin-bottom: 1.25em; } -.admonitionblock td.content > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .mathblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, .tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { text-align: left; font-family: "DejaVu Sans", Arial, Helvetica; font-weight: 300; font-style: italic; } -.tableblock > caption { text-align: left; font-family: "DejaVu Sans", Arial, Helvetica; font-weight: 300; font-style: italic; white-space: nowrap; overflow: visible; max-width: 0; } -table.tableblock #preamble > .sectionbody > .paragraph:first-of-type p { font-size: inherit; } -div.block .admonitionblock > table { border: 0; background: none; width: 100%; } -.admonitionblock > table td.icon { text-align: center; width: 80px; } -.admonitionblock > table td.icon img { max-width: none; } -.admonitionblock > table td.icon .title { font-weight: 300; text-transform: uppercase; } -.admonitionblock > table td.content { padding-left: 1.125em; padding-right: 1.25em; border-left: 1px solid #d8d8d8; color: #6e6e6e; } -.admonitionblock > table td.content > :last-child > :last-child { margin-bottom: 0; } -.exampleblock > .content { border-style: solid; border-width: 1px; border-color: #e6e6e6; margin-bottom: 1.25em; padding: 1.25em; background: white; -webkit-border-radius: 4px; border-radius: 4px; } -.exampleblock > .content > :first-child { margin-top: 0; } -.exampleblock > .content > :last-child { margin-bottom: 0; } -.exampleblock > .content h1, .exampleblock > .content h2, .exampleblock > .content h3, .exampleblock > .content #toctitle, .sidebarblock.exampleblock > .content > .title, .exampleblock > .content h4, .exampleblock > .content h5, .exampleblock > .content h6, .exampleblock > .content p { color: #333333; } -.exampleblock > .content h1, .exampleblock > .content h2, .exampleblock > .content h3, .exampleblock > .content #toctitle, .sidebarblock.exampleblock > .content > .title, .exampleblock > .content h4, .exampleblock > .content h5, .exampleblock > .content h6 { line-height: 1; margin-bottom: 0.625em; } -.exampleblock > .content h1.subheader, .exampleblock > .content h2.subheader, .exampleblock > .content h3.subheader, .exampleblock > .content .subheader#toctitle, .sidebarblock.exampleblock > .content > .subheader.title, .exampleblock > .content h4.subheader, .exampleblock > .content h5.subheader, .exampleblock > .content h6.subheader { line-height: 1.4; } -.exampleblock.result > .content { -webkit-box-shadow: 0 1px 8px #e3e3dd; box-shadow: 0 1px 8px #e3e3dd; } -.sidebarblock { border-style: solid; border-width: 1px; border-color: #e3e3dd; margin-bottom: 1.25em; padding: 1.25em; background: #fafaf9; -webkit-border-radius: 4px; border-radius: 4px; } -.sidebarblock > :first-child { margin-top: 0; } -.sidebarblock > :last-child { margin-bottom: 0; } -.sidebarblock h1, .sidebarblock h2, .sidebarblock h3, .sidebarblock #toctitle, .sidebarblock > .content > .title, .sidebarblock h4, .sidebarblock h5, .sidebarblock h6, .sidebarblock p { color: #333333; } -.sidebarblock h1, .sidebarblock h2, .sidebarblock h3, .sidebarblock #toctitle, .sidebarblock > .content > .title, .sidebarblock h4, .sidebarblock h5, .sidebarblock h6 { line-height: 1; margin-bottom: 0.625em; } -.sidebarblock h1.subheader, .sidebarblock h2.subheader, .sidebarblock h3.subheader, .sidebarblock .subheader#toctitle, .sidebarblock > .content > .subheader.title, .sidebarblock h4.subheader, .sidebarblock h5.subheader, .sidebarblock h6.subheader { line-height: 1.4; } -.sidebarblock > .content > .title { color: #7a4a0e; margin-top: 0; line-height: 1.6; } -.exampleblock > .content > :last-child > :last-child, .exampleblock > .content .olist > ol > li:last-child > :last-child, .exampleblock > .content .ulist > ul > li:last-child > :last-child, .exampleblock > .content .qlist > ol > li:last-child > :last-child, .sidebarblock > .content > :last-child > :last-child, .sidebarblock > .content .olist > ol > li:last-child > :last-child, .sidebarblock > .content .ulist > ul > li:last-child > :last-child, .sidebarblock > .content .qlist > ol > li:last-child > :last-child { margin-bottom: 0; } -.literalblock pre,.listingblock pre:not(.highlight),.listingblock pre[class="highlight"],.listingblock pre[class^="highlight "],.listingblock pre.CodeRay{ background: #f7f7f7 } -.literalblock pre,.literalblock pre[class],.listingblock pre,.listingblock pre[class]{-webkit-border-radius:4px;border-radius:4px;word-wrap:break-word;padding:1em;font-size:.8125em} -.literalblock pre.nowrap,.literalblock pre[class].nowrap,.listingblock pre.nowrap,.listingblock pre[class].nowrap{overflow-x:auto;white-space:pre;word-wrap:normal} -.listingblock>.content{position:relative} -.listingblock code[data-lang]:before{display:none;content:attr(data-lang);position:absolute;font-size:.75em;top:.425rem;right:.5rem;line-height:1;text-transform:uppercase;color:#999} -.listingblock:hover code[data-lang]:before{display:block} -.listingblock.terminal pre .command:before{content:attr(data-prompt);padding-right:.5em;color:#999} -.listingblock.terminal pre .command:not([data-prompt]):before{content:"$"} - -.quoteblock { margin: 0 0 1.25em 0; padding: 0.5625em 1.25em 0 1.1875em; border-left: 3px solid #487c58; } -.quoteblock blockquote { margin: 0 0 1.25em 0; padding: 0 0 0.625em 0; border: 0; } -.quoteblock blockquote > .paragraph:last-child p { margin-bottom: 0; } -.quoteblock .attribution { margin-top: -0.625em; padding-bottom: 0.625em; font-size: inherit; color: #454545; line-height: 1.6; } -.quoteblock .attribution br { display: none; } -.quoteblock .attribution cite { display: block; } - -table.tableblock{max-width:100%;border-collapse:separate;border-spacing:0} -table.tableblock td>.paragraph:last-child p>p:last-child,table.tableblock th>p:last-child,table.tableblock td>p:last-child{margin-bottom:0} -table.spread{width:100%} -table.tableblock,th.tableblock,td.tableblock{border:0 solid #dedede} -table.grid-all th.tableblock,table.grid-all td.tableblock{border-width:0 1px 1px 0} -table.grid-all tfoot>tr>th.tableblock,table.grid-all tfoot>tr>td.tableblock{border-width:1px 1px 0 0} -table.grid-cols th.tableblock,table.grid-cols td.tableblock{border-width:0 1px 0 0} -table.grid-all *>tr>.tableblock:last-child,table.grid-cols *>tr>.tableblock:last-child{border-right-width:0} -table.grid-rows th.tableblock,table.grid-rows td.tableblock{border-width:0 0 1px 0} -table.grid-all tbody>tr:last-child>th.tableblock,table.grid-all tbody>tr:last-child>td.tableblock,table.grid-all thead:last-child>tr>th.tableblock,table.grid-rows tbody>tr:last-child>th.tableblock,table.grid-rows tbody>tr:last-child>td.tableblock,table.grid-rows thead:last-child>tr>th.tableblock{border-bottom-width:0} -table.grid-rows tfoot>tr>th.tableblock,table.grid-rows tfoot>tr>td.tableblock{border-width:1px 0 0 0} -table.frame-all{border-width:1px} -table.frame-sides{border-width:0 1px} -table.frame-topbot{border-width:1px 0} -th.halign-left,td.halign-left{text-align:left} -th.halign-right,td.halign-right{text-align:right} -th.halign-center,td.halign-center{text-align:center} -th.valign-top,td.valign-top{vertical-align:top} -th.valign-bottom,td.valign-bottom{vertical-align:bottom} -th.valign-middle,td.valign-middle{vertical-align:middle} - -.dlist dl dd, .contentContainer .description .dlist dl dd, .contentContainer .details .dlist dl dd { margin-left: 1.125em; } - -.contentContainer hr { - border: 0 solid #ddddd8; - border-top-width: 1px; - height: 0; - margin: 1em 0 1.25em 0; -} - -.contentContainer hr + br { - display: none; -} - -p.tableblock { - margin-top: .5em; - margin-bottom: 0; -} - -/* Javadoc puts its output inside a
      element which confuses nested ul, ol styles in user text. - * Select on asciidoctor's div.ulist & div.olist to get correct nested bullet styles. */ -.ulist > ul { - list-style-type: disc; -} - -.ulist > ul .ulist > ul, .olist > ol .ulist > ul { - list-style-type: circle; -} - -.olist > ol .olist > ol .ulist > ul, .olist > ol .ulist > ul .ulist > ul, .ulist > ul .olist > ol .ulist > ul, .ulist > ul .ulist > ul .ulist > ul { - list-style-type: square; -} - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html deleted file mode 100644 index 5ff514e..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest.html +++ /dev/null @@ -1,124 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.DetectLanguageTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      4
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      1.807s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      TestDurationResult
      testChinese0.172spassed
      testEnglish1.260spassed
      testJapanese0.190spassed
      testKorean0.185spassed
      -
      -
      -

      Standard output

      - -
      [16:15:10,036][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:10,238][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:10,412][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:10,596][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -
      -
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html deleted file mode 100644 index 887a6a9..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest.html +++ /dev/null @@ -1,116 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.DetectorTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      5
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      0s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      TestDurationResult
      testDetector10spassed
      testDetector20spassed
      testDetector30spassed
      testDetector40spassed
      testPunctuation0spassed
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html deleted file mode 100644 index 5f56fe5..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest.html +++ /dev/null @@ -1,471 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectActionTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      2
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      48.559s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - - - - - - -
      TestDurationResult
      testLangDetectProfile6.109spassed
      testSort42.450spassed
      -
      -
      -

      Standard output

      - -
      [16:15:10,624][INFO ][test                     ][Test worker] settings cluster name
      -[16:15:10,624][INFO ][test                     ][Test worker] starting nodes
      -[16:15:10,631][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
      -[16:15:10,793][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
      -[16:15:10,900][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
      -[16:15:10,907][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
      - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
      -[16:15:10,911][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
      -[16:15:10,913][INFO ][org.elasticsearch.node.Node][Test worker] node name [bewR05Z] derived from node ID [bewR05ZwQo2sQ7yw5lONSg]; set [node.name] to override
      -[16:15:10,914][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
      -[16:15:10,915][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
      -[16:15:10,927][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
      -[16:15:10,929][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
      -[16:15:10,930][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
      -[16:15:10,964][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
      -[16:15:10,966][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
      -[16:15:10,967][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
      -[16:15:10,971][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
      -[16:15:10,971][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
      -[16:15:10,971][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
      -[16:15:10,972][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
      -[16:15:10,972][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
      -[16:15:10,973][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
      -[16:15:10,973][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
      -[16:15:10,974][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
      -[16:15:10,974][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
      -[16:15:10,974][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
      -[16:15:10,975][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
      -[16:15:11,122][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
      -[16:15:11,953][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
      -
      -lo0
      -        inet 127.0.0.1 netmask:255.0.0.0 scope:host
      -        inet6 fe80::1 prefixlen:64 scope:link
      -        inet6 ::1 prefixlen:128 scope:host
      -        UP MULTICAST LOOPBACK mtu:16384 index:1
      -
      -en4
      -        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
      -        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
      -        hardware 68:5B:35:BC:46:72
      -        UP MULTICAST mtu:1500 index:9
      -
      -[16:15:12,012][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
      -[16:15:12,028][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
      -[16:15:12,108][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
      -[16:15:12,128][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
      -[16:15:12,129][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
      -[16:15:12,135][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
      -[16:15:12,138][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
      -[16:15:12,241][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
      -[16:15:13,548][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
      -[16:15:13,553][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
      -[16:15:13,567][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
      -[16:15:13,575][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
      -[16:15:13,587][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
      -[16:15:13,593][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
      -[16:15:13,594][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
      -[16:15:13,598][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:15:13,606][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:15:13,884][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
      -[16:15:14,093][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
      -[16:15:14,391][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:14,456][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
      -[16:15:14,465][INFO ][org.elasticsearch.node.Node][Test worker] initialized
      -[16:15:14,465][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
      -[16:15:14,472][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[1]}, bound_addresses {local[1]}
      -[16:15:14,493][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [initial_join]: execute
      -[16:15:14,494][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
      -[16:15:14,500][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [initial_join]: took [6ms] no change in cluster_state
      -[16:15:17,525][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[bewR05Z][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
      -	--> ping_response{node [{bewR05Z}{bewR05ZwQo2sQ7yw5lONSg}{esS4HfhQRa2brUxmrNnN1A}{local}{local[1]}], id[7], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
      -[16:15:17,528][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[bewR05Z][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
      -[16:15:17,532][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
      -[16:15:17,549][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
      -[16:15:17,552][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] new_master {bewR05Z}{bewR05ZwQo2sQ7yw5lONSg}{esS4HfhQRa2brUxmrNnN1A}{local}{local[1]}, reason: zen-disco-elected-as-master ([0] nodes joined)
      -[16:15:17,552][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [1]
      -[16:15:17,555][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 1
      -[16:15:17,559][INFO ][org.elasticsearch.node.Node][Test worker] started
      -[16:15:17,559][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [27ms] done applying updated cluster_state (version: 1, uuid: qYen7CDnT0uhEwnmnuvGyA)
      -[16:15:17,577][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
      -[16:15:17,578][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
      -[16:15:17,578][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [2]
      -[16:15:17,578][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 2
      -[16:15:17,583][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
      -[16:15:17,584][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [6ms] done applying updated cluster_state (version: 2, uuid: xj0xXEP0SVOTZrdzdOm9og)
      -[16:15:17,585][INFO ][test                     ][Test worker] nodes are started
      -[16:15:17,591][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute
      -[16:15:17,629][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating Index [[test/wuR-344MSUyxJc4_TzCJ4g]], shards [5]/[1] - reason [create index]
      -[16:15:17,646][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
      -[16:15:17,756][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using dynamic[true]
      -[16:15:18,165][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:18,597][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:18,642][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [article]
      -[16:15:18,683][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master])
      -[16:15:18,684][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closing index service (reason [cleaning up after validating index on master])
      -[16:15:18,685][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] clearing all bitsets because [close]
      -[16:15:18,687][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] full cache clear, reason [close]
      -[16:15:18,688][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] clearing all bitsets because [close]
      -[16:15:18,690][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closed... (reason [cleaning up after validating index on master])
      -[16:15:18,694][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]]
      -[16:15:18,695][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [3]
      -[16:15:18,695][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 3
      -[16:15:18,697][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [[test/wuR-344MSUyxJc4_TzCJ4g]] creating index
      -[16:15:18,699][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating Index [[test/wuR-344MSUyxJc4_TzCJ4g]], shards [5]/[1] - reason [create index]
      -[16:15:18,701][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
      -[16:15:18,704][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using dynamic[true]
      -[16:15:18,706][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [[test/wuR-344MSUyxJc4_TzCJ4g]] adding mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]}}}}]
      -[16:15:18,925][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:19,158][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:19,162][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][2] creating shard
      -[16:15:19,175][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/2, shard=[test][2]}]
      -[16:15:19,176][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][2]
      -[16:15:19,185][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:15:19,281][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:15:19,299][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:15:19,301][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][3] creating shard
      -[16:15:19,304][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/3, shard=[test][3]}]
      -[16:15:19,305][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][3]
      -[16:15:19,308][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:15:19,309][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:15:19,310][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#3]] starting recovery from store ...
      -[16:15:19,312][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:15:19,313][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#1]] starting recovery from store ...
      -[16:15:19,313][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][1] creating shard
      -[16:15:19,315][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/1, shard=[test][1]}]
      -[16:15:19,316][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][1]
      -[16:15:19,320][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:15:19,321][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:15:19,325][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:15:19,325][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][0] creating shard
      -[16:15:19,325][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] starting recovery from store ...
      -[16:15:19,327][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/0, shard=[test][0]}]
      -[16:15:19,328][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][0]
      -[16:15:19,331][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:15:19,333][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:15:19,336][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:15:19,337][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#4]] starting recovery from store ...
      -[16:15:19,350][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [1.7s] done applying updated cluster_state (version: 3, uuid: M3VF5uEPQ5ymwKcnmMoVJA)
      -[16:15:19,513][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#4]] wipe translog location - creating new translog
      -[16:15:19,514][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#2]] wipe translog location - creating new translog
      -[16:15:19,515][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#1]] wipe translog location - creating new translog
      -[16:15:19,515][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#3]] wipe translog location - creating new translog
      -[16:15:19,543][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#4]] no translog ID present in the current generation - creating one
      -[16:15:19,544][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#3]] no translog ID present in the current generation - creating one
      -[16:15:19,544][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#1]] no translog ID present in the current generation - creating one
      -[16:15:19,545][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#2]] no translog ID present in the current generation - creating one
      -[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:15:19,614][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#3]] recovery completed from [shard_store], took [444ms]
      -[16:15:19,615][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] recovery completed from [shard_store], took [298ms]
      -[16:15:19,615][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#3]] [test][2] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery]]
      -[16:15:19,616][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][1] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery]]
      -[16:15:19,616][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#1]] recovery completed from [shard_store], took [310ms]
      -[16:15:19,616][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]
      -[16:15:19,613][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:15:19,617][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#4]] recovery completed from [shard_store], took [287ms]
      -[16:15:19,618][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery]]
      -[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][1] received shard started for [shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery]]
      -[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#3]] [test][2] received shard started for [shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery]]
      -[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]
      -[16:15:19,619][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery]]
      -[16:15:19,621][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]]: execute
      -[16:15:19,665][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=bRvnNWcsQiOqqtjcoUnxkg], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery]])
      -[16:15:19,667][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=hiFBgviCQHOMNmmVPZm12w], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery]])
      -[16:15:19,667][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=fSMgvX2LTJqdEQwaWj3zfA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery]])
      -[16:15:19,668][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=mvASi-1VQxi4bC6WlqN-4g], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]])
      -[16:15:19,683][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]]
      -[16:15:19,684][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [4]
      -[16:15:19,685][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 4
      -[16:15:19,688][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:15:19,690][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:15:19,692][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:15:19,692][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][4] creating shard
      -[16:15:19,695][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/wuR-344MSUyxJc4_TzCJ4g/4, shard=[test][4]}]
      -[16:15:19,696][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] creating shard_id [test][4]
      -[16:15:19,699][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:15:19,701][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:15:19,705][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:15:19,706][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] starting recovery from store ...
      -[16:15:19,707][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:15:19,712][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[bewR05Z][generic][T#2]] wipe translog location - creating new translog
      -[16:15:19,714][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [bRvnNWcsQiOqqtjcoUnxkg], primary term [0], message [after new shard recovery], shard id [[test][2]], allocation id [hiFBgviCQHOMNmmVPZm12w], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [fSMgvX2LTJqdEQwaWj3zfA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [mvASi-1VQxi4bC6WlqN-4g], primary term [0], message [after new shard recovery]]]: took [91ms] done applying updated cluster_state (version: 4, uuid: -kx_a1VVT964oBybzZGUZQ)
      -[16:15:19,716][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[bewR05Z][generic][T#2]] no translog ID present in the current generation - creating one
      -[16:15:19,723][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:15:19,723][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][generic][T#2]] recovery completed from [shard_store], took [30ms]
      -[16:15:19,724][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][4] sending [internal:cluster/shard/started] to [bewR05ZwQo2sQ7yw5lONSg] for shard entry [shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]
      -[16:15:19,724][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][generic][T#2]] [test][4] received shard started for [shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]
      -[16:15:19,725][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]]: execute
      -[16:15:19,726][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[bewR05ZwQo2sQ7yw5lONSg], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=O2Q9NrVtQKGkB0MwWXeqxw], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:15:18.647Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]])
      -[16:15:19,729][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]]
      -[16:15:19,730][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [5]
      -[16:15:19,730][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 5
      -[16:15:19,732][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:15:19,743][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [O2Q9NrVtQKGkB0MwWXeqxw], primary term [0], message [after new shard recovery]]]: took [17ms] done applying updated cluster_state (version: 5, uuid: tt-EHy-QQ4qrQN4Ok8uHcw)
      -[16:15:47,568][INFO ][org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor][elasticsearch[bewR05Z][management][T#2]] low disk watermark [85%] exceeded on [bewR05ZwQo2sQ7yw5lONSg][bewR05Z][/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0] free: 137.8gb[14.8%], replicas will not be assigned to this node
      -[16:15:49,789][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [put-mapping[article]]: execute
      -[16:15:49,792][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] using dynamic[true]
      -[16:15:49,960][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:50,131][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:50,355][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:50,357][DEBUG][org.elasticsearch.cluster.metadata.MetaDataMappingService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] update_mapping [article] with source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}]
      -[16:15:50,358][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] cluster state updated, version [6], source [put-mapping[article]]
      -[16:15:50,358][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] publishing cluster state version [6]
      -[16:15:50,358][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] set local cluster state to version 6
      -[16:15:50,358][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] [[test/wuR-344MSUyxJc4_TzCJ4g]] updating mapping [article], source [{"article":{"properties":{"content":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["de","en","fr"]},"title":{"type":"text","fields":{"keyword":{"type":"keyword","ignore_above":256}}}}}}]
      -[16:15:50,508][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:50,700][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:50,707][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[bewR05Z][clusterService#updateTask][T#1]] processing [put-mapping[article]]: took [916ms] done applying updated cluster_state (version: 6, uuid: hccsN_0QQe-qiUaEJPa9mw)
      -[16:15:51,011][INFO ][test                     ][Test worker] stopping nodes
      -[16:15:51,012][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
      -[16:15:51,015][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown])
      -[16:15:51,015][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closing index service (reason [shutdown])
      -[16:15:51,015][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown])
      -[16:15:51,016][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:15:51,016][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:15:51,017][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:15:51,017][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:15:51,017][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:15:51,018][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:15:51,019][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:15:51,019][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown])
      -[16:15:51,019][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown])
      -[16:15:51,019][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:15:51,019][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:15:51,019][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:15:51,019][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:15:51,020][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:15:51,022][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:15:51,022][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:15:51,022][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown])
      -[16:15:51,023][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown])
      -[16:15:51,023][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:15:51,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:15:51,038][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:15:51,038][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:15:51,038][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:15:51,040][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:15:51,040][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:15:51,040][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown])
      -[16:15:51,040][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown])
      -[16:15:51,040][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:15:51,040][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:15:51,047][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:15:51,047][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:15:51,047][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:15:51,048][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:15:51,048][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:15:51,048][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown])
      -[16:15:51,048][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown])
      -[16:15:51,048][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:15:51,049][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:15:51,054][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:15:51,054][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:15:51,055][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:15:51,055][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:15:51,056][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:15:51,056][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown])
      -[16:15:51,056][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
      -[16:15:51,056][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close]
      -[16:15:51,057][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
      -[16:15:51,058][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/wuR-344MSUyxJc4_TzCJ4g] closed... (reason [shutdown])
      -[16:15:51,058][INFO ][org.elasticsearch.node.Node][Test worker] stopped
      -[16:15:51,058][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
      -[16:15:51,065][INFO ][org.elasticsearch.node.Node][Test worker] closed
      -[16:15:51,071][INFO ][test                     ][Test worker] data files wiped
      -[16:15:53,074][INFO ][test                     ][Test worker] settings cluster name
      -[16:15:53,075][INFO ][test                     ][Test worker] starting nodes
      -[16:15:53,077][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
      -[16:15:53,079][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
      -[16:15:53,082][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
      -[16:15:53,082][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
      - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
      -[16:15:53,082][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
      -[16:15:53,083][INFO ][org.elasticsearch.node.Node][Test worker] node name [Cu9MbJQ] derived from node ID [Cu9MbJQaQ6yK_lE9o5Kj9Q]; set [node.name] to override
      -[16:15:53,083][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
      -[16:15:53,083][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
      -[16:15:53,083][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
      -[16:15:53,084][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
      -[16:15:53,084][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
      -[16:15:53,084][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
      -[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
      -[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
      -[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
      -[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
      -[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
      -[16:15:53,085][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
      -[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
      -[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
      -[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
      -[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
      -[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
      -[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
      -[16:15:53,086][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
      -[16:15:53,087][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
      -[16:15:53,090][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
      -
      -lo0
      -        inet 127.0.0.1 netmask:255.0.0.0 scope:host
      -        inet6 fe80::1 prefixlen:64 scope:link
      -        inet6 ::1 prefixlen:128 scope:host
      -        UP MULTICAST LOOPBACK mtu:16384 index:1
      -
      -en4
      -        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
      -        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
      -        hardware 68:5B:35:BC:46:72
      -        UP MULTICAST mtu:1500 index:9
      -
      -[16:15:53,091][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
      -[16:15:53,092][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
      -[16:15:53,092][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
      -[16:15:53,092][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
      -[16:15:53,092][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
      -[16:15:53,092][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
      -[16:15:53,093][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
      -[16:15:53,093][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
      -[16:15:53,095][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
      -[16:15:53,096][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
      -[16:15:53,096][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
      -[16:15:53,097][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
      -[16:15:53,098][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
      -[16:15:53,099][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
      -[16:15:53,100][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
      -[16:15:53,100][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:15:53,101][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:15:53,132][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
      -[16:15:53,146][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
      -[16:15:53,317][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:53,324][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
      -[16:15:53,325][INFO ][org.elasticsearch.node.Node][Test worker] initialized
      -[16:15:53,325][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
      -[16:15:53,326][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[2]}, bound_addresses {local[2]}
      -[16:15:53,327][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
      -[16:15:53,327][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [initial_join]: execute
      -[16:15:53,328][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state
      -[16:15:56,332][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Cu9MbJQ][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
      -	--> ping_response{node [{Cu9MbJQ}{Cu9MbJQaQ6yK_lE9o5Kj9Q}{vcqlyfm5RQy0B9pDRCDhhw}{local}{local[2]}], id[14], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
      -[16:15:56,333][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Cu9MbJQ][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
      -[16:15:56,333][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
      -[16:15:56,334][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
      -[16:15:56,334][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] new_master {Cu9MbJQ}{Cu9MbJQaQ6yK_lE9o5Kj9Q}{vcqlyfm5RQy0B9pDRCDhhw}{local}{local[2]}, reason: zen-disco-elected-as-master ([0] nodes joined)
      -[16:15:56,334][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] publishing cluster state version [1]
      -[16:15:56,335][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] set local cluster state to version 1
      -[16:15:56,335][INFO ][org.elasticsearch.node.Node][Test worker] started
      -[16:15:56,336][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [2ms] done applying updated cluster_state (version: 1, uuid: 4DxornPUTw-_Fcvg3x73fw)
      -[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
      -[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
      -[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] publishing cluster state version [2]
      -[16:15:56,337][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] set local cluster state to version 2
      -[16:15:56,341][INFO ][test                     ][Test worker] nodes are started
      -[16:15:56,341][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
      -[16:15:56,341][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Cu9MbJQ][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [4ms] done applying updated cluster_state (version: 2, uuid: nj3qDzE3QM27TjZgz1zLKQ)
      -[16:15:57,174][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:15:57,176][INFO ][test                     ][Test worker] stopping nodes
      -[16:15:57,176][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
      -[16:15:57,177][INFO ][org.elasticsearch.node.Node][Test worker] stopped
      -[16:15:57,178][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
      -[16:15:57,180][INFO ][org.elasticsearch.node.Node][Test worker] closed
      -[16:15:57,181][INFO ][test                     ][Test worker] data files wiped
      -
      -
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html deleted file mode 100644 index 0261732..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest.html +++ /dev/null @@ -1,369 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectBinaryTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      1
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      6.736s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - -
      TestDurationResult
      testLangDetectBinary6.736spassed
      -
      -
      -

      Standard output

      - -
      [16:15:59,186][INFO ][test                     ][Test worker] settings cluster name
      -[16:15:59,187][INFO ][test                     ][Test worker] starting nodes
      -[16:15:59,187][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
      -[16:15:59,190][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
      -[16:15:59,197][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
      -[16:15:59,197][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
      - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
      -[16:15:59,197][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
      -[16:15:59,198][INFO ][org.elasticsearch.node.Node][Test worker] node name [_vWPEAD] derived from node ID [_vWPEAD8R3q57wu4rbdE0A]; set [node.name] to override
      -[16:15:59,198][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
      -[16:15:59,198][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
      -[16:15:59,198][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
      -[16:15:59,199][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
      -[16:15:59,199][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
      -[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
      -[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
      -[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
      -[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
      -[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
      -[16:15:59,200][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
      -[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
      -[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
      -[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
      -[16:15:59,201][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
      -[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
      -[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
      -[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
      -[16:15:59,202][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
      -[16:15:59,203][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
      -[16:15:59,208][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
      -
      -lo0
      -        inet 127.0.0.1 netmask:255.0.0.0 scope:host
      -        inet6 fe80::1 prefixlen:64 scope:link
      -        inet6 ::1 prefixlen:128 scope:host
      -        UP MULTICAST LOOPBACK mtu:16384 index:1
      -
      -en4
      -        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
      -        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
      -        hardware 68:5B:35:BC:46:72
      -        UP MULTICAST mtu:1500 index:9
      -
      -[16:15:59,210][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
      -[16:15:59,210][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
      -[16:15:59,211][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
      -[16:15:59,211][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
      -[16:15:59,211][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
      -[16:15:59,212][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
      -[16:15:59,212][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
      -[16:15:59,213][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
      -[16:15:59,215][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
      -[16:15:59,216][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
      -[16:15:59,216][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
      -[16:15:59,217][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
      -[16:15:59,218][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
      -[16:15:59,218][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
      -[16:15:59,218][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
      -[16:15:59,219][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:15:59,219][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:15:59,255][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
      -[16:15:59,274][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
      -[16:16:00,026][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:00,033][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
      -[16:16:00,035][INFO ][org.elasticsearch.node.Node][Test worker] initialized
      -[16:16:00,035][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
      -[16:16:00,036][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[3]}, bound_addresses {local[3]}
      -[16:16:00,037][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
      -[16:16:00,037][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [initial_join]: execute
      -[16:16:00,038][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state
      -[16:16:03,042][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[_vWPEAD][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
      -	--> ping_response{node [{_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]}], id[21], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
      -[16:16:03,044][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[_vWPEAD][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
      -[16:16:03,045][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
      -[16:16:03,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
      -[16:16:03,046][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] new_master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]}, reason: zen-disco-elected-as-master ([0] nodes joined)
      -[16:16:03,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [1]
      -[16:16:03,046][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 1
      -[16:16:03,047][INFO ][org.elasticsearch.node.Node][Test worker] started
      -[16:16:03,047][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [1ms] done applying updated cluster_state (version: 1, uuid: oJ4rhd9NQDSDvIwR3F29ng)
      -[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
      -[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
      -[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [2]
      -[16:16:03,048][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 2
      -[16:16:03,050][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
      -[16:16:03,050][INFO ][test                     ][Test worker] nodes are started
      -[16:16:03,050][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [2ms] done applying updated cluster_state (version: 2, uuid: aw8v7MD1SpeuLF9dnx3Tog)
      -[16:16:03,051][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute
      -[16:16:03,052][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating Index [[test/HfBDjEv4TJGXdFIpMdLtdQ]], shards [5]/[1] - reason [create index]
      -[16:16:03,053][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
      -[16:16:03,054][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using dynamic[true]
      -[16:16:03,250][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:03,424][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:03,426][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType]
      -[16:16:03,429][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master])
      -[16:16:03,429][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closing index service (reason [cleaning up after validating index on master])
      -[16:16:03,429][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] clearing all bitsets because [close]
      -[16:16:03,429][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] full cache clear, reason [close]
      -[16:16:03,430][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] clearing all bitsets because [close]
      -[16:16:03,430][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closed... (reason [cleaning up after validating index on master])
      -[16:16:03,430][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]]
      -[16:16:03,430][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [3]
      -[16:16:03,430][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 3
      -[16:16:03,430][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [[test/HfBDjEv4TJGXdFIpMdLtdQ]] creating index
      -[16:16:03,431][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating Index [[test/HfBDjEv4TJGXdFIpMdLtdQ]], shards [5]/[1] - reason [create index]
      -[16:16:03,431][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
      -[16:16:03,432][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] using dynamic[true]
      -[16:16:03,433][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [[test/HfBDjEv4TJGXdFIpMdLtdQ]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"binary":"true"}}}}}}]
      -[16:16:03,612][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:03,813][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:03,814][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][2] creating shard
      -[16:16:03,815][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/2, shard=[test][2]}]
      -[16:16:03,815][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][2]
      -[16:16:03,816][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:03,816][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:03,818][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:03,818][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][3] creating shard
      -[16:16:03,819][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] starting recovery from store ...
      -[16:16:03,819][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/3, shard=[test][3]}]
      -[16:16:03,819][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][3]
      -[16:16:03,819][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:03,820][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:03,820][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:03,820][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#3]] wipe translog location - creating new translog
      -[16:16:03,821][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] creating shard
      -[16:16:03,821][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#1]] starting recovery from store ...
      -[16:16:03,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/1, shard=[test][1]}]
      -[16:16:03,821][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][1]
      -[16:16:03,822][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#3]] no translog ID present in the current generation - creating one
      -[16:16:03,822][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:03,822][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#1]] wipe translog location - creating new translog
      -[16:16:03,823][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:03,824][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#1]] no translog ID present in the current generation - creating one
      -[16:16:03,825][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:03,825][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] creating shard
      -[16:16:03,825][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#2]] starting recovery from store ...
      -[16:16:03,825][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/0, shard=[test][0]}]
      -[16:16:03,826][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][0]
      -[16:16:03,827][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:03,827][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:03,827][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#2]] wipe translog location - creating new translog
      -[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] recovery completed from [shard_store], took [13ms]
      -[16:16:03,828][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][2] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery]]
      -[16:16:03,828][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][2] received shard started for [shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery]]
      -[16:16:03,828][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#2]] no translog ID present in the current generation - creating one
      -[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:03,828][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:03,829][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#1]] recovery completed from [shard_store], took [9ms]
      -[16:16:03,829][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]
      -[16:16:03,829][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]
      -[16:16:03,829][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#4]] starting recovery from store ...
      -[16:16:03,830][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [778ms] done applying updated cluster_state (version: 3, uuid: S-H00SMrR66kjfh_m5yK5A)
      -[16:16:03,830][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]]: execute
      -[16:16:03,830][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=xYCz6wGXRs2ejBJ85L1cTA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery]])
      -[16:16:03,831][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=C88dd9azRtmPqXJyx_yrQA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]])
      -[16:16:03,831][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#4]] wipe translog location - creating new translog
      -[16:16:03,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:03,831][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#2]] recovery completed from [shard_store], took [10ms]
      -[16:16:03,831][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#2]] [test][1] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery]]
      -[16:16:03,831][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#2]] [test][1] received shard started for [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery]]
      -[16:16:03,832][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#4]] no translog ID present in the current generation - creating one
      -[16:16:03,833][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]]
      -[16:16:03,833][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [4]
      -[16:16:03,833][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 4
      -[16:16:03,834][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:03,834][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:03,834][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#4]] recovery completed from [shard_store], took [8ms]
      -[16:16:03,835][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery]]
      -[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery]]
      -[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:03,835][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] received shard started for [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:03,835][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] creating shard
      -[16:16:03,836][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/HfBDjEv4TJGXdFIpMdLtdQ/4, shard=[test][4]}]
      -[16:16:03,836][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] creating shard_id [test][4]
      -[16:16:03,838][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:03,839][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:03,840][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:03,841][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] starting recovery from store ...
      -[16:16:03,841][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:03,841][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:03,842][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[_vWPEAD][generic][T#3]] wipe translog location - creating new translog
      -[16:16:03,843][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [xYCz6wGXRs2ejBJ85L1cTA], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [C88dd9azRtmPqXJyx_yrQA], primary term [0], message [after new shard recovery]]]: took [12ms] done applying updated cluster_state (version: 4, uuid: Honx4OlmQaWwV9ZHUsiHkw)
      -[16:16:03,843][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
      -[16:16:03,843][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=fnYoFX7cQTuHJRBAbm4hfA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery]])
      -[16:16:03,844][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[_vWPEAD][generic][T#3]] no translog ID present in the current generation - creating one
      -[16:16:03,844][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=JtIlGmldRCCgVgwUHXhpmQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery]])
      -[16:16:03,845][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
      -[16:16:03,845][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [5]
      -[16:16:03,845][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 5
      -[16:16:03,846][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:03,846][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:03,847][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][generic][T#3]] recovery completed from [shard_store], took [10ms]
      -[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery]]
      -[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [_vWPEAD8R3q57wu4rbdE0A] for shard entry [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery]]
      -[16:16:03,847][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:03,847][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:03,849][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [fnYoFX7cQTuHJRBAbm4hfA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [JtIlGmldRCCgVgwUHXhpmQ], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [5ms] done applying updated cluster_state (version: 5, uuid: yHGFA87MTFSc1tsQGGF_2g)
      -[16:16:03,849][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
      -[16:16:03,849][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[_vWPEAD8R3q57wu4rbdE0A], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=yEvk4BTMTy2k6kuoHnuZIA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:03.426Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery]])
      -[16:16:03,850][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
      -[16:16:03,850][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] publishing cluster state version [6]
      -[16:16:03,850][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] set local cluster state to version 6
      -[16:16:03,851][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:03,852][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[_vWPEAD][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [yEvk4BTMTy2k6kuoHnuZIA], primary term [0], message [master {_vWPEAD}{_vWPEAD8R3q57wu4rbdE0A}{sW76y73GQcWkHqk3sZXGeA}{local}{local[3]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [3ms] done applying updated cluster_state (version: 6, uuid: BzKsuzFxRVGzzpH_StdMLg)
      -[16:16:03,893][INFO ][test                     ][Test worker] stopping nodes
      -[16:16:03,893][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
      -[16:16:03,894][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown])
      -[16:16:03,895][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closing index service (reason [shutdown])
      -[16:16:03,895][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown])
      -[16:16:03,896][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:03,896][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:03,896][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:03,896][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:03,897][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:03,898][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:03,899][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:03,899][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown])
      -[16:16:03,899][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown])
      -[16:16:03,899][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:03,899][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:03,899][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:03,899][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:03,900][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:03,901][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:03,901][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown])
      -[16:16:03,901][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown])
      -[16:16:03,901][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:03,901][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:03,902][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:03,903][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:03,903][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:03,903][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown])
      -[16:16:03,903][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown])
      -[16:16:03,903][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:03,903][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:03,910][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:03,910][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:03,910][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:03,911][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:03,911][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown])
      -[16:16:03,911][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown])
      -[16:16:03,911][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:03,911][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:03,911][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:03,912][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:03,912][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:03,912][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown])
      -[16:16:03,912][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
      -[16:16:03,912][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close]
      -[16:16:03,913][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
      -[16:16:03,913][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/HfBDjEv4TJGXdFIpMdLtdQ] closed... (reason [shutdown])
      -[16:16:03,913][INFO ][org.elasticsearch.node.Node][Test worker] stopped
      -[16:16:03,913][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
      -[16:16:03,915][INFO ][org.elasticsearch.node.Node][Test worker] closed
      -[16:16:03,922][INFO ][test                     ][Test worker] data files wiped
      -
      -
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html deleted file mode 100644 index 04a0ddc..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest.html +++ /dev/null @@ -1,371 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.LangDetectChineseTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      1
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      6.117s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - -
      TestDurationResult
      testChineseLanguageCode6.117spassed
      -
      -
      -

      Standard output

      - -
      [16:16:05,929][INFO ][test                     ][Test worker] settings cluster name
      -[16:16:05,929][INFO ][test                     ][Test worker] starting nodes
      -[16:16:05,929][INFO ][test                     ][Test worker] settings={cluster.name=test-helper-cluster--joerg-1, http.enabled=false, path.home=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect, transport.type=local}
      -[16:16:05,930][INFO ][org.elasticsearch.node.Node][Test worker] initializing ...
      -[16:16:05,933][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] using node location [[NodePath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, spins=null}]], local_lock_id [0]
      -[16:16:05,933][DEBUG][org.elasticsearch.env.NodeEnvironment][Test worker] node data locations details:
      - -> /Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0, free_space [138.1gb], usable_space [137.8gb], total_space [931gb], spins? [unknown], mount [/ (/dev/disk0s2)], type [hfs]
      -[16:16:05,933][INFO ][org.elasticsearch.env.NodeEnvironment][Test worker] heap size [3.5gb], compressed ordinary object pointers [true]
      -[16:16:05,933][INFO ][org.elasticsearch.node.Node][Test worker] node name [Z1dFIC3] derived from node ID [Z1dFIC3HQWqRSaRc5uCucQ]; set [node.name] to override
      -[16:16:05,933][INFO ][org.elasticsearch.node.Node][Test worker] version[5.1.2], pid[9098], build[c8c4c16/2017-01-11T20:18:39.146Z], OS[Mac OS X/10.9.5/x86_64], JVM[Azul Systems, Inc./OpenJDK 64-Bit Server VM/1.8.0_112/25.112-b16]
      -[16:16:05,933][DEBUG][org.elasticsearch.node.Node][Test worker] using config [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/config], data [[/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data]], logs [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/logs], plugins [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins]
      -[16:16:05,934][DEBUG][org.elasticsearch.plugins.PluginsService][Test worker] [/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/plugins] directory does not exist.
      -[16:16:05,934][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] no modules loaded
      -[16:16:05,934][INFO ][org.elasticsearch.plugins.PluginsService][Test worker] loaded plugin [org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [force_merge], size [1], queue size [unbounded]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_started], core [1], max [16], keep alive [5m]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [listener], size [4], queue size [unbounded]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [index], size [8], queue size [200]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [refresh], core [1], max [4], keep alive [5m]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [generic], core [4], max [128], keep alive [30s]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [warmer], core [1], max [4], keep alive [5m]
      -[16:16:05,935][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [search], size [13], queue size [1k]
      -[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [flush], core [1], max [4], keep alive [5m]
      -[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [fetch_shard_store], core [1], max [16], keep alive [5m]
      -[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [management], core [1], max [5], keep alive [5m]
      -[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [get], size [8], queue size [1k]
      -[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [bulk], size [8], queue size [50]
      -[16:16:05,936][DEBUG][org.elasticsearch.threadpool.ThreadPool][Test worker] created thread pool: name [snapshot], core [1], max [4], keep alive [5m]
      -[16:16:05,937][DEBUG][org.elasticsearch.script.ScriptService][Test worker] using script cache with max_size [100], expire [0s]
      -[16:16:05,939][DEBUG][org.elasticsearch.common.network.IfConfig][Test worker] configuration:
      -
      -lo0
      -        inet 127.0.0.1 netmask:255.0.0.0 scope:host
      -        inet6 fe80::1 prefixlen:64 scope:link
      -        inet6 ::1 prefixlen:128 scope:host
      -        UP MULTICAST LOOPBACK mtu:16384 index:1
      -
      -en4
      -        inet 10.1.1.42 netmask:255.255.0.0 broadcast:10.1.255.255 scope:site
      -        inet6 fe80::6a5b:35ff:febc:4672 prefixlen:64 scope:link
      -        hardware 68:5B:35:BC:46:72
      -        UP MULTICAST mtu:1500 index:9
      -
      -[16:16:05,941][DEBUG][org.elasticsearch.monitor.jvm.JvmGcMonitorService][Test worker] enabled [true], interval [1s], gc_threshold [{default=GcThreshold{name='default', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}, young=GcThreshold{name='young', warnThreshold=1000, infoThreshold=700, debugThreshold=400}, old=GcThreshold{name='old', warnThreshold=10000, infoThreshold=5000, debugThreshold=2000}}], overhead [50, 25, 10]
      -[16:16:05,941][DEBUG][org.elasticsearch.monitor.os.OsService][Test worker] using refresh_interval [1s]
      -[16:16:05,941][DEBUG][org.elasticsearch.monitor.process.ProcessService][Test worker] using refresh_interval [1s]
      -[16:16:05,942][DEBUG][org.elasticsearch.monitor.jvm.JvmService][Test worker] using refresh_interval [1s]
      -[16:16:05,942][DEBUG][org.elasticsearch.monitor.fs.FsService][Test worker] using refresh_interval [1s]
      -[16:16:05,942][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider][Test worker] using [cluster.routing.allocation.allow_rebalance] with [indices_all_active]
      -[16:16:05,942][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider][Test worker] using [cluster_concurrent_rebalance] with [2]
      -[16:16:05,943][DEBUG][org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider][Test worker] using node_concurrent_outgoing_recoveries [2], node_concurrent_incoming_recoveries [2], node_initial_primaries_recoveries [4]
      -[16:16:05,945][DEBUG][org.elasticsearch.index.store.IndexStoreConfig][Test worker] using indices.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [0b]
      -[16:16:05,946][DEBUG][org.elasticsearch.indices.IndicesQueryCache][Test worker] using [node] query cache with size [364mb] max filter count [10000]
      -[16:16:05,946][DEBUG][org.elasticsearch.indices.IndexingMemoryController][Test worker] using indexing buffer size [364mb] with indices.memory.shard_inactive_time [5m], indices.memory.interval [5s]
      -[16:16:05,946][DEBUG][org.elasticsearch.transport.local.LocalTransport][Test worker] creating [8] workers, queue_size [-1]
      -[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.UnicastZenPing][Test worker] using initial hosts [0.0.0.0], with concurrent_connects [10], resolve_timeout [5s]
      -[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.ElectMasterService][Test worker] using minimum_master_nodes [-1]
      -[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][Test worker] using ping_timeout [3s], join.timeout [1m], master_election.ignore_non_master [false]
      -[16:16:05,947][DEBUG][org.elasticsearch.discovery.zen.MasterFaultDetection][Test worker] [master] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:16:05,948][DEBUG][org.elasticsearch.discovery.zen.NodesFaultDetection][Test worker] [node  ] uses ping_interval [1s], ping_timeout [30s], ping_retries [3]
      -[16:16:05,977][DEBUG][org.elasticsearch.indices.recovery.RecoverySettings][Test worker] using max_bytes_per_sec[40mb]
      -[16:16:05,985][DEBUG][org.elasticsearch.gateway.GatewayAllocator$InternalPrimaryShardAllocator][Test worker] using initial_shards [quorum]
      -[16:16:06,164][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:06,170][DEBUG][org.elasticsearch.gateway.GatewayMetaState][Test worker] took 0s to load state
      -[16:16:06,171][INFO ][org.elasticsearch.node.Node][Test worker] initialized
      -[16:16:06,171][INFO ][org.elasticsearch.node.Node][Test worker] starting ...
      -[16:16:06,171][INFO ][org.elasticsearch.transport.TransportService][Test worker] publish_address {local[4]}, bound_addresses {local[4]}
      -[16:16:06,172][DEBUG][org.elasticsearch.node.Node][Test worker] waiting to join the cluster. timeout [30s]
      -[16:16:06,172][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [initial_join]: execute
      -[16:16:06,173][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [initial_join]: took [0s] no change in cluster_state
      -[16:16:09,179][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z1dFIC3][generic][T#1]] filtered ping responses: (ignore_non_masters [false])
      -	--> ping_response{node [{Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]}], id[28], master [null],cluster_state_version [-1], cluster_name[test-helper-cluster--joerg-1]}
      -[16:16:09,180][DEBUG][org.elasticsearch.discovery.zen.ZenDiscovery][elasticsearch[Z1dFIC3][generic][T#1]] elected as master, waiting for incoming joins ([0] needed)
      -[16:16:09,180][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: execute
      -[16:16:09,181][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [1], source [zen-disco-elected-as-master ([0] nodes joined)]
      -[16:16:09,181][INFO ][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] new_master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]}, reason: zen-disco-elected-as-master ([0] nodes joined)
      -[16:16:09,181][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [1]
      -[16:16:09,182][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 1
      -[16:16:09,182][INFO ][org.elasticsearch.node.Node][Test worker] started
      -[16:16:09,182][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [zen-disco-elected-as-master ([0] nodes joined)]: took [1ms] done applying updated cluster_state (version: 1, uuid: 8JUfDV_gT8imelkMADqEdQ)
      -[16:16:09,183][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: execute
      -[16:16:09,184][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [2], source [local-gateway-elected-state]
      -[16:16:09,184][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [2]
      -[16:16:09,184][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 2
      -[16:16:09,186][INFO ][org.elasticsearch.gateway.GatewayService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] recovered [0] indices into cluster_state
      -[16:16:09,186][INFO ][test                     ][Test worker] nodes are started
      -[16:16:09,186][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [local-gateway-elected-state]: took [2ms] done applying updated cluster_state (version: 2, uuid: LmbsxwgVTIeGeIflcjrzHQ)
      -[16:16:09,187][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: execute
      -[16:16:09,188][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating Index [[test/vk4gpyojS2iq8eEZ4MdMIw]], shards [5]/[1] - reason [create index]
      -[16:16:09,188][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
      -[16:16:09,189][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using dynamic[true]
      -[16:16:09,370][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:09,544][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:09,545][INFO ][org.elasticsearch.cluster.metadata.MetaDataCreateIndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test] creating index, cause [api], templates [], shards [5]/[1], mappings [someType]
      -[16:16:09,547][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test] closing ... (reason [cleaning up after validating index on master])
      -[16:16:09,547][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closing index service (reason [cleaning up after validating index on master])
      -[16:16:09,547][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] clearing all bitsets because [close]
      -[16:16:09,547][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] full cache clear, reason [close]
      -[16:16:09,547][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] clearing all bitsets because [close]
      -[16:16:09,547][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closed... (reason [cleaning up after validating index on master])
      -[16:16:09,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [3], source [create-index [test], cause [api]]
      -[16:16:09,547][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [3]
      -[16:16:09,548][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 3
      -[16:16:09,548][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [[test/vk4gpyojS2iq8eEZ4MdMIw]] creating index
      -[16:16:09,548][DEBUG][org.elasticsearch.indices.IndicesService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating Index [[test/vk4gpyojS2iq8eEZ4MdMIw]], shards [5]/[1] - reason [create index]
      -[16:16:09,549][DEBUG][org.elasticsearch.index.store.IndexStore][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using index.store.throttle.type [NONE], with index.store.throttle.max_bytes_per_sec [null]
      -[16:16:09,549][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] using dynamic[true]
      -[16:16:09,550][DEBUG][org.elasticsearch.index.mapper.MapperService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [[test/vk4gpyojS2iq8eEZ4MdMIw]] adding mapping [someType], source [{"someType":{"properties":{"content":{"type":"text","fields":{"language":{"type":"langdetect","analyzer":"_keyword","include_in_all":false,"languages":["zh-cn"]}}}}}}]
      -[16:16:09,750][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:09,918][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:09,919][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][2] creating shard
      -[16:16:09,920][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][2] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/2, shard=[test][2]}]
      -[16:16:09,920][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][2]
      -[16:16:09,921][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:09,921][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:09,922][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:09,922][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] creating shard
      -[16:16:09,922][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] starting recovery from store ...
      -[16:16:09,922][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/3, shard=[test][3]}]
      -[16:16:09,923][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][3]
      -[16:16:09,923][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:09,923][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:09,924][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#3]] wipe translog location - creating new translog
      -[16:16:09,925][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:09,925][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] creating shard
      -[16:16:09,925][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#1]] starting recovery from store ...
      -[16:16:09,925][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#3]] no translog ID present in the current generation - creating one
      -[16:16:09,926][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/1, shard=[test][1]}]
      -[16:16:09,926][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][1]
      -[16:16:09,926][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:09,927][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:09,927][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#1]] wipe translog location - creating new translog
      -[16:16:09,928][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:09,928][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] creating shard
      -[16:16:09,928][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#2]] starting recovery from store ...
      -[16:16:09,928][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/0, shard=[test][0]}]
      -[16:16:09,928][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][0]
      -[16:16:09,929][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:09,929][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] recovery completed from [shard_store], took [9ms]
      -[16:16:09,929][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#1]] no translog ID present in the current generation - creating one
      -[16:16:09,929][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:09,929][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][2] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]
      -[16:16:09,929][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][2] received shard started for [shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]
      -[16:16:09,929][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:09,930][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#2]] wipe translog location - creating new translog
      -[16:16:09,930][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:09,930][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#4]] starting recovery from store ...
      -[16:16:09,931][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#2]] no translog ID present in the current generation - creating one
      -[16:16:09,931][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [create-index [test], cause [api]]: took [744ms] done applying updated cluster_state (version: 3, uuid: nGr0qRjISbCib9mIyeiTtw)
      -[16:16:09,932][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]]: execute
      -[16:16:09,932][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#1]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:09,932][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#4]] wipe translog location - creating new translog
      -[16:16:09,932][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#1]] recovery completed from [shard_store], took [9ms]
      -[16:16:09,932][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][2] starting shard [test][2], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=-ap99kDrTiq7PDg6sGzmOA], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]])
      -[16:16:09,932][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery]]
      -[16:16:09,932][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery]]
      -[16:16:09,933][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#4]] no translog ID present in the current generation - creating one
      -[16:16:09,933][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#2]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:09,933][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#2]] recovery completed from [shard_store], took [7ms]
      -[16:16:09,933][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#2]] [test][1] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery]]
      -[16:16:09,933][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#2]] [test][1] received shard started for [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery]]
      -[16:16:09,934][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [4], source [shard-started[shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]]
      -[16:16:09,934][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [4]
      -[16:16:09,934][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 4
      -[16:16:09,935][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#4]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:09,935][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:09,935][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#4]] recovery completed from [shard_store], took [7ms]
      -[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#4]] [test][0] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery]]
      -[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] received shard started for [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,935][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#4]] [test][0] received shard started for [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery]]
      -[16:16:09,936][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,936][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] received shard started for [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,936][DEBUG][org.elasticsearch.indices.cluster.IndicesClusterStateService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] creating shard
      -[16:16:09,936][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] creating using a new path [ShardPath{path=/Users/joerg/Projects/github/jprante/elasticsearch-analysis-langdetect/data/nodes/0/indices/vk4gpyojS2iq8eEZ4MdMIw/4, shard=[test][4]}]
      -[16:16:09,936][DEBUG][org.elasticsearch.index.IndexService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] creating shard_id [test][4]
      -[16:16:09,937][DEBUG][org.elasticsearch.index.store.Store][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] store stats are refreshed with refresh_interval [10s]
      -[16:16:09,938][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]
      -[16:16:09,939][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [CREATED]->[RECOVERING], reason [from store]
      -[16:16:09,939][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] starting recovery from store ...
      -[16:16:09,939][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,939][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] received shard started for [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,940][DEBUG][org.elasticsearch.index.translog.Translog][elasticsearch[Z1dFIC3][generic][T#3]] wipe translog location - creating new translog
      -[16:16:09,941][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][2]], allocation id [-ap99kDrTiq7PDg6sGzmOA], primary term [0], message [after new shard recovery]]]: took [8ms] done applying updated cluster_state (version: 4, uuid: Ve7La_RPQCmQdfMiBHlsEA)
      -[16:16:09,941][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
      -[16:16:09,941][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][3] starting shard [test][3], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=eeSTCfSDS6q4q5GtYVmMvQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery]])
      -[16:16:09,942][DEBUG][org.elasticsearch.index.engine.Engine][elasticsearch[Z1dFIC3][generic][T#3]] no translog ID present in the current generation - creating one
      -[16:16:09,942][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][1] starting shard [test][1], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=LE9gdszmSEChyZPES-ybTQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery]])
      -[16:16:09,942][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][0] starting shard [test][0], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=9TYM88MbQgKcZw0LsFP50g], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[no_attempt]] (shard started task: [shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery]])
      -[16:16:09,944][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [5], source [shard-started[shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
      -[16:16:09,944][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [5]
      -[16:16:09,944][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 5
      -[16:16:09,944][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] state: [RECOVERING]->[POST_RECOVERY], reason [post recovery from shard_store]
      -[16:16:09,944][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][generic][T#3]] recovery completed from [shard_store], took [8ms]
      -[16:16:09,944][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][4] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery]]
      -[16:16:09,944][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:09,944][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][generic][T#3]] [test][4] received shard started for [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery]]
      -[16:16:09,945][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:09,945][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] sending [internal:cluster/shard/started] to [Z1dFIC3HQWqRSaRc5uCucQ] for shard entry [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,945][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] received shard started for [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]
      -[16:16:09,945][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:09,946][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [after new shard recovery], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [after new shard recovery], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [after new shard recovery], shard id [[test][3]], allocation id [eeSTCfSDS6q4q5GtYVmMvQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][1]], allocation id [LE9gdszmSEChyZPES-ybTQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started], shard id [[test][0]], allocation id [9TYM88MbQgKcZw0LsFP50g], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [4ms] done applying updated cluster_state (version: 5, uuid: oOvhqM7VQsWas0uMr-oqhA)
      -[16:16:09,946][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: execute
      -[16:16:09,946][DEBUG][org.elasticsearch.cluster.action.shard.ShardStateAction][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] [test][4] starting shard [test][4], node[Z1dFIC3HQWqRSaRc5uCucQ], [P], recovery_source[new shard recovery], s[INITIALIZING], a[id=kiGZYLwnTVaEWyUf14wAfQ], unassigned_info[[reason=INDEX_CREATED], at[2017-01-26T15:16:09.545Z], delayed=false, allocation_status[deciders_throttled]] (shard started task: [shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery]])
      -[16:16:09,947][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] cluster state updated, version [6], source [shard-started[shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]
      -[16:16:09,947][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] publishing cluster state version [6]
      -[16:16:09,948][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] set local cluster state to version 6
      -[16:16:09,948][DEBUG][org.elasticsearch.index.shard.IndexShard][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] state: [POST_RECOVERY]->[STARTED], reason [global state is [STARTED]]
      -[16:16:09,949][DEBUG][org.elasticsearch.cluster.service.ClusterService][elasticsearch[Z1dFIC3][clusterService#updateTask][T#1]] processing [shard-started[shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [after new shard recovery], shard id [[test][4]], allocation id [kiGZYLwnTVaEWyUf14wAfQ], primary term [0], message [master {Z1dFIC3}{Z1dFIC3HQWqRSaRc5uCucQ}{z-sQkKoYTBum_AZD-Y6dXA}{local}{local[4]} marked shard as initializing, but shard state is [POST_RECOVERY], mark shard as started]]]: took [2ms] done applying updated cluster_state (version: 6, uuid: KH-Kc4cnRDWwj85zwrcSdQ)
      -[16:16:10,019][INFO ][test                     ][Test worker] stopping nodes
      -[16:16:10,019][INFO ][org.elasticsearch.node.Node][Test worker] stopping ...
      -[16:16:10,020][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test] closing ... (reason [shutdown])
      -[16:16:10,021][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closing index service (reason [shutdown])
      -[16:16:10,021][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closing... (reason: [shutdown])
      -[16:16:10,021][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:10,021][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:10,021][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:10,021][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:10,021][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:10,022][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:10,022][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:10,022][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [0] closed (reason: [shutdown])
      -[16:16:10,022][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closing... (reason: [shutdown])
      -[16:16:10,022][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:10,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:10,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:10,023][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:10,023][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:10,024][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:10,024][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [1] closed (reason: [shutdown])
      -[16:16:10,024][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closing... (reason: [shutdown])
      -[16:16:10,024][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:10,024][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:10,025][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:10,026][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:10,026][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:10,026][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [2] closed (reason: [shutdown])
      -[16:16:10,026][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closing... (reason: [shutdown])
      -[16:16:10,027][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:10,027][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:10,033][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:10,033][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:10,033][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:10,034][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:10,034][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:10,034][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [3] closed (reason: [shutdown])
      -[16:16:10,034][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closing... (reason: [shutdown])
      -[16:16:10,035][DEBUG][org.elasticsearch.index.shard.IndexShard][indices_shutdown[T#1]] state: [STARTED]->[CLOSED], reason [shutdown]
      -[16:16:10,035][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] flushing shard on close - this might take some time to sync files to disk
      -[16:16:10,035][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close now acquiring writeLock
      -[16:16:10,035][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] close acquired writeLock
      -[16:16:10,035][DEBUG][org.elasticsearch.index.translog.Translog][indices_shutdown[T#1]] translog closed
      -[16:16:10,036][DEBUG][org.elasticsearch.index.engine.Engine][indices_shutdown[T#1]] engine closed [api]
      -[16:16:10,036][DEBUG][org.elasticsearch.index.store.Store][indices_shutdown[T#1]] store reference count on close: 0
      -[16:16:10,036][DEBUG][org.elasticsearch.index.IndexService][indices_shutdown[T#1]] [4] closed (reason: [shutdown])
      -[16:16:10,036][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
      -[16:16:10,037][DEBUG][org.elasticsearch.index.cache.query.IndexQueryCache][indices_shutdown[T#1]] full cache clear, reason [close]
      -[16:16:10,037][DEBUG][org.elasticsearch.index.cache.bitset.BitsetFilterCache][indices_shutdown[T#1]] clearing all bitsets because [close]
      -[16:16:10,037][DEBUG][org.elasticsearch.indices.IndicesService][indices_shutdown[T#1]] [test/vk4gpyojS2iq8eEZ4MdMIw] closed... (reason [shutdown])
      -[16:16:10,037][INFO ][org.elasticsearch.node.Node][Test worker] stopped
      -[16:16:10,038][INFO ][org.elasticsearch.node.Node][Test worker] closing ...
      -[16:16:10,040][INFO ][org.elasticsearch.node.Node][Test worker] closed
      -[16:16:10,045][INFO ][test                     ][Test worker] data files wiped
      -
      -
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html deleted file mode 100644 index 465d759..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest.html +++ /dev/null @@ -1,121 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.LangProfileTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      6
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      0s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      TestDurationResult
      testAdd0spassed
      testAddIllegally10spassed
      testAddIllegally20spassed
      testLangProfile0spassed
      testLangProfileStringInt0spassed
      testOmitLessFreq0spassed
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html deleted file mode 100644 index c68a3fe..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.html +++ /dev/null @@ -1,348 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      6
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      2.649s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      TestDurationResult
      testBinary0.373spassed
      testBinary20.374spassed
      testCustomMappings0.183spassed
      testShortTextProfile1.011spassed
      testSimpleMappings0.342spassed
      testToFields0.366spassed
      -
      -
      -

      Standard output

      - -
      [16:16:12,060][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:12,248][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:12,251][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:12,413][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:12,419][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:12,591][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:12,595][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content
      - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@5559ab11; line: 1, column: 73]
      -com.fasterxml.jackson.core.JsonParseException: Illegal character '"' (code 0x22) in base64 content
      - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@5559ab11; line: 1, column: 73]
      -	at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.getBinaryValue(UTF8StreamJsonParser.java:553) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.JsonParser.getBinaryValue(JsonParser.java:1346) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at org.elasticsearch.common.xcontent.json.JsonXContentParser.binaryValue(JsonXContentParser.java:182) ~[elasticsearch-5.1.2.jar:5.1.2]
      -	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.parseCreateField(LangdetectMapper.java:96) [main/:?]
      -	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:286) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.FieldMapper$MultiFields.parse(FieldMapper.java:560) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:299) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:438) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:564) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:384) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:361) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:93) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:66) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:275) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:271) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.testBinary2(LangdetectMappingTest.java:78) [test/:?]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) [junit-4.12.jar:4.12]
      -	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.12.jar:4.12]
      -	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) [junit-4.12.jar:4.12]
      -	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) [junit-4.12.jar:4.12]
      -	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) [junit-4.12.jar:4.12]
      -	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.run(ParentRunner.java:363) [junit-4.12.jar:4.12]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.runTestClass(JUnitTestClassExecuter.java:114) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.execute(JUnitTestClassExecuter.java:57) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassProcessor.processTestClass(JUnitTestClassProcessor.java:66) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:51) [gradle-testing-base-3.2.1.jar:3.2.1]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:32) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:93) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at com.sun.proxy.$Proxy2.processTestClass(Unknown Source) [?:?]
      -	at org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:109) [gradle-testing-base-3.2.1.jar:3.2.1]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.remote.internal.hub.MessageHub$Handler.run(MessageHub.java:377) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:54) [gradle-base-services-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.concurrent.StoppableExecutorImpl$1.run(StoppableExecutorImpl.java:40) [gradle-base-services-3.2.1.jar:3.2.1]
      -	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
      -	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
      -	at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
      -[16:16:12,604][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:12,787][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:12,788][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal character '"' (code 0x22) in base64 content
      - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@3ee35472; line: 1, column: 73]
      -com.fasterxml.jackson.core.JsonParseException: Illegal character '"' (code 0x22) in base64 content
      - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@3ee35472; line: 1, column: 73]
      -	at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.getBinaryValue(UTF8StreamJsonParser.java:553) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.JsonParser.getBinaryValue(JsonParser.java:1346) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at org.elasticsearch.common.xcontent.json.JsonXContentParser.binaryValue(JsonXContentParser.java:182) ~[elasticsearch-5.1.2.jar:5.1.2]
      -	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.parseCreateField(LangdetectMapper.java:96) [main/:?]
      -	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:286) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.FieldMapper$MultiFields.parse(FieldMapper.java:560) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:299) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:438) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:564) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:384) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:361) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:93) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:66) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:275) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:271) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.testBinary2(LangdetectMappingTest.java:88) [test/:?]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) [junit-4.12.jar:4.12]
      -	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.12.jar:4.12]
      -	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) [junit-4.12.jar:4.12]
      -	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) [junit-4.12.jar:4.12]
      -	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) [junit-4.12.jar:4.12]
      -	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.run(ParentRunner.java:363) [junit-4.12.jar:4.12]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.runTestClass(JUnitTestClassExecuter.java:114) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.execute(JUnitTestClassExecuter.java:57) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassProcessor.processTestClass(JUnitTestClassProcessor.java:66) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:51) [gradle-testing-base-3.2.1.jar:3.2.1]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:32) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:93) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at com.sun.proxy.$Proxy2.processTestClass(Unknown Source) [?:?]
      -	at org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:109) [gradle-testing-base-3.2.1.jar:3.2.1]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.remote.internal.hub.MessageHub$Handler.run(MessageHub.java:377) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:54) [gradle-base-services-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.concurrent.StoppableExecutorImpl$1.run(StoppableExecutorImpl.java:40) [gradle-base-services-3.2.1.jar:3.2.1]
      -	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
      -	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
      -	at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
      -[16:16:12,794][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:12,961][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:12,964][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:13,130][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:13,137][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:13,312][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:13,320][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:13,491][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:13,497][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:13,685][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:13,686][ERROR][org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper][Test worker] Illegal white space character (code 0x20) as character #3 of 4-char base64 unit: can only used between units
      - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@1f53f724; line: 1, column: 61]
      -com.fasterxml.jackson.core.JsonParseException: Illegal white space character (code 0x20) as character #3 of 4-char base64 unit: can only used between units
      - at [Source: org.elasticsearch.common.bytes.BytesReference$MarkSupportingStreamInputWrapper@1f53f724; line: 1, column: 61]
      -	at com.fasterxml.jackson.core.JsonParser._constructError(JsonParser.java:1702) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.base.ParserMinimalBase._reportError(ParserMinimalBase.java:558) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.base.ParserMinimalBase._decodeBase64(ParserMinimalBase.java:422) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.json.UTF8StreamJsonParser.getBinaryValue(UTF8StreamJsonParser.java:553) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at com.fasterxml.jackson.core.JsonParser.getBinaryValue(JsonParser.java:1346) ~[jackson-core-2.8.1.jar:2.8.1]
      -	at org.elasticsearch.common.xcontent.json.JsonXContentParser.binaryValue(JsonXContentParser.java:182) ~[elasticsearch-5.1.2.jar:5.1.2]
      -	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMapper.parseCreateField(LangdetectMapper.java:96) [main/:?]
      -	at org.elasticsearch.index.mapper.FieldMapper.parse(FieldMapper.java:286) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrField(DocumentParser.java:438) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseValue(DocumentParser.java:564) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.innerParseObject(DocumentParser.java:384) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseObjectOrNested(DocumentParser.java:361) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:93) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:66) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:275) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:271) [elasticsearch-5.1.2.jar:5.1.2]
      -	at org.xbib.elasticsearch.index.mapper.langdetect.LangdetectMappingTest.testBinary(LangdetectMappingTest.java:52) [test/:?]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) [junit-4.12.jar:4.12]
      -	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) [junit-4.12.jar:4.12]
      -	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) [junit-4.12.jar:4.12]
      -	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) [junit-4.12.jar:4.12]
      -	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) [junit-4.12.jar:4.12]
      -	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) [junit-4.12.jar:4.12]
      -	at org.junit.runners.ParentRunner.run(ParentRunner.java:363) [junit-4.12.jar:4.12]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.runTestClass(JUnitTestClassExecuter.java:114) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecuter.execute(JUnitTestClassExecuter.java:57) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.junit.JUnitTestClassProcessor.processTestClass(JUnitTestClassProcessor.java:66) [gradle-testing-jvm-3.2.1.jar:3.2.1]
      -	at org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:51) [gradle-testing-base-3.2.1.jar:3.2.1]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:32) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:93) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at com.sun.proxy.$Proxy2.processTestClass(Unknown Source) [?:?]
      -	at org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:109) [gradle-testing-base-3.2.1.jar:3.2.1]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_112]
      -	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[?:1.8.0_112]
      -	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_112]
      -	at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:35) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.remote.internal.hub.MessageHub$Handler.run(MessageHub.java:377) [gradle-messaging-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:54) [gradle-base-services-3.2.1.jar:3.2.1]
      -	at org.gradle.internal.concurrent.StoppableExecutorImpl$1.run(StoppableExecutorImpl.java:40) [gradle-base-services-3.2.1.jar:3.2.1]
      -	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_112]
      -	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_112]
      -	at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
      -[16:16:13,691][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:14,218][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -[16:16:14,223][DEBUG][org.elasticsearch.index.mapper.MapperService][Test worker] using dynamic[true]
      -[16:16:14,698][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -
      -
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html deleted file mode 100644 index d1b0bd5..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest.html +++ /dev/null @@ -1,96 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.LanguageTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      1
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      0s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - -
      TestDurationResult
      testLanguage0spassed
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html deleted file mode 100644 index 60dcd10..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.NGramTest.html +++ /dev/null @@ -1,111 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.NGramTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.NGramTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      4
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      0s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      TestDurationResult
      testConstants0spassed
      testNGram0spassed
      testNormalizeWithCJKKanji0spassed
      testNormalizeWithLatin0spassed
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html deleted file mode 100644 index 741be9b..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest.html +++ /dev/null @@ -1,106 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleDetectorTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      1
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      0
      -

      ignored

      -
      -
      -
      -
      0.171s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - -
      TestDurationResult
      testDetector0.171spassed
      -
      -
      -

      Standard output

      - -
      [16:16:14,878][DEBUG][org.xbib.elasticsearch.common.langdetect.LangdetectService][Test worker] language detection service installed for [ar, bg, bn, cs, da, de, el, en, es, et, fa, fi, fr, gu, he, hi, hr, hu, id, it, ja, ko, lt, lv, mk, ml, nl, no, pa, pl, pt, ro, ru, sq, sv, ta, te, th, tl, tr, uk, ur, vi, zh-cn, zh-tw]
      -
      -
      -
      -
      - -
      - - diff --git a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html b/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html deleted file mode 100644 index 75c4ba1..0000000 --- a/docs/test/classes/org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest.html +++ /dev/null @@ -1,101 +0,0 @@ - - - - - -Test results - Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest - - - - - -
      -

      Class org.xbib.elasticsearch.index.mapper.langdetect.SimpleHttpTest

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      2
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      2
      -

      ignored

      -
      -
      -
      -
      0.001s
      -

      duration

      -
      -
      -
      -
      -
      -
      -
      -

      successful

      -
      -
      -
      -
      - -
      -

      Tests

      - - - - - - - - - - - - - - - - - - -
      TestDurationResult
      httpPost-ignored
      httpPostShortProfile-ignored
      -
      -
      - -
      - - diff --git a/docs/test/css/base-style.css b/docs/test/css/base-style.css deleted file mode 100644 index 4afa73e..0000000 --- a/docs/test/css/base-style.css +++ /dev/null @@ -1,179 +0,0 @@ - -body { - margin: 0; - padding: 0; - font-family: sans-serif; - font-size: 12pt; -} - -body, a, a:visited { - color: #303030; -} - -#content { - padding-left: 50px; - padding-right: 50px; - padding-top: 30px; - padding-bottom: 30px; -} - -#content h1 { - font-size: 160%; - margin-bottom: 10px; -} - -#footer { - margin-top: 100px; - font-size: 80%; - white-space: nowrap; -} - -#footer, #footer a { - color: #a0a0a0; -} - -#line-wrapping-toggle { - vertical-align: middle; -} - -#label-for-line-wrapping-toggle { - vertical-align: middle; -} - -ul { - margin-left: 0; -} - -h1, h2, h3 { - white-space: nowrap; -} - -h2 { - font-size: 120%; -} - -ul.tabLinks { - padding-left: 0; - padding-top: 10px; - padding-bottom: 10px; - overflow: auto; - min-width: 800px; - width: auto !important; - width: 800px; -} - -ul.tabLinks li { - float: left; - height: 100%; - list-style: none; - padding-left: 10px; - padding-right: 10px; - padding-top: 5px; - padding-bottom: 5px; - margin-bottom: 0; - -moz-border-radius: 7px; - border-radius: 7px; - margin-right: 25px; - border: solid 1px #d4d4d4; - background-color: #f0f0f0; -} - -ul.tabLinks li:hover { - background-color: #fafafa; -} - -ul.tabLinks li.selected { - background-color: #c5f0f5; - border-color: #c5f0f5; -} - -ul.tabLinks a { - font-size: 120%; - display: block; - outline: none; - text-decoration: none; - margin: 0; - padding: 0; -} - -ul.tabLinks li h2 { - margin: 0; - padding: 0; -} - -div.tab { -} - -div.selected { - display: block; -} - -div.deselected { - display: none; -} - -div.tab table { - min-width: 350px; - width: auto !important; - width: 350px; - border-collapse: collapse; -} - -div.tab th, div.tab table { - border-bottom: solid #d0d0d0 1px; -} - -div.tab th { - text-align: left; - white-space: nowrap; - padding-left: 6em; -} - -div.tab th:first-child { - padding-left: 0; -} - -div.tab td { - white-space: nowrap; - padding-left: 6em; - padding-top: 5px; - padding-bottom: 5px; -} - -div.tab td:first-child { - padding-left: 0; -} - -div.tab td.numeric, div.tab th.numeric { - text-align: right; -} - -span.code { - display: inline-block; - margin-top: 0em; - margin-bottom: 1em; -} - -span.code pre { - font-size: 11pt; - padding-top: 10px; - padding-bottom: 10px; - padding-left: 10px; - padding-right: 10px; - margin: 0; - background-color: #f7f7f7; - border: solid 1px #d0d0d0; - min-width: 700px; - width: auto !important; - width: 700px; -} - -span.wrapped pre { - word-wrap: break-word; - white-space: pre-wrap; - word-break: break-all; -} - -label.hidden { - display: none; -} \ No newline at end of file diff --git a/docs/test/css/style.css b/docs/test/css/style.css deleted file mode 100644 index 3dc4913..0000000 --- a/docs/test/css/style.css +++ /dev/null @@ -1,84 +0,0 @@ - -#summary { - margin-top: 30px; - margin-bottom: 40px; -} - -#summary table { - border-collapse: collapse; -} - -#summary td { - vertical-align: top; -} - -.breadcrumbs, .breadcrumbs a { - color: #606060; -} - -.infoBox { - width: 110px; - padding-top: 15px; - padding-bottom: 15px; - text-align: center; -} - -.infoBox p { - margin: 0; -} - -.counter, .percent { - font-size: 120%; - font-weight: bold; - margin-bottom: 8px; -} - -#duration { - width: 125px; -} - -#successRate, .summaryGroup { - border: solid 2px #d0d0d0; - -moz-border-radius: 10px; - border-radius: 10px; -} - -#successRate { - width: 140px; - margin-left: 35px; -} - -#successRate .percent { - font-size: 180%; -} - -.success, .success a { - color: #008000; -} - -div.success, #successRate.success { - background-color: #bbd9bb; - border-color: #008000; -} - -.failures, .failures a { - color: #b60808; -} - -.skipped, .skipped a { - color: #c09853; -} - -div.failures, #successRate.failures { - background-color: #ecdada; - border-color: #b60808; -} - -ul.linkList { - padding-left: 0; -} - -ul.linkList li { - list-style: none; - margin-bottom: 5px; -} diff --git a/docs/test/index.html b/docs/test/index.html deleted file mode 100644 index ea85d03..0000000 --- a/docs/test/index.html +++ /dev/null @@ -1,238 +0,0 @@ - - - - - -Test results - Test Summary - - - - - -
      -

      Test Summary

      -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      33
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      2
      -

      ignored

      -
      -
      -
      -
      1m6.04s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Ignored tests

      - -
      -
      -

      Packages

      - - - - - - - - - - - - - - - - - - - - - -
      PackageTestsFailuresIgnoredDurationSuccess rate
      -org.xbib.elasticsearch.index.mapper.langdetect -33021m6.04s100%
      -
      - -
      - -
      - - diff --git a/docs/test/js/report.js b/docs/test/js/report.js deleted file mode 100644 index 83bab4a..0000000 --- a/docs/test/js/report.js +++ /dev/null @@ -1,194 +0,0 @@ -(function (window, document) { - "use strict"; - - var tabs = {}; - - function changeElementClass(element, classValue) { - if (element.getAttribute("className")) { - element.setAttribute("className", classValue); - } else { - element.setAttribute("class", classValue); - } - } - - function getClassAttribute(element) { - if (element.getAttribute("className")) { - return element.getAttribute("className"); - } else { - return element.getAttribute("class"); - } - } - - function addClass(element, classValue) { - changeElementClass(element, getClassAttribute(element) + " " + classValue); - } - - function removeClass(element, classValue) { - changeElementClass(element, getClassAttribute(element).replace(classValue, "")); - } - - function initTabs() { - var container = document.getElementById("tabs"); - - tabs.tabs = findTabs(container); - tabs.titles = findTitles(tabs.tabs); - tabs.headers = findHeaders(container); - tabs.select = select; - tabs.deselectAll = deselectAll; - tabs.select(0); - - return true; - } - - function getCheckBox() { - return document.getElementById("line-wrapping-toggle"); - } - - function getLabelForCheckBox() { - return document.getElementById("label-for-line-wrapping-toggle"); - } - - function findCodeBlocks() { - var spans = document.getElementById("tabs").getElementsByTagName("span"); - var codeBlocks = []; - for (var i = 0; i < spans.length; ++i) { - if (spans[i].className.indexOf("code") >= 0) { - codeBlocks.push(spans[i]); - } - } - return codeBlocks; - } - - function forAllCodeBlocks(operation) { - var codeBlocks = findCodeBlocks(); - - for (var i = 0; i < codeBlocks.length; ++i) { - operation(codeBlocks[i], "wrapped"); - } - } - - function toggleLineWrapping() { - var checkBox = getCheckBox(); - - if (checkBox.checked) { - forAllCodeBlocks(addClass); - } else { - forAllCodeBlocks(removeClass); - } - } - - function initControls() { - if (findCodeBlocks().length > 0) { - var checkBox = getCheckBox(); - var label = getLabelForCheckBox(); - - checkBox.onclick = toggleLineWrapping; - checkBox.checked = false; - - removeClass(label, "hidden"); - } - } - - function switchTab() { - var id = this.id.substr(1); - - for (var i = 0; i < tabs.tabs.length; i++) { - if (tabs.tabs[i].id === id) { - tabs.select(i); - break; - } - } - - return false; - } - - function select(i) { - this.deselectAll(); - - changeElementClass(this.tabs[i], "tab selected"); - changeElementClass(this.headers[i], "selected"); - - while (this.headers[i].firstChild) { - this.headers[i].removeChild(this.headers[i].firstChild); - } - - var h2 = document.createElement("H2"); - - h2.appendChild(document.createTextNode(this.titles[i])); - this.headers[i].appendChild(h2); - } - - function deselectAll() { - for (var i = 0; i < this.tabs.length; i++) { - changeElementClass(this.tabs[i], "tab deselected"); - changeElementClass(this.headers[i], "deselected"); - - while (this.headers[i].firstChild) { - this.headers[i].removeChild(this.headers[i].firstChild); - } - - var a = document.createElement("A"); - - a.setAttribute("id", "ltab" + i); - a.setAttribute("href", "#tab" + i); - a.onclick = switchTab; - a.appendChild(document.createTextNode(this.titles[i])); - - this.headers[i].appendChild(a); - } - } - - function findTabs(container) { - return findChildElements(container, "DIV", "tab"); - } - - function findHeaders(container) { - var owner = findChildElements(container, "UL", "tabLinks"); - return findChildElements(owner[0], "LI", null); - } - - function findTitles(tabs) { - var titles = []; - - for (var i = 0; i < tabs.length; i++) { - var tab = tabs[i]; - var header = findChildElements(tab, "H2", null)[0]; - - header.parentNode.removeChild(header); - - if (header.innerText) { - titles.push(header.innerText); - } else { - titles.push(header.textContent); - } - } - - return titles; - } - - function findChildElements(container, name, targetClass) { - var elements = []; - var children = container.childNodes; - - for (var i = 0; i < children.length; i++) { - var child = children.item(i); - - if (child.nodeType === 1 && child.nodeName === name) { - if (targetClass && child.className.indexOf(targetClass) < 0) { - continue; - } - - elements.push(child); - } - } - - return elements; - } - - // Entry point. - - window.onload = function() { - initTabs(); - initControls(); - }; -} (window, window.document)); \ No newline at end of file diff --git a/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html b/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html deleted file mode 100644 index 5ab9e1a..0000000 --- a/docs/test/packages/org.xbib.elasticsearch.index.mapper.langdetect.html +++ /dev/null @@ -1,219 +0,0 @@ - - - - - -Test results - Package org.xbib.elasticsearch.index.mapper.langdetect - - - - - -
      -

      Package org.xbib.elasticsearch.index.mapper.langdetect

      - -
      - - - - - -
      -
      - - - - - - - -
      -
      -
      33
      -

      tests

      -
      -
      -
      -
      0
      -

      failures

      -
      -
      -
      -
      2
      -

      ignored

      -
      -
      -
      -
      1m6.04s
      -

      duration

      -
      -
      -
      -
      -
      -
      100%
      -

      successful

      -
      -
      -
      -
      - -
      -

      Ignored tests

      - -
      -
      -

      Classes

      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
      ClassTestsFailuresIgnoredDurationSuccess rate
      -DetectLanguageTest -4001.807s100%
      -DetectorTest -5000s100%
      -LangDetectActionTest -20048.559s100%
      -LangDetectBinaryTest -1006.736s100%
      -LangDetectChineseTest -1006.117s100%
      -LangProfileTest -6000s100%
      -LangdetectMappingTest -6002.649s100%
      -LanguageTest -1000s100%
      -NGramTest -4000s100%
      -SimpleDetectorTest -1000.171s100%
      -SimpleHttpTest -2020.001s-
      -
      -
      - -
      - - diff --git a/gradle.properties b/gradle.properties index c3bd1f2..08f4c1a 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,3 +1,9 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.1.2.0 +version = 5.2.1.0 + +elasticsearch.version = 5.2.1 +log4j.version = 2.8 +junit.version = 4.12 +wagon-ssh-external.version = 2.10 +asciidoclet.version = 1.5.4 diff --git a/gradle/ext.gradle b/gradle/ext.gradle new file mode 100644 index 0000000..b0f6d8f --- /dev/null +++ b/gradle/ext.gradle @@ -0,0 +1,10 @@ +ext { + pluginName = 'langdetect' + pluginClassname = 'org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin' + pluginDescription = 'Language detection for Elasticsearch' + user = 'jprante' + name = 'elasticsearch-langdetect' + scmUrl = 'https://github.com/' + user + '/' + name + scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' + scmDeveloperConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git' +} diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java index 5573dd0..d7deaab 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangProfile.java @@ -68,7 +68,7 @@ public void setFreq(Map freq) { @SuppressWarnings("unchecked") public void read(InputStream input) throws IOException { - XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(input); + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(null, input); Map map = parser.map(); freq = (Map) map.get("freq"); name = (String) map.get("name"); diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java index 91f7f62..c5f715e 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java @@ -133,9 +133,9 @@ private void load(Settings settings) { return; } try { - String[] keys = DEFAULT_LANGUAGES; - if (settings.get("languages") != null) { - keys = settings.get("languages").split(","); + String[] keys = settings.getAsArray("languages"); + if (keys.length == 0) { + keys = DEFAULT_LANGUAGES; } int index = 0; int size = keys.length; @@ -151,10 +151,7 @@ private void load(Settings settings) { } try { // map by settings - Settings map = Settings.EMPTY; - if (settings.getByPrefix("map.") != null) { - map = Settings.builder().put(settings.getByPrefix("map.")).build(); - } + Settings map = Settings.builder().put(settings.getByPrefix("map.")).build(); if (map.getAsMap().isEmpty()) { // is in "map" a resource name? String s = settings.get("map") != null ? diff --git a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java index f834eb8..840083e 100644 --- a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java +++ b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java @@ -1,9 +1,11 @@ package org.xbib.elasticsearch.index.mapper.langdetect; +import com.fasterxml.jackson.core.JsonParseException; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -28,7 +30,7 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.index.mapper.TypeParsers.parseStore; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; /** * @@ -46,7 +48,7 @@ public class LangdetectMapper extends TextFieldMapper { private final int positionIncrementGap; public LangdetectMapper(String simpleName, - TextFieldType fieldType, + MappedFieldType fieldType, MappedFieldType defaultFieldType, int positionIncrementGap, Settings indexSettings, @@ -67,7 +69,7 @@ protected String contentType() { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (context.externalValueSet()) { return; } @@ -97,6 +99,8 @@ protected void parseCreateField(ParseContext context, List fields) throws if (b != null && b.length > 0) { value = new String(b, StandardCharsets.UTF_8); } + } catch (JsonParseException e) { + logger.trace(e.getMessage(), e); } catch (Exception e) { logger.error(e.getMessage(), e); } @@ -175,7 +179,7 @@ private static void parseLanguageToFields(ParseContext originalContext, Object l public static class Defaults { - public static final MappedFieldType LANG_FIELD_TYPE = new TextFieldType(); + public static final TextFieldType LANG_FIELD_TYPE = new TextFieldType(); static { LANG_FIELD_TYPE.setStored(true); @@ -308,7 +312,7 @@ public LangdetectMapper build(BuilderContext context) { setupFieldType(context); LangdetectService service = new LangdetectService(settingsBuilder.build()); return new LangdetectMapper(name, - (TextFieldType) fieldType(), + fieldType(), defaultFieldType, positionIncrementGap, context.indexSettings(), @@ -360,7 +364,7 @@ public static class TypeParser implements Mapper.TypeParser { iterator.remove(); break; case "store": - builder.store(parseStore(fieldName, fieldNode.toString(), parserContext)); + builder.store(parseStore(fieldNode.toString())); iterator.remove(); break; case "number_of_trials": @@ -429,6 +433,10 @@ public static class TypeParser implements Mapper.TypeParser { } return builder; } + + private static boolean parseStore(String store) throws MapperParsingException { + return !"no".equals(store) && ("yes".equals(store) || lenientNodeBooleanValue(store)); + } } public static class LanguageTo { diff --git a/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java b/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java index 4f1b604..4b4b21e 100644 --- a/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java +++ b/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java @@ -1,9 +1,12 @@ package org.xbib.elasticsearch; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; @@ -31,6 +34,8 @@ */ public class MapperTestUtils { + private static final Logger logger = LogManager.getLogger(MapperTestUtils.class.getName()); + public static AnalysisRegistry analysisService(Settings customSettings) throws IOException { Settings settings = Settings.builder() .put("path.home", System.getProperty("path.home", "/tmp")) @@ -69,12 +74,17 @@ public static DocumentMapperParser newDocumentMapperParser(Settings customSettin Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); - IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, - tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); - MapperService mapperService = new MapperService(indexSettings, indexAnalyzers, + Map> normalizerProviderMap = analysisRegistry.buildNormalizerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, + analyzerProviderMap, + normalizerProviderMap, + tokenizerFactoryMap, + charFilterFactoryMap, + tokenFilterFactoryMap); + MapperService mapperService = new MapperService(indexSettings, indexAnalyzers, NamedXContentRegistry.EMPTY, + similarityService, mapperRegistry, null); + return new DocumentMapperParser(indexSettings, mapperService, indexAnalyzers, NamedXContentRegistry.EMPTY, similarityService, mapperRegistry, null); - return new DocumentMapperParser(indexSettings, - mapperService, indexAnalyzers, similarityService, mapperRegistry, null); } public static Analyzer analyzer(String name) throws IOException { @@ -93,7 +103,8 @@ public static Analyzer analyzer(String name) throws IOException { Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); - IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, + Map> normalizerProviderMap = analysisRegistry.buildNormalizerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, normalizerProviderMap, tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); Analyzer analyzer = indexAnalyzers.get(name) != null ? indexAnalyzers.get(name) : analysisRegistry.getAnalyzer(name); assertNotNull(analyzer); @@ -117,7 +128,8 @@ public static Analyzer analyzer(Settings customSettings, String name) throws IOE Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); - IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, + Map> normalizerProviderMap = analysisRegistry.buildNormalizerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, normalizerProviderMap, tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); Analyzer analyzer = indexAnalyzers.get(name) != null ? indexAnalyzers.get(name) : analysisRegistry.getAnalyzer(name); assertNotNull(analyzer); @@ -141,7 +153,8 @@ public static Analyzer analyzer(String resource, String name) throws IOException Map tokenFilterFactoryMap = analysisRegistry.buildTokenFilterFactories(indexSettings); Map tokenizerFactoryMap = analysisRegistry.buildTokenizerFactories(indexSettings); Map> analyzerProviderMap = analysisRegistry.buildAnalyzerFactories(indexSettings); - IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, + Map> normalizerProviderMap = analysisRegistry.buildNormalizerFactories(indexSettings); + IndexAnalyzers indexAnalyzers = analysisRegistry.build(indexSettings, analyzerProviderMap, normalizerProviderMap, tokenizerFactoryMap, charFilterFactoryMap, tokenFilterFactoryMap); Analyzer analyzer = indexAnalyzers.get(name) != null ? indexAnalyzers.get(name) : analysisRegistry.getAnalyzer(name); assertNotNull(analyzer); diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java index c42ccce..b40b329 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMappingTest.java @@ -43,7 +43,7 @@ public void testBinary() throws Exception { String sampleText = copyToStringFromClasspath("base64-decoded.txt"); BytesReference json = jsonBuilder().startObject().field("someField", sampleBinary).endObject().bytes(); ParseContext.Document doc = docMapper.parse("someIndex", "someType", "1", json).rootDoc(); - assertEquals(1, doc.getFields("someField").length); + assertEquals(2, doc.getFields("someField").length); assertEquals("en", doc.getFields("someField")[0].stringValue()); // re-parse it String builtMapping = docMapper.mappingSource().string(); diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json index 3652b09..931e853 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-2-mapping.json @@ -12,4 +12,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-mapping.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-mapping.json index 74aabdf..fdee673 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-mapping.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/base64-mapping.json @@ -1,10 +1,11 @@ { - "someType" : { - "properties" : { - "someField": { - "type" : "langdetect", - "binary" : true - } - } + "someType" : { + "properties" : { + "someField":{ + "type" : "langdetect", + "languages": [ "en", "fr", "de", "it", "es" ], + "binary" : true + } } -} \ No newline at end of file + } +} diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json index 9378754..f5c10f1 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/mapping-to-fields.json @@ -19,4 +19,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/settings.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/settings.json index 895aa32..4de283f 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/settings.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/settings.json @@ -1,24 +1,24 @@ { - "index" : { - "analysis" : { - "analyzer" : { - "default" : { - "type" : "standard" - } - } + "index" : { + "analysis" : { + "analyzer" : { + "default" : { + "type" : "standard" } - }, - "mappings" : { - "someType" : { - "properties" : { - "someField":{ - "type" : "langdetect", - "languages" : [ "de", "en", "fr", "nl", "it" ], - "map" : { - "de" : "ger" - } - } - } + } + } + }, + "mappings" : { + "someType" : { + "properties" : { + "someField":{ + "type" : "langdetect", + "languages" : [ "de", "en", "fr", "nl", "it" ], + "map" : { + "de" : "ger" + } } + } } -} \ No newline at end of file + } +} diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/short-text-mapping.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/short-text-mapping.json index b0684f3..c7c3c96 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/short-text-mapping.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/short-text-mapping.json @@ -1,10 +1,10 @@ { - "someType" : { - "properties" : { - "someField" : { - "type" : "langdetect", - "profile" : "short-text" - } - } + "someType" : { + "properties" : { + "someField" : { + "type" : "langdetect", + "profile" : "short-text" + } } -} \ No newline at end of file + } +} diff --git a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/simple-mapping.json b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/simple-mapping.json index 9c19524..3c36dd7 100644 --- a/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/simple-mapping.json +++ b/src/test/resources/org/xbib/elasticsearch/index/mapper/langdetect/simple-mapping.json @@ -1,10 +1,10 @@ { - "someType" : { - "properties" : { - "someField": { - "type" : "langdetect", - "languages" : [ "de", "en", "fr", "nl", "it" ] - } - } + "someType" : { + "properties" : { + "someField": { + "type" : "langdetect", + "languages" : [ "de", "en", "fr", "nl", "it" ] + } } -} \ No newline at end of file + } +} From 0338d4327ee974a2adba036f5aca684124f285da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 2 Mar 2017 18:41:00 +0100 Subject: [PATCH 09/19] docs --- README.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.adoc b/README.adoc index ba9df09..1efd627 100644 --- a/README.adoc +++ b/README.adoc @@ -118,7 +118,7 @@ Here is a list of languages code recognized: [source] ---- -./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.1.2.0/elasticsearch-langdetect-5.1.2.0-plugin.zip +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.2.1.0/elasticsearch-langdetect-5.2.1.0-plugin.zip ---- ### Elasticsearch 2.x From 9ab71cec39bb2fbb68460ee9a62e3dcc45679608 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 2 Mar 2017 18:47:44 +0100 Subject: [PATCH 10/19] update to Elasticsearch 5.2.2 --- gradle.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gradle.properties b/gradle.properties index 08f4c1a..58d532b 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,8 +1,8 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.2.1.0 +version = 5.2.2.0 -elasticsearch.version = 5.2.1 +elasticsearch.version = 5.2.2 log4j.version = 2.8 junit.version = 4.12 wagon-ssh-external.version = 2.10 From 01293b055bba9a06949f68f6093b20751df9262e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 2 Mar 2017 18:48:29 +0100 Subject: [PATCH 11/19] docs --- README.adoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.adoc b/README.adoc index 1efd627..b510de0 100644 --- a/README.adoc +++ b/README.adoc @@ -89,6 +89,7 @@ Here is a list of languages code recognized: [frame="all"] |=== | Plugin version | Elasticsearch version | Release date +| 5.2.2.0 | 5.2.2 | Mar 2, 2017 | 5.2.1.0 | 5.2.1 | Mar 2, 2017 | 5.1.2.0 | 5.1.2 | Jan 26, 2017 | 2.4.4.1 | 2.4.4 | Jan 25, 2017 @@ -118,7 +119,7 @@ Here is a list of languages code recognized: [source] ---- -./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.2.1.0/elasticsearch-langdetect-5.2.1.0-plugin.zip +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.2.2.0/elasticsearch-langdetect-5.2.2.0-plugin.zip ---- ### Elasticsearch 2.x From dc4791317a95d0cfa68283018b893b7ccadf138b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 30 Mar 2017 23:07:52 +0200 Subject: [PATCH 12/19] update to Elasticsearch 5.3 --- README.adoc | 3 ++- gradle.properties | 4 ++-- .../action/langdetect/LangdetectResponse.java | 7 +++--- .../mapper/langdetect/LangdetectMapper.java | 2 +- .../plugin/langdetect/LangdetectPlugin.java | 20 ++++++++++++++--- .../java/org/elasticsearch/node/MockNode.java | 1 - .../xbib/elasticsearch/MapperTestUtils.java | 22 +++++++++---------- .../org/xbib/elasticsearch/NodeTestUtils.java | 4 ++-- 8 files changed, 39 insertions(+), 24 deletions(-) diff --git a/README.adoc b/README.adoc index b510de0..ba6028f 100644 --- a/README.adoc +++ b/README.adoc @@ -89,6 +89,7 @@ Here is a list of languages code recognized: [frame="all"] |=== | Plugin version | Elasticsearch version | Release date +| 5.3.0.0 | 5.3.0 | Mar 30, 2017 | 5.2.2.0 | 5.2.2 | Mar 2, 2017 | 5.2.1.0 | 5.2.1 | Mar 2, 2017 | 5.1.2.0 | 5.1.2 | Jan 26, 2017 @@ -119,7 +120,7 @@ Here is a list of languages code recognized: [source] ---- -./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.2.2.0/elasticsearch-langdetect-5.2.2.0-plugin.zip +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.0.0/elasticsearch-langdetect-5.3.0.0-plugin.zip ---- ### Elasticsearch 2.x diff --git a/gradle.properties b/gradle.properties index 58d532b..07f4efe 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,8 +1,8 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.2.2.0 +version = 5.3.0.0 -elasticsearch.version = 5.2.2 +elasticsearch.version = 5.3.0 log4j.version = 2.8 junit.version = 4.12 wagon-ssh-external.version = 2.10 diff --git a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java index c36a8ab..a2c115f 100644 --- a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java +++ b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java @@ -2,7 +2,8 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.StatusToXContent; +import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestStatus; import org.xbib.elasticsearch.common.langdetect.Language; @@ -16,7 +17,7 @@ /** * */ -public class LangdetectResponse extends ActionResponse implements StatusToXContent { +public class LangdetectResponse extends ActionResponse implements StatusToXContentObject { private String profile; @@ -41,7 +42,7 @@ public LangdetectResponse setLanguages(List languages) { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { if (!Strings.isNullOrEmpty(profile)) { builder.field("profile", profile); } diff --git a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java index 840083e..e6029a1 100644 --- a/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java +++ b/src/main/java/org/xbib/elasticsearch/index/mapper/langdetect/LangdetectMapper.java @@ -435,7 +435,7 @@ public static class TypeParser implements Mapper.TypeParser { } private static boolean parseStore(String store) throws MapperParsingException { - return !"no".equals(store) && ("yes".equals(store) || lenientNodeBooleanValue(store)); + return !"no".equals(store) && ("yes".equals(store) || lenientNodeBooleanValue(null, store, false)); } } diff --git a/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java b/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java index 3de1722..678fbbb 100644 --- a/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java +++ b/src/main/java/org/xbib/elasticsearch/plugin/langdetect/LangdetectPlugin.java @@ -2,10 +2,17 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.xbib.elasticsearch.action.langdetect.LangdetectAction; import org.xbib.elasticsearch.action.langdetect.TransportLangdetectAction; @@ -16,6 +23,7 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.function.Supplier; /** * @@ -37,9 +45,15 @@ public Map getMappers() { } @Override - public List> getRestHandlers() { - List> extra = new ArrayList<>(); - extra.add(RestLangdetectAction.class); + public List getRestHandlers(Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster) { + List extra = new ArrayList<>(); + extra.add(new RestLangdetectAction(settings, restController)); return extra; } } diff --git a/src/test/java/org/elasticsearch/node/MockNode.java b/src/test/java/org/elasticsearch/node/MockNode.java index 686fdec..4e7b7d8 100644 --- a/src/test/java/org/elasticsearch/node/MockNode.java +++ b/src/test/java/org/elasticsearch/node/MockNode.java @@ -1,7 +1,6 @@ package org.elasticsearch.node; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; import java.util.ArrayList; diff --git a/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java b/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java index 4b4b21e..98247e9 100644 --- a/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java +++ b/src/test/java/org/xbib/elasticsearch/MapperTestUtils.java @@ -38,7 +38,7 @@ public class MapperTestUtils { public static AnalysisRegistry analysisService(Settings customSettings) throws IOException { Settings settings = Settings.builder() - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(customSettings) .build(); @@ -53,7 +53,7 @@ public static DocumentMapperParser newDocumentMapperParser(String index) throws public static DocumentMapperParser newDocumentMapperParser(Settings customSettings, String index) throws IOException { Settings settings = Settings.builder() - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(customSettings) .build(); @@ -90,7 +90,7 @@ public static DocumentMapperParser newDocumentMapperParser(Settings customSettin public static Analyzer analyzer(String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .build(); AnalysisRegistry analysisRegistry = analysisService(settings); IndexMetaData indexMetaData = IndexMetaData.builder("test") @@ -114,7 +114,7 @@ public static Analyzer analyzer(String name) throws IOException { public static Analyzer analyzer(Settings customSettings, String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .put(customSettings) .build(); AnalysisRegistry analysisRegistry = analysisService(settings); @@ -139,7 +139,7 @@ public static Analyzer analyzer(Settings customSettings, String name) throws IOE public static Analyzer analyzer(String resource, String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) .build(); AnalysisRegistry analysisRegistry = analysisService(settings); @@ -164,7 +164,7 @@ public static Analyzer analyzer(String resource, String name) throws IOException public static TokenizerFactory tokenizerFactory(String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .build(); AnalysisRegistry analysisRegistry = analysisService(settings); IndexMetaData indexMetaData = IndexMetaData.builder("test") @@ -183,7 +183,7 @@ public static TokenizerFactory tokenizerFactory(String name) throws IOException public static TokenizerFactory tokenizerFactory(String resource, String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) .build(); Environment environment = new Environment(settings); @@ -204,7 +204,7 @@ public static TokenizerFactory tokenizerFactory(String resource, String name) th public static TokenFilterFactory tokenFilterFactory(String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .build(); Environment environment = new Environment(settings); AnalysisRegistry analysisRegistry = analysisService(settings); @@ -222,7 +222,7 @@ public static TokenFilterFactory tokenFilterFactory(String name) throws IOExcept public static TokenFilterFactory tokenFilterFactory(String resource, String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) .build(); Environment environment = new Environment(settings); @@ -241,7 +241,7 @@ public static TokenFilterFactory tokenFilterFactory(String resource, String name public static CharFilterFactory charFilterFactory(String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .build(); Environment environment = new Environment(settings); AnalysisRegistry analysisRegistry = analysisService(settings); @@ -259,7 +259,7 @@ public static CharFilterFactory charFilterFactory(String name) throws IOExceptio public static CharFilterFactory charFilterFactory(String resource, String name) throws IOException { Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put("path.home", System.getProperty("path.home", "/tmp")) + .put("path.home", System.getProperty("path.home", System.getProperty("user.dir"))) .loadFromStream(resource, MapperTestUtils.class.getClassLoader().getResource(resource).openStream()) .build(); Environment environment = new Environment(settings); diff --git a/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java b/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java index ec733e2..95a7b9f 100644 --- a/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java +++ b/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java @@ -185,8 +185,8 @@ public Node buildNode() throws IOException { } - private static void deleteFiles() throws IOException { - Path directory = Paths.get(System.getProperty("path.home") + "/data"); + private void deleteFiles() throws IOException { + Path directory = Paths.get(getHome() + "/data"); Files.walkFileTree(directory, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { From d202457d9c20d1aa7f9d7a2b8e8d6a76114f6c43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Sat, 1 Apr 2017 19:40:24 +0200 Subject: [PATCH 13/19] fixing REST action bug --- README.adoc | 3 ++- gradle.properties | 2 +- .../elasticsearch/action/langdetect/LangdetectResponse.java | 2 ++ .../rest/action/langdetect/RestLangdetectAction.java | 2 ++ 4 files changed, 7 insertions(+), 2 deletions(-) diff --git a/README.adoc b/README.adoc index ba6028f..9bc921b 100644 --- a/README.adoc +++ b/README.adoc @@ -89,6 +89,7 @@ Here is a list of languages code recognized: [frame="all"] |=== | Plugin version | Elasticsearch version | Release date +| 5.3.0.1 | 5.3.0 | Apr 1, 2017 | 5.3.0.0 | 5.3.0 | Mar 30, 2017 | 5.2.2.0 | 5.2.2 | Mar 2, 2017 | 5.2.1.0 | 5.2.1 | Mar 2, 2017 @@ -120,7 +121,7 @@ Here is a list of languages code recognized: [source] ---- -./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.0.0/elasticsearch-langdetect-5.3.0.0-plugin.zip +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.0.1/elasticsearch-langdetect-5.3.0.1-plugin.zip ---- ### Elasticsearch 2.x diff --git a/gradle.properties b/gradle.properties index 07f4efe..449cf7e 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,6 +1,6 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.3.0.0 +version = 5.3.0.1 elasticsearch.version = 5.3.0 log4j.version = 2.8 diff --git a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java index a2c115f..bdf265f 100644 --- a/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java +++ b/src/main/java/org/xbib/elasticsearch/action/langdetect/LangdetectResponse.java @@ -43,6 +43,7 @@ public LangdetectResponse setLanguages(List languages) { @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); if (!Strings.isNullOrEmpty(profile)) { builder.field("profile", profile); } @@ -52,6 +53,7 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par .field("probability", lang.getProbability()).endObject(); } builder.endArray(); + builder.endObject(); return builder; } diff --git a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java index 7188eae..6f37fb3 100644 --- a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java +++ b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java @@ -12,6 +12,7 @@ import java.io.IOException; +import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; /** @@ -22,6 +23,7 @@ public class RestLangdetectAction extends BaseRestHandler { @Inject public RestLangdetectAction(Settings settings, RestController controller) { super(settings); + controller.registerHandler(GET, "/_langdetect", this); controller.registerHandler(POST, "/_langdetect", this); } From f24b20a34ee05c5d4b1989d1856e0b3d27a231d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Mon, 3 Apr 2017 19:36:40 +0200 Subject: [PATCH 14/19] re-add HTTP testing --- build.gradle | 4 +--- gradle.properties | 2 +- .../langdetect/RestLangdetectAction.java | 6 +++++- .../org/xbib/elasticsearch/NodeTestUtils.java | 20 ++++++------------- .../mapper/langdetect/SimpleHttpTest.java | 16 ++++++++------- 5 files changed, 22 insertions(+), 26 deletions(-) diff --git a/build.gradle b/build.gradle index f7876f5..3f6fa0d 100644 --- a/build.gradle +++ b/build.gradle @@ -55,6 +55,7 @@ dependencies { compileOnly "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" testCompile "junit:junit:${project.property('junit.version')}" testCompile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" + testCompile "org.elasticsearch.plugin:transport-netty4-client:${project.property('elasticsearch.version')}" asciidoclet "org.asciidoctor:asciidoclet:${project.property('asciidoclet.version')}" wagon "org.apache.maven.wagon:wagon-ssh-external:${project.property('wagon-ssh-external.version')}" distJars "${project.group}:${project.name}:${project.version}" @@ -84,9 +85,6 @@ task makePluginDescriptor(type: Copy) { 'name': pluginName, 'classname': pluginClassname, 'description': pluginDescription, - 'jvm': true, - 'site': false, - 'isolated': true, 'version': project.property('version'), 'javaVersion': project.property('targetCompatibility'), 'elasticsearchVersion' : project.property('elasticsearch.version') diff --git a/gradle.properties b/gradle.properties index 449cf7e..9d75493 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,6 +1,6 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.3.0.1 +version = 5.3.0.2 elasticsearch.version = 5.3.0 log4j.version = 2.8 diff --git a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java index 6f37fb3..4e08794 100644 --- a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java +++ b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java @@ -24,13 +24,17 @@ public class RestLangdetectAction extends BaseRestHandler { public RestLangdetectAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(GET, "/_langdetect", this); + controller.registerHandler(GET, "/_langdetect/{profile}", this); controller.registerHandler(POST, "/_langdetect", this); + controller.registerHandler(POST, "/_langdetect/{profile}", this); } @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + // read request.param early to "consume" parameter, avoiding HTTP 400 + final String profile = request.param("profile", ""); return channel -> client.execute(LangdetectAction.INSTANCE, new LangdetectRequest() - .setProfile(request.param("profile", "")) + .setProfile(profile) .setText(request.content().utf8ToString()), new RestStatusToXContentListener<>(channel)); } diff --git a/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java b/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java index 95a7b9f..b9707e0 100644 --- a/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java +++ b/src/test/java/org/xbib/elasticsearch/NodeTestUtils.java @@ -17,6 +17,7 @@ import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.transport.Netty4Plugin; import org.xbib.elasticsearch.plugin.langdetect.LangdetectPlugin; import java.io.IOException; @@ -26,7 +27,7 @@ import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; -import java.util.Collections; +import java.util.Arrays; import java.util.concurrent.atomic.AtomicInteger; /** @@ -97,18 +98,17 @@ protected String getClusterName() { } protected Settings getNodeSettings() { - //String hostname = NetworkUtils.getLocalAddress().getHostName(); return Settings.builder() .put("cluster.name", clustername) .put("transport.type", "local") - .put("http.enabled", false) + .put("http.enabled", true) + .put("http.type", "netty4") .put("path.home", getHome()) - //.put("node.max_local_storage_nodes", 1) .build(); } protected String getHome() { - return System.getProperty("path.home"); + return System.getProperty("path.home") != null ? System.getProperty("path.home") : System.getProperty("user.dir"); } public Node startNode() throws IOException { @@ -168,20 +168,12 @@ public static String findHttpAddress(Client client) { return null; } - public Node buildNodeWithoutPlugins() throws IOException { - Settings nodeSettings = Settings.builder() - .put(getNodeSettings()) - .build(); - logger.info("settings={}", nodeSettings.getAsMap()); - return new MockNode(nodeSettings, Collections.emptyList()); - } - public Node buildNode() throws IOException { Settings nodeSettings = Settings.builder() .put(getNodeSettings()) .build(); logger.info("settings={}", nodeSettings.getAsMap()); - return new MockNode(nodeSettings, Collections.singletonList(LangdetectPlugin.class)); + return new MockNode(nodeSettings, Arrays.asList(Netty4Plugin.class, LangdetectPlugin.class)); } diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java index 5329663..4fe852d 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java @@ -1,7 +1,6 @@ package org.xbib.elasticsearch.index.mapper.langdetect; import org.elasticsearch.common.io.Streams; -import org.junit.Ignore; import org.junit.Test; import org.xbib.elasticsearch.NodeTestUtils; @@ -19,7 +18,6 @@ /** * */ -@Ignore public class SimpleHttpTest extends NodeTestUtils { @Test @@ -33,13 +31,15 @@ public void httpPost() throws IOException { URL base = new URL(httpAddress); URL url = new URL(base, "_langdetect"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestProperty("Content-Type", "application/json"); + connection.setRequestProperty("Accept", "application/json"); connection.setRequestMethod("POST"); connection.setDoOutput(true); connection.setDoInput(true); - Streams.copy(new StringReader("Das ist ein Text"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); + Streams.copy(new StringReader("{\"text\":\"Hallo, wie geht es Ihnen?\"}"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); StringWriter response = new StringWriter(); Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999967609942226}]}", response.toString()); + assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999968691386925}]}", response.toString()); } finally { stopCluster(); } @@ -54,15 +54,17 @@ public void httpPostShortProfile() throws IOException { throw new IllegalArgumentException("no HTTP address found"); } URL base = new URL(httpAddress); - URL url = new URL(base, "_langdetect?profile=short-text"); + URL url = new URL(base, "_langdetect/short-text"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestProperty("Content-Type", "application/json"); + connection.setRequestProperty("Accept", "application/json"); connection.setRequestMethod("POST"); connection.setDoOutput(true); connection.setDoInput(true); - Streams.copy(new StringReader("Das ist ein Text"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); + Streams.copy(new StringReader("{\"text\":\"Das ist ein Text\"}"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); StringWriter response = new StringWriter(); Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999968539079941}]}", response.toString()); + assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999951642381224}]}", response.toString()); } finally { stopCluster(); } From b412faa16d14787331534342901724f9d95dccd9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Mon, 3 Apr 2017 21:46:19 +0200 Subject: [PATCH 15/19] parse JSON content body for 'text' and 'profile', fixing #60 --- .../langdetect/RestLangdetectAction.java | 40 ++++++++++++++++--- .../mapper/langdetect/SimpleHttpTest.java | 4 +- 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java index 4e08794..dd827b6 100644 --- a/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java +++ b/src/main/java/org/xbib/elasticsearch/rest/action/langdetect/RestLangdetectAction.java @@ -1,8 +1,12 @@ package org.xbib.elasticsearch.rest.action.langdetect; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; @@ -31,11 +35,37 @@ public RestLangdetectAction(Settings settings, RestController controller) { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - // read request.param early to "consume" parameter, avoiding HTTP 400 - final String profile = request.param("profile", ""); - return channel -> client.execute(LangdetectAction.INSTANCE, new LangdetectRequest() - .setProfile(profile) - .setText(request.content().utf8ToString()), + final LangdetectRequest langdetectRequest = new LangdetectRequest(); + langdetectRequest.setText(request.param("text")); + langdetectRequest.setProfile(request.param("profile", "")); + withContent(request, parser -> { + if (parser != null) { + XContentParser.Token token; + while ((token = parser.nextToken()) != null) { + if (token == XContentParser.Token.VALUE_STRING) { + if ("text".equals(parser.currentName())) { + langdetectRequest.setText(parser.text()); + } else if ("profile".equals(parser.currentName())) { + langdetectRequest.setProfile(parser.text()); + } + } + } + } + }); + return channel -> client.execute(LangdetectAction.INSTANCE, langdetectRequest, new RestStatusToXContentListener<>(channel)); } + + private void withContent(RestRequest restRequest, CheckedConsumer withParser) + throws IOException { + BytesReference content = restRequest.content(); + XContentType xContentType = XContentType.JSON; + if (content.length() > 0) { + try (XContentParser parser = xContentType.xContent().createParser(restRequest.getXContentRegistry(), content)) { + withParser.accept(parser); + } + } else { + withParser.accept(null); + } + } } \ No newline at end of file diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java index 4fe852d..46652a0 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java @@ -39,7 +39,7 @@ public void httpPost() throws IOException { Streams.copy(new StringReader("{\"text\":\"Hallo, wie geht es Ihnen?\"}"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); StringWriter response = new StringWriter(); Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999968691386925}]}", response.toString()); + assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999958804394111}]}", response.toString()); } finally { stopCluster(); } @@ -64,7 +64,7 @@ public void httpPostShortProfile() throws IOException { Streams.copy(new StringReader("{\"text\":\"Das ist ein Text\"}"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); StringWriter response = new StringWriter(); Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999951642381224}]}", response.toString()); + assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999968539079941}]}", response.toString()); } finally { stopCluster(); } From b0a130918aaf56f7a11bab91a592dd807063304b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Mon, 3 Apr 2017 22:36:30 +0200 Subject: [PATCH 16/19] README --- README.adoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.adoc b/README.adoc index 9bc921b..2b42402 100644 --- a/README.adoc +++ b/README.adoc @@ -89,6 +89,7 @@ Here is a list of languages code recognized: [frame="all"] |=== | Plugin version | Elasticsearch version | Release date +| 5.3.0.2 | 5.3.0 | Apr 3, 2017 | 5.3.0.1 | 5.3.0 | Apr 1, 2017 | 5.3.0.0 | 5.3.0 | Mar 30, 2017 | 5.2.2.0 | 5.2.2 | Mar 2, 2017 @@ -121,7 +122,7 @@ Here is a list of languages code recognized: [source] ---- -./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.0.1/elasticsearch-langdetect-5.3.0.1-plugin.zip +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.0.2/elasticsearch-langdetect-5.3.0.2-plugin.zip ---- ### Elasticsearch 2.x From d38c79bab0a156984f2b649a8732ae764f28caf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Sun, 30 Apr 2017 20:25:01 +0200 Subject: [PATCH 17/19] update to Elasticsearch 5.3.1 --- build.gradle | 4 +- gradle.properties | 8 ++-- gradle/publish.gradle | 6 ++- .../common/langdetect/LangdetectService.java | 2 +- .../mapper/langdetect/SimpleHttpTest.java | 39 +++++++++++++++++-- 5 files changed, 48 insertions(+), 11 deletions(-) diff --git a/build.gradle b/build.gradle index 3f6fa0d..2377eb7 100644 --- a/build.gradle +++ b/build.gradle @@ -1,6 +1,7 @@ plugins { id "org.sonarqube" version "2.2" id "org.xbib.gradle.plugin.asciidoctor" version "1.5.4.1.0" + id "io.codearte.nexus-staging" version "0.7.0" } printf "Host: %s\nOS: %s %s %s\nJVM: %s %s %s %s\nGroovy: %s\nGradle: %s\n" + @@ -24,6 +25,7 @@ apply plugin: 'pmd' apply plugin: 'checkstyle' apply plugin: "jacoco" apply plugin: 'org.xbib.gradle.plugin.asciidoctor' +apply plugin: "io.codearte.nexus-staging" repositories { mavenCentral() @@ -57,7 +59,7 @@ dependencies { testCompile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" testCompile "org.elasticsearch.plugin:transport-netty4-client:${project.property('elasticsearch.version')}" asciidoclet "org.asciidoctor:asciidoclet:${project.property('asciidoclet.version')}" - wagon "org.apache.maven.wagon:wagon-ssh-external:${project.property('wagon-ssh-external.version')}" + wagon "org.apache.maven.wagon:wagon-ssh:${project.property('wagon.version')}" distJars "${project.group}:${project.name}:${project.version}" } diff --git a/gradle.properties b/gradle.properties index 9d75493..aa2e35b 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,9 +1,9 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.3.0.2 +version = 5.3.1.0 -elasticsearch.version = 5.3.0 -log4j.version = 2.8 +elasticsearch.version = 5.3.1 +log4j.version = 2.8.2 junit.version = 4.12 -wagon-ssh-external.version = 2.10 asciidoclet.version = 1.5.4 +wagon.version = 2.12 diff --git a/gradle/publish.gradle b/gradle/publish.gradle index af1816c..2039063 100644 --- a/gradle/publish.gradle +++ b/gradle/publish.gradle @@ -6,7 +6,7 @@ task xbibUpload(type: Upload) { if (project.hasProperty('xbibUsername')) { mavenDeployer { configuration = configurations.wagon - repository(url: uri('scpexe://xbib.org/repository')) { + repository(url: uri('sftp://xbib.org/repository')) { authentication(userName: xbibUsername, privateKey: xbibPrivateKey) } } @@ -64,3 +64,7 @@ task sonatypeUpload(type: Upload) { } } } + +nexusStaging { + packageGroup = "org.xbib" +} diff --git a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java index c5f715e..f5459ee 100644 --- a/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java +++ b/src/main/java/org/xbib/elasticsearch/common/langdetect/LangdetectService.java @@ -186,7 +186,7 @@ public void loadProfileFromResource(String resource, int index, int langsize) th String thisProfile = "/langdetect/" + (this.profile != null ? this.profile + "/" : ""); InputStream in = getClass().getResourceAsStream(thisProfile + resource); if (in == null) { - throw new IOException("profile '" + resource + "' not found"); + throw new IOException("profile '" + resource + "' not found, path = " + thisProfile + resource); } LangProfile langProfile = new LangProfile(); langProfile.read(in); diff --git a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java index 46652a0..8913225 100644 --- a/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java +++ b/src/test/java/org/xbib/elasticsearch/index/mapper/langdetect/SimpleHttpTest.java @@ -36,10 +36,12 @@ public void httpPost() throws IOException { connection.setRequestMethod("POST"); connection.setDoOutput(true); connection.setDoInput(true); - Streams.copy(new StringReader("{\"text\":\"Hallo, wie geht es Ihnen?\"}"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); + Streams.copy(new StringReader("{\"text\":\"Hallo, wie geht es Ihnen?\"}"), + new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); StringWriter response = new StringWriter(); Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999958804394111}]}", response.toString()); + assertEquals("{\"languages\":[{\"language\":\"de\",\"probability\":0.9999958804394111}]}", + response.toString()); } finally { stopCluster(); } @@ -61,10 +63,39 @@ public void httpPostShortProfile() throws IOException { connection.setRequestMethod("POST"); connection.setDoOutput(true); connection.setDoInput(true); - Streams.copy(new StringReader("{\"text\":\"Das ist ein Text\"}"), new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); + Streams.copy(new StringReader("{\"text\":\"Das ist ein Text\"}"), + new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); StringWriter response = new StringWriter(); Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); - assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999968539079941}]}", response.toString()); + assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999968539079941}]}", + response.toString()); + } finally { + stopCluster(); + } + } + + @Test + public void httpPostShortProfileInBody() throws IOException { + startCluster(); + try { + String httpAddress = findHttpAddress(client()); + if (httpAddress == null) { + throw new IllegalArgumentException("no HTTP address found"); + } + URL base = new URL(httpAddress); + URL url = new URL(base, "_langdetect"); + HttpURLConnection connection = (HttpURLConnection) url.openConnection(); + connection.setRequestProperty("Content-Type", "application/json"); + connection.setRequestProperty("Accept", "application/json"); + connection.setRequestMethod("POST"); + connection.setDoOutput(true); + connection.setDoInput(true); + Streams.copy(new StringReader("{\"text\":\"Das ist ein Text\",\"profile\":\"short-text\"}"), + new OutputStreamWriter(connection.getOutputStream(), StandardCharsets.UTF_8)); + StringWriter response = new StringWriter(); + Streams.copy(new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8), response); + assertEquals("{\"profile\":\"short-text\",\"languages\":[{\"language\":\"de\",\"probability\":0.9999968539079941}]}", + response.toString()); } finally { stopCluster(); } From ca0a2a6a2c1e595549adc5cc3565368cd83b8e3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Sun, 30 Apr 2017 20:43:07 +0200 Subject: [PATCH 18/19] update to Elasticsearch 5.3.2 --- README.adoc | 4 +++- gradle.properties | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/README.adoc b/README.adoc index 2b42402..7d6fb12 100644 --- a/README.adoc +++ b/README.adoc @@ -89,6 +89,8 @@ Here is a list of languages code recognized: [frame="all"] |=== | Plugin version | Elasticsearch version | Release date +| 5.3.2.0 | 5.3.2 | Apr 30, 2017 +| 5.3.1.0 | 5.3.1 | Apr 30, 2017 | 5.3.0.2 | 5.3.0 | Apr 3, 2017 | 5.3.0.1 | 5.3.0 | Apr 1, 2017 | 5.3.0.0 | 5.3.0 | Mar 30, 2017 @@ -122,7 +124,7 @@ Here is a list of languages code recognized: [source] ---- -./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.0.2/elasticsearch-langdetect-5.3.0.2-plugin.zip +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.2.0/elasticsearch-langdetect-5.3.2.0-plugin.zip ---- ### Elasticsearch 2.x diff --git a/gradle.properties b/gradle.properties index aa2e35b..5212660 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,8 +1,8 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.3.1.0 +version = 5.3.2.0 -elasticsearch.version = 5.3.1 +elasticsearch.version = 5.3.2 log4j.version = 2.8.2 junit.version = 4.12 asciidoclet.version = 1.5.4 From 0938773f7043b1845341c947a5935054a6e9bd6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Wed, 10 May 2017 21:40:38 +0200 Subject: [PATCH 19/19] update to Elasticsearch 5.4.0 --- README.adoc | 3 ++- gradle.properties | 4 ++-- gradle/wrapper/gradle-wrapper.jar | Bin 54227 -> 54783 bytes gradle/wrapper/gradle-wrapper.properties | 4 ++-- .../langdetect/LangDetectActionTest.java | 6 +++--- .../langdetect/LangDetectBinaryTest.java | 2 +- .../langdetect/LangDetectChineseTest.java | 2 +- 7 files changed, 11 insertions(+), 10 deletions(-) diff --git a/README.adoc b/README.adoc index 7d6fb12..3b3161a 100644 --- a/README.adoc +++ b/README.adoc @@ -89,6 +89,7 @@ Here is a list of languages code recognized: [frame="all"] |=== | Plugin version | Elasticsearch version | Release date +| 5.4.0.0 | 5.4.0 | May 10, 2017 | 5.3.2.0 | 5.3.2 | Apr 30, 2017 | 5.3.1.0 | 5.3.1 | Apr 30, 2017 | 5.3.0.2 | 5.3.0 | Apr 3, 2017 @@ -124,7 +125,7 @@ Here is a list of languages code recognized: [source] ---- -./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.3.2.0/elasticsearch-langdetect-5.3.2.0-plugin.zip +./bin/elasticsearch-plugin install http://xbib.org/repository/org/xbib/elasticsearch/plugin/elasticsearch-langdetect/5.4.0.0/elasticsearch-langdetect-5.4.0.0-plugin.zip ---- ### Elasticsearch 2.x diff --git a/gradle.properties b/gradle.properties index 5212660..a0e5349 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,8 +1,8 @@ group = org.xbib.elasticsearch.plugin name = elasticsearch-langdetect -version = 5.3.2.0 +version = 5.4.0.0 -elasticsearch.version = 5.3.2 +elasticsearch.version = 5.4.0 log4j.version = 2.8.2 junit.version = 4.12 asciidoclet.version = 1.5.4 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 51288f9c2f05faf8d42e1a751a387ca7923882c3..ee6e791b56c4ffb3f255c0cb057b6b498f509455 100644 GIT binary patch delta 9234 zcmZ`<1z1!~+g?DBkZxE&I+s?u1f)SykOpZ4S)^kH0Vx+@>6Di44w3E#=~OyJkrMue z_j|vWef__=u5->l%zfYU%rkLj&M=mXa#etW^F#^d4iNx=jt&3-qyUjPG=%?Ht{9&^ zCE$P8ij>qOPWTQ!1S0|dh0mwJ{0AR7NPF8lWxZupaItcu-m+mfoHIE8P+&Qzpaivs@5oi;s4q;Euv5T|KUK$0aKdE}{*5EIgU9SZy$#&<%GcrC@m1Eft z+viA&oV`N*QXG?Ht4@d$SgEAZG7#zwNo``T1YqQis-|Z|uG|ll&3A%w1a(wTrkHhwUsSV42YTUz9&7-wkaO$M zSoK&$Y9sFEv`J${kcD2k#otSi$j^B_QAfy^^)>_vSASveLqP|v^ypkzRQoteP?5Kf ze?|;VHCtc>Z4fhG@Kte&8Z?n$#oVgs6-BAinWftLw| zN=5{z2LA5~o=2))pdog+mZZ>SbQ}Nx2m}CVVcb|GFtsjp2&cKdsk3v69)=0g7mv%- z$_74)cQ`uor6vbvE$AtoohDdsA_D+^3;`q|9AvZfyYkO;2f8aNUe8UwTOAfwAzT+v z9#>7S+%wz>BF0eN$pP7nO*SP}D@0VPBvm^krfxeto2 z*x{+8&lVrR6%rDLMo?>NLdG>w`pah8`|(4rzBS^yGb~|oyCu>?B?{l^*M3Q8;%|+4 z4hl{h?R`~7FY4`m+ghmHwc8wRPe3RtCofP;FQ9&l9s51s7UKqJHctHgB-&|u)*>v9 zWKoP;)5uOVFB0r88-wzj3s<@BT`*oMwmp4yQRP>3k_5Rwo)-BLT0(K6e_;r^q%I*p zx$CXc6`p)5$GW)H-DnBKiz9_K2RC##CndI2LCXpp|mACroUUEBR`y>Z? zHF5w#IX{eoIv%mt&IjO?Kb55ku|<)Sl_Zst4hYD>G9-Ke?j4pb;#9_V9Fs|*-I9I7 zNx`aJI0BK;B~+=`EeNwzsoXG2@pCFOBE>hfe-9QIC2$Jke7I>=!Ras|t3|5+$|;Gn zS`lfMzs!)Z#?I_5;7-3(S6S0#bd2h(jo-n?bt9vVfs=~rRpwld#h-V!Gdzj-t8&qd%fRI*2iyhldIlr0a!#2p& zW<$9TP7BUfq9Q5Oia9>Bk*y-^7!qHNz#7FdWFY$7#$g`?=1m&B4Lie*!kE~?U2F1b% z*f2zPnNu2f&b~F?J8AjG>Se?kRaf@GdHC=JWT;GopJ_F_Hw!1SNE}w(v_LL`tAyss z%O?iH*KnzhI2KH(S`JlxqQFde^L={h@|=3;V~O+cSL0957e5OulcTg&5}wZ zQ-tIY@zt@w>IZVWNSNO&C%Gy@JyxbP!OU476~Rq)-4Ywo<#sl9Gt|GLkdyjE8!G0! zAalMNghiEs)O!`9J$c`)cMHjqV;=T!lWTVHpl}!pZYVc!;J|cd7Am}25;RVlo2rl< z*evW;!*|!?8%<}^TsG^l8DFJ}rDRm^%~5Qs=;M=GSIrIU8)2y`r!vFB%aR+;0|ZPw z^0KmQIn&u%b0lkP0=TnQx(DkCaJ0{ZAmhj$libF+ysUjSU11KjMPUBeeZ9H?bzWD*;T8Rh%@!ZRjjRHO=OT2+g{QaPqNfRw#H!|g*$R;d2GIyRc34AdmG;CeUv=P$p?zYz+MwM?b9%g;vgad!iLPpA3p_QkflPx(^b;z;T*%rkMK56{+$IocSs) zKl4JfLdZ4yF?AHp^Ttbq<18Rmtb<`#9$%x&9ofO->xu)c0#=$v+BTm@WO|95{YB8B zX~J^~uj|Zw$qoIg@*WWr1ouw&8OE(^7{U~oM_jtUgCM30mKc_RafJ%YROE|f8y``E zCe+D3MpJXq_fgKSA^py%wF+;a2CE2rvThC)6P!O6-m3NGPf18~vyq0-_12-1gm09E zn4(V0>DGf47Gl)E36Dh`V%4B)SSjHtR-u*3g4%WUoNL?h2{hlfHKiQ{YxpzkHj53W z`KRS+2w2na{ZUy z+h7G2MW@o1a}q3V5N!|Sk-X5Qzp$u|_6MA$*bHcSK}}@!Z|UR87=^>RDP1rQC{vTS zfAp;T0;Z!&Z3ABIY)!9t)rC1=D9_MTSwEdZ+w2qf^bJ98w%tVHWQ<#byxlju;V6)Y zuO)fP@{hPVo`=zSx~_7zdGIzKB{x5&CpQAdT{L92W@ml>S)B@@X03Z!#t>8c{8UBR z$mA@}7;iK4^XPYDHkPG45sT8F_Gt`kpWE~FxFGZW?z|*{hu_&H9b!GVqc%Thw`Kmw z5oA-68EMYJ92+m$La$?+AA8>3yz1qz`(FAj+dhlMURqUS{Eu2b3$BLaA8fX#sp$`P z7dy-YCW%#J$*@eBAS%Rz;d6GH)1d7s9-E^XMY2^LE)RvYkCPkUKT9d-4!AL`=V~}S zTL>X*nHnzyZHIPVstfF{duDRDpQbXd=2y7!n)fBL6aFf-8tBi{pSjpZp6pHdd_|Kq zCmmc|t+82-zQ)zeD=T9B1zQwS-m8>nEIRd7d?KAitf}JS64JmGN6qMFny|#)9s1?# zS}4|!hI{r#1IJfwQ~qY;1+5cTCa9?u?Cy%$h3?9QSE_bD97pRmjP5h+6)i1R5E~e6 zp3+iH*DUod##U<0q)BpWzUuSM2kaP_7dFC8x%aaUm-m_Ol=;$5#1AiTTNevfv=Flbgnz;m*F!N`_GE ziVnrWorOY(7XK2a@9SiVZr|5@<~tI4Pjp<8b*-~5g%`eh`tN=jiSI~E9mWn=6&NPU zU8xGQrVa@xx8B64tdc_x)0I;z*&m_vd&!VtQASJ9ZDy|aasb*H_k-FH>M(6SzEe~* ztxt;a_y@(w$q0fWIt^9C^4^P@%I zf|c|uVfxQW!}Omy>V+LMiul6z*2}7L)4$g!cJ_0*47n%>uG!n6>NUZcS10@(kJ+~WJ&57ewTpYFO_6sZ>-?^)I>}i z#Fqk@(CSA2nx(g7!y<>KjWN0t9}h8o$sFmms4XG>pHf(jq+dj+@1t5;hBxlFh^Q;Q zi>s?~)4)uB0>Jn3z$=SGQIGl9(QruZ|3aiW^w)|{fYy@@cL{Mx2#1t%-2S;}YJ_I) zot0S1TKa|94$jYOs+V&eY4aC#NfKTpWxs4k25388HZ!PKFG=&G$`aEsG18oS5D$Ib z9`dV(9Lvt}jh+hKF+yLQRW;Q6A&RXT=ta{ukKeqHcHBkkbl)37a(7m@!1HtH~+S97}htp}voL&tsYBZDPmug?Sq$sgRWVtOb!-0j{S1Uo4%!G%wc>fb-~7r;)9%~#j;dr`xY__a+(W6 zqZddyKgJl#7PR>-;~vqgSyZmBkEdJk(iTJVb)WVuN_-nQqq9(CNWqejd=u2V9_@sy zxY~+awu=!!6Pso-6_xhHyTct08E$nFMY=h2dWX%qYf_d#SvT zW%QTs&gbJ&oZ>1c%}N%l9W)A#PSZ-12-I`o`Pj;D2c&R0g&4}Wo){zUCdXB|=F{OH zIC|k1&wqE5Pr!~=U<+qwWgF*L77iBRzjl`{?>=MtOA^WMvya&l%k&+~4iDE`;4?gm z9QPf4%evCtAAdEOfxD4O_pW-rW7z>`{im-eQX%)jlE(%iQ(1nPL62bNsE(yGOtzKB zR1}NXBQC>)eK7a;%&&FQR&K2rsC(R+a=Zg%OH;i-rh-SxmV>k~u1GVqm1gncmxGMB`-jQ%8QgrWlyYx2oIYgT8|S-( z?k{^R{yJTlSiJnzC4nRuYP{(V%*OF1ARXt16fK7duehtWg+mz>ZI2qCH4Q^aCoULW zzDH4T<}X`3(50}PRoCeYg&N7Hwz!VxE=t~Pm?d}k|AgSYR6nj85U!rUpmjPYkfmMbg zf}HQ=Nih>_{M;7`o|NTUH+;9x6lVV&^Gw;7;876kR}P!alg+U3tcH~DvY8Gaedd~ zT5d4}67V*zmwMXX74Rn@{|B_G4;HCW{zr1m>x zM8m;6BLY&WA*;#@OxP77kM3^Am~|pDo*5-5nr~+ z*i-3H6}k4Ldwe8;b4i1RkPQfh?BFwPBL_9Bhue7y-xiDTv^`gUK7L1ns%3@VxFNsP zlA~tzj<(})3wfKODPp~?+VTsEgUIe#K6F&N#k|60$mR}s6m#vA;UUUnFJZ=`zTM|0 zLiHeu-c%53d_jvbN6@azzI0I)#ZG=y^jZ(a8kf3TR^g#+mKyu3hOt0MQF)D3-`*p3 zd!NCxj3RFWS8*V)?I4@JsNUcW*tU$io2_8&S#j>iA+)OWFz(>e0h8CpM_s2y4$_!5 z&+l`08{QK>(;8Z;SF6GvbXvfs?>j_y5?bOE`5i(y zFXtj~fRX|@D;TNqkrb)zklYL!jC4x;;^<7WFKL}tEV2S+m;@Wh?eN#@5YU#cEZ}SZ z3u}XFfsv71XX}ikhv5~{jRPH;hfMVjD<2aZ7G-l2y&TL;Fq-&4^UW``GF~l~@6rE~ z;iLsC1eN<7;0Di^LT%{2L~fn1caH0QI$i?-^;@9{>CKn3?)-7`5XMAzjyPq;B>6!9 zUssWe3!ewI{83aY=WE{>?$2LU@bXm7{B$_e;!KtrZ`;Nb#aA76H2{SDrc|B$41A*yL|aRF~LT=3i>C6sIs0~;per1&HAzUy-3RNtEKYd7)*s?X26BT0MWDfUL&G>12; z6eNUFBk%S#F53oB%hsL&p)i4So1-I9JLzXX2zoxA8HPa!hA<%IAN@bkM_8SB{bZPU zM?BzUn`%39PI&UDdlC8Sbi_vecgO*b^`|#}yHm!$=P|or%cp^O75F`s2b)`jyDD)?&%PO5{UO-8IgNKdd1%F8yG(gS zFXnDC?y~Z4nkP#4ZXY&OcpIi5lm2rCP!nRZoq-7eu#v&9n>SXPAsXy`H-1IGMh@of zvjZW_rCVkNB^~D7+qQs&I^k_wg)0R4mYq3FcH69=$D%`^i%?D8xdpGFHyK61b0DE$ z+{%YtnQ7lPF`hHtGAs5!`5~}N-AfSE1scMUZsVW0=^)s+w*9ELg2ZcV5s?F5u57?B z7MOZ11FRifcAGDk5{`1)rnK5ZD8H##7@_=cj##)MAxyQF8J0H) ztVo0PA;hPl-~>TYrgjs-9%v*(@Brd`gis^F=hFyy-KrG?8>vDC!M5??N6^cColbDe zV|{mr_-MGtUHGuF*TgW6VbTintat>E=RaH^D0;i}B6##Fspqy0^K6wRzV4~$@MAIl z!hA(|tp7?dD{~qMyAp_Puz%=?eLzJKRQ_O+4;8-jx{E&PG3rPNhvCDoCrU`jM1X&y zdd)_@zQGg5UyoaN6%Cu7A`1djJOq!^Qqes5cK7bsH7rf0S_NZ|Kc45q@_B zTm2z^o%y#2rIF|4oF|w_)FSvk4!qr0oBy^S%?WnP~T;=;X^gd_M7#Dwqu2af^U z?V!5NK{zWy;3Q%Ehl9@h*W&))fk)UFAW&CG|E50r|Mv<1mO@yEBS?*s-AGY&Fd~GZ ztou3)(KI)iX@(RW|5ikpClM4y=>9{xo;FEt$6|d)*Et`KorPa?2pmSx4aWcz_m6^7 z3dl~z;aiA@xc?^Ma^8@{>HZ|4y`V{?f{%k8Rsi5WLP_)w{zJM;^8ZI2=6KTseC2O& zAG+`n;-b=V9T$8hcqUyGIoTiHnw{9NZ@|5!fFBPsArwrh0_U*&L3r-jzZeCV6oYqA zZUlmy7L2D;0HO4IJ=di(>%n+Txc*R@xqOS%1wTQLhS!?~K?%hGP5{E#+6iyB#%kcO zF*O_qKg0ykBjE8YVA$r2c>iv4*kupAIpN9C0%2&H+uQsxJYlqB*RG~oz?$93VLD@o zgG_SZI+N8Jo+$?79zaE?q~Yjw=88Qm9SnrUi~?`_%zQ`w9yt;KU$N97Myat0SMXNgY6HY-WEl8o86KG0E%5;%Y9r3-iZ!fizT|k#pq#PLx}ZC zzA1^@^CnZ-ir~)>HVt3X8GLRKd&QK0BGmh?5!QY;i1&SLw>|ymy$J6x2%eGz+~D&D zX>a2-9$`>@goo-Eyhn;6;042P@H+*RfApj3mQv?+rsqQ-DaZY{ zwAa1(w=3URsKOfw=e&V)5bME}c#DGu%kD#%KJ6L73IP znoTe-!|@LA*Z=iDs{E6{X*c*pAL>6U6qsE}YDEA5bh7<>o+!_{R-3mJ#ltd7DeKX3J+F;3J^lX2vHvV3&++K;m*V4BVPdm)soPy+u5-PlhUB{Rg NU<_ZcgtlK>{|7EyBHjQ1 delta 8682 zcmZ{J1z1$w_w~>zp)?F7At0R+3L@Rz-7vIt3`j^RF?2~ucY~C4r?k=_NC+q?;5WnX z4d(U#-RC(ov!Aoq+UxAQ&%JZb`CWoERe*$}EQf@Q2>_s@1B$)fWnyut9{g(w=(vT7 z0RRBaSWz`0(jC%cmm?Phz`uwrC<59IK!kljFJx$Qbg^Pb`j1cSFb(GaSYe3RGp0LM z4Zc1S2JyeWU-z_Ki=jM=&_)Y!MkRuD^y5M-Sj+EXm=tKy?qDjg6;C-(0f0Iz0D$`z zQ`$!jPS6~1CD5R_G34UTkQc;w5r%`j0~n;klhhYw#J~4&A`l~-4jB>M-I7uUqUf== zL(>f;{OqBhReEfiu&GeO6HxRjP1CEo^qBpyzb8_|kf41wBN)`ZcAU8Ui*?y?|7LTU zssb<<@xvJ;szpDOtH-KjJ4Y;qEJEhJ9+oW!HaIO&GeW^Byw1w`S+W_Ui&L^07FxjU zqz;Vkslrz3GcgqvZ$k_b&}2SMh;gRqGftNbU8{7;LF|%!XAl_6Ea0?G(!;HBsQhWg z?vafEQ|05?gn8G}hiQv$rB+}msnaY`gM(nNN%{GF?&NPrB15Dg!otx;#R-03`&g|U zST$?1K{8M8SSS*amPMKS+k*Sd(R+`{T0+hl%O{EL;?$|bmT8yBw4@E99~aU$`&-|~ zBJ7-`BGs#(er~ZjOUZnHpw=Ds%v?o|`KxzOk=WSdIWFFtSqk#QlMB+#v;~Ht@`$<}s^Kf=7n$z>mE30R|Y85>u#%E{$w@@RILcDnSM zN)(ZB(6G?_;^z_P&_kYCa;tB`Q%sEyO#sj9HMr|fM|{*q<+C$5TRVc}J!u@TKc|DE zq7}bdbWIj6di&^kC_f#g7*JU@ov76GE*0zhDMz^}wCFvB^{{n4(S?xAW^J$|DtR4= z%j|>1h50~S+epW!<#8h6;0p_k-mor>N1CIrID+CAn14)NAvUn)1iw9Lq-TGYuu#We z(akEW-Nz8MBb_57#hr@$bKdZq07_0P*w4RV|FMp{n73eNkE>v2AJ*d(R(F5xzQ~A5 z4E7yQ(gL@ZoJ1|>RIm1IGot~OvM1{a9U`O+=ET^2CvRF96^zJo^CJpWlFRAW8+S>I zJEPXSEw(*bd0t9(=>RL%?*$4uXlf%`Q2O~kaJ2E%>l5Rb1G-sK~>)Q5(s0d8H* zhe|Gy-tkcf<}jJHPXp+iF2^;vH+ zURm-|vsZiQK{UQZlSmWu+!jbzZwgVn>t$YC@+(k-1;e#}OIQnQ%4b!xjandT5NCh4U$qmA3zN^fM654v z-4ZVTWaxwN2BJOlj9#xH{rN_DT4L*dv9y$gpgRUjy&bJVO5qhLu;r88$a3ypl-B_k z=w0{VO=9z9WTOaB;RU0Ulwe|*0yVyq(QjU{RgLgR#1ZlDnu4c|jNYkxgLN&*9kL|e zyCB+qJJ|4=qQFaIH!Bkr%%uK^PH3v%c{BHi7tpv5|>=Ehq#X8Ngu>k_<@s*GPs zRA-CEq7K47P_VU1*Qg)mQZxQG%Sp~m70op*m4X1NK(QP&RRnz=+xZPZ+vj`Rz26w{ z`jCF|9JA~k)&4wFHEBXI68ypG4dh(O{D|0n-Er^ZvwqGV1KJ3O98djHQBAr^Js#s| z>y1UpHhVD-Wg01McB1m}1*CCnx46@KNH248IawxUo*RDpWUW!C+oyR+-6wH5sJK4} zXfBtlkfNz&sr(-{yK7Dn1*Yd10W^(NhlREf%9(l#~_h%-x?}3v*vvd4Dh1L zv9Q@aCObsm029_|jV!7#K-3Wf9)f2++FN2Q_`~69zmp%*YWp!45 zzVH2d@|P+vBZpBaHu=T#Ymwg_t;*Jpd_|_KY)B+CKU{s1SS%0wRl;dJaAH3$C5TMp zvHNlRekNl`@#fvAMSVj@LZ?yYZiL+=n=b9s$EQ6NOjOJPh&yh z+QYEybMWhdn2TInOzjLIPI2#KlEcc)eVk@p3NE^l)<-h(BF1xrvy6{)FHA~Q1;|Z3 zxq{B4i*|NM!pEy;wu*}h81buoP1<{7GA2yhEz3PDMw7L3+j|2qa>A1J@}z3OFNC5W zw^Ba_Q9V2kNk^Zo?S1_*x>34>o3^8eu$t362FR$zWLvrC@nX5D>cQhi=UG9qH5N9C zt{Y*EgQ*uydxaE%O#0+WKC*mVGo6-p15xN^ti5_4PfQlR#ek8L2AVhMRXXVB8fA%g zx*g#w!`yp#xkg8Q68Y}G6CfKV+FhF&fdl1^4KYVuxMS8tNrxle`7`t7a+!(b9ssaJ z3INbR|IAQ=*~~yDuC5u{7`_Cvem9x5AGpa-X^8M1&sEO#q61mh=P1@HV`Ge~*lMNC zKh8yIKD0TqwG!dR>UWrIE}x>X|H5EH{=t5d&j$F_+J4qB7O&4?H|AGc1)sv&mbL58 zr;`j9ON-N#lc8H$#>*}1;MVWpVet8Q^zyYKK&U4k94L@!+o^LELXnx+#f|#<)ms## zq(ioQ-}$oEs{_wgbdbLHxVm^6PF}sIxET~>ywUB#B3rUD<}D|&{`l)2bS}jAQN?w= zcJuA-kUHXg;`_OS@lXcoU1!1OVG4uoMd{ly(sSXqtkp=tHgoC-cC_ppY$-S2o(`=e ze$*QxBk)vgTx2`sc^jKYTlp&aWU%Als>GA4R*E+I*N#K0LN4d!JnK|1uW|!ve3kmn z+^$5saPRH)s0rT4JpHW`cNLj_U4p#1j7`zozI%>~^gI0bBy}&c0mpk#BLgk`Aw&(g| zt8QMb9kk`HcX^DbHg*g?W!$~coOxHt<@G`3`J>7l4`1Uf@ZQ0TEA?j$^$qC3T%*HL z3QTEWRG>$`Pj6j5m-`vY?BFJ-!ABW*@L;@=vVIGc%HcUG=;&rux+m|^=`l4=N=Yu9 zS!uw&Z_eZb7EC+E#r#=sAhG4bIppCkkB)qq&Qxr`n>@PEb2idTYCDmOY z994M^e5#!ii5Z<6lpje=b+M0bPP3Bnyw1 zXomJ~d>Ou96l88U>QnDy-OJ{cT&c8GQ1G6ST=8r&6XfqOoLFBm*fbi&Q#LjrmmLL; zHjDqZ;TJel&H2#lS<+ef2m28f?*PEW+&x}hQdJ!xdRBS~@zk%ogI7l&-Va+kQ>Tq- zL~||3O+dMc)sxB8IcdExG*u1;DL|yv_L!4m)G{ z>O5cOG#TfC{hTXp!CSVJWXiNt} zx_}gq9LKx$4jB=}b|}XP#7(tVvLUZ~L)W{ca@Hx=8NGjYhffq1BTp=-;<=43I(!FtGJLg?_?U$Aq?bCGHyq zzn@XjVW|<>LhHp#NehEQ>&zEmi|%gn!!RbvP1fj0PBpr23?iHX^4taaQmq6^g{U;s z=jnM;EW9DpUNV=dYwu}wFcWQ0$p>P zJ*LN5tILbj9FTKd&P?~#^cnb4MiqN-^x8JZzJg|S1w~t*Ykqyy!Jaw;9`Z;VQl8Vs z=j-KH@E$FX^Rna|rdx6IU6)|byPhvOf4VPG^Oox5xG7znJU{1inw3QuWg3ZUwl}|> zI%kF1`TpXRr91>61cimRlo8WK509(Asdm)>3Vm1yF85rK{$;>$Emu>=31@=v#=Ic!7EHG=1y^ zV_BvCB*83eq+^n74MFM$E9QOgVhN$1^Hbm{KqEfk#@cQ1n1OXt6WqRBG^e^|m53h& zGSc9#at@D3wJfZ=!FU*F95sKoHIJZq_{Ff`SK;y%--FoV(Fpn?%(#U_JB1AUQ|Iz; z2toD*J|5k6`e4&~&)T>U_2+Gk<$mvpJ(&D079zEl?oDSb7nULqjTA2A`)jcEYgBxX zdwY?xjPbHb-?Hd9-xj>prZc1H``#nG zxx!xaXI}TL#--T{|5VDVroVnwxPH+y|Hv*Hv(3}S{$ki8qh{Spk4P*<2C1BvyWleS z^VsIjrqI!EsGiE3N&vt!f05)Iv|fj@^F$-I=nCy6=l>^fiAAF%Y= zX%I7WJCN!$i5X~}1#Ay7tEtxP|K@O4sror*I_2jB%$yqXEPnskZk_+ClzT00G}Gh2 z#CWeP==+Xf%&grNeN5|*wyQkHG7s?!lv)3iITK4kV)bOX3ZtnR4_>Ol!ct$9;C!IR z=y8&MHjb)WxpGb?STkLM0+Hy|T$s(U(^IT01e@|Pjk51z+V0!wuPq?unNLnYv?Hxf zPrUUn^6nirW&0uIw62r#)OtJ(>OC#D!duZil%XH_+K5l{Hch5Age%87pV!U_IJ54( z;WcR(AM~R!>*mZlXp;StyzY3Gfu+T0Wvgy710#_=V5jK{{nD*OEj+Q7rhFwKro>8t>0C8v|q#ItP??7>AtCJ^e^8zam?E* zn7~`o?0CaCo!`2+9kKmpHx+a4W=@GxtH2&*FATXtG#ZSIW&ORm(+B|=y?RMmbJN5y z_HfHp>3CA?ipg*##HfStL{d7$F1623e`mOLsMhA@14CSg$VipD_8 z{W8*`pk3R=8>uZ&Yn>PSt-U8@d#I5WI`R?k6)NRc&Hek$SC3eGmVwMe0cA-ODIoSN zIj69r*^y<1tNo4dTPQ6X;at8nJx*F0#*XyZ?CY*zm7ZPc0rpRE5&Wu6@{K|JK&(cK z#@%alVbZp`pk~aulSdH_pB4DsL(lYkmo^Z+JAYx$EicstAo&tG#W-sXgpCAaB5&5KZym*4)pR0q^>w9Wy!@*8+KG>8q?un9P?y? z_RyDAI;b(;ibQ8K_6ySLNF`NDBIZzfDXnn5W$ZHJB7ZP8A)QghBl(QM`Vxt`$Is1z zl#1BPo2q2?dV4Fu$8(2M@^zlD)B>7%i%20gxEYeUEE*wo=&xvrRnsZXHe9Zchi5q?vAVUV;%7Z zZh%*UQ>gJ#&En_mtyTs~C|JqVn`O}%kN6}*N@Swj4Gidqqx@bs%Obx%QXEB$cLIQW zbz)rHt1zEzfJ9Y(K;-c*B<+nm(@}I^UT!}Ikvv0zU#it`@~S+zyY7;qUBM*ycU6yE zquxL z0YJv4$-un*LO7iq*nsg?EwSRYt9WdkIc@C~M8&-@X$8G$rmwFNmd*!fe8~A_Kbn*4 zzcpr=w~Z24-4mISHpU-l7HuZr->hN%3CgfAUWRC{Ak?CwP0UALrY`wPFV6g$=cXDM z_744RpmNTyK*@4&ydYWJhz`E9_l^vc1@W_>|7#F1vqm>*{_jED+E2Z|!P#4m!+)<< zZ~NT?^X~RTTHMy|(itTDK)7p5bqvDUp$?jGHotYn-;3p2`ha&lNO$pVIobE`+H}Dr z=yz@M&H*^SLiZ>*yjbhiUAZ+jb)$bT#&6?EMx7)6b=P6G_yp0`HzWYS7Ym{s#aLtJ zhXJRKDMJB{O(b;<&X$hefwK=mQE=*n;z;1sm0ASe^@kjENf;p@5+MDrieM)&(Bk>$ zJOu-0++KKqeb_+^KtV!Xj8#Tal3iI29`?^s4b+L?7V^K3yg&*&&cCSPb+X?wGChNK5(zlCzBB=W{TYrZkG>DPTpOv&8GHyf(1yDd6 zeEN<|Ar(W+f2SJuRDpjL{2L`NP}vOnVxVCDck;sAVGdA9w-@eT-v9t?zWg$a8z%bu z4#Qj_R}-+YF`GlGrf4CK6KDczFI{XLtp3a%%-aa}>0~aHnF$SgF!~QMtorX+-4Yy* zlEAUQ8-cMS*g`ugAb3vLaJ6t7-){`&G3$5>|1idKci3D;7SZ|8Fx$}f2rgWR%dP*O z8#-L=z!=j(`eOyq8JGXZjB|Fq1wjW1S)3rc8z@qY7vu}YU_mj0aDg1$AQ~OQ5U1gP zW+yBLBeFC$Db(KzmJ2xl=N`914HzMS%R|60Y?L{k5RqY0h@myze+f5Crx{nFWDd~8 zK7%9F^Sc#Y`Ht$3k%qjlCAPW`0Q5o&nE?)z5 U!YMeq&GQS$EgSndEnMgCPcV6w zX%O;h(Ytw(tVcf+f#zfo`a}_ei|{1lmgsO46|&?`_~&gExn&&7f+)4J{UKb>(;+z? zI?_J;{|cYZg#u`a{_pC8=#Rr>mX+LwP93MYEA`(OF90A6C$*yd*5A~R;twILgx0Un zplSaAeQI*Sfqqoqf^;(O|2Z~;Ntg`Tx?2xeHN8wF`h&W@qtZ1QN?i