From bee39a34671b8237c04b7212a846fd5a1d663b56 Mon Sep 17 00:00:00 2001 From: Kazuhiro Sera Date: Sat, 11 Aug 2018 03:00:09 +0900 Subject: [PATCH 1/6] Fix misspelled words in comments, error messages, and test code --- CONTRIBUTING.md | 2 +- TESTING.asciidoc | 2 +- build.gradle | 2 +- .../gradle/VersionCollection.groovy | 2 +- .../doc/RestTestsFromSnippetsTask.groovy | 2 +- .../elasticsearch/gradle/test/NodeInfo.groovy | 2 +- .../build.gradle | 4 +- .../tools/launchers/JvmOptionsParser.java | 2 +- docs/java-rest/low-level/usage.asciidoc | 2 +- docs/painless/painless-api-reference.asciidoc | 2 +- docs/plugins/repository-gcs.asciidoc | 2 +- docs/plugins/repository-hdfs.asciidoc | 8 ++-- docs/reference/commands/certutil.asciidoc | 2 +- .../mapping/types/percolator.asciidoc | 2 +- docs/reference/sql/concepts.asciidoc | 6 +-- docs/reference/sql/endpoints/jdbc.asciidoc | 4 +- .../painless/node/package-info.java | 4 +- .../painless/PainlessDocGenerator.java | 2 +- .../painless/PainlessExecuteRequestTests.java | 2 +- .../index/rankeval/RankEvalResponse.java | 4 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../index/reindex/CancelTests.java | 2 +- .../ICUCollationKeywordFieldMapperIT.java | 48 +++++++++---------- .../test/custom-suggester/20_suggest.yml | 2 +- .../src/main/resources/regexes.yml | 4 +- .../test/old_cluster/10_basic.yml | 2 +- .../resources/packaging/tests/60_systemd.bats | 2 +- .../tests/module_and_plugin_test_cases.bash | 2 +- .../test/resources/packaging/utils/utils.bash | 2 +- .../resources/rest-api-spec/api/_common.json | 2 +- .../action/search/SearchPhaseController.java | 4 +- .../org/elasticsearch/bootstrap/Natives.java | 4 +- .../metadata/IndexNameExpressionResolver.java | 2 +- .../metadata/MetaDataIndexUpgradeService.java | 4 +- .../metadata/TemplateUpgradeService.java | 2 +- .../routing/IndexShardRoutingTable.java | 2 +- .../common/time/DateMathParser.java | 2 +- .../analysis/PreConfiguredCharFilter.java | 2 +- .../reindex/AbstractBulkByScrollRequest.java | 2 +- .../elasticsearch/index/shard/IndexShard.java | 2 +- .../org/elasticsearch/index/store/Store.java | 2 +- .../index/translog/TranslogStats.java | 4 +- .../cluster/RestGetRepositoriesAction.java | 2 +- .../filter/FilterAggregatorFactory.java | 2 +- .../highlight/FragmentBuilderHelper.java | 2 +- .../elasticsearch/transport/Transport.java | 2 +- .../TransportConnectionListener.java | 2 +- .../action/bulk/BulkRequestTests.java | 2 +- .../TransportReplicationActionTests.java | 2 +- .../routing/OperationRoutingTests.java | 2 +- .../decider/FilterAllocationDeciderTests.java | 2 +- .../mapper/MultiFieldCopyToMapperTests.java | 2 +- .../index/shard/IndexShardIT.java | 2 +- .../index/translog/TranslogTests.java | 2 +- .../monitor/process/ProcessProbeTests.java | 2 +- .../rest/BaseRestHandlerTests.java | 4 +- .../scripted/InternalScriptedMetricTests.java | 2 +- .../metrics/tophits/InternalTopHitsTests.java | 2 +- .../highlight/HighlighterSearchIT.java | 12 ++--- .../search/sort/FieldSortBuilderTests.java | 8 ++-- .../sort/GeoDistanceSortBuilderTests.java | 4 +- .../search/sort/ScriptSortBuilderTests.java | 2 +- .../action/bulk/simple-bulk7.json | 2 +- .../dynamictemplate/simple/test-mapping.json | 2 +- .../TestThreadInfoPatternConverterTests.java | 2 +- .../rest/yaml/section/DoSectionTests.java | 4 +- .../test/test/InternalTestClusterTests.java | 2 +- x-pack/docs/en/rest-api/ml/jobcounts.asciidoc | 2 +- .../en/rest-api/rollup/rollup-caps.asciidoc | 2 +- x-pack/docs/en/watcher/actions/email.asciidoc | 2 +- .../elasticsearch/xpack/core/XPackPlugin.java | 2 +- .../xpack/core/graph/action/Hop.java | 2 +- .../xpack/core/ml/job/config/Detector.java | 2 +- .../core/watcher/history/WatchRecord.java | 2 +- .../action/TransportGraphExploreAction.java | 2 +- .../categorization/GrokPatternCreator.java | 2 +- .../rest/results/RestGetCategoriesAction.java | 2 +- .../xpack/ml/MlSingleNodeTestCase.java | 2 +- .../integration/BasicDistributedJobsIT.java | 2 +- .../xpack/rollup/job/RollupJobTask.java | 4 +- .../xpack/rollup/job/RollupJobTaskTests.java | 4 +- .../authc/ldap/LdapSessionFactory.java | 2 +- .../authc/support/UserRoleMapper.java | 2 +- .../transport/ServerTransportFilter.java | 2 +- .../xpack/security/TemplateUpgraderTests.java | 2 +- .../support/LdapMetaDataResolverTests.java | 4 +- .../authc/saml/SamlAuthenticatorTests.java | 8 ++-- .../filter/IpFilteringUpdateTests.java | 2 +- .../security/authz/store/invalid_roles.yml | 2 +- .../xpack/sql/jdbc/debug/Debug.java | 2 +- .../function/FunctionDefinition.java | 4 +- .../scalar/string/StringFunctionUtils.java | 2 +- .../xpack/sql/parser/ExpressionBuilder.java | 6 +-- .../BinaryProcessorDefinitionTests.java | 4 +- .../rest-api-spec/test/ml/datafeeds_crud.yml | 2 +- .../test/monitoring/bulk/20_privileges.yml | 6 +-- .../rest-api-spec/test/rollup/delete_job.yml | 2 +- .../rest-api-spec/test/rollup/start_job.yml | 2 +- .../rest-api-spec/test/rollup/stop_job.yml | 2 +- .../10_small_users_one_index.yml | 2 +- .../30_reset_ack_after_unmet_condition.yml | 2 +- .../xpack/upgrade/IndexUpgradeIT.java | 2 +- .../watcher/WatcherIndexingListener.java | 2 +- .../watcher/execution/ExecutionService.java | 4 +- .../actions/slack/SlackActionTests.java | 2 +- .../watcher/common/http/HttpClientTests.java | 2 +- .../action/delete/DeleteWatchTests.java | 2 +- .../action/execute/ExecuteWatchTests.java | 2 +- .../xpack/ml/job/config/Detector.java | 2 +- .../xpack/ml/integration/MlJobIT.java | 2 +- .../test/20_small_users_one_index.yml | 4 +- .../qa/sql/security/RestSqlSecurityIT.java | 2 +- .../qa/sql/security/SqlSecurityTestCase.java | 2 +- 113 files changed, 175 insertions(+), 175 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c591459f01bb7..9dfa0598a4880 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -320,7 +320,7 @@ have to test Elasticsearch. #### Configurations Gradle organizes dependencies and build artifacts into "configurations" and -allows you to use these configurations arbitrarilly. Here are some of the most +allows you to use these configurations arbitrarily. Here are some of the most common configurations in our build and how we use them:
diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 5e5207b279eff..2120c6603421b 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -250,7 +250,7 @@ Pass arbitrary jvm arguments. Running backwards compatibility tests is disabled by default since it requires a release version of elasticsearch to be present on the test system. -To run backwards compatibilty tests untar or unzip a release and run the tests +To run backwards compatibility tests untar or unzip a release and run the tests with the following command: --------------------------------------------------------------------------- diff --git a/build.gradle b/build.gradle index 3674e0a540bf8..c1d5412948fe9 100644 --- a/build.gradle +++ b/build.gradle @@ -588,7 +588,7 @@ static void assertLinesInFile(final Path path, final List expectedLines) /* * Check that all generated JARs have our NOTICE.txt and an appropriate - * LICENSE.txt in them. We configurate this in gradle but we'd like to + * LICENSE.txt in them. We configure this in gradle but we'd like to * be extra paranoid. */ subprojects { project -> diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy index 7d5b793254fe4..c69958ddad4b3 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy @@ -122,7 +122,7 @@ class VersionCollection { if (isReleased(version) == false) { // caveat 1 - This should only ever contain 2 non released branches in flight. An example is 6.x is frozen, // and 6.2 is cut but not yet released there is some simple logic to make sure that in the case of more than 2, - // it will bail. The order is that the minor snapshot is fufilled first, and then the staged minor snapshot + // it will bail. The order is that the minor snapshot is fulfilled first, and then the staged minor snapshot if (nextMinorSnapshot == null) { // it has not been set yet nextMinorSnapshot = replaceAsSnapshot(version) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy index f2105086f2553..ac258f76e3322 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy @@ -72,7 +72,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask { /** * Root directory containing all the files generated by this task. It is - * contained withing testRoot. + * contained within testRoot. */ File outputRoot() { return new File(testRoot, '/rest-api-spec/test') diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 0dd56b863324f..5041ba9b1d097 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -331,7 +331,7 @@ class NodeInfo { case 'deb': return new File(baseDir, "${distro}-extracted/etc/elasticsearch") default: - throw new InvalidUserDataException("Unkown distribution: ${distro}") + throw new InvalidUserDataException("Unknown distribution: ${distro}") } } } diff --git a/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle index 95d1453025e92..c87c097e6beb6 100644 --- a/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle +++ b/buildSrc/src/testKit/elasticsearch-build-resources/build.gradle @@ -22,7 +22,7 @@ task sample { // dependsOn buildResources.outputDir // for now it's just dependsOn buildResources - // we have to refference it at configuration time in order to be picked up + // we have to reference it at configuration time in order to be picked up ext.checkstyle_suppressions = buildResources.copy('checkstyle_suppressions.xml') doLast { println "This task is using ${file(checkstyle_suppressions)}" @@ -35,4 +35,4 @@ task noConfigAfterExecution { println "This should cause an error because we are refferencing " + "${buildResources.copy('checkstyle_suppressions.xml')} after the `buildResources` task has ran." } -} \ No newline at end of file +} diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java index c19c12cfe4444..d74f106c50ba4 100644 --- a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -228,7 +228,7 @@ static void parse( // no range is present, apply the JVM option to the specified major version only upper = lower; } else if (end == null) { - // a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specifed one + // a range of the form \\d+- is present, apply the JVM option to all major versions larger than the specified one upper = Integer.MAX_VALUE; } else { // a range of the form \\d+-\\d+ is present, apply the JVM option to the specified range of major versions diff --git a/docs/java-rest/low-level/usage.asciidoc b/docs/java-rest/low-level/usage.asciidoc index 71fadd98988a3..38104215720d7 100644 --- a/docs/java-rest/low-level/usage.asciidoc +++ b/docs/java-rest/low-level/usage.asciidoc @@ -307,7 +307,7 @@ You can also customize the response consumer used to buffer the asynchronous responses. The default consumer will buffer up to 100MB of response on the JVM heap. If the response is larger then the request will fail. You could, for example, lower the maximum size which might be useful if you are running -in a heap constrained environment like the exmaple above. +in a heap constrained environment like the example above. Once you've created the singleton you can use it when making requests: diff --git a/docs/painless/painless-api-reference.asciidoc b/docs/painless/painless-api-reference.asciidoc index 54b1f20977b61..814824b0db9a2 100644 --- a/docs/painless/painless-api-reference.asciidoc +++ b/docs/painless/painless-api-reference.asciidoc @@ -3,7 +3,7 @@ Painless has a strict whitelist for methods and classes to ensure all painless scripts are secure. Most of these methods are exposed directly -from the Java Runtime Enviroment (JRE) while others are part of +from the Java Runtime Environment (JRE) while others are part of Elasticsearch or Painless itself. Below is a list of all available classes grouped with their respected methods. Clicking on the method name takes you to the documentation for that specific method. Methods diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index 8292325738506..e8fd3e23d8703 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -185,7 +185,7 @@ are marked as `Secure`. `project_id`:: - The Google Cloud project id. This will be automatically infered from the credentials file but + The Google Cloud project id. This will be automatically inferred from the credentials file but can be specified explicitly. For example, it can be used to switch between projects when the same credentials are usable for both the production and the development projects. diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index ffd5ecebc2536..1b975ef761d4a 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -32,7 +32,7 @@ PUT _snapshot/my_hdfs_repository "type": "hdfs", "settings": { "uri": "hdfs://namenode:8020/", - "path": "elasticsearch/respositories/my_hdfs_repository", + "path": "elasticsearch/repositories/my_hdfs_repository", "conf.dfs.client.read.shortcircuit": "true" } } @@ -149,7 +149,7 @@ PUT _snapshot/my_hdfs_repository "type": "hdfs", "settings": { "uri": "hdfs://namenode:8020/", - "path": "/user/elasticsearch/respositories/my_hdfs_repository", + "path": "/user/elasticsearch/repositories/my_hdfs_repository", "security.principal": "elasticsearch@REALM" } } @@ -167,7 +167,7 @@ PUT _snapshot/my_hdfs_repository "type": "hdfs", "settings": { "uri": "hdfs://namenode:8020/", - "path": "/user/elasticsearch/respositories/my_hdfs_repository", + "path": "/user/elasticsearch/repositories/my_hdfs_repository", "security.principal": "elasticsearch/_HOST@REALM" } } @@ -186,4 +186,4 @@ extracts for file access checks will be `elasticsearch`. NOTE: The repository plugin makes no assumptions of what Elasticsearch's principal name is. The main fragment of the Kerberos principal is not required to be `elasticsearch`. If you have a principal or service name that works better -for you or your organization then feel free to use it instead! \ No newline at end of file +for you or your organization then feel free to use it instead! diff --git a/docs/reference/commands/certutil.asciidoc b/docs/reference/commands/certutil.asciidoc index e0c6c701e31fe..4b04f95445ef6 100644 --- a/docs/reference/commands/certutil.asciidoc +++ b/docs/reference/commands/certutil.asciidoc @@ -72,7 +72,7 @@ parameter or in the `filename` field in an input YAML file. You can optionally provide IP addresses or DNS names for each instance. If neither IP addresses nor DNS names are specified, the Elastic stack products cannot perform hostname verification and you might need to configure the -`verfication_mode` security setting to `certificate` only. For more information +`verification_mode` security setting to `certificate` only. For more information about this setting, see <>. All certificates that are generated by this command are signed by a CA. You can diff --git a/docs/reference/mapping/types/percolator.asciidoc b/docs/reference/mapping/types/percolator.asciidoc index 066d3ce1ac597..5aee15813b939 100644 --- a/docs/reference/mapping/types/percolator.asciidoc +++ b/docs/reference/mapping/types/percolator.asciidoc @@ -372,7 +372,7 @@ GET /test_index/_search "percolate" : { "field" : "query", "document" : { - "body" : "Bycicles are missing" + "body" : "Bicycles are missing" } } } diff --git a/docs/reference/sql/concepts.asciidoc b/docs/reference/sql/concepts.asciidoc index 1dc23e391fab1..7ebc336a5cbae 100644 --- a/docs/reference/sql/concepts.asciidoc +++ b/docs/reference/sql/concepts.asciidoc @@ -9,7 +9,7 @@ NOTE: This documentation while trying to be complete, does assume the reader has As a general rule, {es-sql} as the name indicates provides a SQL interface to {es}. As such, it follows the SQL terminology and conventions first, whenever possible. However the backing engine itself is {es} for which {es-sql} was purposely created hence why features or concepts that are not available, or cannot be mapped correctly, in SQL appear in {es-sql}. -Last but not least, {es-sql} tries to obey the https://en.wikipedia.org/wiki/Principle_of_least_astonishment[principle of least suprise], though as all things in the world, everything is relative. +Last but not least, {es-sql} tries to obey the https://en.wikipedia.org/wiki/Principle_of_least_astonishment[principle of least surprise], though as all things in the world, everything is relative. === Mapping concepts across SQL and {es} @@ -43,7 +43,7 @@ Notice that in {es} a field can contain _multiple_ values of the same type (esen |`catalog` or `database` |`cluster` instance -|In SQL, `catalog` or `database` are used interchangebly and represent a set of schemas that is, a number of tables. +|In SQL, `catalog` or `database` are used interchangeably and represent a set of schemas that is, a number of tables. In {es} the set of indices available are grouped in a `cluster`. The semantics also differ a bit; a `database` is essentially yet another namespace (which can have some implications on the way data is stored) while an {es} `cluster` is a runtime instance, or rather a set of at least one {es} instance (typically running distributed). In practice this means that while in SQL one can potentially have multiple catalogs inside an instance, in {es} one is restricted to only _one_. @@ -62,4 +62,4 @@ Multiple clusters, each with its own namespace, connected to each other in a fed |=== -As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangebly through-out the rest of the material. \ No newline at end of file +As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {es} transparently and the terminology of the two likely to be used interchangeably through-out the rest of the material. diff --git a/docs/reference/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc index 6a8793f7e24e2..91e6dd571ebf5 100644 --- a/docs/reference/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -38,7 +38,7 @@ from `artifacts.elastic.co/maven` by adding it to the repositories list: === Setup The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. -Note the driver implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically +Note the driver implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registered automatically as long as its available in the classpath. Once registered, the driver understands the following syntax as an URL: @@ -176,4 +176,4 @@ connection. For example: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{jdbc-tests}/SimpleExampleTestCase.java[simple_example] --------------------------------------------------- \ No newline at end of file +-------------------------------------------------- diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java index b8a1af073bf53..8ba8b79b74a92 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/package-info.java @@ -143,7 +143,7 @@ * described by later documentation. *

* Storebable nodes have three methods for writing -- setup, load, and store. These methods - * are used in conjuction with a parent node aware of the storeable node (lhs) that has a node + * are used in conjunction with a parent node aware of the storeable node (lhs) that has a node * representing a value to store (rhs). The setup method is always once called before a store * to give storeable nodes a chance to write any prefixes they may have and any values such as * array indices before the store happens. Load is called on a storeable node that must also @@ -152,7 +152,7 @@ * Sub nodes are partial nodes that require a parent to work correctly. These nodes can really * represent anything the parent node would like to split up into logical pieces and don't really * have any distinct set of rules. The currently existing subnodes all have ANode as a super class - * somewhere in their class heirachy so the parent node can defer some analysis and writing to + * somewhere in their class hierarchy so the parent node can defer some analysis and writing to * the sub node. */ package org.elasticsearch.painless.node; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 1460d5f2359b6..444d1cd4f9400 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -434,7 +434,7 @@ private static String javadocRoot(Class clazz) { if (classPackage.startsWith("org.apache.lucene")) { return "lucene-core"; } - throw new IllegalArgumentException("Unrecognized packge: " + classPackage); + throw new IllegalArgumentException("Unrecognized package: " + classPackage); } private static void emitGeneratedWarning(PrintStream stream) { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java index 44cd6b5304dc4..e70d728091fab 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java @@ -83,7 +83,7 @@ private static ContextSetup randomContextSetup() { QueryBuilder query = randomBoolean() ? new MatchAllQueryBuilder() : null; // TODO: pass down XContextType to createTestInstance() method. // otherwise the document itself is different causing test failures. - // This should be done in a seperate change as the test instance is created before xcontent type is randomly picked and + // This should be done in a separate change as the test instance is created before xcontent type is randomly picked and // all the createTestInstance() methods need to be changed, which will make this a big chnage // BytesReference doc = randomBoolean() ? new BytesArray("{}") : null; BytesReference doc = null; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java index 6efff154b6253..991d97b61968d 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalResponse.java @@ -42,8 +42,8 @@ /** * Returns the results for a {@link RankEvalRequest}.
- * The repsonse contains a detailed section for each evaluation query in the request and - * possible failures that happened when executin individual queries. + * The response contains a detailed section for each evaluation query in the request and + * possible failures that happened when execution individual queries. **/ public class RankEvalResponse extends ActionResponse implements ToXContentObject { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 6a5610de37a01..b991509f49bad 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -481,7 +481,7 @@ public ScheduledFuture schedule(TimeValue delay, String name, Runnable comman /** * Execute a bulk retry test case. The total number of failures is random and the number of retries attempted is set to - * testRequest.getMaxRetries and controled by the failWithRejection parameter. + * testRequest.getMaxRetries and controlled by the failWithRejection parameter. */ private void bulkRetryTestCase(boolean failWithRejection) throws Exception { int totalFailures = randomIntBetween(1, testRequest.getMaxRetries()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java index d2110c5cded14..0266462e26e88 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java @@ -122,7 +122,7 @@ private void testCancel(String action, AbstractBulkByScrollRequestBuilder logger.debug("waiting for updates to be blocked"); boolean blocked = awaitBusy( () -> ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0, - 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavilly loaded machines this can wake a while + 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavily loaded machines this can wake a while assertTrue("updates blocked", blocked); // Status should show the task running diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java index ce8a635ffb6f1..3bcefe0cf5680 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperIT.java @@ -60,7 +60,7 @@ public void testBasicUsage() throws Exception { String index = "foo"; String type = "mytype"; - String[] equilavent = {"I WİLL USE TURKİSH CASING", "ı will use turkish casıng"}; + String[] equivalent = {"I WİLL USE TURKİSH CASING", "ı will use turkish casıng"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -75,8 +75,8 @@ public void testBasicUsage() throws Exception { // both values should collate to same value indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value @@ -85,7 +85,7 @@ public void testBasicUsage() throws Exception { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -100,7 +100,7 @@ public void testMultipleValues() throws Exception { String index = "foo"; String type = "mytype"; - String[] equilavent = {"a", "C", "a", "B"}; + String[] equivalent = {"a", "C", "a", "B"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -114,9 +114,9 @@ public void testMultipleValues() throws Exception { // everything should be indexed fine, no exceptions indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":[\"" + equilavent[0] + "\", \"" - + equilavent[1] + "\"]}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[2] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":[\"" + equivalent[0] + "\", \"" + + equivalent[1] + "\"]}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[2] + "\"}", XContentType.JSON) ); // using sort mode = max, values B and C will be used for the sort @@ -161,7 +161,7 @@ public void testNormalization() throws Exception { String index = "foo"; String type = "mytype"; - String[] equilavent = {"I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"}; + String[] equivalent = {"I W\u0049\u0307LL USE TURKİSH CASING", "ı will use turkish casıng"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -176,8 +176,8 @@ public void testNormalization() throws Exception { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); // searching for either of the terms should return both results since they collate to the same value @@ -186,7 +186,7 @@ public void testNormalization() throws Exception { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -204,7 +204,7 @@ public void testSecondaryStrength() throws Exception { String index = "foo"; String type = "mytype"; - String[] equilavent = {"TESTING", "testing"}; + String[] equivalent = {"TESTING", "testing"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -219,8 +219,8 @@ public void testSecondaryStrength() throws Exception { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest() @@ -228,7 +228,7 @@ public void testSecondaryStrength() throws Exception { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -247,7 +247,7 @@ public void testIgnorePunctuation() throws Exception { String index = "foo"; String type = "mytype"; - String[] equilavent = {"foo-bar", "foo bar"}; + String[] equivalent = {"foo-bar", "foo bar"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -262,8 +262,8 @@ public void testIgnorePunctuation() throws Exception { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest() @@ -271,7 +271,7 @@ public void testIgnorePunctuation() throws Exception { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate") .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); @@ -467,7 +467,7 @@ public void testCustomRules() throws Exception { RuleBasedCollator tailoredCollator = new RuleBasedCollator(baseCollator.getRules() + DIN5007_2_tailorings); String tailoredRules = tailoredCollator.getRules(); - String[] equilavent = {"Töne", "Toene"}; + String[] equivalent = {"Töne", "Toene"}; XContentBuilder builder = jsonBuilder() .startObject().startObject("properties") @@ -481,8 +481,8 @@ public void testCustomRules() throws Exception { assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); indexRandom(true, - client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equilavent[0] + "\"}", XContentType.JSON), - client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equilavent[1] + "\"}", XContentType.JSON) + client().prepareIndex(index, type, "1").setSource("{\"collate\":\"" + equivalent[0] + "\"}", XContentType.JSON), + client().prepareIndex(index, type, "2").setSource("{\"collate\":\"" + equivalent[1] + "\"}", XContentType.JSON) ); SearchRequest request = new SearchRequest() @@ -490,7 +490,7 @@ public void testCustomRules() throws Exception { .types(type) .source(new SearchSourceBuilder() .fetchSource(false) - .query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1])) + .query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1])) .sort("collate", SortOrder.ASC) .sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value ); diff --git a/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml b/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml index 3731a8b211268..bac4e1014ef15 100644 --- a/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml +++ b/plugins/examples/custom-suggester/src/test/resources/rest-api-spec/test/custom-suggester/20_suggest.yml @@ -1,7 +1,7 @@ # tests that the custom suggester works # the issue that prompted serializing Suggestion as a registered named writeable was not revealed until -# a user found that it would fail when reducing suggestions in a multi node envrionment +# a user found that it would fail when reducing suggestions in a multi node environment # https://github.com/elastic/elasticsearch/issues/26585 "test custom suggester": - do: diff --git a/plugins/ingest-user-agent/src/main/resources/regexes.yml b/plugins/ingest-user-agent/src/main/resources/regexes.yml index 6c3369dc2f769..d9c16403cb8d2 100644 --- a/plugins/ingest-user-agent/src/main/resources/regexes.yml +++ b/plugins/ingest-user-agent/src/main/resources/regexes.yml @@ -748,7 +748,7 @@ os_parsers: # possibility of false positive when different marketing names share same NT kernel # e.g. windows server 2003 and windows xp # lots of ua strings have Windows NT 4.1 !?!?!?!? !?!? !? !????!?! !!! ??? !?!?! ? - # (very) roughly ordered in terms of frequency of occurence of regex (win xp currently most frequent, etc) + # (very) roughly ordered in terms of frequency of occurrence of regex (win xp currently most frequent, etc) ########## # ie mobile desktop mode @@ -2848,7 +2848,7 @@ device_parsers: device_replacement: 'Micromax $1' brand_replacement: 'Micromax' model_replacement: '$1' - # be carefull here with Acer e.g. A500 + # be careful here with Acer e.g. A500 - regex: '; *(A\d{2}|A[12]\d{2}|A90S|A110Q) Build' regex_flag: 'i' device_replacement: 'Micromax $1' diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml index 04d85eb607835..9b899fe800ce2 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml @@ -1,5 +1,5 @@ --- -"Create things in the cluster state that we'll validate are there after the ugprade": +"Create things in the cluster state that we'll validate are there after the upgrade": - do: snapshot.create_repository: repository: my_repo diff --git a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats index a7628d08bbaff..db062eb337e74 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/60_systemd.bats @@ -190,7 +190,7 @@ setup() { @test "[SYSTEMD] start Elasticsearch with custom JVM options" { assert_file_exist $ESENVFILE # The custom config directory is not under /tmp or /var/tmp because - # systemd's private temp directory functionaly means different + # systemd's private temp directory functionally means different # processes can have different views of what's in these directories local temp=`mktemp -p /etc -d` cp "$ESCONFIG"/elasticsearch.yml "$temp" diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash index 8fd6bd9ad3f15..d7cf400963254 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash @@ -97,7 +97,7 @@ fi rm -rf "$ESPLUGINS" # The custom plugins directory is not under /tmp or /var/tmp because - # systemd's private temp directory functionaly means different + # systemd's private temp directory functionally means different # processes can have different views of what's in these directories local es_plugins=$(mktemp -p /var -d -t 'plugins.XXXX') chown -R elasticsearch:elasticsearch "$es_plugins" diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash index cb71e9e6ec1a0..f5a9f25df16f3 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/utils.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/utils.bash @@ -556,7 +556,7 @@ run_elasticsearch_tests() { move_config() { local oldConfig="$ESCONFIG" # The custom config directory is not under /tmp or /var/tmp because - # systemd's private temp directory functionaly means different + # systemd's private temp directory functionally means different # processes can have different views of what's in these directories export ESCONFIG="${1:-$(mktemp -p /etc -d -t 'config.XXXX')}" echo "Moving configuration directory from $oldConfig to $ESCONFIG" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json index 6edd6d80320af..69a1f8fb8ce3c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/_common.json @@ -23,7 +23,7 @@ }, "filter_path": { "type": "list", - "description": "A comma-separated list of filters used to reduce the respone." + "description": "A comma-separated list of filters used to reduce the response." } } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index fb450b2ce8359..1c67d7cbb6811 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -325,7 +325,7 @@ public InternalSearchResponse merge(boolean ignoreFrom, ReducedQueryPhase reduce SearchPhaseResult searchResultProvider = resultsLookup.apply(shardDoc.shardIndex); if (searchResultProvider == null) { // this can happen if we are hitting a shard failure during the fetch phase - // in this case we referenced the shard result via teh ScoreDoc but never got a + // in this case we referenced the shard result via the ScoreDoc but never got a // result from fetch. // TODO it would be nice to assert this in the future continue; @@ -377,7 +377,7 @@ private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFr SearchPhaseResult fetchResultProvider = resultsLookup.apply(shardDoc.shardIndex); if (fetchResultProvider == null) { // this can happen if we are hitting a shard failure during the fetch phase - // in this case we referenced the shard result via teh ScoreDoc but never got a + // in this case we referenced the shard result via the ScoreDoc but never got a // result from fetch. // TODO it would be nice to assert this in the future continue; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java index e428df66f1447..9bd2a5c2f3dc7 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Natives.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Natives.java @@ -84,7 +84,7 @@ static void tryVirtualLock() { */ static String getShortPathName(final String path) { if (!JNA_AVAILABLE) { - logger.warn("cannot obtain short path for [{}] because JNA is not avilable", path); + logger.warn("cannot obtain short path for [{}] because JNA is not available", path); return path; } return JNANatives.getShortPathName(path); @@ -123,7 +123,7 @@ static void trySetMaxNumberOfThreads() { static void trySetMaxSizeVirtualMemory() { if (!JNA_AVAILABLE) { - logger.warn("cannot getrlimit RLIMIT_AS beacuse JNA is not available"); + logger.warn("cannot getrlimit RLIMIT_AS because JNA is not available"); return; } JNANatives.trySetMaxSizeVirtualMemory(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 1f6a9fe027d1b..9a940466ff140 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -366,7 +366,7 @@ public String[] indexAliases(ClusterState state, String index, Predicate2014-11-18||-2y substracts two years from the input date + * 2014-11-18||-2y subtracts two years from the input date * now/m rounds the current time to minute granularity * * Supported rounding units are diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java index 84eb0c4c3498c..a3fddce3e060f 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java @@ -41,7 +41,7 @@ public static PreConfiguredCharFilter singleton(String name, boolean useFilterFo } /** - * Create a pre-configured char filter that may not vary at all, provide access to the elasticsearch verison + * Create a pre-configured char filter that may not vary at all, provide access to the elasticsearch version */ public static PreConfiguredCharFilter singletonWithVersion(String name, boolean useFilterForMultitermQueries, BiFunction create) { diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index 8536337bfdbc2..0a793b2613685 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -150,7 +150,7 @@ public ActionRequestValidationException validate() { e = addValidationError("stored_fields is not supported in this context", e); } if (maxRetries < 0) { - e = addValidationError("retries cannnot be negative", e); + e = addValidationError("retries cannot be negative", e); } if (false == (size == -1 || size > 0)) { e = addValidationError( diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 08a0111fb4dc5..2e1dda859b927 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -779,7 +779,7 @@ private Engine.DeleteResult applyDeleteOperation(long seqNo, long opPrimaryTerm, + "]"; ensureWriteAllowed(origin); // When there is a single type, the unique identifier is only composed of the _id, - // so there is no way to differenciate foo#1 from bar#1. This is especially an issue + // so there is no way to differentiate foo#1 from bar#1. This is especially an issue // if a user first deletes foo#1 and then indexes bar#1: since we do not encode the // _type in the uid it might look like we are reindexing the same document, which // would fail if bar#1 is indexed with a lower version than foo#1 was deleted with. diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 001e263ea8ffb..ec213f1fdcb97 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -1063,7 +1063,7 @@ public Map getCommitUserData() { } /** - * returns the history uuid the store points at, or null if not existant. + * returns the history uuid the store points at, or null if not existent. */ public String getHistoryUUID() { return commitUserData.get(Engine.HISTORY_UUID_KEY); diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java index 8dd5ddcee3be3..75cd605538094 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java @@ -85,12 +85,12 @@ public int estimatedNumberOfOperations() { return numberOfOperations; } - /** the size of the generations in the translog that weren't yet to comitted to lucene */ + /** the size of the generations in the translog that weren't yet to committe to lucene */ public long getUncommittedSizeInBytes() { return uncommittedSizeInBytes; } - /** the number of operations in generations of the translog that weren't yet to comitted to lucene */ + /** the number of operations in generations of the translog that weren't yet to committe to lucene */ public int getUncommittedOperations() { return uncommittedOperations; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java index ccb10b603d3b2..5d8c93ca4337c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java @@ -51,7 +51,7 @@ public RestGetRepositoriesAction(Settings settings, RestController controller, S @Override public String getName() { - return "get_respositories_action"; + return "get_repositories_action"; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java index 4b54dccbf96c1..cb187f5ee4033 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java @@ -49,7 +49,7 @@ public FilterAggregatorFactory(String name, QueryBuilder filterBuilder, SearchCo * Returns the {@link Weight} for this filter aggregation, creating it if * necessary. This is done lazily so that the {@link Weight} is only created * if the aggregation collects documents reducing the overhead of the - * aggregation in teh case where no documents are collected. + * aggregation in the case where no documents are collected. * * Note that as aggregations are initialsed and executed in a serial manner, * no concurrency considerations are necessary here. diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index 0b3204e193b07..583516c5cd4c2 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -53,7 +53,7 @@ public static WeightedFragInfo fixWeightedFragInfo(MappedFieldType fieldType, Fi if (!fragInfo.getSubInfos().isEmpty() && containsBrokenAnalysis(fieldType.indexAnalyzer())) { /* This is a special case where broken analysis like WDF is used for term-vector creation at index-time * which can potentially mess up the offsets. To prevent a SAIIOBException we need to resort - * the fragments based on their offsets rather than using soley the positions as it is done in + * the fragments based on their offsets rather than using solely the positions as it is done in * the FastVectorHighlighter. Yet, this is really a lucene problem and should be fixed in lucene rather * than in this hack... aka. "we are are working on in!" */ final List subInfos = fragInfo.getSubInfos(); diff --git a/server/src/main/java/org/elasticsearch/transport/Transport.java b/server/src/main/java/org/elasticsearch/transport/Transport.java index 9538119f43b8a..1d3a3b6e26f98 100644 --- a/server/src/main/java/org/elasticsearch/transport/Transport.java +++ b/server/src/main/java/org/elasticsearch/transport/Transport.java @@ -159,7 +159,7 @@ default Object getCacheKey() { } /** - * This class represents a response context that encapsulates the actual response handler, the action and the conneciton it was + * This class represents a response context that encapsulates the actual response handler, the action and the connection it was * executed on. */ final class ResponseContext { diff --git a/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java b/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java index 0ee2ed5828d44..1f90291a54514 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportConnectionListener.java @@ -78,7 +78,7 @@ default void onConnectionClosed(Transport.Connection connection) {} /** * Called for every response received - * @param requestId the request id for this reponse + * @param requestId the request id for this response * @param context the response context or null if the context was already processed ie. due to a timeout. */ default void onResponseReceived(long requestId, Transport.ResponseContext context) {} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index f1842b5b0dd1d..1b2c238098e50 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -149,7 +149,7 @@ public void testSimpleBulk7() throws Exception { IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null, XContentType.JSON)); assertThat(exc.getMessage(), - containsString("Malformed action/metadata line [5], expected a simple value for field [_unkown] but found [START_ARRAY]")); + containsString("Malformed action/metadata line [5], expected a simple value for field [_unknown] but found [START_ARRAY]")); } public void testSimpleBulk8() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 7f1b4adf8df1e..d9df2a06a3eff 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -1017,7 +1017,7 @@ protected ReplicaResult shardOperationOnReplica(Request request, IndexShard repl // publish a new state (same as the old state with the version incremented) setState(clusterService, stateWithNodes); - // Assert that the request was retried, this time successfull + // Assert that the request was retried, this time successful assertTrue("action should have been successfully called on retry but was not", calledSuccessfully.get()); transportService.stop(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index 98c8dc1b2caff..a2f55897455f5 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -600,7 +600,7 @@ public void testAdaptiveReplicaSelection() throws Exception { collector.addNodeStatistics("node_1", 4, TimeValue.timeValueMillis(300).nanos(), TimeValue.timeValueMillis(250).nanos()); groupIterator = opRouting.searchShards(state, indexNames, null, null, collector, outstandingRequests); shardChoice = groupIterator.get(0).nextOrNull(); - // finally, node 2 is choosen instead + // finally, node 2 is chosen instead assertThat(shardChoice.currentNodeId(), equalTo("node_2")); IOUtils.close(clusterService); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java index b1fa8346e2c54..d91493c2a2578 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDeciderTests.java @@ -63,7 +63,7 @@ public void testFilterInitialRecovery() { "node2").build()); RoutingTable routingTable = state.routingTable(); - // we can initally only allocate on node2 + // we can initially only allocate on node2 assertEquals(routingTable.index("idx").shard(0).shards().get(0).state(), INITIALIZING); assertEquals(routingTable.index("idx").shard(0).shards().get(0).currentNodeId(), "node2"); routingTable = service.applyFailedShard(state, routingTable.index("idx").shard(0).shards().get(0), randomBoolean()).routingTable(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java index 9e31bd76c3016..b17abcc17b359 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java @@ -37,7 +37,7 @@ public class MultiFieldCopyToMapperTests extends ESTestCase { public void testExceptionForCopyToInMultiFields() throws IOException { XContentBuilder mapping = createMappinmgWithCopyToInMultiField(); - // first check that for newer versions we throw exception if copy_to is found withing multi field + // first check that for newer versions we throw exception if copy_to is found within multi field MapperService mapperService = MapperTestUtils.newMapperService(xContentRegistry(), createTempDir(), Settings.EMPTY, "test"); try { mapperService.parse("type", new CompressedXContent(Strings.toString(mapping)), true); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index bda6de8aa7d61..4d1bab0185171 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -573,7 +573,7 @@ public void testCircuitBreakerIncrementedByIndexShard() throws Exception { // Generate a couple of segments client().prepareIndex("test", "_doc", "1").setSource("{\"foo\":\"" + randomAlphaOfLength(100) + "\"}", XContentType.JSON) .setRefreshPolicy(IMMEDIATE).get(); - // Use routing so 2 documents are guarenteed to be on the same shard + // Use routing so 2 documents are guaranteed to be on the same shard String routing = randomAlphaOfLength(5); client().prepareIndex("test", "_doc", "2").setSource("{\"foo\":\"" + randomAlphaOfLength(100) + "\"}", XContentType.JSON) .setRefreshPolicy(IMMEDIATE).setRouting(routing).get(); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 1c27a59e0ecbe..bfae5b124df48 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2247,7 +2247,7 @@ ChannelFactory getChannelFactory() { @Override void deleteReaderFiles(TranslogReader reader) { if (fail.fail()) { - // simulate going OOM and dieing just at the wrong moment. + // simulate going OOM and dying just at the wrong moment. throw new RuntimeException("simulated"); } else { super.deleteReaderFiles(reader); diff --git a/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java b/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java index 4a44c518051a2..aceee1474c60e 100644 --- a/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java +++ b/server/src/test/java/org/elasticsearch/monitor/process/ProcessProbeTests.java @@ -69,7 +69,7 @@ public void testProcessStats() { ProcessStats.Mem mem = stats.getMem(); assertNotNull(mem); - // Commited total virtual memory can return -1 if not supported, let's see which platforms fail + // Committed total virtual memory can return -1 if not supported, let's see which platforms fail assertThat(mem.getTotalVirtual().getBytes(), greaterThan(0L)); } } diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 835dd7cd9fab0..327d5f95f7b81 100644 --- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -119,7 +119,7 @@ public String getName() { final HashMap params = new HashMap<>(); params.put("consumed", randomAlphaOfLength(8)); params.put("flied", randomAlphaOfLength(8)); - params.put("respones_param", randomAlphaOfLength(8)); + params.put("response_param", randomAlphaOfLength(8)); params.put("tokenzier", randomAlphaOfLength(8)); params.put("very_close_to_parametre", randomAlphaOfLength(8)); params.put("very_far_from_every_consumed_parameter", randomAlphaOfLength(8)); @@ -132,7 +132,7 @@ public String getName() { hasToString(containsString( "request [/] contains unrecognized parameters: " + "[flied] -> did you mean [field]?, " + - "[respones_param] -> did you mean [response_param]?, " + + "[response_param] -> did you mean [response_param]?, " + "[tokenzier] -> did you mean [tokenizer]?, " + "[very_close_to_parametre] -> did you mean any of [very_close_to_parameter_1, very_close_to_parameter_2]?, " + "[very_far_from_every_consumed_parameter]"))); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java index 584208af4177c..bd7ce5e96c1a5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java @@ -113,7 +113,7 @@ private static Object randomValue(Supplier[] valueTypes, int level) { */ @Override protected ScriptService mockScriptService() { - // mock script always retuns the size of the input aggs list as result + // mock script always returns the size of the input aggs list as result @SuppressWarnings("unchecked") MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap(REDUCE_SCRIPT_NAME, script -> ((List) script.get("_aggs")).size())); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java index 3289c5a7f6424..3462f485de080 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java @@ -164,7 +164,7 @@ private Object randomOfType(SortField.Type type) { case STRING_VAL: return new BytesRef(randomAlphaOfLength(5)); default: - throw new UnsupportedOperationException("Unkown SortField.Type: " + type); + throw new UnsupportedOperationException("Unknown SortField.Type: " + type); } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 069c72c10b496..89ad2759d5937 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -334,29 +334,29 @@ public void testEnsureNoNegativeOffsets() throws Exception { "long_term", "type=text,term_vector=with_positions_offsets")); client().prepareIndex("test", "type1", "1") - .setSource("no_long_term", "This is a test where foo is highlighed and should be highlighted", - "long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighed " + .setSource("no_long_term", "This is a test where foo is highlighted and should be highlighted", + "long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighted " + "and should be highlighted") .get(); refresh(); SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighed")) + .setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighted")) .highlighter(new HighlightBuilder().field("long_term", 18, 1).highlighterType("fvh")) .get(); assertHighlight(search, 0, "long_term", 0, 1, equalTo("thisisaverylongwordandmakessurethisfails")); search = client().prepareSearch() - .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) + .setQuery(matchPhraseQuery("no_long_term", "test foo highlighted").slop(3)) .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) .get(); assertNotHighlighted(search, 0, "no_long_term"); search = client().prepareSearch() - .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) + .setQuery(matchPhraseQuery("no_long_term", "test foo highlighted").slop(3)) .highlighter(new HighlightBuilder().field("no_long_term", 30, 1).highlighterType("fvh").postTags("").preTags("")) .get(); - assertHighlight(search, 0, "no_long_term", 0, 1, equalTo("a test where foo is highlighed and")); + assertHighlight(search, 0, "no_long_term", 0, 1, equalTo("a test where foo is highlighted and")); } public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 268f4aeb26d65..fc32abe18c76b 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -159,7 +159,7 @@ protected void sortFieldAssertions(FieldSortBuilder builder, SortField sortField } /** - * Test that missing values get transfered correctly to the SortField + * Test that missing values get transferred correctly to the SortField */ public void testBuildSortFieldMissingValue() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); @@ -190,7 +190,7 @@ public void testBuildSortFieldMissingValue() throws IOException { } /** - * Test that the sort builder order gets transfered correctly to the SortField + * Test that the sort builder order gets transferred correctly to the SortField */ public void testBuildSortFieldOrder() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); @@ -214,7 +214,7 @@ public void testBuildSortFieldOrder() throws IOException { } /** - * Test that the sort builder mode gets transfered correctly to the SortField + * Test that the sort builder mode gets transferred correctly to the SortField */ public void testMultiValueMode() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); @@ -249,7 +249,7 @@ public void testMultiValueMode() throws IOException { comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MEDIAN, comparatorSource.sortMode()); - // sort mode should also be set by build() implicitely to MIN or MAX if not set explicitely on builder + // sort mode should also be set by build() implicitly to MIN or MAX if not set explicitly on builder sortBuilder = new FieldSortBuilder("value"); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedNumericSortField.class)); diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index 7ffedbf43ec2c..ebe1118cc6f69 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -445,7 +445,7 @@ public void testCommonCaseIsOptimized() throws IOException { } /** - * Test that the sort builder order gets transfered correctly to the SortField + * Test that the sort builder order gets transferred correctly to the SortField */ public void testBuildSortFieldOrder() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); @@ -460,7 +460,7 @@ public void testBuildSortFieldOrder() throws IOException { } /** - * Test that the sort builder mode gets transfered correctly to the SortField + * Test that the sort builder mode gets transferred correctly to the SortField */ public void testMultiValueMode() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 0f19b709a4fed..3017a2e0c067d 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -267,7 +267,7 @@ public void testBadSortMode() throws IOException { } /** - * Test that the sort builder mode gets transfered correctly to the SortField + * Test that the sort builder mode gets transferred correctly to the SortField */ public void testMultiValueMode() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); diff --git a/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json b/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json index a642d9ce4fe57..669bfd10798e9 100644 --- a/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json +++ b/server/src/test/resources/org/elasticsearch/action/bulk/simple-bulk7.json @@ -2,5 +2,5 @@ {"field1": "value0"} {"index": {"_index": "test", "_type": "doc", "_id": 1}} {"field1": "value1"} -{"index": {"_index": "test", "_type": "doc", "_id": 2, "_unkown": ["foo", "bar"]}} +{"index": {"_index": "test", "_type": "doc", "_id": 2, "_unknown": ["foo", "bar"]}} {"field1": "value2"} diff --git a/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json b/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json index dd544527f525a..4b91bcfb36b5f 100644 --- a/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json +++ b/server/src/test/resources/org/elasticsearch/index/mapper/dynamictemplate/simple/test-mapping.json @@ -2,7 +2,7 @@ "person":{ "dynamic_templates":[ { - "tempalte_1":{ + "template_1":{ "match":"multi*", "mapping":{ "type":"{dynamic_type}", diff --git a/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java index 8a98b867a46f5..3ada3cc93d980 100644 --- a/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/common/logging/TestThreadInfoPatternConverterTests.java @@ -43,7 +43,7 @@ public void testThreadInfo() { // Test threads get the test name assertEquals(getTestName(), threadInfo(Thread.currentThread().getName())); - // Suite initalization gets "suite" + // Suite initialization gets "suite" assertEquals("suite", suiteInfo); // And stuff that doesn't match anything gets wrapped in [] so we can see it diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index 4c714c7e9aa14..a55470d67a69a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -482,7 +482,7 @@ public void testParseDoSectionExpectedWarnings() throws Exception { " type: test_type\n" + "warnings:\n" + " - some test warning they are typically pretty long\n" + - " - some other test warning somtimes they have [in] them" + " - some other test warning sometimes they have [in] them" ); DoSection doSection = DoSection.parse(parser); @@ -496,7 +496,7 @@ public void testParseDoSectionExpectedWarnings() throws Exception { assertThat(doSection.getApiCallSection().getBodies().size(), equalTo(0)); assertThat(doSection.getExpectedWarningHeaders(), equalTo(Arrays.asList( "some test warning they are typically pretty long", - "some other test warning somtimes they have [in] them"))); + "some other test warning sometimes they have [in] them"))); parser = createParser(YamlXContent.yamlXContent, "indices.get_field_mapping:\n" + diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 23f44c560baeb..00ab93b816032 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -99,7 +99,7 @@ public void testInitializiationIsConsistent() { /** * a set of settings that are expected to have different values betweem clusters, even they have been initialized with the same - * base settins. + * base settings. */ static final Set clusterUniqueSettings = new HashSet<>(); diff --git a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc b/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc index d343cc23ae0ad..740c76852174c 100644 --- a/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc +++ b/x-pack/docs/en/rest-api/ml/jobcounts.asciidoc @@ -206,7 +206,7 @@ The `forecasts_stats` object shows statistics about forecasts. It has the follow (object) Counts per forecast status, for example: {"finished" : 2}. NOTE: `memory_bytes`, `records`, `processing_time_ms` and `status` require at least 1 forecast, otherwise -these fields are ommitted. +these fields are omitted. [float] [[ml-stats-node]] diff --git a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc index f770adf1f0d1c..fc7750b76ea15 100644 --- a/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc +++ b/x-pack/docs/en/rest-api/rollup/rollup-caps.asciidoc @@ -44,7 +44,7 @@ For more information, see ==== Examples Imagine we have an index named `sensor-1` full of raw data. We know that the data will grow over time, so there -will be a `sensor-2`, `sensor-3`, etc. Let's create a Rollup job that targets the index pattern `sensor-*` to accomodate +will be a `sensor-2`, `sensor-3`, etc. Let's create a Rollup job that targets the index pattern `sensor-*` to accommodate this future scaling: [source,js] diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/x-pack/docs/en/watcher/actions/email.asciidoc index 0da028fcc7b1e..43dcaf86753dd 100644 --- a/x-pack/docs/en/watcher/actions/email.asciidoc +++ b/x-pack/docs/en/watcher/actions/email.asciidoc @@ -61,7 +61,7 @@ configuring the `http` attachment type, you must specify the request URL. The `reporting` attachment type is a special type to include PDF rendered dashboards from kibana. This type is consistently polling the kibana app if the dashboard rendering is done, preventing long running HTTP connections, that are potentially -killed by firewalls or load balancers inbetween. +killed by firewalls or load balancers in-between. [source,js] -------------------------------------------------- diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index aaa3effcfe8bb..661d90b2d8075 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -114,7 +114,7 @@ public Void run() { //private final Environment env; protected boolean transportClientMode; protected final Licensing licensing; - // These should not be directly accessed as they cannot be overriden in tests. Please use the getters so they can be overridden. + // These should not be directly accessed as they cannot be overridden in tests. Please use the getters so they can be overridden. private static final SetOnce licenseState = new SetOnce<>(); private static final SetOnce sslService = new SetOnce<>(); private static final SetOnce licenseService = new SetOnce<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Hop.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Hop.java index 8ba7005f15fcf..d73005bb1b12d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Hop.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/Hop.java @@ -33,7 +33,7 @@ *

* Optionally, each hop can contain a "guiding query" that further limits the set of documents considered. * In our weblog example above we might choose to constrain the second hop to only look at log records that - * had a reponse code of 404. + * had a response code of 404. *

*

* If absent, the list of {@link VertexRequest}s is inherited from the prior Hop's list to avoid repeating diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java index 93aa5495c409e..cb8dd4a2afcbe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java @@ -393,7 +393,7 @@ public boolean isUseNull() { } /** - * Excludes frequently-occuring metrics from the analysis; + * Excludes frequently-occurring metrics from the analysis; * can apply to 'by' field, 'over' field, or both * * @return the value that the user set diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java index 2b28c2f15c9c7..b972781695a65 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java @@ -106,7 +106,7 @@ public static ExecutionState getState(WatchExecutionResult executionResult) { } if (executionResult.conditionResult().met()) { final Collection values = executionResult.actionsResults().values(); - // acknowledged as state wins because the user had explicitely set this, where as throttled may happen due to execution + // acknowledged as state wins because the user had explicitly set this, where as throttled may happen due to execution if (values.stream().anyMatch((r) -> r.action().status() == Action.Result.Status.ACKNOWLEDGED)) { return ExecutionState.ACKNOWLEDGED; } else if (values.stream().anyMatch((r) -> r.action().status() == Action.Result.Status.THROTTLED)) { diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 4eb136040e988..43af0c41225fe 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -193,7 +193,7 @@ synchronized void expand() { // A single sample pool of docs is built at the root of the aggs tree. // For quality's sake it might have made more sense to sample top docs // for each of the terms from the previous hop (e.g. an initial query for "beatles" - // may have seperate doc-sample pools for significant root terms "john", "paul", "yoko" etc) + // may have separate doc-sample pools for significant root terms "john", "paul", "yoko" etc) // but I found this dramatically slowed down execution - each pool typically had different docs which // each had non-overlapping sets of terms that needed frequencies looking up for significant terms. // A common sample pool reduces the specialization that can be given to each root term but diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java index 04280261b2634..115741455f492 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java @@ -60,7 +60,7 @@ public final class GrokPatternCreator { // minus sign is not a "word" character) new GrokPatternCandidate("NUMBER", "field", "(? lis updatePersistentTaskState(state, ActionListener.wrap( (task) -> { - logger.debug("Succesfully updated state for rollup job [" + job.getConfig().getId() + "] to [" + logger.debug("Successfully updated state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "][" + state.getPosition() + "]"); listener.onResponse(new StartRollupJobAction.Response(true)); }, @@ -308,7 +308,7 @@ public synchronized void stop(ActionListener liste updatePersistentTaskState(state, ActionListener.wrap( (task) -> { - logger.debug("Succesfully updated state for rollup job [" + job.getConfig().getId() + logger.debug("Successfully updated state for rollup job [" + job.getConfig().getId() + "] to [" + state.getIndexerState() + "]"); listener.onResponse(new StopRollupJobAction.Response(true)); }, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index 13290f09e8eb8..f398a892d2631 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -198,7 +198,7 @@ public void updatePersistentTaskState(PersistentTaskState taskState, } else if (c == 1) { assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STOPPED)); } else { - fail("Should not have updated persistent statuse > 2 times"); + fail("Should not have updated persistent statuses > 2 times"); } listener.onResponse(new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, new PersistentTasksCustomMetaData.Assignment("foo", "foo"))); @@ -684,7 +684,7 @@ public void updatePersistentTaskState(PersistentTaskState taskState, } else if (c == 2) { assertThat(((RollupJobStatus) taskState).getIndexerState(), equalTo(IndexerState.STOPPED)); } else { - fail("Should not have updated persistent statuse > 3 times"); + fail("Should not have updated persistent statuses > 3 times"); } listener.onResponse(new PersistentTasksCustomMetaData.PersistentTask<>("foo", RollupField.TASK_NAME, job, 1, new PersistentTasksCustomMetaData.Assignment("foo", "foo"))); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java index 36d14aa67c0de..246de7268625e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactory.java @@ -115,7 +115,7 @@ void loop() { * Securely escapes the username and inserts it into the template using MessageFormat * * @param username username to insert into the DN template. Any commas, equals or plus will be escaped. - * @return DN (distinquished name) build from the template. + * @return DN (distinguished name) build from the template. */ String buildDnFromTemplate(String username, String template) { //this value must be escaped to avoid manipulation of the template DN. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java index 8c60e565e681a..2c728fa002c3c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java @@ -151,7 +151,7 @@ public RealmConfig getRealm() { * {@link ExpressionModel} class can take a custom {@code Predicate} that tests whether the data in the model * matches the {@link FieldExpression.FieldValue value} in the expression. * - * The string constructor parameter may or may not actaully parse as a DN - the "dn" field should + * The string constructor parameter may or may not actually parse as a DN - the "dn" field should * always be a DN, however groups will be a DN if they're from an LDAP/AD realm, but often won't be for a SAML realm. * * Because the {@link FieldExpression.FieldValue} might be a pattern ({@link CharacterRunAutomaton automaton}), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index 761af81b08ec5..a88e1d470d858 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -54,7 +54,7 @@ void inbound(String action, TransportRequest request, TransportChannel transport throws IOException; /** - * The server trasnport filter that should be used in nodes as it ensures that an incoming + * The server transport filter that should be used in nodes as it ensures that an incoming * request is properly authenticated and authorized */ class NodeProfile implements ServerTransportFilter { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java index 7c254723868c1..88b6247282923 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/TemplateUpgraderTests.java @@ -31,7 +31,7 @@ /** * This test ensures, that the plugin template upgrader can add and remove * templates when started within security, as this requires certain - * system priviliges + * system privileges */ @ClusterScope(maxNumDataNodes = 1, scope = Scope.SUITE, numClientNodes = 0) public class TemplateUpgraderTests extends SecurityIntegTestCase { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java index bddfd3f4bcfca..74502b4e2b33b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/support/LdapMetaDataResolverTests.java @@ -40,7 +40,7 @@ public void testResolveSingleValuedAttributeFromCachedAttributes() throws Except new Attribute("cn", "Clint Barton"), new Attribute("uid", "hawkeye"), new Attribute("email", "clint.barton@shield.gov"), - new Attribute("memberOf", "cn=staff,ou=groups,dc=exmaple,dc=com", "cn=admin,ou=groups,dc=exmaple,dc=com") + new Attribute("memberOf", "cn=staff,ou=groups,dc=example,dc=com", "cn=admin,ou=groups,dc=example,dc=com") ); final Map map = resolve(attributes); assertThat(map.size(), equalTo(2)); @@ -75,4 +75,4 @@ private Map resolve(Collection attributes) throws Exc resolver.resolve(null, HAWKEYE_DN, TimeValue.timeValueSeconds(1), logger, attributes, future); return future.get(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 5a7015a4e8dfa..8a126f4a73ed4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -1645,7 +1645,7 @@ public void testSignatureWrappingAttackSeven() throws Exception { /* Permutation 7 - Mangle the contents of the response to be - + @@ -1654,16 +1654,16 @@ public void testSignatureWrappingAttackSeven() throws Exception { */ final Element response = (Element) legitimateDocument. getElementsByTagNameNS(SAML20P_NS, "Response").item(0); - final Element extentions = legitimateDocument.createElement("Extensions"); + final Element extensions = legitimateDocument.createElement("Extensions"); final Element assertion = (Element) legitimateDocument. getElementsByTagNameNS(SAML20_NS, "Assertion").item(0); - response.insertBefore(extentions, assertion); + response.insertBefore(extensions, assertion); final Element forgedAssertion = (Element) assertion.cloneNode(true); forgedAssertion.setAttribute("ID", "_forged_assertion_id"); final Element forgedSignature = (Element) forgedAssertion. getElementsByTagNameNS("http://www.w3.org/2000/09/xmldsig#", "Signature").item(0); forgedAssertion.removeChild(forgedSignature); - extentions.appendChild(forgedAssertion); + extensions.appendChild(forgedAssertion); final SamlToken forgedToken = token(SamlUtils.toString((legitimateDocument.getDocumentElement()))); final ElasticsearchSecurityException exception = expectSamlException(() -> authenticator.authenticate(forgedToken)); assertThat(exception.getMessage(), containsString("Failed to parse SAML")); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java index 9a960eae2d6f6..65a5fb080cdb0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/filter/IpFilteringUpdateTests.java @@ -121,7 +121,7 @@ public void testThatIpFilterConfigurationCanBeChangedDynamically() throws Except } } - // issue #762, occured because in the above test we use HTTP and transport + // issue #762, occurred because in the above test we use HTTP and transport public void testThatDisablingIpFilterWorksAsExpected() throws Exception { Settings settings = Settings.builder() .put("xpack.security.transport.filter.deny", "127.0.0.8") diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml index 11657750c5104..1bef00f737185 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authz/store/invalid_roles.yml @@ -23,7 +23,7 @@ role3: cluster: ALL indices: '*': ALL -# invalid role indices privilegs +# invalid role indices privileges role4: cluster: ALL indices: diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java index ccba7429a4bb2..831b3177f562b 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/debug/Debug.java @@ -43,7 +43,7 @@ * This class tries to cater to both audiences - use the legacy, Writer way if needed though strive to use the * proper typical approach, that of specifying intention and output (file) in the URL. * - * For this reason the {@link System#out} and {@link System#err} are being refered in this class though are used only + * For this reason the {@link System#out} and {@link System#err} are being referred in this class though are used only * when needed. */ public final class Debug { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java index ec76b6ab34ab0..d513ca07df4ae 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionDefinition.java @@ -23,7 +23,7 @@ public interface Builder { private final List aliases; private final Class clazz; /** - * Is this a datetime function comaptible with {@code EXTRACT}. + * Is this a datetime function compatible with {@code EXTRACT}. */ private final boolean datetime; private final Builder builder; @@ -60,7 +60,7 @@ Builder builder() { } /** - * Is this a datetime function comaptible with {@code EXTRACT}. + * Is this a datetime function compatible with {@code EXTRACT}. */ boolean datetime() { return datetime; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java index 1f38456cba131..33eb61012eac4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java @@ -15,7 +15,7 @@ abstract class StringFunctionUtils { * * @param s the original String * @param start starting position for the substring within the original string. 0-based index position - * @param length length in characters of the substracted substring + * @param length length in characters of the subtracted substring * @return the resulting String */ static String substring(String s, int start, int length) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index e202803b2610a..9949df48d117e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -286,7 +286,7 @@ public Object visitArithmeticUnary(ArithmeticUnaryContext ctx) { case SqlBaseParser.MINUS: return new Neg(source(ctx.operator), value); default: - throw new ParsingException(loc, "Unknown arithemtic {}", ctx.operator.getText()); + throw new ParsingException(loc, "Unknown arithmetic {}", ctx.operator.getText()); } } @@ -309,7 +309,7 @@ public Object visitArithmeticBinary(ArithmeticBinaryContext ctx) { case SqlBaseParser.MINUS: return new Sub(loc, left, right); default: - throw new ParsingException(loc, "Unknown arithemtic {}", ctx.operator.getText()); + throw new ParsingException(loc, "Unknown arithmetic {}", ctx.operator.getText()); } } @@ -638,4 +638,4 @@ public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) { return new Literal(source(ctx), string, DataType.KEYWORD); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinitionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinitionTests.java index 110c482916228..8d71ce9003f14 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinitionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/processor/definition/BinaryProcessorDefinitionTests.java @@ -68,7 +68,7 @@ public void collectFields(SqlSourceBuilder sourceBuilder) { } /** - * Returns {@code true} if the processor defintion builds a query that + * Returns {@code true} if the processor definition builds a query that * tracks scores, {@code false} otherwise. Used for testing * {@link ProcessorDefinition#collectFields(SqlSourceBuilder)}. */ @@ -151,4 +151,4 @@ public ProcessorDefinition resolveAttributes(AttributeResolver resolver) { public void collectFields(SqlSourceBuilder sourceBuilder) { } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml index a0f79b7cabad4..bf2f3bcec1cd8 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/datafeeds_crud.yml @@ -172,7 +172,7 @@ setup: "job_id":"datafeeds-crud-1", "indexes":["index-foo"], "types":["type-bar"], - "query":{"match_all_mispelled":{}} + "query":{"match_all_misspelled":{}} } --- diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml index b1d4158ac1550..9f065bb55224f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/monitoring/bulk/20_privileges.yml @@ -34,7 +34,7 @@ setup: # read the monitoring indices. - do: xpack.security.put_role: - name: "unkown_agent_role" + name: "unknown_agent_role" body: > { "cluster": ["monitor"], @@ -51,7 +51,7 @@ setup: body: > { "password": "s3krit", - "roles" : [ "unkown_agent_role" ] + "roles" : [ "unknown_agent_role" ] } --- @@ -70,7 +70,7 @@ teardown: ignore: 404 - do: xpack.security.delete_role: - name: "unkown_agent_role" + name: "unknown_agent_role" ignore: 404 --- diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml index 298cf27fa2f9d..e5c7c76234022 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/delete_job.yml @@ -177,7 +177,7 @@ setup: - is_true: acknowledged --- -"Test delete non-existant job": +"Test delete non-existent job": - do: catch: /the task with id does_not_exist doesn't exist/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml index 7adba9035ebd5..0aea0b874d5a7 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml @@ -37,7 +37,7 @@ setup: } --- -"Test start non-existant job": +"Test start non-existent job": - do: catch: /Task for Rollup Job \[does_not_exist\] not found/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml index 42a1dea8163fb..8dfc050919f33 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml @@ -37,7 +37,7 @@ setup: } --- -"Test stop non-existant job": +"Test stop non-existent job": - do: catch: /Task for Rollup Job \[does_not_exist\] not found/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml index 0e42a13b8fd2a..24ea7c03c802e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml @@ -91,7 +91,7 @@ teardown: ignore: 404 --- -"Test shared index seperating user by using DLS": +"Test shared index separating user by using DLS": - do: headers: Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml index 5b2d00235c5c7..2f3a815346484 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/ack_watch/30_reset_ack_after_unmet_condition.yml @@ -12,7 +12,7 @@ teardown: ignore: 404 --- -"Ensure that ack status is reset after unsuccesful execution": +"Ensure that ack status is reset after unsuccessful execution": - do: xpack.watcher.put_watch: diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java index 74eb029e916e5..56a17674b494e 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeIT.java @@ -47,7 +47,7 @@ public void testIndexUpgradeInfo() { public void testIndexUpgradeInfoLicense() throws Exception { // This test disables all licenses and generates a new one using dev private key - // in non-snapshot builds we are using produciton public key for license verification + // in non-snapshot builds we are using production public key for license verification // which makes this test to fail assumeTrue("License is only valid when tested against snapshot/test keys", Build.CURRENT.isSnapshot()); assertAcked(client().admin().indices().prepareCreate("test").get()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index f3b77b922aa89..086528054bcf3 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -143,7 +143,7 @@ public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { * * @param shardId The shard id object of the document being processed * @param index The index operation - * @param ex The exception occured during indexing + * @param ex The exception occurred during indexing */ @Override public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 7b77afb225e4b..247db12e9a7fa 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -122,7 +122,7 @@ public void unPause() { /** * Pause the execution of the watcher executor, and empty the state. - * Pausing means, that no new watch executions will be done unless this pausing is explicitely unset. + * Pausing means, that no new watch executions will be done unless this pausing is explicitly unset. * This is important when watcher is stopped, so that scheduled watches do not accidentally get executed. * This should not be used when we need to reload watcher based on some cluster state changes, then just calling * {@link #clearExecutionsAndQueue()} is the way to go @@ -341,7 +341,7 @@ record = createWatchRecord(record, ctx, e); public void updateWatchStatus(Watch watch) throws IOException { // at the moment we store the status together with the watch, // so we just need to update the watch itself - // we do not want to update the status.state field, as it might have been deactivated inbetween + // we do not want to update the status.state field, as it might have been deactivated in-between Map parameters = MapBuilder.newMapBuilder() .put(Watch.INCLUDE_STATUS_KEY, "true") .put(WatchStatus.INCLUDE_STATE, "false") diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java index 29eaece9037e5..ce7ed9fdbded8 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/slack/SlackActionTests.java @@ -117,7 +117,7 @@ public void testExecute() throws Exception { hasError = true; break; case 1: - when(response.status()).thenReturn(randomIntBetween(300, 600)); // error reponse + when(response.status()).thenReturn(randomIntBetween(300, 600)); // error response messages.add(SentMessages.SentMessage.responded(randomAlphaOfLength(10), message, request, response)); hasError = true; break; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index 03dcd7947155e..60418357d4d28 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -528,7 +528,7 @@ public void testThatHttpClientFailsOnNonHttpResponse() throws Exception { }); HttpRequest request = HttpRequest.builder("localhost", serverSocket.getLocalPort()).path("/").build(); expectThrows(ClientProtocolException.class, () -> httpClient.execute(request)); - assertThat("A server side exception occured, but shouldn't", hasExceptionHappened.get(), is(nullValue())); + assertThat("A server side exception occurred, but shouldn't", hasExceptionHappened.get(), is(nullValue())); } finally { terminate(executor); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java index 2f502cb95aac3..065981a426039 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java @@ -83,7 +83,7 @@ public void testWatchDeletionDuringExecutionWorks() throws Exception { // watch has been executed successfully String state = ObjectPath.eval("state", source); assertThat(state, is("executed")); - // no exception occured + // no exception occurred assertThat(source, not(hasKey("exception"))); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java index 53d5458706a63..c743922a4d715 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/action/execute/ExecuteWatchTests.java @@ -105,7 +105,7 @@ public void testExecuteActionMode() throws Exception { } if (mode.force()) { - // since we're forcing, lets ack the action, such that it'd suppoed to be throttled + // since we're forcing, lets ack the action, such that it'd supposed to be throttled // but forcing will ignore the throttling // lets wait for the watch to be ackable diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java index 3274b03877f14..d2317f78b7db7 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/config/Detector.java @@ -215,7 +215,7 @@ public boolean isUseNull() { } /** - * Excludes frequently-occuring metrics from the analysis; + * Excludes frequently-occurring metrics from the analysis; * can apply to 'by' field, 'over' field, or both * * @return the value that the user set diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 07529acdb8815..1923e961cf743 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -573,7 +573,7 @@ public void testMultiIndexDelete() throws Exception { } public void testDelete_multipleRequest() throws Exception { - String jobId = "delete-job-mulitple-times"; + String jobId = "delete-job-multiple-times"; createFarequoteJob(jobId); ConcurrentMapLong responses = ConcurrentCollections.newConcurrentMapLong(); diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml index a015a88a315fa..ff5fad0e82d32 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/20_small_users_one_index.yml @@ -62,7 +62,7 @@ teardown: ignore: 404 --- -"Test shared index seperating user by using DLS role query with user's username": +"Test shared index separating user by using DLS role query with user's username": - do: xpack.security.put_role: name: "small_companies_role" @@ -130,7 +130,7 @@ teardown: - match: { hits.hits.0._source.user.username: john} --- -"Test shared index seperating user by using DLS role query with user's metadata": +"Test shared index separating user by using DLS role query with user's metadata": - do: xpack.security.put_role: name: "small_companies_role" diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java index cb8afc876a418..3ef13ed71439c 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/RestSqlSecurityIT.java @@ -213,7 +213,7 @@ protected AuditLogAsserter createAuditLogAsserter() { /** * Test the hijacking a scroll fails. This test is only implemented for * REST because it is the only API where it is simple to hijack a scroll. - * It should excercise the same code as the other APIs but if we were truly + * It should exercise the same code as the other APIs but if we were truly * paranoid we'd hack together something to test the others as well. */ public void testHijackScrollFails() throws Exception { diff --git a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java index 83047c93da327..a8f51ea4f827a 100644 --- a/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java +++ b/x-pack/qa/sql/security/src/test/java/org/elasticsearch/xpack/qa/sql/security/SqlSecurityTestCase.java @@ -263,7 +263,7 @@ public void testScrollWithSingleFieldGranted() throws Exception { createAuditLogAsserter() .expectSqlCompositeAction("test_admin", "test") /* Scrolling doesn't have to access the index again, at least not through sql. - * If we asserted query and scroll logs then we would see the scoll. */ + * If we asserted query and scroll logs then we would see the scroll. */ .expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expect(true, SQL_ACTION_NAME, "test_admin", empty()) .expectSqlCompositeAction("only_a", "test") From 51822cd5ab10b039bfa6e5972aafae1d9a5d28d3 Mon Sep 17 00:00:00 2001 From: Kazuhiro Sera Date: Tue, 14 Aug 2018 15:43:44 +0900 Subject: [PATCH 2/6] Re-fix / revert a portion of the PR #32792 changes, following the review --- server/src/main/java/org/elasticsearch/index/store/Store.java | 2 +- .../java/org/elasticsearch/index/translog/TranslogStats.java | 4 ++-- .../rest/action/admin/cluster/RestGetRepositoriesAction.java | 3 ++- .../java/org/elasticsearch/rest/BaseRestHandlerTests.java | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index ec213f1fdcb97..4ecd9983961a4 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -1063,7 +1063,7 @@ public Map getCommitUserData() { } /** - * returns the history uuid the store points at, or null if not existent. + * returns the history uuid the store points at, or null if nonexistent. */ public String getHistoryUUID() { return commitUserData.get(Engine.HISTORY_UUID_KEY); diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java index 75cd605538094..1f87792745572 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogStats.java @@ -85,12 +85,12 @@ public int estimatedNumberOfOperations() { return numberOfOperations; } - /** the size of the generations in the translog that weren't yet to committe to lucene */ + /** the size of the generations in the translog that weren't yet to committed to lucene */ public long getUncommittedSizeInBytes() { return uncommittedSizeInBytes; } - /** the number of operations in generations of the translog that weren't yet to committe to lucene */ + /** the number of operations in generations of the translog that weren't yet to committed to lucene */ public int getUncommittedOperations() { return uncommittedOperations; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java index 5d8c93ca4337c..9e4b50fcd4131 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java @@ -51,7 +51,8 @@ public RestGetRepositoriesAction(Settings settings, RestController controller, S @Override public String getName() { - return "get_repositories_action"; + // FIXIME: the typo, just modifying the response will bring a breaking change to the REST Usage API, see also PR #32792 + return "get_respositories_action"; } @Override diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 327d5f95f7b81..6fc7ac4a96b09 100644 --- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -132,7 +132,7 @@ public String getName() { hasToString(containsString( "request [/] contains unrecognized parameters: " + "[flied] -> did you mean [field]?, " + - "[response_param] -> did you mean [response_param]?, " + + "[respones_param] -> did you mean [response_param]?, " + "[tokenzier] -> did you mean [tokenizer]?, " + "[very_close_to_parametre] -> did you mean any of [very_close_to_parameter_1, very_close_to_parameter_2]?, " + "[very_far_from_every_consumed_parameter]"))); From ce6fc356e3075b8bfe4b9361c028637373901fcd Mon Sep 17 00:00:00 2001 From: Kazuhiro Sera Date: Tue, 14 Aug 2018 16:17:56 +0900 Subject: [PATCH 3/6] Remove an unnecessary comment --- .../rest/action/admin/cluster/RestGetRepositoriesAction.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java index 9e4b50fcd4131..ccb10b603d3b2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetRepositoriesAction.java @@ -51,7 +51,6 @@ public RestGetRepositoriesAction(Settings settings, RestController controller, S @Override public String getName() { - // FIXIME: the typo, just modifying the response will bring a breaking change to the REST Usage API, see also PR #32792 return "get_respositories_action"; } From 14b66de59eb1072e6f658f93ce256dcf15c38698 Mon Sep 17 00:00:00 2001 From: Kazuhiro Sera Date: Tue, 14 Aug 2018 23:32:22 +0900 Subject: [PATCH 4/6] Re-fix / revert a portion of the PR #32792 changes, following the review --- .../test/java/org/elasticsearch/index/reindex/CancelTests.java | 2 +- .../test/java/org/elasticsearch/rest/BaseRestHandlerTests.java | 2 +- .../xpack/ml/rest/results/RestGetCategoriesAction.java | 2 +- .../src/test/resources/rest-api-spec/test/rollup/start_job.yml | 2 +- .../src/test/resources/rest-api-spec/test/rollup/stop_job.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java index 0266462e26e88..6b7b21a55148d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/CancelTests.java @@ -122,7 +122,7 @@ private void testCancel(String action, AbstractBulkByScrollRequestBuilder logger.debug("waiting for updates to be blocked"); boolean blocked = awaitBusy( () -> ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0, - 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavily loaded machines this can wake a while + 1, TimeUnit.MINUTES); // 10 seconds is usually fine but on heavily loaded machines this can take a while assertTrue("updates blocked", blocked); // Status should show the task running diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java index 6fc7ac4a96b09..835dd7cd9fab0 100644 --- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java @@ -119,7 +119,7 @@ public String getName() { final HashMap params = new HashMap<>(); params.put("consumed", randomAlphaOfLength(8)); params.put("flied", randomAlphaOfLength(8)); - params.put("response_param", randomAlphaOfLength(8)); + params.put("respones_param", randomAlphaOfLength(8)); params.put("tokenzier", randomAlphaOfLength(8)); params.put("very_close_to_parametre", randomAlphaOfLength(8)); params.put("very_far_from_every_consumed_parameter", randomAlphaOfLength(8)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java index f6d024270fa2b..e8a74575197b7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/results/RestGetCategoriesAction.java @@ -39,7 +39,7 @@ public RestGetCategoriesAction(Settings settings, RestController controller) { @Override public String getName() { - return "xpack_ml_get_categories_action"; + return "xpack_ml_get_catagories_action"; } @Override diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml index 0aea0b874d5a7..38a357bcd6825 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/start_job.yml @@ -37,7 +37,7 @@ setup: } --- -"Test start non-existent job": +"Test start nonexistent job": - do: catch: /Task for Rollup Job \[does_not_exist\] not found/ diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml index 8dfc050919f33..849aca3332dfe 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/stop_job.yml @@ -37,7 +37,7 @@ setup: } --- -"Test stop non-existent job": +"Test stop nonexistent job": - do: catch: /Task for Rollup Job \[does_not_exist\] not found/ From 916fa9a8a08afe320ea043335951e9208ba939db Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 21 Aug 2018 08:56:24 +0100 Subject: [PATCH 5/6] Back out changes to HighlighterSearchIT --- .../subphase/highlight/HighlighterSearchIT.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 89ad2759d5937..069c72c10b496 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -334,29 +334,29 @@ public void testEnsureNoNegativeOffsets() throws Exception { "long_term", "type=text,term_vector=with_positions_offsets")); client().prepareIndex("test", "type1", "1") - .setSource("no_long_term", "This is a test where foo is highlighted and should be highlighted", - "long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighted " + .setSource("no_long_term", "This is a test where foo is highlighed and should be highlighted", + "long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighed " + "and should be highlighted") .get(); refresh(); SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighted")) + .setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighed")) .highlighter(new HighlightBuilder().field("long_term", 18, 1).highlighterType("fvh")) .get(); assertHighlight(search, 0, "long_term", 0, 1, equalTo("thisisaverylongwordandmakessurethisfails")); search = client().prepareSearch() - .setQuery(matchPhraseQuery("no_long_term", "test foo highlighted").slop(3)) + .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) .get(); assertNotHighlighted(search, 0, "no_long_term"); search = client().prepareSearch() - .setQuery(matchPhraseQuery("no_long_term", "test foo highlighted").slop(3)) + .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) .highlighter(new HighlightBuilder().field("no_long_term", 30, 1).highlighterType("fvh").postTags("").preTags("")) .get(); - assertHighlight(search, 0, "no_long_term", 0, 1, equalTo("a test where foo is highlighted and")); + assertHighlight(search, 0, "no_long_term", 0, 1, equalTo("a test where foo is highlighed and")); } public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception { From c67057fa6a3b17635adf9ff1cc3b668c3c16287b Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 3 Oct 2018 12:33:13 +0100 Subject: [PATCH 6/6] Revert change to percolator.asciidoc due to failing test --- docs/reference/mapping/types/percolator.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/mapping/types/percolator.asciidoc b/docs/reference/mapping/types/percolator.asciidoc index cb9fc171ecd2e..e4502d37360c9 100644 --- a/docs/reference/mapping/types/percolator.asciidoc +++ b/docs/reference/mapping/types/percolator.asciidoc @@ -372,7 +372,7 @@ GET /test_index/_search "percolate" : { "field" : "query", "document" : { - "body" : "Bicycles are missing" + "body" : "Bycicles are missing" } } }