From 033ba725af26b940226aaa9b1ce88349bc433af6 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 5 Feb 2019 20:53:35 +0100 Subject: [PATCH 01/19] Remove support for internal versioning for concurrency control (#38254) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Elasticsearch has long [supported](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html#index-versioning) compare and set (a.k.a optimistic concurrency control) operations using internal document versioning. Sadly that approach is flawed and can sometime do the wrong thing. Here's the relevant excerpt from the resiliency status page: > When a primary has been partitioned away from the cluster there is a short period of time until it detects this. During that time it will continue indexing writes locally, thereby updating document versions. When it tries to replicate the operation, however, it will discover that it is partitioned away. It won’t acknowledge the write and will wait until the partition is resolved to negotiate with the master on how to proceed. The master will decide to either fail any replicas which failed to index the operations on the primary or tell the primary that it has to step down because a new primary has been chosen in the meantime. Since the old primary has already written documents, clients may already have read from the old primary before it shuts itself down. The version numbers of these reads may not be unique if the new primary has already accepted writes for the same document We recently [introduced](https://www.elastic.co/guide/en/elasticsearch/reference/6.x/optimistic-concurrency-control.html) a new sequence number based approach that doesn't suffer from this dirty reads problem. This commit removes support for internal versioning as a concurrency control mechanism in favor of the sequence number approach. Relates to #1078 --- .../client/WatcherRequestConverters.java | 1 - .../client/watcher/PutWatchRequest.java | 11 - .../client/RequestConvertersTests.java | 11 +- .../client/WatcherRequestConvertersTests.java | 11 +- .../documentation/CRUDDocumentationIT.java | 16 +- .../high-level/document/update.asciidoc | 5 +- .../migration/migrate_7_0/api.asciidoc | 16 ++ .../reindex/AsyncDeleteByQueryAction.java | 22 +- .../reindex/TransportDeleteByQueryAction.java | 2 +- .../reindex/TransportUpdateByQueryAction.java | 10 +- .../UpdateByQueryWhileModifyingTests.java | 4 +- .../resources/rest-api-spec/api/update.json | 9 - .../test/create/30_internal_version.yml | 36 ---- .../create/31_internal_version_with_types.yml | 35 --- .../{20_internal_version.yml => 20_cas.yml} | 10 +- .../test/delete/21_cas_with_types.yml | 30 +++ .../delete/21_internal_version_with_types.yml | 28 --- .../test/index/30_internal_version.yml | 36 ---- .../index/31_internal_version_with_types.yml | 36 ---- .../test/update/30_internal_version.yml | 31 --- .../update/31_internal_version_with_types.yml | 30 --- .../test/update/35_other_versions.yml | 29 --- .../update/36_other_versions_with_types.yml | 27 --- .../elasticsearch/action/DocWriteRequest.java | 17 +- .../action/bulk/BulkRequest.java | 14 +- .../action/search/SearchRequestBuilder.java | 2 +- .../action/update/UpdateHelper.java | 2 +- .../action/update/UpdateRequest.java | 61 +++--- .../index/get/ShardGetService.java | 7 +- .../action/document/RestUpdateAction.java | 2 - .../action/bulk/BulkRequestTests.java | 11 +- .../action/bulk/BulkWithUpdatesIT.java | 27 +-- .../action/update/UpdateRequestTests.java | 8 +- .../index/shard/ShardGetServiceTests.java | 26 +-- .../indices/settings/UpdateSettingsIT.java | 18 +- .../versioning/SimpleVersioningIT.java | 84 ++------ .../ml/action/TransportOpenJobAction.java | 2 +- .../action/TransportUpdateDatafeedAction.java | 2 +- .../action/TransportUpdateFilterAction.java | 29 +-- .../persistence/DatafeedConfigProvider.java | 21 +- .../xpack/ml/job/JobManager.java | 4 +- .../ml/job/persistence/JobConfigProvider.java | 22 +- .../integration/DatafeedConfigProviderIT.java | 7 +- .../ml/integration/JobConfigProviderIT.java | 9 +- .../xpack/security/authc/TokenService.java | 8 +- .../80_put_get_watch_with_passwords.yml | 201 ------------------ 46 files changed, 225 insertions(+), 805 deletions(-) delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/create/31_internal_version_with_types.yml rename rest-api-spec/src/main/resources/rest-api-spec/test/delete/{20_internal_version.yml => 20_cas.yml} (71%) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_cas_with_types.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java index 34fb826d62382..9718607d8b80e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java @@ -70,7 +70,6 @@ static Request putWatch(PutWatchRequest putWatchRequest) { Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(request) - .withVersion(putWatchRequest.getVersion()) .withIfSeqNo(putWatchRequest.ifSeqNo()) .withIfPrimaryTerm(putWatchRequest.ifPrimaryTerm()); if (putWatchRequest.isActive() == false) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/PutWatchRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/PutWatchRequest.java index 8b83970723dd2..1d13a77e06cf9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/PutWatchRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/PutWatchRequest.java @@ -21,7 +21,6 @@ import org.elasticsearch.client.Validatable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -43,11 +42,9 @@ public final class PutWatchRequest implements Validatable { private final BytesReference source; private final XContentType xContentType; private boolean active = true; - private long version = Versions.MATCH_ANY; private long ifSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; private long ifPrimaryTerm = UNASSIGNED_PRIMARY_TERM; - public PutWatchRequest(String id, BytesReference source, XContentType xContentType) { Objects.requireNonNull(id, "watch id is missing"); if (isValidId(id) == false) { @@ -95,14 +92,6 @@ public XContentType xContentType() { return xContentType; } - public long getVersion() { - return version; - } - - public void setVersion(long version) { - this.version = version; - } - /** * only performs this put request if the watch's last modification was assigned the given * sequence number. Must be used in combination with {@link #setIfPrimaryTerm(long)} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 9364e2ce2d57c..e3807b6067960 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -768,8 +768,6 @@ public void testUpdate() throws IOException { } } setRandomWaitForActiveShards(updateRequest::waitForActiveShards, expectedParams); - setRandomVersion(updateRequest, expectedParams); - setRandomVersionType(updateRequest::versionType, expectedParams); setRandomIfSeqNoAndTerm(updateRequest, new HashMap<>()); // if* params are passed in the body if (randomBoolean()) { int retryOnConflict = randomIntBetween(0, 5); @@ -911,14 +909,7 @@ public void testBulk() throws IOException { if (randomBoolean()) { docWriteRequest.routing(randomAlphaOfLength(10)); } - if (randomBoolean()) { - if (randomBoolean()) { - docWriteRequest.version(randomNonNegativeLong()); - } - if (randomBoolean()) { - docWriteRequest.versionType(randomFrom(VersionType.values())); - } - } else if (randomBoolean()) { + if (opType != DocWriteRequest.OpType.UPDATE && randomBoolean()) { docWriteRequest.setIfSeqNo(randomNonNegativeLong()); docWriteRequest.setIfPrimaryTerm(randomLongBetween(1, 200)); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java index a31206bee88cc..19483cc201a5f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java @@ -29,8 +29,8 @@ import org.elasticsearch.client.watcher.DeactivateWatchRequest; import org.elasticsearch.client.watcher.DeleteWatchRequest; import org.elasticsearch.client.watcher.ExecuteWatchRequest; -import org.elasticsearch.client.watcher.PutWatchRequest; import org.elasticsearch.client.watcher.GetWatchRequest; +import org.elasticsearch.client.watcher.PutWatchRequest; import org.elasticsearch.client.watcher.StartWatchServiceRequest; import org.elasticsearch.client.watcher.StopWatchServiceRequest; import org.elasticsearch.client.watcher.WatcherStatsRequest; @@ -88,9 +88,12 @@ public void testPutWatch() throws Exception { } if (randomBoolean()) { - long version = randomLongBetween(10, 100); - putWatchRequest.setVersion(version); - expectedParams.put("version", String.valueOf(version)); + long seqNo = randomNonNegativeLong(); + long ifPrimaryTerm = randomLongBetween(1, 200); + putWatchRequest.setIfSeqNo(seqNo); + putWatchRequest.setIfPrimaryTerm(ifPrimaryTerm); + expectedParams.put("if_seq_no", String.valueOf(seqNo)); + expectedParams.put("if_primary_term", String.valueOf(ifPrimaryTerm)); } Request request = WatcherRequestConverters.putWatch(putWatchRequest); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 53524a6a5c215..1af9593e77f87 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -170,7 +170,6 @@ public void testIndex() throws Exception { // tag::index-response String index = indexResponse.getIndex(); String id = indexResponse.getId(); - long version = indexResponse.getVersion(); if (indexResponse.getResult() == DocWriteResponse.Result.CREATED) { // <1> } else if (indexResponse.getResult() == DocWriteResponse.Result.UPDATED) { @@ -220,7 +219,8 @@ public void testIndex() throws Exception { IndexRequest request = new IndexRequest("posts") .id("1") .source("field", "value") - .version(1); + .setIfSeqNo(10L) + .setIfPrimaryTerm(20); try { IndexResponse response = client.index(request, RequestOptions.DEFAULT); } catch(ElasticsearchException e) { @@ -432,7 +432,8 @@ public void testUpdate() throws Exception { // tag::update-conflict UpdateRequest request = new UpdateRequest("posts", "1") .doc("field", "value") - .version(1); + .setIfSeqNo(101L) + .setIfPrimaryTerm(200L); try { UpdateResponse updateResponse = client.update( request, RequestOptions.DEFAULT); @@ -499,9 +500,10 @@ public void testUpdate() throws Exception { request.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); // <1> request.setRefreshPolicy("wait_for"); // <2> // end::update-request-refresh - // tag::update-request-version - request.version(2); // <1> - // end::update-request-version + // tag::update-request-cas + request.setIfSeqNo(2L); // <1> + request.setIfPrimaryTerm(1L); // <2> + // end::update-request-request-cas // tag::update-request-detect-noop request.detectNoop(false); // <1> // end::update-request-detect-noop @@ -630,7 +632,7 @@ public void testDelete() throws Exception { // tag::delete-conflict try { DeleteResponse deleteResponse = client.delete( - new DeleteRequest("posts", "1").version(2), + new DeleteRequest("posts", "1").setIfSeqNo(100).setIfPrimaryTerm(2), RequestOptions.DEFAULT); } catch (ElasticsearchException exception) { if (exception.status() == RestStatus.CONFLICT) { diff --git a/docs/java-rest/high-level/document/update.asciidoc b/docs/java-rest/high-level/document/update.asciidoc index 3112d85512217..35300512dfc3e 100644 --- a/docs/java-rest/high-level/document/update.asciidoc +++ b/docs/java-rest/high-level/document/update.asciidoc @@ -140,9 +140,10 @@ include-tagged::{doc-tests-file}[{api}-request-source-exclude] ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests-file}[{api}-request-version] +include-tagged::{doc-tests-file}[{api}-request-cas] -------------------------------------------------- -<1> Version +<1> ifSeqNo +<2> ifPrimaryTerm ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index bb151edb778e2..6c1d03760f904 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -2,6 +2,22 @@ [[breaking_70_api_changes]] === API changes +[float] +==== Internal Versioning is no longer supported for optimistic concurrency control + +Elasticsearch maintains a numeric version field for each document it stores. That field +is incremented by one with every change to the document. Until 7.0.0 the API allowed using +that field for optimistic concurrency control, i.e., making a write operation conditional +on the current document version. Sadly, that approach is flawed because the value of the +version doesn't always uniquely represent a change to the document. If a primary fails +while handling a write operation, it may expose a version that will then be reused by the +new primary. + +Due to that issue, internal versioning can no longer be used and is replaced by a new +method based on sequence numbers. See <> for more details. + +Note that the `external` versioning type is still fully supported. + [float] ==== Camel case and underscore parameters deprecated in 6.x have been removed A number of duplicate parameters deprecated in 6.x have been removed from diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java index b317ea06d9f35..f7d8d037fcddb 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AsyncDeleteByQueryAction.java @@ -20,11 +20,9 @@ package org.elasticsearch.index.reindex; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.client.ParentTaskAssigningClient; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; @@ -33,18 +31,10 @@ */ public class AsyncDeleteByQueryAction extends AbstractAsyncBulkByScrollAction { - private final boolean useSeqNoForCAS; - public AsyncDeleteByQueryAction(BulkByScrollTask task, Logger logger, ParentTaskAssigningClient client, ThreadPool threadPool, TransportDeleteByQueryAction action, DeleteByQueryRequest request, - ScriptService scriptService, ClusterState clusterState, ActionListener listener) { - super(task, - // not all nodes support sequence number powered optimistic concurrency control, we fall back to version - clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0) == false, - // all nodes support sequence number powered optimistic concurrency control and we can use it - clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0), - logger, client, threadPool, action, request, listener); - useSeqNoForCAS = clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0); + ScriptService scriptService, ActionListener listener) { + super(task, false, true, logger, client, threadPool, action, request, listener); } @Override @@ -60,12 +50,8 @@ protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc delete.index(doc.getIndex()); delete.type(doc.getType()); delete.id(doc.getId()); - if (useSeqNoForCAS) { - delete.setIfSeqNo(doc.getSeqNo()); - delete.setIfPrimaryTerm(doc.getPrimaryTerm()); - } else { - delete.version(doc.getVersion()); - } + delete.setIfSeqNo(doc.getSeqNo()); + delete.setIfPrimaryTerm(doc.getPrimaryTerm()); return wrap(delete); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 08538b335535d..d7959f0058974 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -61,7 +61,7 @@ public void doExecute(Task task, DeleteByQueryRequest request, ActionListener buildRequest(ScrollableHitSource.Hit doc) index.type(doc.getType()); index.id(doc.getId()); index.source(doc.getSource(), doc.getXContentType()); - if (useSeqNoForCAS) { - index.setIfSeqNo(doc.getSeqNo()); - index.setIfPrimaryTerm(doc.getPrimaryTerm()); - } else { - index.versionType(VersionType.INTERNAL); - index.version(doc.getVersion()); - } + index.setIfSeqNo(doc.getSeqNo()); + index.setIfPrimaryTerm(doc.getPrimaryTerm()); index.setPipeline(mainRequest.getPipeline()); return wrap(index); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java index 987830ddd3bc9..1c3456fe20c5f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -67,7 +67,7 @@ public void testUpdateWhileReindexing() throws Exception { IndexRequestBuilder index = client().prepareIndex("test", "test", "test").setSource("test", value.get()) .setRefreshPolicy(IMMEDIATE); /* - * Update by query increments the version number so concurrent + * Update by query changes the document so concurrent * indexes might get version conflict exceptions so we just * blindly retry. */ @@ -75,7 +75,7 @@ public void testUpdateWhileReindexing() throws Exception { while (true) { attempts++; try { - index.setVersion(get.getVersion()).get(); + index.setIfSeqNo(get.getSeqNo()).setIfPrimaryTerm(get.getPrimaryTerm()).get(); break; } catch (VersionConflictEngineException e) { if (attempts >= MAX_ATTEMPTS) { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index 92f1013a317c3..106b29b252ad3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -70,15 +70,6 @@ "if_primary_term" : { "type" : "number", "description" : "only perform the update operation if the last operation that has changed the document has the specified primary term" - }, - "version": { - "type": "number", - "description": "Explicit version number for concurrency control" - }, - "version_type": { - "type": "enum", - "options": ["internal", "force"], - "description": "Specific version type" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yml deleted file mode 100644 index 52e8e464da094..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/30_internal_version.yml +++ /dev/null @@ -1,36 +0,0 @@ ---- -"Internal version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: - create: - index: test_1 - id: 1 - body: { foo: bar } - - - match: { _version: 1} - - - do: - catch: conflict - create: - index: test_1 - id: 1 - body: { foo: bar } - ---- -"Internal versioning with explicit version": - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - do: - catch: bad_request - create: - index: test - id: 3 - body: { foo: bar } - version: 5 - - - match: { status: 400 } - - match: { error.type: action_request_validation_exception } - - match: { error.reason: "Validation Failed: 1: create operations do not support explicit versions. use index instead;" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/31_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/31_internal_version_with_types.yml deleted file mode 100644 index 83772828bc8f4..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/31_internal_version_with_types.yml +++ /dev/null @@ -1,35 +0,0 @@ ---- -"Internal version": - - - do: - create: - index: test_1 - type: test - id: 1 - body: { foo: bar } - - - match: { _version: 1} - - - do: - catch: conflict - create: - index: test_1 - type: test - id: 1 - body: { foo: bar } - ---- -"Internal versioning with explicit version": - - - do: - catch: bad_request - create: - index: test - type: test - id: 3 - body: { foo: bar } - version: 5 - - - match: { status: 400 } - - match: { error.type: action_request_validation_exception } - - match: { error.reason: "Validation Failed: 1: create operations do not support explicit versions. use index instead;" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_cas.yml similarity index 71% rename from rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml rename to rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_cas.yml index afe69b4fe82e5..f3c7b0acbcccd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_cas.yml @@ -11,19 +11,21 @@ id: 1 body: { foo: bar } - - match: { _version: 1} + - match: { _seq_no: 0 } - do: catch: conflict delete: index: test_1 id: 1 - version: 2 + if_seq_no: 2 + if_primary_term: 1 - do: delete: index: test_1 id: 1 - version: 1 + if_seq_no: 0 + if_primary_term: 1 - - match: { _version: 2 } + - match: { _seq_no: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_cas_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_cas_with_types.yml new file mode 100644 index 0000000000000..ef352a9bad6b1 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_cas_with_types.yml @@ -0,0 +1,30 @@ +--- +"Internal version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - match: { _seq_no: 0 } + + - do: + catch: conflict + delete: + index: test_1 + type: test + id: 1 + if_seq_no: 2 + if_primary_term: 1 + + - do: + delete: + index: test_1 + type: test + id: 1 + if_seq_no: 0 + if_primary_term: 1 + + - match: { _seq_no: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml deleted file mode 100644 index 3d9ddb79366f7..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml +++ /dev/null @@ -1,28 +0,0 @@ ---- -"Internal version": - - - do: - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - - - match: { _version: 1} - - - do: - catch: conflict - delete: - index: test_1 - type: test - id: 1 - version: 2 - - - do: - delete: - index: test_1 - type: test - id: 1 - version: 1 - - - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml deleted file mode 100644 index adc4f3f4b15c0..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml +++ /dev/null @@ -1,36 +0,0 @@ ---- -"Internal version": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - - do: - index: - index: test_1 - id: 1 - body: { foo: bar } - - match: { _version: 1} - - - do: - index: - index: test_1 - id: 1 - body: { foo: bar } - - match: { _version: 2} - - - do: - catch: conflict - index: - index: test_1 - id: 1 - body: { foo: bar } - version: 1 - - do: - index: - index: test_1 - id: 1 - body: { foo: bar } - version: 2 - - - match: { _version: 3 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml deleted file mode 100644 index 1767fbebbf966..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml +++ /dev/null @@ -1,36 +0,0 @@ ---- -"Internal version": - - - do: - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - - match: { _version: 1} - - - do: - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - - match: { _version: 2} - - - do: - catch: conflict - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - version: 1 - - do: - index: - index: test_1 - type: test - id: 1 - body: { foo: bar } - version: 2 - - - match: { _version: 3 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml deleted file mode 100644 index 7b474d6bc09dc..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml +++ /dev/null @@ -1,31 +0,0 @@ ---- -"Internal version": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - - do: - catch: missing - update: - index: test_1 - id: 1 - version: 1 - body: - doc: { foo: baz } - - - do: - index: - index: test_1 - id: 1 - body: - doc: { foo: baz } - - - do: - catch: conflict - update: - index: test_1 - id: 1 - version: 2 - body: - doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml deleted file mode 100644 index 17c4806c693ac..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml +++ /dev/null @@ -1,30 +0,0 @@ ---- -"Internal version": - - - do: - catch: missing - update: - index: test_1 - type: test - id: 1 - version: 1 - body: - doc: { foo: baz } - - - do: - index: - index: test_1 - type: test - id: 1 - body: - doc: { foo: baz } - - - do: - catch: conflict - update: - index: test_1 - type: test - id: 1 - version: 2 - body: - doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml deleted file mode 100644 index 9740aa39edeb3..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml +++ /dev/null @@ -1,29 +0,0 @@ ---- -"Not supported versions": - - - skip: - version: " - 6.99.99" - reason: types are required in requests before 7.0.0 - - - do: - catch: /Validation|Invalid/ - update: - index: test_1 - id: 1 - version: 2 - version_type: external - body: - doc: { foo: baz } - upsert: { foo: bar } - - - do: - catch: /Validation|Invalid/ - update: - index: test_1 - id: 1 - version: 2 - version_type: external_gte - body: - doc: { foo: baz } - upsert: { foo: bar } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml deleted file mode 100644 index c0ec082b91a4f..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml +++ /dev/null @@ -1,27 +0,0 @@ ---- -"Not supported versions": - - - do: - catch: /Validation|Invalid/ - update: - index: test_1 - type: test - id: 1 - version: 2 - version_type: external - body: - doc: { foo: baz } - upsert: { foo: bar } - - - do: - catch: /Validation|Invalid/ - update: - index: test_1 - type: test - id: 1 - version: 2 - version_type: external_gte - body: - doc: { foo: baz } - upsert: { foo: bar } - diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java index d8a9a3503a617..373dfaa5c7416 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java @@ -257,16 +257,23 @@ static void writeDocumentRequest(StreamOutput out, DocWriteRequest request) static ActionRequestValidationException validateSeqNoBasedCASParams( DocWriteRequest request, ActionRequestValidationException validationException) { - if (request.versionType().validateVersionForWrites(request.version()) == false) { - validationException = addValidationError("illegal version value [" + request.version() + "] for version type [" - + request.versionType().name() + "]", validationException); + final long version = request.version(); + final VersionType versionType = request.versionType(); + if (versionType.validateVersionForWrites(version) == false) { + validationException = addValidationError("illegal version value [" + version + "] for version type [" + + versionType.name() + "]", validationException); } - if (request.versionType() == VersionType.FORCE) { + if (versionType == VersionType.FORCE) { validationException = addValidationError("version type [force] may no longer be used", validationException); } + if (versionType == VersionType.INTERNAL && version != Versions.MATCH_ANY && version != Versions.MATCH_DELETED) { + validationException = addValidationError("internal versioning can not be used for optimistic concurrency control. " + + "Please use `if_seq_no` and `if_primary_term` instead", validationException); + } + if (request.ifSeqNo() != UNASSIGNED_SEQ_NO && ( - request.versionType() != VersionType.INTERNAL || request.version() != Versions.MATCH_ANY + versionType != VersionType.INTERNAL || version != Versions.MATCH_ANY )) { validationException = addValidationError("compare and write operations can not use versioning", validationException); } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index b5c786ab2df6d..42f569c0a9bda 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -46,9 +46,9 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.rest.action.document.RestBulkAction; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; @@ -501,8 +501,11 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null .create(true).setPipeline(pipeline).setIfSeqNo(ifSeqNo).setIfPrimaryTerm(ifPrimaryTerm) .source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType), payload); } else if ("update".equals(action)) { + if (version != Versions.MATCH_ANY || versionType != VersionType.INTERNAL) { + throw new IllegalArgumentException("Update requests do not support versioning. " + + "Please use `if_seq_no` and `if_primary_term` instead"); + } UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).retryOnConflict(retryOnConflict) - .version(version).versionType(versionType) .setIfSeqNo(ifSeqNo).setIfPrimaryTerm(ifPrimaryTerm) .routing(routing); // EMPTY is safe here because we never call namedObject @@ -516,15 +519,8 @@ public BulkRequest add(BytesReference data, @Nullable String defaultIndex, @Null } IndexRequest upsertRequest = updateRequest.upsertRequest(); if (upsertRequest != null) { - upsertRequest.version(version); - upsertRequest.versionType(versionType); upsertRequest.setPipeline(defaultPipeline); } - IndexRequest doc = updateRequest.doc(); - if (doc != null) { - doc.version(version); - doc.versionType(versionType); - } internalAdd(updateRequest, payload); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index 4e9c598ba9c00..96c93c974cabb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -224,7 +224,7 @@ public SearchRequestBuilder setVersion(boolean version) { sourceBuilder().version(version); return this; } - + /** * Should each {@link org.elasticsearch.search.SearchHit} be returned with the * sequence number and primary term of the last modification of the document. diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 8cd6146768fff..54cd38aa0b960 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -70,7 +70,7 @@ public UpdateHelper(ScriptService scriptService) { */ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier nowInMillis) { final GetResult getResult = indexShard.getService().getForUpdate( - request.type(), request.id(), request.version(), request.versionType(), request.ifSeqNo(), request.ifPrimaryTerm()); + request.type(), request.id(), request.ifSeqNo(), request.ifPrimaryTerm()); return prepare(indexShard.shardId(), request, getResult, nowInMillis); } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index c85f73d90ec3d..3693975ddab08 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -108,8 +108,6 @@ public class UpdateRequest extends InstanceShardOperationRequest private FetchSourceContext fetchSourceContext; - private long version = Versions.MATCH_ANY; - private VersionType versionType = VersionType.INTERNAL; private int retryOnConflict = 0; private long ifSeqNo = UNASSIGNED_SEQ_NO; private long ifPrimaryTerm = UNASSIGNED_PRIMARY_TERM; @@ -150,9 +148,6 @@ public UpdateRequest(String index, String type, String id) { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); - if (version != Versions.MATCH_ANY && upsertRequest != null) { - validationException = addValidationError("can't provide both upsert request and a version", validationException); - } if(upsertRequest != null && upsertRequest.version() != Versions.MATCH_ANY) { validationException = addValidationError("can't provide version in upsert request", validationException); } @@ -163,30 +158,20 @@ public ActionRequestValidationException validate() { validationException = addValidationError("id is missing", validationException); } - if (versionType != VersionType.INTERNAL) { - validationException = addValidationError("version type [" + versionType + "] is not supported by the update API", - validationException); - } else { + validationException = DocWriteRequest.validateSeqNoBasedCASParams(this, validationException); - if (version != Versions.MATCH_ANY && retryOnConflict > 0) { - validationException = addValidationError("can't provide both retry_on_conflict and a specific version", - validationException); + if (ifSeqNo != UNASSIGNED_SEQ_NO) { + if (retryOnConflict > 0) { + validationException = addValidationError("compare and write operations can not be retried", validationException); } - if (!versionType.validateVersionForWrites(version)) { - validationException = addValidationError("illegal version value [" + version + "] for version type [" + - versionType.name() + "]", validationException); + if (docAsUpsert) { + validationException = addValidationError("compare and write operations can not be used with upsert", validationException); + } + if (upsertRequest != null) { + validationException = + addValidationError("upsert requests don't support `if_seq_no` and `if_primary_term`", validationException); } - } - - validationException = DocWriteRequest.validateSeqNoBasedCASParams(this, validationException); - - if (ifSeqNo != UNASSIGNED_SEQ_NO && retryOnConflict > 0) { - validationException = addValidationError("compare and write operations can not be retried", validationException); - } - - if (ifSeqNo != UNASSIGNED_SEQ_NO && docAsUpsert) { - validationException = addValidationError("compare and write operations can not be used with upsert", validationException); } if (script == null && doc == null) { @@ -530,24 +515,22 @@ public int retryOnConflict() { @Override public UpdateRequest version(long version) { - this.version = version; - return this; + throw new UnsupportedOperationException("update requests do not support versioning"); } @Override public long version() { - return this.version; + return Versions.MATCH_ANY; } @Override public UpdateRequest versionType(VersionType versionType) { - this.versionType = versionType; - return this; + throw new UnsupportedOperationException("update requests do not support versioning"); } @Override public VersionType versionType() { - return this.versionType; + return VersionType.INTERNAL; } /** @@ -877,8 +860,14 @@ public void readFrom(StreamInput in) throws IOException { upsertRequest.readFrom(in); } docAsUpsert = in.readBoolean(); - version = in.readLong(); - versionType = VersionType.fromValue(in.readByte()); + if (in.getVersion().before(Version.V_7_0_0)) { + long version = in.readLong(); + VersionType versionType = VersionType.readFromStream(in); + if (version != Versions.MATCH_ANY || versionType != VersionType.INTERNAL) { + throw new UnsupportedOperationException( + "versioned update requests have been removed in 7.0. Use if_seq_no and if_primary_term"); + } + } ifSeqNo = in.readZLong(); ifPrimaryTerm = in.readVLong(); detectNoop = in.readBoolean(); @@ -930,8 +919,10 @@ public void writeTo(StreamOutput out) throws IOException { upsertRequest.writeTo(out); } out.writeBoolean(docAsUpsert); - out.writeLong(version); - out.writeByte(versionType.getValue()); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeLong(Versions.MATCH_ANY); + out.writeByte(VersionType.INTERNAL.getValue()); + } out.writeZLong(ifSeqNo); out.writeVLong(ifPrimaryTerm); out.writeBoolean(detectNoop); diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 9fb1cb804946f..3c85fe40c5ba7 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion; import org.elasticsearch.common.metrics.CounterMetric; import org.elasticsearch.common.metrics.MeanMetric; @@ -102,9 +103,9 @@ private GetResult get(String type, String id, String[] gFields, boolean realtime } } - public GetResult getForUpdate(String type, String id, long version, VersionType versionType, long ifSeqNo, long ifPrimaryTerm) { - return get(type, id, new String[]{RoutingFieldMapper.NAME}, true, version, versionType, ifSeqNo, ifPrimaryTerm, - FetchSourceContext.FETCH_SOURCE, true); + public GetResult getForUpdate(String type, String id, long ifSeqNo, long ifPrimaryTerm) { + return get(type, id, new String[]{RoutingFieldMapper.NAME}, true, + Versions.MATCH_ANY, VersionType.INTERNAL, ifSeqNo, ifPrimaryTerm, FetchSourceContext.FETCH_SOURCE, true); } /** diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index 463a18ea6b802..804fa61fc53b2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -83,8 +83,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } updateRequest.retryOnConflict(request.paramAsInt("retry_on_conflict", updateRequest.retryOnConflict())); - updateRequest.version(RestActions.parseVersion(request)); - updateRequest.versionType(VersionType.fromString(request.param("version_type"), updateRequest.versionType())); updateRequest.setIfSeqNo(request.paramAsLong("if_seq_no", updateRequest.ifSeqNo())); updateRequest.setIfPrimaryTerm(request.paramAsLong("if_primary_term", updateRequest.ifPrimaryTerm())); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 75701e0685290..6d3e4c04c13d7 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -311,7 +311,7 @@ public void testSmileIsSupported() throws IOException { assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } - public void testToValidateUpsertRequestAndVersionInBulkRequest() throws IOException { + public void testToValidateUpsertRequestAndCASInBulkRequest() throws IOException { XContentType xContentType = XContentType.SMILE; BytesReference data; try (BytesStreamOutput out = new BytesStreamOutput()) { @@ -321,7 +321,8 @@ public void testToValidateUpsertRequestAndVersionInBulkRequest() throws IOExcept builder.field("_index", "index"); builder.field("_type", "type"); builder.field("_id", "id"); - builder.field("version", 1L); + builder.field("if_seq_no", 1L); + builder.field("if_primary_term", 100L); builder.endObject(); builder.endObject(); } @@ -330,7 +331,8 @@ public void testToValidateUpsertRequestAndVersionInBulkRequest() throws IOExcept builder.startObject(); builder.startObject("doc").endObject(); Map values = new HashMap<>(); - values.put("version", 2L); + values.put("if_seq_no", 1L); + values.put("if_primary_term", 100L); values.put("_index", "index"); values.put("_type", "type"); builder.field("upsert", values); @@ -341,8 +343,7 @@ public void testToValidateUpsertRequestAndVersionInBulkRequest() throws IOExcept } BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(data, null, null, xContentType); - assertThat(bulkRequest.validate().validationErrors(), contains("can't provide both upsert request and a version", - "can't provide version in upsert request")); + assertThat(bulkRequest.validate().validationErrors(), contains("upsert requests don't support `if_seq_no` and `if_primary_term`")); //This test's JSON contains outdated references to types assertWarnings(RestBulkAction.TYPES_DEPRECATION_MESSAGE); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index 277c130cebb1b..f74137d4a418d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Requests; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -195,8 +196,8 @@ public void testBulkUpdateSimple() throws Exception { assertThat(((Number) getResponse.getSource().get("field")).longValue(), equalTo(4L)); } - public void testBulkVersioning() throws Exception { - createIndex("test"); + public void testBulkWithCAS() throws Exception { + createIndex("test", Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).build()); ensureGreen(); BulkResponse bulkResponse = client().prepareBulk() .add(client().prepareIndex("test", "type", "1").setCreate(true).setSource("field", "1")) @@ -204,20 +205,22 @@ public void testBulkVersioning() throws Exception { .add(client().prepareIndex("test", "type", "1").setSource("field", "2")).get(); assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[0].getResponse().getResult()); - assertThat(bulkResponse.getItems()[0].getResponse().getVersion(), equalTo(1L)); + assertThat(bulkResponse.getItems()[0].getResponse().getSeqNo(), equalTo(0L)); assertEquals(DocWriteResponse.Result.CREATED, bulkResponse.getItems()[1].getResponse().getResult()); - assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(1L)); + assertThat(bulkResponse.getItems()[1].getResponse().getSeqNo(), equalTo(1L)); assertEquals(DocWriteResponse.Result.UPDATED, bulkResponse.getItems()[2].getResponse().getResult()); - assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(2L)); + assertThat(bulkResponse.getItems()[2].getResponse().getSeqNo(), equalTo(2L)); bulkResponse = client().prepareBulk() - .add(client().prepareUpdate("test", "type", "1").setVersion(4L).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) + .add(client().prepareUpdate("test", "type", "1").setIfSeqNo(40L).setIfPrimaryTerm(20) + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) .add(client().prepareUpdate("test", "type", "2").setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2")) - .add(client().prepareUpdate("test", "type", "1").setVersion(2L).setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3")).get(); + .add(client().prepareUpdate("test", "type", "1").setIfSeqNo(2L).setIfPrimaryTerm(1) + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3")).get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); - assertThat(bulkResponse.getItems()[1].getResponse().getVersion(), equalTo(2L)); - assertThat(bulkResponse.getItems()[2].getResponse().getVersion(), equalTo(3L)); + assertThat(bulkResponse.getItems()[1].getResponse().getSeqNo(), equalTo(3L)); + assertThat(bulkResponse.getItems()[2].getResponse().getSeqNo(), equalTo(4L)); bulkResponse = client().prepareBulk() .add(client().prepareIndex("test", "type", "e1") @@ -237,9 +240,9 @@ public void testBulkVersioning() throws Exception { bulkResponse = client().prepareBulk() .add(client().prepareUpdate("test", "type", "e1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2").setVersion(10)) // INTERNAL + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "2").setIfSeqNo(10L).setIfPrimaryTerm(1)) .add(client().prepareUpdate("test", "type", "e1") - .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3").setVersion(13).setVersionType(VersionType.INTERNAL)) + .setDoc(Requests.INDEX_CONTENT_TYPE, "field", "3").setIfSeqNo(20L).setIfPrimaryTerm(1)) .get(); assertThat(bulkResponse.getItems()[0].getFailureMessage(), containsString("version conflict")); @@ -471,7 +474,7 @@ public void testFailingVersionedUpdatedOnBulk() throws Exception { return; } BulkRequestBuilder requestBuilder = client().prepareBulk(); - requestBuilder.add(client().prepareUpdate("test", "type", "1").setVersion(1) + requestBuilder.add(client().prepareUpdate("test", "type", "1").setIfSeqNo(0L).setIfPrimaryTerm(1) .setDoc(Requests.INDEX_CONTENT_TYPE, "field", threadID)); responses[threadID] = requestBuilder.get(); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 5a734352eafb2..642d14e2258cb 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -500,12 +500,14 @@ public void testToAndFromXContent() throws IOException { assertToXContentEquivalent(originalBytes, finalBytes, xContentType); } - public void testToValidateUpsertRequestAndVersion() { + public void testToValidateUpsertRequestAndCAS() { UpdateRequest updateRequest = new UpdateRequest("index", "type", "id"); - updateRequest.version(1L); + updateRequest.setIfSeqNo(1L); + updateRequest.setIfPrimaryTerm(1L); updateRequest.doc("{}", XContentType.JSON); updateRequest.upsert(new IndexRequest("index","type", "id")); - assertThat(updateRequest.validate().validationErrors(), contains("can't provide both upsert request and a version")); + assertThat(updateRequest.validate().validationErrors(), + contains("upsert requests don't support `if_seq_no` and `if_primary_term`")); } public void testToValidateUpsertRequestWithVersion() { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index 496221ca9fc4e..5492b8bf7c672 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.get.GetResult; @@ -31,7 +30,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.elasticsearch.common.lucene.uid.Versions.MATCH_ANY; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -51,8 +49,7 @@ public void testGetForUpdate() throws IOException { recoverShardFromStore(primary); Engine.IndexResult test = indexDoc(primary, "test", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet = primary.getService().getForUpdate( - "test", "0", test.getVersion(), VersionType.INTERNAL, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet = primary.getService().getForUpdate("test", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals(new String(testGet.source(), StandardCharsets.UTF_8), "{\"foo\" : \"bar\"}"); try (Engine.Searcher searcher = primary.getEngine().acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { @@ -61,8 +58,7 @@ public void testGetForUpdate() throws IOException { Engine.IndexResult test1 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet1 = primary.getService().getForUpdate( - "test", "1", test1.getVersion(), VersionType.INTERNAL, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult testGet1 = primary.getService().getForUpdate("test", "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals("foobar", testGet1.getFields().get(RoutingFieldMapper.NAME).getValue()); @@ -77,20 +73,19 @@ public void testGetForUpdate() throws IOException { // now again from the reader Engine.IndexResult test2 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, "foobar"); assertTrue(primary.getEngine().refreshNeeded()); - testGet1 = primary.getService().getForUpdate("test", "1", test2.getVersion(), VersionType.INTERNAL, - UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + testGet1 = primary.getService().getForUpdate("test", "1", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); assertTrue(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); assertEquals("foobar", testGet1.getFields().get(RoutingFieldMapper.NAME).getValue()); final long primaryTerm = primary.getOperationPrimaryTerm(); - testGet1 = primary.getService().getForUpdate("test", "1", MATCH_ANY, VersionType.INTERNAL, test2.getSeqNo(), primaryTerm); + testGet1 = primary.getService().getForUpdate("test", "1", test2.getSeqNo(), primaryTerm); assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); expectThrows(VersionConflictEngineException.class, () -> - primary.getService().getForUpdate("test", "1", MATCH_ANY, VersionType.INTERNAL, test2.getSeqNo() + 1, primaryTerm)); + primary.getService().getForUpdate("test", "1", test2.getSeqNo() + 1, primaryTerm)); expectThrows(VersionConflictEngineException.class, () -> - primary.getService().getForUpdate("test", "1", MATCH_ANY, VersionType.INTERNAL, test2.getSeqNo(), primaryTerm + 1)); + primary.getService().getForUpdate("test", "1", test2.getSeqNo(), primaryTerm + 1)); closeShards(primary); } @@ -108,16 +103,13 @@ public void testTypelessGetForUpdate() throws IOException { Engine.IndexResult indexResult = indexDoc(shard, "some_type", "0", "{\"foo\" : \"bar\"}"); assertTrue(indexResult.isCreated()); - GetResult getResult = shard.getService().getForUpdate( - "some_type", "0", MATCH_ANY, VersionType.INTERNAL, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + GetResult getResult = shard.getService().getForUpdate("some_type", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertTrue(getResult.isExists()); - getResult = shard.getService().getForUpdate( - "some_other_type", "0", MATCH_ANY, VersionType.INTERNAL, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + getResult = shard.getService().getForUpdate("some_other_type", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertFalse(getResult.isExists()); - getResult = shard.getService().getForUpdate( - "_doc", "0", MATCH_ANY, VersionType.INTERNAL, UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); + getResult = shard.getService().getForUpdate("_doc", "0", UNASSIGNED_SEQ_NO, UNASSIGNED_PRIMARY_TERM); assertTrue(getResult.isExists()); closeShards(shard); diff --git a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index fb3eac28b6793..d749ce367cf0b 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Setting; @@ -436,17 +437,20 @@ public void testOpenCloseUpdateSettings() throws Exception { public void testEngineGCDeletesSetting() throws InterruptedException { createIndex("test"); - client().prepareIndex("test", "type", "1").setSource("f", 1).get(); // set version to 1 - client().prepareDelete("test", "type", "1").get(); // sets version to 2 - // delete is still in cache this should work & set version to 3 - client().prepareIndex("test", "type", "1").setSource("f", 2).setVersion(2).get(); + client().prepareIndex("test", "type", "1").setSource("f", 1).get(); + DeleteResponse response = client().prepareDelete("test", "type", "1").get(); + long seqNo = response.getSeqNo(); + long primaryTerm = response.getPrimaryTerm(); + // delete is still in cache this should work + client().prepareIndex("test", "type", "1").setSource("f", 2).setIfSeqNo(seqNo).setIfPrimaryTerm(primaryTerm).get(); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.gc_deletes", 0)).get(); - client().prepareDelete("test", "type", "1").get(); // sets version to 4 + response = client().prepareDelete("test", "type", "1").get(); + seqNo = response.getSeqNo(); Thread.sleep(300); // wait for cache time to change TODO: this needs to be solved better. To be discussed. // delete is should not be in cache - assertThrows(client().prepareIndex("test", "type", "1").setSource("f", 3) - .setVersion(4), VersionConflictEngineException.class); + assertThrows(client().prepareIndex("test", "type", "1").setSource("f", 3).setIfSeqNo(seqNo).setIfPrimaryTerm(primaryTerm), + VersionConflictEngineException.class); } diff --git a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index f562ace967820..ffd876383ad9e 100644 --- a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -213,11 +213,11 @@ public void testRequireUnitsOnUpdateSettings() throws Exception { } } - public void testInternalVersioningInitialDelete() throws Exception { + public void testCompareAndSetInitialDelete() throws Exception { createIndex("test"); ensureGreen(); - assertThrows(client().prepareDelete("test", "type", "1").setVersion(17).execute(), + assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(17).setIfPrimaryTerm(10).execute(), VersionConflictEngineException.class); IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1") @@ -225,63 +225,6 @@ public void testInternalVersioningInitialDelete() throws Exception { assertThat(indexResponse.getVersion(), equalTo(1L)); } - public void testInternalVersioning() throws Exception { - createIndex("test"); - ensureGreen(); - - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); - assertThat(indexResponse.getVersion(), equalTo(1L)); - - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet(); - assertThat(indexResponse.getVersion(), equalTo(2L)); - - assertThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(), - VersionConflictEngineException.class); - - assertThrows( - client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(), - VersionConflictEngineException.class); - - assertThrows( - client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").execute(), - VersionConflictEngineException.class); - - - assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class); - assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class); - - client().admin().indices().prepareRefresh().execute().actionGet(); - for (int i = 0; i < 10; i++) { - assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2L)); - } - - // search with versioning - for (int i = 0; i < 10; i++) { - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet(); - assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(2L)); - } - - // search without versioning - for (int i = 0; i < 10; i++) { - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet(); - assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(Versions.NOT_FOUND)); - } - - DeleteResponse deleteResponse = client().prepareDelete("test", "type", "1").setVersion(2).execute().actionGet(); - assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); - assertThat(deleteResponse.getVersion(), equalTo(3L)); - - assertThrows(client().prepareDelete("test", "type", "1").setVersion(2).execute(), VersionConflictEngineException.class); - - - // This is intricate - the object was deleted but a delete transaction was with the right version. We add another one - // and thus the transaction is increased. - deleteResponse = client().prepareDelete("test", "type", "1").setVersion(3).execute().actionGet(); - assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult()); - assertThat(deleteResponse.getVersion(), equalTo(4L)); - } - public void testCompareAndSet() { createIndex("test"); ensureGreen(); @@ -290,7 +233,7 @@ public void testCompareAndSet() { assertThat(indexResponse.getSeqNo(), equalTo(0L)); assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet(); + indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setIfSeqNo(0L).setIfPrimaryTerm(1).get(); assertThat(indexResponse.getSeqNo(), equalTo(1L)); assertThat(indexResponse.getPrimaryTerm(), equalTo(1L)); @@ -353,25 +296,21 @@ public void testSimpleVersioningWithFlush() throws Exception { createIndex("test"); ensureGreen(); - IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").execute().actionGet(); - assertThat(indexResponse.getVersion(), equalTo(1L)); + IndexResponse indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").get(); + assertThat(indexResponse.getSeqNo(), equalTo(0L)); client().admin().indices().prepareFlush().execute().actionGet(); - indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setVersion(1).execute().actionGet(); - assertThat(indexResponse.getVersion(), equalTo(2L)); + indexResponse = client().prepareIndex("test", "type", "1").setSource("field1", "value1_2").setIfSeqNo(0).setIfPrimaryTerm(1).get(); + assertThat(indexResponse.getSeqNo(), equalTo(1L)); client().admin().indices().prepareFlush().execute().actionGet(); - assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(), - VersionConflictEngineException.class); - - assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setVersion(1).execute(), + assertThrows(client().prepareIndex("test", "type", "1").setSource("field1", "value1_1").setIfSeqNo(0).setIfPrimaryTerm(1), VersionConflictEngineException.class); - assertThrows(client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1").execute(), + assertThrows(client().prepareIndex("test", "type", "1").setCreate(true).setSource("field1", "value1_1"), VersionConflictEngineException.class); - assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class); - assertThrows(client().prepareDelete("test", "type", "1").setVersion(1).execute(), VersionConflictEngineException.class); + assertThrows(client().prepareDelete("test", "type", "1").setIfSeqNo(0).setIfPrimaryTerm(1), VersionConflictEngineException.class); for (int i = 0; i < 10; i++) { assertThat(client().prepareGet("test", "type", "1").execute().actionGet().getVersion(), equalTo(2L)); @@ -380,10 +319,11 @@ public void testSimpleVersioningWithFlush() throws Exception { client().admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 10; i++) { - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true). + SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).seqNoAndPrimaryTerm(true). execute().actionGet(); assertHitCount(searchResponse, 1); assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(2L)); + assertThat(searchResponse.getHits().getAt(0).getSeqNo(), equalTo(1L)); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index bfe0cdef41596..ac4f435da130d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -495,7 +495,7 @@ public void onTimeout(TimeValue timeout) { private void clearJobFinishedTime(String jobId, ActionListener listener) { JobUpdate update = new JobUpdate.Builder(jobId).setClearFinishTime(true).build(); - jobConfigProvider.updateJob(jobId, update, null, clusterService.state().nodes().getMinNodeVersion(), ActionListener.wrap( + jobConfigProvider.updateJob(jobId, update, null, ActionListener.wrap( job -> listener.onResponse(new AcknowledgedResponse(true)), e -> { logger.error("[" + jobId + "] Failed to clear finished_time", e); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java index 443e66b81e785..09a8f219afcf4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java @@ -87,7 +87,7 @@ protected void masterOperation(UpdateDatafeedAction.Request request, ClusterStat CheckedConsumer updateConsumer = ok -> { datafeedConfigProvider.updateDatefeedConfig(request.getUpdate().getId(), request.getUpdate(), headers, - jobConfigProvider::validateDatafeedJob, clusterService.state().nodes().getMinNodeVersion(), + jobConfigProvider::validateDatafeedJob, ActionListener.wrap( updatedConfig -> listener.onResponse(new PutDatafeedAction.Response(updatedConfig)), listener::onFailure diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java index d8d5fe216b2a4..fe5ae7eb6e8bf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.action; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; @@ -54,7 +53,6 @@ public class TransportUpdateFilterAction extends HandledTransportAction) UpdateFilterAction.Request::new); this.client = client; this.jobManager = jobManager; - this.clusterService = clusterService; } @Override protected void doExecute(Task task, UpdateFilterAction.Request request, ActionListener listener) { - ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { + ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { updateFilter(filterWithVersion, request, listener); }, listener::onFailure); getFilterWithVersion(request.getFilterId(), filterListener); } - private void updateFilter(FilterWithVersion filterWithVersion, UpdateFilterAction.Request request, + private void updateFilter(FilterWithSeqNo filterWithVersion, UpdateFilterAction.Request request, ActionListener listener) { MlFilter filter = filterWithVersion.filter; @@ -100,19 +97,15 @@ private void updateFilter(FilterWithVersion filterWithVersion, UpdateFilterActio MlFilter updatedFilter = MlFilter.builder(filter.getId()).setDescription(description).setItems(items).build(); indexUpdatedFilter( - updatedFilter, filterWithVersion.version, filterWithVersion.seqNo, filterWithVersion.primaryTerm, request, listener); + updatedFilter, filterWithVersion.seqNo, filterWithVersion.primaryTerm, request, listener); } - private void indexUpdatedFilter(MlFilter filter, final long version, final long seqNo, final long primaryTerm, + private void indexUpdatedFilter(MlFilter filter, final long seqNo, final long primaryTerm, UpdateFilterAction.Request request, ActionListener listener) { IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); - if (clusterService.state().nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0)) { - indexRequest.setIfSeqNo(seqNo); - indexRequest.setIfPrimaryTerm(primaryTerm); - } else { - indexRequest.version(version); - } + indexRequest.setIfSeqNo(seqNo); + indexRequest.setIfPrimaryTerm(primaryTerm); indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { @@ -145,7 +138,7 @@ public void onFailure(Exception e) { }); } - private void getFilterWithVersion(String filterId, ActionListener listener) { + private void getFilterWithVersion(String filterId, ActionListener listener) { GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { @Override @@ -157,7 +150,7 @@ public void onResponse(GetResponse getDocResponse) { XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build(); - listener.onResponse(new FilterWithVersion(filter, getDocResponse)); + listener.onResponse(new FilterWithSeqNo(filter, getDocResponse)); } } else { this.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.FILTER_NOT_FOUND, filterId))); @@ -174,16 +167,14 @@ public void onFailure(Exception e) { }); } - private static class FilterWithVersion { + private static class FilterWithSeqNo { private final MlFilter filter; - private final long version; private final long seqNo; private final long primaryTerm; - private FilterWithVersion(MlFilter filter, GetResponse getDocResponse) { + private FilterWithSeqNo(MlFilter filter, GetResponse getDocResponse) { this.filter = filter; - this.version = getDocResponse.getVersion(); this.seqNo = getDocResponse.getSeqNo(); this.primaryTerm = getDocResponse.getPrimaryTerm(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 7d11173e258b1..7237ab0eb9818 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -8,7 +8,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; @@ -264,13 +263,11 @@ public void onFailure(Exception e) { * @param headers Datafeed headers applied with the update * @param validator BiConsumer that accepts the updated config and can perform * extra validations. {@code validator} must call the passed listener - * @param minClusterNodeVersion minimum version of nodes in cluster * @param updatedConfigListener Updated datafeed config listener */ public void updateDatefeedConfig(String datafeedId, DatafeedUpdate update, Map headers, - BiConsumer> validator, - Version minClusterNodeVersion, - ActionListener updatedConfigListener) { + BiConsumer> validator, + ActionListener updatedConfigListener) { GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), ElasticsearchMappings.DOC_TYPE, DatafeedConfig.documentId(datafeedId)); @@ -304,7 +301,7 @@ public void onResponse(GetResponse getResponse) { ActionListener validatedListener = ActionListener.wrap( ok -> { - indexUpdatedConfig(updatedConfig, version, seqNo, primaryTerm, minClusterNodeVersion, ActionListener.wrap( + indexUpdatedConfig(updatedConfig, seqNo, primaryTerm, ActionListener.wrap( indexResponse -> { assert indexResponse.getResult() == DocWriteResponse.Result.UPDATED; updatedConfigListener.onResponse(updatedConfig); @@ -324,8 +321,8 @@ public void onFailure(Exception e) { }); } - private void indexUpdatedConfig(DatafeedConfig updatedConfig, long version, long seqNo, long primaryTerm, - Version minClusterNodeVersion, ActionListener listener) { + private void indexUpdatedConfig(DatafeedConfig updatedConfig, long seqNo, long primaryTerm, + ActionListener listener) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder updatedSource = updatedConfig.toXContent(builder, new ToXContent.MapParams(TO_XCONTENT_PARAMS)); IndexRequestBuilder indexRequest = client.prepareIndex(AnomalyDetectorsIndex.configIndexName(), @@ -333,12 +330,8 @@ private void indexUpdatedConfig(DatafeedConfig updatedConfig, long version, long .setSource(updatedSource) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - if (minClusterNodeVersion.onOrAfter(Version.V_6_7_0)) { - indexRequest.setIfSeqNo(seqNo); - indexRequest.setIfPrimaryTerm(primaryTerm); - } else { - indexRequest.setVersion(version); - } + indexRequest.setIfSeqNo(seqNo); + indexRequest.setIfPrimaryTerm(primaryTerm); executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest.request(), listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index 6696bfe1ad96a..ccd0d594eb382 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -333,7 +333,7 @@ public void updateJob(UpdateJobAction.Request request, ActionListener { jobConfigProvider.updateJobWithValidation(request.getJobId(), request.getJobUpdate(), maxModelMemoryLimit, - this::validate, clusterService.state().nodes().getMinNodeVersion(), ActionListener.wrap( + this::validate, ActionListener.wrap( updatedJob -> postJobUpdate(request, updatedJob, actionListener), actionListener::onFailure )); @@ -603,7 +603,7 @@ public void revertSnapshot(RevertModelSnapshotAction.Request request, ActionList .setModelSnapshotId(modelSnapshot.getSnapshotId()) .build(); - jobConfigProvider.updateJob(request.getJobId(), update, maxModelMemoryLimit, clusterService.state().nodes().getMinNodeVersion(), + jobConfigProvider.updateJob(request.getJobId(), update, maxModelMemoryLimit, ActionListener.wrap(job -> { auditor.info(request.getJobId(), Messages.getMessage(Messages.JOB_AUDIT_REVERTED, modelSnapshot.getDescription())); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index e5ee8855969a3..9423768b8ed4f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -10,7 +10,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; @@ -227,11 +226,9 @@ public void onFailure(Exception e) { * @param maxModelMemoryLimit The maximum model memory allowed. This can be {@code null} * if the job's {@link org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits} * are not changed. - * @param minClusterNodeVersion the minimum version of nodes in the cluster * @param updatedJobListener Updated job listener */ public void updateJob(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit, - Version minClusterNodeVersion, ActionListener updatedJobListener) { GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId)); @@ -266,7 +263,7 @@ public void onResponse(GetResponse getResponse) { return; } - indexUpdatedJob(updatedJob, version, seqNo, primaryTerm, minClusterNodeVersion, updatedJobListener); + indexUpdatedJob(updatedJob, seqNo, primaryTerm, updatedJobListener); } @Override @@ -287,18 +284,17 @@ public interface UpdateValidator { } /** - * Similar to {@link #updateJob(String, JobUpdate, ByteSizeValue, Version, ActionListener)} but + * Similar to {@link #updateJob(String, JobUpdate, ByteSizeValue, ActionListener)} but * with an extra validation step which is called before the updated is applied. * * @param jobId The Id of the job to update * @param update The job update * @param maxModelMemoryLimit The maximum model memory allowed * @param validator The job update validator - * @param minClusterNodeVersion the minimum version of a node ifn the cluster * @param updatedJobListener Updated job listener */ public void updateJobWithValidation(String jobId, JobUpdate update, ByteSizeValue maxModelMemoryLimit, - UpdateValidator validator, Version minClusterNodeVersion, ActionListener updatedJobListener) { + UpdateValidator validator, ActionListener updatedJobListener) { GetRequest getRequest = new GetRequest(AnomalyDetectorsIndex.configIndexName(), ElasticsearchMappings.DOC_TYPE, Job.documentId(jobId)); @@ -334,7 +330,7 @@ public void onResponse(GetResponse getResponse) { return; } - indexUpdatedJob(updatedJob, version, seqNo, primaryTerm, minClusterNodeVersion, updatedJobListener); + indexUpdatedJob(updatedJob, seqNo, primaryTerm, updatedJobListener); }, updatedJobListener::onFailure )); @@ -347,7 +343,7 @@ public void onFailure(Exception e) { }); } - private void indexUpdatedJob(Job updatedJob, long version, long seqNo, long primaryTerm, Version minClusterNodeVersion, + private void indexUpdatedJob(Job updatedJob, long seqNo, long primaryTerm, ActionListener updatedJobListener) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) { XContentBuilder updatedSource = updatedJob.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -355,12 +351,8 @@ private void indexUpdatedJob(Job updatedJob, long version, long seqNo, long prim ElasticsearchMappings.DOC_TYPE, Job.documentId(updatedJob.getId())) .setSource(updatedSource) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - if (minClusterNodeVersion.onOrAfter(Version.V_6_7_0)) { - indexRequest.setIfSeqNo(seqNo); - indexRequest.setIfPrimaryTerm(primaryTerm); - } else { - indexRequest.setVersion(version); - } + indexRequest.setIfSeqNo(seqNo); + indexRequest.setIfPrimaryTerm(primaryTerm); executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest.request(), ActionListener.wrap( indexResponse -> { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java index 00d62b7e0a933..9496f4ca0d8f2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java @@ -7,7 +7,6 @@ import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; @@ -87,7 +86,7 @@ public void testCrud() throws InterruptedException { AtomicReference configHolder = new AtomicReference<>(); blockingCall(actionListener -> datafeedConfigProvider.updateDatefeedConfig(datafeedId, update.build(), updateHeaders, - (updatedConfig, listener) -> listener.onResponse(Boolean.TRUE), Version.CURRENT, actionListener), + (updatedConfig, listener) -> listener.onResponse(Boolean.TRUE), actionListener), configHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(configHolder.get().getIndices(), equalTo(updateIndices)); @@ -168,7 +167,7 @@ public void testUpdateWhenApplyingTheUpdateThrows() throws Exception { AtomicReference configHolder = new AtomicReference<>(); blockingCall(actionListener -> datafeedConfigProvider.updateDatefeedConfig(datafeedId, update.build(), Collections.emptyMap(), - (updatedConfig, listener) -> listener.onResponse(Boolean.TRUE), Version.CURRENT, actionListener), + (updatedConfig, listener) -> listener.onResponse(Boolean.TRUE), actionListener), configHolder, exceptionHolder); assertNull(configHolder.get()); assertNotNull(exceptionHolder.get()); @@ -194,7 +193,7 @@ public void testUpdateWithValidatorFunctionThatErrors() throws Exception { AtomicReference exceptionHolder = new AtomicReference<>(); blockingCall(actionListener -> datafeedConfigProvider.updateDatefeedConfig(datafeedId, update.build(), Collections.emptyMap(), - validateErrorFunction, Version.CURRENT, actionListener), + validateErrorFunction, actionListener), configHolder, exceptionHolder); assertNull(configHolder.get()); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java index 3e20bdd73de07..f6ff80edeec02 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/JobConfigProviderIT.java @@ -8,7 +8,6 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; @@ -149,7 +148,7 @@ public void testCrud() throws InterruptedException { AtomicReference updateJobResponseHolder = new AtomicReference<>(); blockingCall(actionListener -> jobConfigProvider.updateJob - (jobId, jobUpdate, new ByteSizeValue(32), Version.CURRENT, actionListener), updateJobResponseHolder, exceptionHolder); + (jobId, jobUpdate, new ByteSizeValue(32), actionListener), updateJobResponseHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertEquals("This job has been updated", updateJobResponseHolder.get().getDescription()); @@ -206,7 +205,7 @@ public void testUpdateWithAValidationError() throws Exception { .build(); AtomicReference updateJobResponseHolder = new AtomicReference<>(); - blockingCall(actionListener -> jobConfigProvider.updateJob(jobId, invalidUpdate, new ByteSizeValue(32), Version.CURRENT, + blockingCall(actionListener -> jobConfigProvider.updateJob(jobId, invalidUpdate, new ByteSizeValue(32), actionListener), updateJobResponseHolder, exceptionHolder); assertNull(updateJobResponseHolder.get()); assertNotNull(exceptionHolder.get()); @@ -231,7 +230,7 @@ public void testUpdateWithValidator() throws Exception { AtomicReference updateJobResponseHolder = new AtomicReference<>(); // update with the no-op validator blockingCall(actionListener -> jobConfigProvider.updateJobWithValidation( - jobId, jobUpdate, new ByteSizeValue(32), validator, Version.CURRENT, actionListener), updateJobResponseHolder, exceptionHolder); + jobId, jobUpdate, new ByteSizeValue(32), validator, actionListener), updateJobResponseHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertNotNull(updateJobResponseHolder.get()); @@ -244,7 +243,7 @@ public void testUpdateWithValidator() throws Exception { updateJobResponseHolder.set(null); // Update with a validator that errors blockingCall(actionListener -> jobConfigProvider.updateJobWithValidation(jobId, jobUpdate, new ByteSizeValue(32), - validatorWithAnError, Version.CURRENT, actionListener), + validatorWithAnError, actionListener), updateJobResponseHolder, exceptionHolder); assertNull(updateJobResponseHolder.get()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 522b1a6d4b97a..8b9fda5b9c3f3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -750,12 +750,8 @@ private void innerRefresh(String tokenDocId, Authentication userAuth, ActionList client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) .setDoc("refresh_token", Collections.singletonMap("refreshed", true)) .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); - if (clusterService.state().nodes().getMinNodeVersion().onOrAfter(Version.V_6_7_0)) { - updateRequest.setIfSeqNo(response.getSeqNo()); - updateRequest.setIfPrimaryTerm(response.getPrimaryTerm()); - } else { - updateRequest.setVersion(response.getVersion()); - } + updateRequest.setIfSeqNo(response.getSeqNo()); + updateRequest.setIfPrimaryTerm(response.getPrimaryTerm()); executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, updateRequest.request(), ActionListener.wrap( updateResponse -> createUserToken(authentication, userAuth, listener, metadata, true), diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/80_put_get_watch_with_passwords.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/80_put_get_watch_with_passwords.yml index 74b3a20f5cff8..ebef6c87d7022 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/80_put_get_watch_with_passwords.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/80_put_get_watch_with_passwords.yml @@ -115,112 +115,6 @@ setup: } ---- -"Test putting a watch with a redacted password with old version returns an error": - - # version 1 - - do: - xpack.watcher.put_watch: - id: "watch_old_version" - body: > - { - "trigger": { - "schedule" : { "cron" : "0 0 0 1 * ? 2099" } - }, - "input": { - "http" : { - "request" : { - "host" : "host.domain", - "port" : 9200, - "path" : "/myservice", - "auth" : { - "basic" : { - "username" : "user", - "password" : "pass" - } - } - } - } - }, - "actions": { - "logging": { - "logging": { - "text": "Log me Amadeus!" - } - } - } - } - - # version 2 - - do: - xpack.watcher.put_watch: - id: "watch_old_version" - body: > - { - "trigger": { - "schedule" : { "cron" : "0 0 0 1 * ? 2099" } - }, - "input": { - "http" : { - "request" : { - "host" : "host.domain", - "port" : 9200, - "path" : "/myservice", - "auth" : { - "basic" : { - "username" : "user", - "password" : "pass" - } - } - } - } - }, - "actions": { - "logging": { - "logging": { - "text": "Log me Amadeus!" - } - } - } - } - - - # using optimistic concurrency control, this one will loose - # as if two users in the watch UI tried to update the same watch - - do: - catch: conflict - xpack.watcher.put_watch: - id: "watch_old_version" - version: 1 - body: > - { - "trigger": { - "schedule" : { "cron" : "0 0 0 1 * ? 2099" } - }, - "input": { - "http" : { - "request" : { - "host" : "host.domain", - "port" : 9200, - "path" : "/myservice", - "auth" : { - "basic" : { - "username" : "user", - "password" : "::es_redacted::" - } - } - } - } - }, - "actions": { - "logging": { - "logging": { - "text": "Log me Amadeus!" - } - } - } - } - --- "Test putting a watch with a redacted password with old seq no returns an error": - skip: @@ -390,98 +284,3 @@ setup: - match: { hits.hits.0._source.input.http.request.auth.basic.username: "new_user" } - match: { hits.hits.0._source.input.http.request.auth.basic.password: "pass" } ---- -"Test putting a watch with a redacted password with current version works": - - - do: - xpack.watcher.put_watch: - id: "my_watch_with_version" - body: > - { - "trigger": { - "schedule" : { "cron" : "0 0 0 1 * ? 2099" } - }, - "input": { - "http" : { - "request" : { - "host" : "host.domain", - "port" : 9200, - "path" : "/myservice", - "auth" : { - "basic" : { - "username" : "user", - "password" : "pass" - } - } - } - } - }, - "actions": { - "logging": { - "logging": { - "text": "Log me Amadeus!" - } - } - } - } - - - match: { _id: "my_watch_with_version" } - - match: { _version: 1 } - - # this resembles the exact update from the UI and thus should work, no password change, any change in the watch - # but correct version provided - - do: - xpack.watcher.put_watch: - id: "my_watch_with_version" - version: 1 - body: > - { - "trigger": { - "schedule" : { "cron" : "0 0 0 1 * ? 2099" } - }, - "input": { - "http" : { - "request" : { - "host" : "host.domain", - "port" : 9200, - "path" : "/myservice", - "auth" : { - "basic" : { - "username" : "user", - "password" : "::es_redacted::" - } - } - } - } - }, - "actions": { - "logging": { - "logging": { - "text": "Log me Amadeus!" - } - } - } - } - - - match: { _id: "my_watch_with_version" } - - match: { _version: 2 } - - - do: - search: - rest_total_hits_as_int: true - index: .watches - body: > - { - "query": { - "term": { - "_id": { - "value": "my_watch_with_version" - } - } - } - } - - - match: { hits.total: 1 } - - match: { hits.hits.0._id: "my_watch_with_version" } - - match: { hits.hits.0._source.input.http.request.auth.basic.password: "pass" } - From 8ad9a07b87432f3f8586f974e9a74f9ab25bd209 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 5 Feb 2019 21:27:28 +0100 Subject: [PATCH 02/19] CRUDDocumentationIT fix documentation references --- .../elasticsearch/client/documentation/CRUDDocumentationIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 1af9593e77f87..fe003d691a830 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -503,7 +503,7 @@ public void testUpdate() throws Exception { // tag::update-request-cas request.setIfSeqNo(2L); // <1> request.setIfPrimaryTerm(1L); // <2> - // end::update-request-request-cas + // end::update-request-cas // tag::update-request-detect-noop request.detectNoop(false); // <1> // end::update-request-detect-noop From 7ca5495d86093cd4673f7a96f6752406ac82cfda Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Tue, 5 Feb 2019 13:39:29 -0700 Subject: [PATCH 03/19] Allow custom authorization with an authorization engine (#38358) For some users, the built in authorization mechanism does not fit their needs and no feature that we offer would allow them to control the authorization process to meet their needs. In order to support this, a concept of an AuthorizationEngine is being introduced, which can be provided using the security extension mechanism. An AuthorizationEngine is responsible for making the authorization decisions about a request. The engine is responsible for knowing how to authorize and can be backed by whatever mechanism a user wants. The default mechanism is one backed by roles to provide the authorization decisions. The AuthorizationEngine will be called by the AuthorizationService, which handles more of the internal workings that apply in general to authorization within Elasticsearch. In order to support external authorization services that would back an authorization engine, the entire authorization process has become asynchronous, which also includes all calls to the AuthorizationEngine. The use of roles also leaked out of the AuthorizationService in our existing code that is not specifically related to roles so this also needed to be addressed. RequestInterceptor instances sometimes used a role to ensure a user was not attempting to escalate their privileges. Addressing this leakage of roles meant that the RequestInterceptor execution needed to move within the AuthorizationService and that AuthorizationEngines needed to support detection of whether a user has more privileges on a name than another. The second area where roles leaked to the user is in the handling of a few privilege APIs that could be used to retrieve the user's privileges or ask if a user has privileges to perform an action. To remove the leakage of roles from these actions, the AuthorizationService and AuthorizationEngine gained methods that enabled an AuthorizationEngine to return the response for these APIs. Ultimately this feature is the work included in: #37785 #37495 #37328 #36245 #38137 #38219 Closes #32435 --- build.gradle | 3 + .../build.gradle | 46 + .../example/AuthorizationEnginePlugin.java | 30 + .../example/CustomAuthorizationEngine.java | 238 +++++ .../ExampleAuthorizationEngineExtension.java | 35 + ...arch.xpack.core.security.SecurityExtension | 1 + .../example/CustomAuthorizationEngineIT.java | 163 ++++ .../CustomAuthorizationEngineTests.java | 188 ++++ x-pack/docs/build.gradle | 1 - ...asciidoc => custom-authorization.asciidoc} | 91 +- .../authorization/managing-roles.asciidoc | 2 +- .../license/XPackLicenseState.java | 20 +- .../core/security/SecurityExtension.java | 13 + .../security/authz/AuthorizationEngine.java | 338 +++++++ .../core/security/authz/ResolvedIndices.java | 111 +++ .../accesscontrol/IndicesAccessControl.java | 1 + .../authz/permission/IndicesPermission.java | 7 +- .../authz/permission/LimitedRole.java | 49 +- .../core/security/authz/permission/Role.java | 28 +- .../core/security/support/Exceptions.java | 6 +- .../license/XPackLicenseStateTests.java | 6 +- .../authz/permission/LimitedRoleTests.java | 24 +- .../authz/store/ReservedRolesStoreTests.java | 18 +- .../xpack/security/Security.java | 73 +- .../action/TransportCreateApiKeyAction.java | 12 +- .../action/filter/SecurityActionFilter.java | 23 +- ...cumentLevelSecurityRequestInterceptor.java | 61 -- .../IndicesAliasesRequestInterceptor.java | 90 -- .../interceptor/RequestInterceptor.java | 28 - .../interceptor/ResizeRequestInterceptor.java | 75 -- .../TransportGetUserPrivilegesAction.java | 85 +- .../user/TransportHasPrivilegesAction.java | 66 +- .../xpack/security/audit/AuditTrail.java | 16 +- .../security/audit/AuditTrailService.java | 26 +- .../audit/logfile/LoggingAuditTrail.java | 66 +- .../xpack/security/authc/ApiKeyService.java | 68 +- .../security/authc/AuthenticationService.java | 6 +- .../security/authz/AuthorizationService.java | 821 ++++++++++-------- .../security/authz/AuthorizationUtils.java | 62 -- .../security/authz/AuthorizedIndices.java | 55 -- .../authz/IndicesAndAliasesResolver.java | 113 +-- .../xpack/security/authz/RBACEngine.java | 552 ++++++++++++ .../SecuritySearchOperationListener.java | 10 +- .../BulkShardRequestInterceptor.java | 40 +- ...cumentLevelSecurityRequestInterceptor.java | 68 ++ .../IndicesAliasesRequestInterceptor.java | 105 +++ .../authz/interceptor/RequestInterceptor.java | 24 + .../interceptor/ResizeRequestInterceptor.java | 85 ++ .../interceptor/SearchRequestInterceptor.java | 28 +- .../interceptor/UpdateRequestInterceptor.java | 16 +- .../authz/store/CompositeRolesStore.java | 71 +- .../transport/ServerTransportFilter.java | 15 +- .../DocumentLevelSecurityTests.java | 2 +- .../integration/FieldLevelSecurityTests.java | 2 +- .../filter/SecurityActionFilterTests.java | 55 +- ...TransportGetUserPrivilegesActionTests.java | 86 -- .../TransportHasPrivilegesActionTests.java | 575 ------------ .../audit/AuditTrailServiceTests.java | 17 +- .../logfile/LoggingAuditTrailFilterTests.java | 227 ++--- .../audit/logfile/LoggingAuditTrailTests.java | 70 +- .../security/authc/ApiKeyServiceTests.java | 70 +- .../authc/AuthenticationServiceTests.java | 11 +- .../authz/AuthorizationServiceTests.java | 727 ++++++++-------- .../authz/AuthorizedIndicesTests.java | 49 +- .../authz/IndicesAndAliasesResolverTests.java | 118 ++- .../xpack/security/authz/RBACEngineTests.java | 750 ++++++++++++++++ .../SecuritySearchOperationListenerTests.java | 48 +- .../accesscontrol/IndicesPermissionTests.java | 48 +- ...IndicesAliasesRequestInterceptorTests.java | 69 +- .../ResizeRequestInterceptorTests.java | 61 +- .../authz/store/CompositeRolesStoreTests.java | 249 +++++- .../transport/ServerTransportFilterTests.java | 30 +- .../build.gradle | 2 +- 73 files changed, 4739 insertions(+), 2706 deletions(-) create mode 100644 plugins/examples/security-authorization-engine/build.gradle create mode 100644 plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/AuthorizationEnginePlugin.java create mode 100644 plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java create mode 100644 plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/ExampleAuthorizationEngineExtension.java create mode 100644 plugins/examples/security-authorization-engine/src/main/resources/META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension create mode 100644 plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineIT.java create mode 100644 plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java rename x-pack/docs/en/security/authorization/{custom-roles-provider.asciidoc => custom-authorization.asciidoc} (51%) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/ResolvedIndices.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptor.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/RequestInterceptor.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptor.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java rename x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/{action => authz}/interceptor/BulkShardRequestInterceptor.java (58%) create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java rename x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/{action => authz}/interceptor/SearchRequestInterceptor.java (50%) rename x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/{action => authz}/interceptor/UpdateRequestInterceptor.java (65%) delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesActionTests.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java rename x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/{action => authz}/interceptor/IndicesAliasesRequestInterceptorTests.java (65%) rename x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/{action => authz}/interceptor/ResizeRequestInterceptorTests.java (61%) diff --git a/build.gradle b/build.gradle index e5bc1ab3ba986..d50801bd207f4 100644 --- a/build.gradle +++ b/build.gradle @@ -233,6 +233,9 @@ allprojects { "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}": ':modules:aggs-matrix-stats', "org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator', "org.elasticsearch.plugin:rank-eval-client:${version}": ':modules:rank-eval', + // for security example plugins + "org.elasticsearch.plugin:x-pack-core:${version}": ':x-pack:plugin:core', + "org.elasticsearch.client.x-pack-transport:${version}": ':x-pack:transport-client' ] /* diff --git a/plugins/examples/security-authorization-engine/build.gradle b/plugins/examples/security-authorization-engine/build.gradle new file mode 100644 index 0000000000000..d0d227e221b68 --- /dev/null +++ b/plugins/examples/security-authorization-engine/build.gradle @@ -0,0 +1,46 @@ +apply plugin: 'elasticsearch.esplugin' + +esplugin { + name 'security-authorization-engine' + description 'An example spi extension plugin for security that implements an Authorization Engine' + classname 'org.elasticsearch.example.AuthorizationEnginePlugin' + extendedPlugins = ['x-pack-security'] +} + +dependencies { + compileOnly "org.elasticsearch.plugin:x-pack-core:${version}" + testCompile "org.elasticsearch.client.x-pack-transport:${version}" +} + + +integTestRunner { + systemProperty 'tests.security.manager', 'false' +} + +integTestCluster { + dependsOn buildZip + setting 'xpack.security.enabled', 'true' + setting 'xpack.ilm.enabled', 'false' + setting 'xpack.ml.enabled', 'false' + setting 'xpack.monitoring.enabled', 'false' + setting 'xpack.license.self_generated.type', 'trial' + + // This is important, so that all the modules are available too. + // There are index templates that use token filters that are in analysis-module and + // processors are being used that are in ingest-common module. + distribution = 'default' + + setupCommand 'setupDummyUser', + 'bin/elasticsearch-users', 'useradd', 'test_user', '-p', 'x-pack-test-password', '-r', 'custom_superuser' + waitCondition = { node, ant -> + File tmpFile = new File(node.cwd, 'wait.success') + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", + dest: tmpFile.toString(), + username: 'test_user', + password: 'x-pack-test-password', + ignoreerrors: true, + retries: 10) + return tmpFile.exists() + } +} +check.dependsOn integTest diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/AuthorizationEnginePlugin.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/AuthorizationEnginePlugin.java new file mode 100644 index 0000000000000..1878bb90a0c85 --- /dev/null +++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/AuthorizationEnginePlugin.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example; + +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; + +/** + * Plugin class that is required so that the code contained here may be loaded as a plugin. + * Additional items such as settings and actions can be registered using this plugin class. + */ +public class AuthorizationEnginePlugin extends Plugin implements ActionPlugin { +} diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java new file mode 100644 index 0000000000000..9916eb5dfed37 --- /dev/null +++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/CustomAuthorizationEngine.java @@ -0,0 +1,238 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.metadata.AliasOrIndex; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse.Indices; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl.IndexAccessControl; +import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; +import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.user.User; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * A custom implementation of an authorization engine. This engine is extremely basic in that it + * authorizes based upon the name of a single role. If users have this role they are granted access. + */ +public class CustomAuthorizationEngine implements AuthorizationEngine { + + @Override + public void resolveAuthorizationInfo(RequestInfo requestInfo, ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); + if (authentication.getUser().isRunAs()) { + final CustomAuthorizationInfo authenticatedUserAuthzInfo = + new CustomAuthorizationInfo(authentication.getUser().authenticatedUser().roles(), null); + listener.onResponse(new CustomAuthorizationInfo(authentication.getUser().roles(), authenticatedUserAuthzInfo)); + } else { + listener.onResponse(new CustomAuthorizationInfo(authentication.getUser().roles(), null)); + } + } + + @Override + public void authorizeRunAs(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, ActionListener listener) { + if (isSuperuser(requestInfo.getAuthentication().getUser().authenticatedUser())) { + listener.onResponse(AuthorizationResult.granted()); + } else { + listener.onResponse(AuthorizationResult.deny()); + } + } + + @Override + public void authorizeClusterAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + ActionListener listener) { + if (isSuperuser(requestInfo.getAuthentication().getUser())) { + listener.onResponse(AuthorizationResult.granted()); + } else { + listener.onResponse(AuthorizationResult.deny()); + } + } + + @Override + public void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + AsyncSupplier indicesAsyncSupplier, + Map aliasOrIndexLookup, + ActionListener listener) { + if (isSuperuser(requestInfo.getAuthentication().getUser())) { + indicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> { + Map indexAccessControlMap = new HashMap<>(); + for (String name : resolvedIndices.getLocal()) { + indexAccessControlMap.put(name, new IndexAccessControl(true, FieldPermissions.DEFAULT, null)); + } + IndicesAccessControl indicesAccessControl = + new IndicesAccessControl(true, Collections.unmodifiableMap(indexAccessControlMap)); + listener.onResponse(new IndexAuthorizationResult(true, indicesAccessControl)); + }, listener::onFailure)); + } else { + listener.onResponse(new IndexAuthorizationResult(true, IndicesAccessControl.DENIED)); + } + } + + @Override + public void loadAuthorizedIndices(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map aliasOrIndexLookup, ActionListener> listener) { + if (isSuperuser(requestInfo.getAuthentication().getUser())) { + listener.onResponse(new ArrayList<>(aliasOrIndexLookup.keySet())); + } else { + listener.onResponse(Collections.emptyList()); + } + } + + @Override + public void validateIndexPermissionsAreSubset(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map> indexNameToNewNames, + ActionListener listener) { + if (isSuperuser(requestInfo.getAuthentication().getUser())) { + listener.onResponse(AuthorizationResult.granted()); + } else { + listener.onResponse(AuthorizationResult.deny()); + } + } + + @Override + public void checkPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, + HasPrivilegesRequest hasPrivilegesRequest, + Collection applicationPrivilegeDescriptors, + ActionListener listener) { + if (isSuperuser(authentication.getUser())) { + listener.onResponse(getHasPrivilegesResponse(authentication, hasPrivilegesRequest, true)); + } else { + listener.onResponse(getHasPrivilegesResponse(authentication, hasPrivilegesRequest, false)); + } + } + + @Override + public void getUserPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, GetUserPrivilegesRequest request, + ActionListener listener) { + if (isSuperuser(authentication.getUser())) { + listener.onResponse(getUserPrivilegesResponse(true)); + } else { + listener.onResponse(getUserPrivilegesResponse(false)); + } + } + + private HasPrivilegesResponse getHasPrivilegesResponse(Authentication authentication, HasPrivilegesRequest hasPrivilegesRequest, + boolean authorized) { + Map clusterPrivMap = new HashMap<>(); + for (String clusterPriv : hasPrivilegesRequest.clusterPrivileges()) { + clusterPrivMap.put(clusterPriv, authorized); + } + final Map indices = new LinkedHashMap<>(); + for (IndicesPrivileges check : hasPrivilegesRequest.indexPrivileges()) { + for (String index : check.getIndices()) { + final Map privileges = new HashMap<>(); + final ResourcePrivileges existing = indices.get(index); + if (existing != null) { + privileges.putAll(existing.getPrivileges()); + } + for (String privilege : check.getPrivileges()) { + privileges.put(privilege, authorized); + } + indices.put(index, ResourcePrivileges.builder(index).addPrivileges(privileges).build()); + } + } + final Map> privilegesByApplication = new HashMap<>(); + Set applicationNames = Arrays.stream(hasPrivilegesRequest.applicationPrivileges()) + .map(RoleDescriptor.ApplicationResourcePrivileges::getApplication) + .collect(Collectors.toSet()); + for (String applicationName : applicationNames) { + final Map appPrivilegesByResource = new LinkedHashMap<>(); + for (RoleDescriptor.ApplicationResourcePrivileges p : hasPrivilegesRequest.applicationPrivileges()) { + if (applicationName.equals(p.getApplication())) { + for (String resource : p.getResources()) { + final Map privileges = new HashMap<>(); + final ResourcePrivileges existing = appPrivilegesByResource.get(resource); + if (existing != null) { + privileges.putAll(existing.getPrivileges()); + } + for (String privilege : p.getPrivileges()) { + privileges.put(privilege, authorized); + } + appPrivilegesByResource.put(resource, ResourcePrivileges.builder(resource).addPrivileges(privileges).build()); + } + } + } + privilegesByApplication.put(applicationName, appPrivilegesByResource.values()); + } + return new HasPrivilegesResponse(authentication.getUser().principal(), authorized, clusterPrivMap, indices.values(), + privilegesByApplication); + } + + private GetUserPrivilegesResponse getUserPrivilegesResponse(boolean isSuperuser) { + final Set cluster = isSuperuser ? Collections.singleton("ALL") : Collections.emptySet(); + final Set conditionalCluster = Collections.emptySet(); + final Set indices = isSuperuser ? Collections.singleton(new Indices(Collections.singleton("*"), + Collections.singleton("*"), Collections.emptySet(), Collections.emptySet(), true)) : Collections.emptySet(); + + final Set application = isSuperuser ? + Collections.singleton( + RoleDescriptor.ApplicationResourcePrivileges.builder().application("*").privileges("*").resources("*").build()) : + Collections.emptySet(); + final Set runAs = isSuperuser ? Collections.singleton("*") : Collections.emptySet(); + return new GetUserPrivilegesResponse(cluster, conditionalCluster, indices, application, runAs); + } + + public static class CustomAuthorizationInfo implements AuthorizationInfo { + + private final String[] roles; + private final CustomAuthorizationInfo authenticatedAuthzInfo; + + CustomAuthorizationInfo(String[] roles, CustomAuthorizationInfo authenticatedAuthzInfo) { + this.roles = roles; + this.authenticatedAuthzInfo = authenticatedAuthzInfo; + } + + @Override + public Map asMap() { + return Collections.singletonMap("roles", roles); + } + + @Override + public CustomAuthorizationInfo getAuthenticatedUserAuthorizationInfo() { + return authenticatedAuthzInfo; + } + } + + private boolean isSuperuser(User user) { + return Arrays.asList(user.roles()).contains("custom_superuser"); + } +} diff --git a/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/ExampleAuthorizationEngineExtension.java b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/ExampleAuthorizationEngineExtension.java new file mode 100644 index 0000000000000..cba064fae27ad --- /dev/null +++ b/plugins/examples/security-authorization-engine/src/main/java/org/elasticsearch/example/ExampleAuthorizationEngineExtension.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.core.security.SecurityExtension; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; + +/** + * Security extension class that registers the custom authorization engine to be used + */ +public class ExampleAuthorizationEngineExtension implements SecurityExtension { + + @Override + public AuthorizationEngine getAuthorizationEngine(Settings settings) { + return new CustomAuthorizationEngine(); + } +} diff --git a/plugins/examples/security-authorization-engine/src/main/resources/META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension b/plugins/examples/security-authorization-engine/src/main/resources/META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension new file mode 100644 index 0000000000000..73029aef8fd63 --- /dev/null +++ b/plugins/examples/security-authorization-engine/src/main/resources/META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension @@ -0,0 +1 @@ +org.elasticsearch.example.ExampleAuthorizationEngineExtension \ No newline at end of file diff --git a/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineIT.java b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineIT.java new file mode 100644 index 0000000000000..9daf9bd01a8bc --- /dev/null +++ b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineIT.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.core.XPackClientPlugin; +import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.client.SecurityClient; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +/** + * Integration tests for the custom authorization engine. These tests are meant to be run against + * an external cluster with the custom authorization plugin installed to validate the functionality + * when running as a plugin + */ +public class CustomAuthorizationEngineIT extends ESIntegTestCase { + + @Override + protected Settings externalClusterClientSettings() { + final String token = "Basic " + + Base64.getEncoder().encodeToString(("test_user:x-pack-test-password").getBytes(StandardCharsets.UTF_8)); + return Settings.builder() + .put(ThreadContext.PREFIX + ".Authorization", token) + .put(NetworkModule.TRANSPORT_TYPE_KEY, "security4") + .build(); + } + + @Override + protected Collection> transportClientPlugins() { + return Collections.singleton(XPackClientPlugin.class); + } + + public void testClusterAction() throws IOException { + SecurityClient securityClient = new SecurityClient(client()); + securityClient.preparePutUser("custom_user", "x-pack-test-password".toCharArray(), Hasher.BCRYPT, "custom_superuser").get(); + + { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue("custom_user", new SecureString("x-pack-test-password".toCharArray()))); + Request request = new Request("GET", "_cluster/health"); + request.setOptions(options); + Response response = getRestClient().performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), is(200)); + } + + { + securityClient.preparePutUser("custom_user2", "x-pack-test-password".toCharArray(), Hasher.BCRYPT, "not_superuser").get(); + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue("custom_user2", new SecureString("x-pack-test-password".toCharArray()))); + Request request = new Request("GET", "_cluster/health"); + request.setOptions(options); + ResponseException e = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); + } + } + + public void testIndexAction() throws IOException { + SecurityClient securityClient = new SecurityClient(client()); + securityClient.preparePutUser("custom_user", "x-pack-test-password".toCharArray(), Hasher.BCRYPT, "custom_superuser").get(); + + { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue("custom_user", new SecureString("x-pack-test-password".toCharArray()))); + Request request = new Request("PUT", "/index"); + request.setOptions(options); + Response response = getRestClient().performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), is(200)); + } + + { + securityClient.preparePutUser("custom_user2", "x-pack-test-password".toCharArray(), Hasher.BCRYPT, "not_superuser").get(); + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue("custom_user2", new SecureString("x-pack-test-password".toCharArray()))); + Request request = new Request("PUT", "/index"); + request.setOptions(options); + ResponseException e = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); + } + } + + public void testRunAs() throws IOException { + SecurityClient securityClient = new SecurityClient(client()); + securityClient.preparePutUser("custom_user", "x-pack-test-password".toCharArray(), Hasher.BCRYPT, "custom_superuser").get(); + securityClient.preparePutUser("custom_user2", "x-pack-test-password".toCharArray(), Hasher.BCRYPT, "custom_superuser").get(); + securityClient.preparePutUser("custom_user3", "x-pack-test-password".toCharArray(), Hasher.BCRYPT, "not_superuser").get(); + + { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue("custom_user", new SecureString("x-pack-test-password".toCharArray()))); + options.addHeader("es-security-runas-user", "custom_user2"); + Request request = new Request("GET", "/_security/_authenticate"); + request.setOptions(options); + Response response = getRestClient().performRequest(request); + assertThat(response.getStatusLine().getStatusCode(), is(200)); + String responseStr = EntityUtils.toString(response.getEntity()); + assertThat(responseStr, containsString("custom_user2")); + } + + { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue("custom_user", new SecureString("x-pack-test-password".toCharArray()))); + options.addHeader("es-security-runas-user", "custom_user3"); + Request request = new Request("PUT", "/index"); + request.setOptions(options); + ResponseException e = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); + } + + { + RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder(); + options.addHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, + basicAuthHeaderValue("custom_user3", new SecureString("x-pack-test-password".toCharArray()))); + options.addHeader("es-security-runas-user", "custom_user2"); + Request request = new Request("PUT", "/index"); + request.setOptions(options); + ResponseException e = expectThrows(ResponseException.class, () -> getRestClient().performRequest(request)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(403)); + } + } +} diff --git a/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java new file mode 100644 index 0000000000000..a4f3e90208695 --- /dev/null +++ b/plugins/examples/security-authorization-engine/src/test/java/org/elasticsearch/example/CustomAuthorizationEngineTests.java @@ -0,0 +1,188 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.example; + +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.metadata.AliasOrIndex; +import org.elasticsearch.cluster.metadata.AliasOrIndex.Index; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationResult; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.IndexAuthorizationResult; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; +import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.user.User; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +/** + * Unit tests for the custom authorization engine. These are basic tests that validate the + * engine's functionality outside of being used by the AuthorizationService + */ +public class CustomAuthorizationEngineTests extends ESTestCase { + + public void testGetAuthorizationInfo() { + PlainActionFuture future = new PlainActionFuture<>(); + CustomAuthorizationEngine engine = new CustomAuthorizationEngine(); + engine.resolveAuthorizationInfo(getRequestInfo(), future); + assertNotNull(future.actionGet()); + } + + public void testAuthorizeRunAs() { + final String action = "cluster:monitor/foo"; + final TransportRequest request = new TransportRequest() {}; + CustomAuthorizationEngine engine = new CustomAuthorizationEngine(); + // unauthorized + { + Authentication authentication = + new Authentication(new User("joe", new String[]{"custom_superuser"}, new User("bar", "not_superuser")), + new RealmRef("test", "test", "node"), new RealmRef("test", "test", "node")); + RequestInfo info = new RequestInfo(authentication, request, action); + PlainActionFuture future = new PlainActionFuture<>(); + engine.resolveAuthorizationInfo(info, future); + AuthorizationInfo authzInfo = future.actionGet(); + + PlainActionFuture resultFuture = new PlainActionFuture<>(); + engine.authorizeRunAs(info, authzInfo, resultFuture); + AuthorizationResult result = resultFuture.actionGet(); + assertThat(result.isGranted(), is(false)); + assertThat(result.isAuditable(), is(true)); + } + + // authorized + { + Authentication authentication = + new Authentication(new User("joe", new String[]{"not_superuser"}, new User("bar", "custom_superuser")), + new RealmRef("test", "test", "node"), new RealmRef("test", "test", "node")); + RequestInfo info = new RequestInfo(authentication, request, action); + PlainActionFuture future = new PlainActionFuture<>(); + engine.resolveAuthorizationInfo(info, future); + AuthorizationInfo authzInfo = future.actionGet(); + PlainActionFuture resultFuture = new PlainActionFuture<>(); + engine.authorizeRunAs(info, authzInfo, resultFuture); + AuthorizationResult result = resultFuture.actionGet(); + assertThat(result.isGranted(), is(true)); + assertThat(result.isAuditable(), is(true)); + } + } + + public void testAuthorizeClusterAction() { + CustomAuthorizationEngine engine = new CustomAuthorizationEngine(); + RequestInfo requestInfo = getRequestInfo(); + // authorized + { + PlainActionFuture future = new PlainActionFuture<>(); + engine.resolveAuthorizationInfo(requestInfo, future); + AuthorizationInfo authzInfo = future.actionGet(); + + PlainActionFuture resultFuture = new PlainActionFuture<>(); + engine.authorizeClusterAction(requestInfo, authzInfo, resultFuture); + AuthorizationResult result = resultFuture.actionGet(); + assertThat(result.isGranted(), is(true)); + assertThat(result.isAuditable(), is(true)); + } + + // unauthorized + { + RequestInfo unauthReqInfo = + new RequestInfo(new Authentication(new User("joe", "not_superuser"), new RealmRef("test", "test", "node"), null), + requestInfo.getRequest(), requestInfo.getAction()); + PlainActionFuture future = new PlainActionFuture<>(); + engine.resolveAuthorizationInfo(unauthReqInfo, future); + AuthorizationInfo authzInfo = future.actionGet(); + + PlainActionFuture resultFuture = new PlainActionFuture<>(); + engine.authorizeClusterAction(unauthReqInfo, authzInfo, resultFuture); + AuthorizationResult result = resultFuture.actionGet(); + assertThat(result.isGranted(), is(false)); + assertThat(result.isAuditable(), is(true)); + } + } + + public void testAuthorizeIndexAction() { + CustomAuthorizationEngine engine = new CustomAuthorizationEngine(); + Map aliasOrIndexMap = new HashMap<>(); + aliasOrIndexMap.put("index", new Index(IndexMetaData.builder("index") + .settings(Settings.builder().put("index.version.created", Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build())); + // authorized + { + RequestInfo requestInfo = + new RequestInfo(new Authentication(new User("joe", "custom_superuser"), new RealmRef("test", "test", "node"), null), + new SearchRequest(), "indices:data/read/search"); + PlainActionFuture future = new PlainActionFuture<>(); + engine.resolveAuthorizationInfo(requestInfo, future); + AuthorizationInfo authzInfo = future.actionGet(); + + PlainActionFuture resultFuture = new PlainActionFuture<>(); + engine.authorizeIndexAction(requestInfo, authzInfo, + listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())), + aliasOrIndexMap, resultFuture); + IndexAuthorizationResult result = resultFuture.actionGet(); + assertThat(result.isGranted(), is(true)); + assertThat(result.isAuditable(), is(true)); + IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl(); + assertNotNull(indicesAccessControl.getIndexPermissions("index")); + assertThat(indicesAccessControl.getIndexPermissions("index").isGranted(), is(true)); + } + + // unauthorized + { + RequestInfo requestInfo = + new RequestInfo(new Authentication(new User("joe", "not_superuser"), new RealmRef("test", "test", "node"), null), + new SearchRequest(), "indices:data/read/search"); + PlainActionFuture future = new PlainActionFuture<>(); + engine.resolveAuthorizationInfo(requestInfo, future); + AuthorizationInfo authzInfo = future.actionGet(); + + PlainActionFuture resultFuture = new PlainActionFuture<>(); + engine.authorizeIndexAction(requestInfo, authzInfo, + listener -> listener.onResponse(new ResolvedIndices(Collections.singletonList("index"), Collections.emptyList())), + aliasOrIndexMap, resultFuture); + IndexAuthorizationResult result = resultFuture.actionGet(); + assertThat(result.isGranted(), is(false)); + assertThat(result.isAuditable(), is(true)); + IndicesAccessControl indicesAccessControl = result.getIndicesAccessControl(); + assertNull(indicesAccessControl.getIndexPermissions("index")); + } + } + + private RequestInfo getRequestInfo() { + final String action = "cluster:monitor/foo"; + final TransportRequest request = new TransportRequest() {}; + final Authentication authentication = + new Authentication(new User("joe", "custom_superuser"), new RealmRef("test", "test", "node"), null); + return new RequestInfo(authentication, request, action); + } +} diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 0e5cd633a9061..518628e9fd0fb 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -13,7 +13,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/security/authentication/user-cache.asciidoc', 'en/security/authorization/run-as-privilege.asciidoc', 'en/security/ccs-clients-integrations/http.asciidoc', - 'en/security/authorization/custom-roles-provider.asciidoc', 'en/rest-api/watcher/stats.asciidoc', 'en/watcher/example-watches/watching-time-series-data.asciidoc', ] diff --git a/x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc b/x-pack/docs/en/security/authorization/custom-authorization.asciidoc similarity index 51% rename from x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc rename to x-pack/docs/en/security/authorization/custom-authorization.asciidoc index bb8942985b701..735fb26cc58a3 100644 --- a/x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc +++ b/x-pack/docs/en/security/authorization/custom-authorization.asciidoc @@ -1,23 +1,23 @@ [role="xpack"] -[[custom-roles-provider]] -=== Custom roles provider extension +[[custom-roles-authorization]] +=== Customizing roles and authorization If you need to retrieve user roles from a system not supported out-of-the-box -by the {es} {security-features}, you can create a custom roles provider to -retrieve and resolve -roles. You implement a custom roles provider as an SPI loaded security extension -as part of an ordinary elasticsearch plugin. +or if the authorization system that is provided by the {es} {security-features} +does not meet your needs, a SPI loaded security extension can be implemented to +customize role retrieval and/or the authorization system. The SPI loaded +security extension is part of an ordinary elasticsearch plugin. [[implementing-custom-roles-provider]] ==== Implementing a custom roles provider -To create a custom roles provider: +To create a custom roles provider: . Implement the interface `BiConsumer, ActionListener>>`. That is to say, the implementation consists of one method that takes a set of strings, which are the role names to resolve, and an ActionListener, on which the set of resolved role descriptors are passed on as the response. -. The custom roles provider implementation must take special care to not block on any I/O +. The custom roles provider implementation must take special care to not block on any I/O operations. It is the responsibility of the implementation to ensure asynchronous behavior and non-blocking calls, which is made easier by the fact that the `ActionListener` is provided on which to send the response when the roles have been resolved and the response @@ -32,7 +32,7 @@ To package your custom roles provider as a plugin: [source,java] ---------------------------------------------------- @Override -public List, ActionListener>>> +public List, ActionListener>>> getRolesProviders(Settings settings, ResourceWatcherService resourceWatcherService) { ... } @@ -41,50 +41,81 @@ getRolesProviders(Settings settings, ResourceWatcherService resourceWatcherServi The `getRolesProviders` method is used to provide a list of custom roles providers that will be used to resolve role names, if the role names could not be resolved by the reserved roles or native roles stores. The list should be returned in the order that the custom role -providers should be invoked to resolve roles. For example, if `getRolesProviders` returns two -instances of roles providers, and both of them are able to resolve role `A`, then the resolved -role descriptor that will be used for role `A` will be the one resolved by the first roles +providers should be invoked to resolve roles. For example, if `getRolesProviders` returns two +instances of roles providers, and both of them are able to resolve role `A`, then the resolved +role descriptor that will be used for role `A` will be the one resolved by the first roles provider in the list. + +[[implementing-authorization-engine]] +==== Implementing an authorization engine + +To create an authorization engine, you need to: + +. Implement the `org.elasticsearch.xpack.core.security.authz.AuthorizationEngine` + interface in a class with the desired authorization behavior. +. Implement the `org.elasticsearch.xpack.core.security.authz.Authorization.AuthorizationInfo` + interface in a class that contains the necessary information to authorize the request. + +To package your authorization engine as a plugin: + +. Implement an extension class for your authorization engine that extends + `org.elasticsearch.xpack.core.security.SecurityExtension`. There you need to + override the following method: + [source,java] ---------------------------------------------------- @Override -public List getSettingsFilter() { +public AuthorizationEngine getAuthorizationEngine(Settings settings) { ... } ---------------------------------------------------- + -The `Plugin#getSettingsFilter` method returns a list of setting names that should be -filtered from the settings APIs as they may contain sensitive credentials. Note this method is not -part of the `SecurityExtension` interface, it's available as part of the elasticsearch plugin main class. +The `getAuthorizationEngine` method is used to provide the authorization engine +implementation. + +Sample code that illustrates the structure and implementation of a custom +authorization engine is provided in the +https://github.com/elastic/elasticsearch/tree/master/plugin/examples/security-example-authorization-engine[elasticsearch] +repository on GitHub. You can use this code as a starting point for creating your +own authorization engine. + +[[packing-extension-plugin]] +==== Implement an elasticsearch plugin + +In order to register the security extension for your custom roles provider or +authorization engine, you need to also implement an elasticsearch plugin that +contains the extension: +. Implement a plugin class that extends `org.elasticsearch.plugins.Plugin` . Create a build configuration file for the plugin; Gradle is our recommendation. +. Create a `plugin-descriptor.properties` file as described in + {plugins}/plugin-authors.html[Help for plugin authors]. . Create a `META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension` descriptor file for the extension that contains the fully qualified class name of your `org.elasticsearch.xpack.core.security.SecurityExtension` implementation . Bundle all in a single zip file. -[[using-custom-roles-provider]] -==== Using a custom roles provider to resolve roles +[[using-security-extension]] +==== Using the security extension -To use a custom roles provider: +To use a security extension: -. Install the roles provider extension on each node in the cluster. You run +. Install the plugin with the extension on each node in the cluster. You run `bin/elasticsearch-plugin` with the `install` sub-command and specify the URL pointing to the zip file that contains the extension. For example: + [source,shell] ---------------------------------------- -bin/elasticsearch-plugin install file:////my-roles-provider-1.0.zip +bin/elasticsearch-plugin install file:////my-extension-plugin-1.0.zip ---------------------------------------- -. Add any configuration parameters for any of the custom roles provider implementations -to `elasticsearch.yml`. The settings are not namespaced and you have access to any -settings when constructing the custom roles providers, although it is recommended to -have a namespacing convention for custom roles providers to keep your `elasticsearch.yml` -configuration easy to understand. +. Add any configuration parameters for implementations in the extension to the +`elasticsearch.yml` file. The settings are not namespaced and you have access to any +settings when constructing the extensions, although it is recommended to have a +namespacing convention for extensions to keep your `elasticsearch.yml` +configuration easy to understand. + -For example, if you have a custom roles provider that -resolves roles from reading a blob in an S3 bucket on AWS, then you would specify settings +For example, if you have a custom roles provider that +resolves roles from reading a blob in an S3 bucket on AWS, then you would specify settings in `elasticsearch.yml` such as: + [source,js] @@ -94,8 +125,8 @@ custom_roles_provider.s3_roles_provider.region: us-east-1 custom_roles_provider.s3_roles_provider.secret_key: xxx custom_roles_provider.s3_roles_provider.access_key: xxx ---------------------------------------- +// NOTCONSOLE + -These settings will be available as the first parameter in the `getRolesProviders` method, from -where you will create and return the custom roles provider instances. +These settings are passed as arguments to the methods in the `SecurityExtension` interface. . Restart Elasticsearch. diff --git a/x-pack/docs/en/security/authorization/managing-roles.asciidoc b/x-pack/docs/en/security/authorization/managing-roles.asciidoc index cac4eaac1fbfa..04fb12e19d75b 100644 --- a/x-pack/docs/en/security/authorization/managing-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/managing-roles.asciidoc @@ -179,7 +179,7 @@ There are two available mechanisms to define roles: using the _Role Management A or in local files on the {es} nodes. You can also implement custom roles providers. If you need to integrate with another system to retrieve user roles, you can build a custom roles provider plugin. For more information, -see <>. +see <>. [float] [[roles-management-ui]] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 84dc4c9a5887b..7cb04a9e57a4b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -103,7 +103,8 @@ private static String[] securityAcknowledgementMessages(OperationMode currentMod "The following X-Pack security functionality will be disabled: authentication, authorization, " + "ip filtering, and auditing. Please restart your node after applying the license.", "Field and document level access control will be disabled.", - "Custom realms will be ignored." + "Custom realms will be ignored.", + "A custom authorization engine will be ignored." }; } break; @@ -116,7 +117,8 @@ private static String[] securityAcknowledgementMessages(OperationMode currentMod case PLATINUM: return new String[] { "Field and document level access control will be disabled.", - "Custom realms will be ignored." + "Custom realms will be ignored.", + "A custom authorization engine will be ignored." }; } break; @@ -131,7 +133,8 @@ private static String[] securityAcknowledgementMessages(OperationMode currentMod "Authentication will be limited to the native realms.", "IP filtering and auditing will be disabled.", "Field and document level access control will be disabled.", - "Custom realms will be ignored." + "Custom realms will be ignored.", + "A custom authorization engine will be ignored." }; } } @@ -433,6 +436,17 @@ public synchronized boolean isAuthorizationRealmAllowed() { && status.active; } + /** + * @return whether a custom authorization engine is allowed based on the license {@link OperationMode} + * @see org.elasticsearch.xpack.core.security.authc.support.DelegatedAuthorizationSettings + */ + public synchronized boolean isAuthorizationEngineAllowed() { + final boolean isSecurityCurrentlyEnabled = + isSecurityEnabled(status.mode, isSecurityExplicitlyEnabled, isSecurityEnabled); + return isSecurityCurrentlyEnabled && (status.mode == OperationMode.PLATINUM || status.mode == OperationMode.TRIAL) + && status.active; + } + /** * Determine if Watcher is available based on the current license. *

diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java index 82d31aa8a2993..9f0eb474a59c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityExtension.java @@ -11,6 +11,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authc.Realm; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; @@ -79,6 +80,18 @@ default AuthenticationFailureHandler getAuthenticationFailureHandler() { return Collections.emptyList(); } + /** + * Returns a authorization engine for authorizing requests, or null to use the default authorization mechanism. + * + * Only one installed extension may have an authorization engine. If more than + * one extension returns a non-null authorization engine, an error is raised. + * + * @param settings The configured settings for the node + */ + default AuthorizationEngine getAuthorizationEngine(Settings settings) { + return null; + } + /** * Loads the XPackSecurityExtensions from the given class loader */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java new file mode 100644 index 0000000000000..fd5e6fba9c55e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java @@ -0,0 +1,338 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.metadata.AliasOrIndex; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.user.User; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + *

+ * An AuthorizationEngine is responsible for making the core decisions about whether a request + * should be authorized or not. The engine can and usually will be called multiple times during + * the authorization of a request. Security categorizes requests into a few different buckets + * and uses the action name as the indicator of what a request will perform. Internally, the + * action name is used to map a {@link TransportRequest} to the actual + * {@link org.elasticsearch.action.support.TransportAction} that will handle the request. + *


+ *

+ * Requests can be a cluster request or an indices request. Cluster requests + * are requests that tend to be global in nature; they could affect the whole cluster. + * Indices requests are those that deal with specific indices; the actions could have the affect + * of reading data, modifying data, creating an index, deleting an index, or modifying metadata. + *


+ *

+ * Each call to the engine will contain a {@link RequestInfo} object that contains the request, + * action name, and the authentication associated with the request. This data is provided by the + * engine so that all information about the request can be used to make the authorization decision. + *


+ * The methods of the engine will be called in the following order: + *
    + *
  1. {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} to retrieve information + * necessary to authorize the given user. It is important to note that the {@link RequestInfo} + * may contain an {@link Authentication} object that actually has two users when the + * run as feature is used and this method should resolve the information for both. + * To check for the presence of run as, use the {@link User#isRunAs()} method on the user + * retrieved using the {@link Authentication#getUser()} method.
  2. + *
  3. {@link #authorizeRunAs(RequestInfo, AuthorizationInfo, ActionListener)} if the request + * is making use of the run as feature. This method is used to ensure the authenticated user + * can actually impersonate the user running the request.
  4. + *
  5. {@link #authorizeClusterAction(RequestInfo, AuthorizationInfo, ActionListener)} if the + * request is a cluster level operation.
  6. + *
  7. {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, Map, ActionListener)} if + * the request is a an index action. This method may be called multiple times for a single + * request as the request may be made up of sub-requests that also need to be authorized. The async supplier + * for resolved indices will invoke the + * {@link #loadAuthorizedIndices(RequestInfo, AuthorizationInfo, Map, ActionListener)} method + * if it is used as part of the authorization process.
  8. + *
+ *

+ * NOTE: the {@link #loadAuthorizedIndices(RequestInfo, AuthorizationInfo, Map, ActionListener)} + * method may be called prior to {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, Map, ActionListener)} + * in cases where wildcards need to be expanded. + *


+ * Authorization engines can be called from various threads including network threads that should + * not be blocked waiting for I/O. Network threads in elasticsearch are limited and we rely on + * asynchronous processing to ensure optimal use of network threads; this is unlike many other Java + * based servers that have a thread for each concurrent request and blocking operations could take + * place on those threads. Given this it is imperative that the implementations used here do not + * block when calling out to an external service or waiting on some data. + */ +public interface AuthorizationEngine { + + /** + * Asynchronously resolves any necessary information to authorize the given user(s). This could + * include retrieval of permissions from an index or external system. + * + * @param requestInfo object contain the request and associated information such as the action + * and associated user(s) + * @param listener the listener to be notified of success using {@link ActionListener#onResponse(Object)} + * or failure using {@link ActionListener#onFailure(Exception)} + */ + void resolveAuthorizationInfo(RequestInfo requestInfo, ActionListener listener); + + /** + * Asynchronously authorizes an attempt for a user to run as another user. + * + * @param requestInfo object contain the request and associated information such as the action + * and associated user(s) + * @param authorizationInfo information needed from authorization that was previously retrieved + * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} + * @param listener the listener to be notified of the authorization result + */ + void authorizeRunAs(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, ActionListener listener); + + /** + * Asynchronously authorizes a cluster action. + * + * @param requestInfo object contain the request and associated information such as the action + * and associated user(s) + * @param authorizationInfo information needed from authorization that was previously retrieved + * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} + * @param listener the listener to be notified of the authorization result + */ + void authorizeClusterAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, ActionListener listener); + + /** + * Asynchronously authorizes an action that operates on an index. The indices and aliases that + * the request is attempting to operate on can be retrieved using the {@link AsyncSupplier} for + * {@link ResolvedIndices}. The resolved indices will contain the exact list of indices and aliases + * that the request is attempting to take action on; in other words this supplier handles wildcard + * expansion and datemath expressions. + * + * @param requestInfo object contain the request and associated information such as the action + * and associated user(s) + * @param authorizationInfo information needed from authorization that was previously retrieved + * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} + * @param indicesAsyncSupplier the asynchronous supplier for the indices that this request is + * attempting to operate on + * @param aliasOrIndexLookup a map of a string name to the cluster metadata specific to that + * alias or index + * @param listener the listener to be notified of the authorization result + */ + void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + AsyncSupplier indicesAsyncSupplier, Map aliasOrIndexLookup, + ActionListener listener); + + /** + * Asynchronously loads a list of alias and index names for which the user is authorized + * to execute the requested action. + * + * @param requestInfo object contain the request and associated information such as the action + * and associated user(s) + * @param authorizationInfo information needed from authorization that was previously retrieved + * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} + * @param aliasOrIndexLookup a map of a string name to the cluster metadata specific to that + * alias or index + * @param listener the listener to be notified of the authorization result + */ + void loadAuthorizedIndices(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map aliasOrIndexLookup, ActionListener> listener); + + + /** + * Asynchronously checks that the permissions a user would have for a given list of names do + * not exceed their permissions for a given name. This is used to ensure that a user cannot + * perform operations that would escalate their privileges over the data. Some examples include + * adding an alias to gain more permissions to a given index and/or resizing an index in order + * to gain more privileges on the data since the index name changes. + * + * @param requestInfo object contain the request and associated information such as the action + * and associated user(s) + * @param authorizationInfo information needed from authorization that was previously retrieved + * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} + * @param indexNameToNewNames A map of an existing index/alias name to a one or more names of + * an index/alias that the user is requesting to create. The method + * should validate that none of the names have more permissions than + * the name in the key would have. + * @param listener the listener to be notified of the authorization result + */ + void validateIndexPermissionsAreSubset(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map> indexNameToNewNames, ActionListener listener); + + /** + * Checks the current user's privileges against those that being requested to check in the + * request. This provides a way for an application to ask if a user has permission to perform + * an action or if they have permissions to an application resource. + * + * @param authentication the authentication that is associated with this request + * @param authorizationInfo information needed from authorization that was previously retrieved + * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} + * @param hasPrivilegesRequest the request that contains the privileges to check for the user + * @param applicationPrivilegeDescriptors a collection of application privilege descriptors + * @param listener the listener to be notified of the has privileges response + */ + void checkPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, HasPrivilegesRequest hasPrivilegesRequest, + Collection applicationPrivilegeDescriptors, + ActionListener listener); + + /** + * Retrieve's the current user's privileges in a standard format that can be rendered via an + * API for an application to understand the privileges that the current user has. + * + * @param authentication the authentication that is associated with this request + * @param authorizationInfo information needed from authorization that was previously retrieved + * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} + * @param request the request for retrieving the user's privileges + * @param listener the listener to be notified of the has privileges response + */ + void getUserPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, GetUserPrivilegesRequest request, + ActionListener listener); + + /** + * Interface for objects that contains the information needed to authorize a request + */ + interface AuthorizationInfo { + + /** + * @return a map representation of the authorization information. This map will be used to + * augment the data that is audited, so in the case of RBAC this map could contain the + * role names. + */ + Map asMap(); + + /** + * This method should be overridden in case of run as. Authorization info is only retrieved + * a single time and should represent the information to authorize both run as and the + * operation being performed. + */ + default AuthorizationInfo getAuthenticatedUserAuthorizationInfo() { + return this; + } + } + + /** + * Implementation of authorization info that is used in cases where we were not able to resolve + * the authorization info + */ + final class EmptyAuthorizationInfo implements AuthorizationInfo { + + public static final EmptyAuthorizationInfo INSTANCE = new EmptyAuthorizationInfo(); + + private EmptyAuthorizationInfo() {} + + @Override + public Map asMap() { + return Collections.emptyMap(); + } + } + + /** + * A class that encapsulates information about the request that is being authorized including + * the actual transport request, the authentication, and the action being invoked. + */ + final class RequestInfo { + + private final Authentication authentication; + private final TransportRequest request; + private final String action; + + public RequestInfo(Authentication authentication, TransportRequest request, String action) { + this.authentication = authentication; + this.request = request; + this.action = action; + } + + public String getAction() { + return action; + } + + public Authentication getAuthentication() { + return authentication; + } + + public TransportRequest getRequest() { + return request; + } + } + + /** + * Represents the result of authorization. This includes whether the actions should be granted + * and if this should be considered an auditable event. + */ + class AuthorizationResult { + + private final boolean granted; + private final boolean auditable; + + /** + * Create an authorization result with the provided granted value that is auditable + */ + public AuthorizationResult(boolean granted) { + this(granted, true); + } + + public AuthorizationResult(boolean granted, boolean auditable) { + this.granted = granted; + this.auditable = auditable; + } + + public boolean isGranted() { + return granted; + } + + public boolean isAuditable() { + return auditable; + } + + /** + * Returns a new authorization result that is granted and auditable + */ + public static AuthorizationResult granted() { + return new AuthorizationResult(true); + } + + /** + * Returns a new authorization result that is denied and auditable + */ + public static AuthorizationResult deny() { + return new AuthorizationResult(false); + } + } + + /** + * An extension of {@link AuthorizationResult} that is specific to index requests. Index requests + * need to return a {@link IndicesAccessControl} object representing the users permissions to indices + * that are being operated on. + */ + class IndexAuthorizationResult extends AuthorizationResult { + + private final IndicesAccessControl indicesAccessControl; + + public IndexAuthorizationResult(boolean auditable, IndicesAccessControl indicesAccessControl) { + super(indicesAccessControl == null || indicesAccessControl.isGranted(), auditable); + this.indicesAccessControl = indicesAccessControl; + } + + public IndicesAccessControl getIndicesAccessControl() { + return indicesAccessControl; + } + } + + @FunctionalInterface + interface AsyncSupplier { + + /** + * Asynchronously retrieves the value that is being supplied and notifies the listener upon + * completion. + */ + void getAsync(ActionListener listener); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/ResolvedIndices.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/ResolvedIndices.java new file mode 100644 index 0000000000000..f74a94cbaa665 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/ResolvedIndices.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authz; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER; + +/** + * Stores a collection of index names separated into "local" and "remote". + * This allows the resolution and categorization to take place exactly once per-request. + */ +public final class ResolvedIndices { + private final List local; + private final List remote; + + public ResolvedIndices(List local, List remote) { + this.local = Collections.unmodifiableList(local); + this.remote = Collections.unmodifiableList(remote); + } + + /** + * Returns the collection of index names that have been stored as "local" indices. + * This is a List because order may be important. For example [ "a*" , "-a1" ] is interpreted differently + * to [ "-a1", "a*" ]. As a consequence, this list may contain duplicates. + */ + public List getLocal() { + return local; + } + + /** + * Returns the collection of index names that have been stored as "remote" indices. + */ + public List getRemote() { + return remote; + } + + /** + * @return true if both the {@link #getLocal() local} and {@link #getRemote() remote} index lists are empty. + */ + public boolean isEmpty() { + return local.isEmpty() && remote.isEmpty(); + } + + /** + * @return true if the {@link #getRemote() remote} index lists is empty, and the local index list contains the + * {@link IndicesAndAliasesResolverField#NO_INDEX_PLACEHOLDER no-index-placeholder} and nothing else. + */ + public boolean isNoIndicesPlaceholder() { + return remote.isEmpty() && local.size() == 1 && local.contains(NO_INDEX_PLACEHOLDER); + } + + public String[] toArray() { + final String[] array = new String[local.size() + remote.size()]; + int i = 0; + for (String index : local) { + array[i++] = index; + } + for (String index : remote) { + array[i++] = index; + } + return array; + } + + /** + * Builder class for ResolvedIndices that allows for the building of a list of indices + * without the need to construct new objects and merging them together + */ + public static class Builder { + + private final List local = new ArrayList<>(); + private final List remote = new ArrayList<>(); + + /** add a local index name */ + public void addLocal(String index) { + local.add(index); + } + + /** adds the array of local index names */ + public void addLocal(String[] indices) { + local.addAll(Arrays.asList(indices)); + } + + /** adds the list of local index names */ + public void addLocal(List indices) { + local.addAll(indices); + } + + /** adds the list of remote index names */ + public void addRemote(List indices) { + remote.addAll(indices); + } + + /** @return true if both the local and remote index lists are empty. */ + public boolean isEmpty() { + return local.isEmpty() && remote.isEmpty(); + } + + /** @return a immutable ResolvedIndices instance with the local and remote index lists */ + public ResolvedIndices build() { + return new ResolvedIndices(local, remote); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java index 8cdf099e676d8..604508f95c5af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/IndicesAccessControl.java @@ -25,6 +25,7 @@ public class IndicesAccessControl { public static final IndicesAccessControl ALLOW_NO_INDICES = new IndicesAccessControl(true, Collections.singletonMap(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER, new IndicesAccessControl.IndexAccessControl(true, new FieldPermissions(), DocumentPermissions.allowAll()))); + public static final IndicesAccessControl DENIED = new IndicesAccessControl(false, Collections.emptyMap()); private final boolean granted; private final Map indexPermissions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index 006c6661d2c4c..d15bf966276a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -12,7 +12,6 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -31,7 +30,6 @@ import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.SortedMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.function.Predicate; @@ -182,11 +180,10 @@ public Automaton allowedActionsMatcher(String index) { * Authorizes the provided action against the provided indices, given the current cluster metadata */ public Map authorize(String action, Set requestedIndicesOrAliases, - MetaData metaData, FieldPermissionsCache fieldPermissionsCache) { + Map allAliasesAndIndices, + FieldPermissionsCache fieldPermissionsCache) { // now... every index that is associated with the request, must be granted // by at least one indices permission group - - SortedMap allAliasesAndIndices = metaData.getAliasAndIndexLookup(); Map> fieldPermissionsByIndex = new HashMap<>(); Map roleQueriesByIndex = new HashMap<>(); Map grantedBuilder = new HashMap<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java index 809b95965340e..8c7491d0a9a3d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java @@ -6,20 +6,23 @@ package org.elasticsearch.xpack.core.security.authz.permission; -import org.elasticsearch.cluster.metadata.MetaData; +import org.apache.lucene.util.automaton.Automaton; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import java.util.Collection; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Predicate; /** * A {@link Role} limited by another role.
- * The effective permissions returned on {@link #authorize(String, Set, MetaData, FieldPermissionsCache)} call would be limited by the + * The effective permissions returned on {@link #authorize(String, Set, Map, FieldPermissionsCache)} call would be limited by the * provided role. */ public final class LimitedRole extends Role { @@ -37,10 +40,32 @@ public Role limitedBy() { } @Override - public IndicesAccessControl authorize(String action, Set requestedIndicesOrAliases, MetaData metaData, + public ClusterPermission cluster() { + throw new UnsupportedOperationException("cannot retrieve cluster permission on limited role"); + } + + @Override + public IndicesPermission indices() { + throw new UnsupportedOperationException("cannot retrieve indices permission on limited role"); + } + + @Override + public ApplicationPermission application() { + throw new UnsupportedOperationException("cannot retrieve application permission on limited role"); + } + + @Override + public RunAsPermission runAs() { + throw new UnsupportedOperationException("cannot retrieve cluster permission on limited role"); + } + + @Override + public IndicesAccessControl authorize(String action, Set requestedIndicesOrAliases, + Map aliasAndIndexLookup, FieldPermissionsCache fieldPermissionsCache) { - IndicesAccessControl indicesAccessControl = super.authorize(action, requestedIndicesOrAliases, metaData, fieldPermissionsCache); - IndicesAccessControl limitedByIndicesAccessControl = limitedBy.authorize(action, requestedIndicesOrAliases, metaData, + IndicesAccessControl indicesAccessControl = + super.authorize(action, requestedIndicesOrAliases, aliasAndIndexLookup, fieldPermissionsCache); + IndicesAccessControl limitedByIndicesAccessControl = limitedBy.authorize(action, requestedIndicesOrAliases, aliasAndIndexLookup, fieldPermissionsCache); return indicesAccessControl.limitIndicesAccessControl(limitedByIndicesAccessControl); @@ -52,11 +77,18 @@ public IndicesAccessControl authorize(String action, Set requestedIndice */ @Override public Predicate allowedIndicesMatcher(String action) { - Predicate predicate = indices().allowedIndicesMatcher(action); + Predicate predicate = super.indices().allowedIndicesMatcher(action); predicate = predicate.and(limitedBy.indices().allowedIndicesMatcher(action)); return predicate; } + @Override + public Automaton allowedActionsMatcher(String index) { + final Automaton allowedMatcher = super.allowedActionsMatcher(index); + final Automaton limitedByMatcher = super.allowedActionsMatcher(index); + return Automatons.intersectAndMinimize(allowedMatcher, limitedByMatcher); + } + /** * Check if indices permissions allow for the given action, also checks whether the limited by role allows the given actions * @@ -137,6 +169,11 @@ public ResourcePrivilegesMap checkApplicationResourcePrivileges(final String app return ResourcePrivilegesMap.intersection(resourcePrivilegesMap, resourcePrivilegesMapForLimitedRole); } + @Override + public boolean checkRunAs(String runAs) { + return super.checkRunAs(runAs) && limitedBy.checkRunAs(runAs); + } + /** * Create a new role defined by given role and the limited role. * diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java index 570fa02a9b5ba..817a9e41eab71 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java @@ -5,7 +5,8 @@ */ package org.elasticsearch.xpack.core.security.authz.permission; -import org.elasticsearch.cluster.metadata.MetaData; +import org.apache.lucene.util.automaton.Automaton; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; @@ -82,7 +83,15 @@ public static Builder builder(RoleDescriptor rd, FieldPermissionsCache fieldPerm * has the privilege for executing the given action on. */ public Predicate allowedIndicesMatcher(String action) { - return indices().allowedIndicesMatcher(action); + return indices.allowedIndicesMatcher(action); + } + + public Automaton allowedActionsMatcher(String index) { + return indices.allowedActionsMatcher(index); + } + + public boolean checkRunAs(String runAsName) { + return runAs.check(runAsName); } /** @@ -92,7 +101,7 @@ public Predicate allowedIndicesMatcher(String action) { * @return {@code true} if action is allowed else returns {@code false} */ public boolean checkIndicesAction(String action) { - return indices().check(action); + return indices.check(action); } @@ -108,7 +117,7 @@ public boolean checkIndicesAction(String action) { */ public ResourcePrivilegesMap checkIndicesPrivileges(Set checkForIndexPatterns, boolean allowRestrictedIndices, Set checkForPrivileges) { - return indices().checkResourcePrivileges(checkForIndexPatterns, allowRestrictedIndices, checkForPrivileges); + return indices.checkResourcePrivileges(checkForIndexPatterns, allowRestrictedIndices, checkForPrivileges); } /** @@ -119,7 +128,7 @@ public ResourcePrivilegesMap checkIndicesPrivileges(Set checkForIndexPat * @return {@code true} if action is allowed else returns {@code false} */ public boolean checkClusterAction(String action, TransportRequest request) { - return cluster().check(action, request); + return cluster.check(action, request); } /** @@ -129,7 +138,7 @@ public boolean checkClusterAction(String action, TransportRequest request) { * @return {@code true} if cluster privilege is allowed else returns {@code false} */ public boolean grants(ClusterPrivilege clusterPrivilege) { - return cluster().grants(clusterPrivilege); + return cluster.grants(clusterPrivilege); } /** @@ -147,7 +156,7 @@ public boolean grants(ClusterPrivilege clusterPrivilege) { public ResourcePrivilegesMap checkApplicationResourcePrivileges(final String applicationName, Set checkForResources, Set checkForPrivilegeNames, Collection storedPrivileges) { - return application().checkResourcePrivileges(applicationName, checkForResources, checkForPrivilegeNames, storedPrivileges); + return application.checkResourcePrivileges(applicationName, checkForResources, checkForPrivilegeNames, storedPrivileges); } /** @@ -155,10 +164,11 @@ public ResourcePrivilegesMap checkApplicationResourcePrivileges(final String app * specified action with the requested indices/aliases. At the same time if field and/or document level security * is configured for any group also the allowed fields and role queries are resolved. */ - public IndicesAccessControl authorize(String action, Set requestedIndicesOrAliases, MetaData metaData, + public IndicesAccessControl authorize(String action, Set requestedIndicesOrAliases, + Map aliasAndIndexLookup, FieldPermissionsCache fieldPermissionsCache) { Map indexPermissions = indices.authorize( - action, requestedIndicesOrAliases, metaData, fieldPermissionsCache + action, requestedIndicesOrAliases, aliasAndIndexLookup, fieldPermissionsCache ); // At least one role / indices permission set need to match with all the requested indices/aliases: diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Exceptions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Exceptions.java index 9cf09482a5268..18638b15335bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Exceptions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Exceptions.java @@ -27,6 +27,10 @@ public static ElasticsearchSecurityException authenticationError(String msg, Obj } public static ElasticsearchSecurityException authorizationError(String msg, Object... args) { - return new ElasticsearchSecurityException(msg, RestStatus.FORBIDDEN, args); + return new ElasticsearchSecurityException(msg, RestStatus.FORBIDDEN, null, args); + } + + public static ElasticsearchSecurityException authorizationError(String msg, Exception cause, Object... args) { + return new ElasticsearchSecurityException(msg, RestStatus.FORBIDDEN, cause, args); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index bbd5d950c8b9b..8ad42d5afe636 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -226,17 +226,17 @@ public void testSecurityAckAnyToTrialOrPlatinum() { } public void testSecurityAckTrialStandardGoldOrPlatinumToBasic() { - assertAckMesssages(XPackField.SECURITY, randomTrialStandardGoldOrPlatinumMode(), BASIC, 3); + assertAckMesssages(XPackField.SECURITY, randomTrialStandardGoldOrPlatinumMode(), BASIC, 4); } public void testSecurityAckAnyToStandard() { OperationMode from = randomFrom(BASIC, GOLD, PLATINUM, TRIAL); - assertAckMesssages(XPackField.SECURITY, from, STANDARD, 4); + assertAckMesssages(XPackField.SECURITY, from, STANDARD, 5); } public void testSecurityAckBasicStandardTrialOrPlatinumToGold() { OperationMode from = randomFrom(BASIC, PLATINUM, TRIAL, STANDARD); - assertAckMesssages(XPackField.SECURITY, from, GOLD, 2); + assertAckMesssages(XPackField.SECURITY, from, GOLD, 3); } public void testMonitoringAckBasicToAny() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java index eef358271b61a..56807d23913ba 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java @@ -69,12 +69,14 @@ public void testAuthorize() { Role fromRole = Role.builder("a-role").cluster(Collections.singleton(ClusterPrivilegeName.MANAGE_SECURITY), Collections.emptyList()) .add(IndexPrivilege.ALL, "_index").add(IndexPrivilege.CREATE_INDEX, "_index1").build(); - IndicesAccessControl iac = fromRole.authorize(SearchAction.NAME, Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); + IndicesAccessControl iac = fromRole.authorize(SearchAction.NAME, Sets.newHashSet("_index", "_alias1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(true)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index1").isGranted(), is(false)); - iac = fromRole.authorize(CreateIndexAction.NAME, Sets.newHashSet("_index", "_index1"), md, fieldPermissionsCache); + iac = fromRole.authorize(CreateIndexAction.NAME, Sets.newHashSet("_index", "_index1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(true)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); @@ -84,34 +86,40 @@ public void testAuthorize() { Role limitedByRole = Role.builder("limited-role") .cluster(Collections.singleton(ClusterPrivilegeName.ALL), Collections.emptyList()).add(IndexPrivilege.READ, "_index") .add(IndexPrivilege.NONE, "_index1").build(); - iac = limitedByRole.authorize(SearchAction.NAME, Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); + iac = limitedByRole.authorize(SearchAction.NAME, Sets.newHashSet("_index", "_alias1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(true)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index1").isGranted(), is(false)); - iac = limitedByRole.authorize(DeleteIndexAction.NAME, Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); + iac = limitedByRole.authorize(DeleteIndexAction.NAME, Sets.newHashSet("_index", "_alias1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(false)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index1").isGranted(), is(false)); - iac = limitedByRole.authorize(CreateIndexAction.NAME, Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); + iac = limitedByRole.authorize(CreateIndexAction.NAME, Sets.newHashSet("_index", "_alias1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(false)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index1").isGranted(), is(false)); Role role = LimitedRole.createLimitedRole(fromRole, limitedByRole); - iac = role.authorize(SearchAction.NAME, Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); + iac = role.authorize(SearchAction.NAME, Sets.newHashSet("_index", "_alias1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(true)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index1").isGranted(), is(false)); - iac = role.authorize(DeleteIndexAction.NAME, Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); + iac = role.authorize(DeleteIndexAction.NAME, Sets.newHashSet("_index", "_alias1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(false)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index1").isGranted(), is(false)); - iac = role.authorize(CreateIndexAction.NAME, Sets.newHashSet("_index", "_index1"), md, fieldPermissionsCache); + iac = role.authorize(CreateIndexAction.NAME, Sets.newHashSet("_index", "_index1"), md.getAliasAndIndexLookup(), + fieldPermissionsCache); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.getIndexPermissions("_index").isGranted(), is(false)); assertThat(iac.getIndexPermissions("_index1"), is(notNullValue())); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 195ec3973f8f3..42144535a2f06 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.settings.Settings; @@ -147,6 +148,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.SortedMap; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; @@ -586,8 +588,8 @@ private void assertMonitoringOnRestrictedIndices(Role role) { GetSettingsAction.NAME, IndicesShardStoresAction.NAME, UpgradeStatusAction.NAME, RecoveryAction.NAME); for (final String indexMonitoringActionName : indexMonitoringActionNamesList) { final Map authzMap = role.indices().authorize(indexMonitoringActionName, - Sets.newHashSet(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX, RestrictedIndicesNames.SECURITY_INDEX_NAME), metaData, - fieldPermissionsCache); + Sets.newHashSet(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX, RestrictedIndicesNames.SECURITY_INDEX_NAME), + metaData.getAliasAndIndexLookup(), fieldPermissionsCache); assertThat(authzMap.get(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX).isGranted(), is(true)); assertThat(authzMap.get(RestrictedIndicesNames.SECURITY_INDEX_NAME).isGranted(), is(true)); } @@ -704,22 +706,24 @@ public void testSuperuserRole() { .build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + SortedMap lookup = metaData.getAliasAndIndexLookup(); Map authzMap = - superuserRole.indices().authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), metaData, fieldPermissionsCache); + superuserRole.indices().authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); assertThat(authzMap.get("a1").isGranted(), is(true)); assertThat(authzMap.get("b").isGranted(), is(true)); - authzMap = superuserRole.indices().authorize(DeleteIndexAction.NAME, Sets.newHashSet("a1", "ba"), metaData, fieldPermissionsCache); + authzMap = + superuserRole.indices().authorize(DeleteIndexAction.NAME, Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); assertThat(authzMap.get("a1").isGranted(), is(true)); assertThat(authzMap.get("b").isGranted(), is(true)); - authzMap = superuserRole.indices().authorize(IndexAction.NAME, Sets.newHashSet("a2", "ba"), metaData, fieldPermissionsCache); + authzMap = superuserRole.indices().authorize(IndexAction.NAME, Sets.newHashSet("a2", "ba"), lookup, fieldPermissionsCache); assertThat(authzMap.get("a2").isGranted(), is(true)); assertThat(authzMap.get("b").isGranted(), is(true)); authzMap = superuserRole.indices() - .authorize(UpdateSettingsAction.NAME, Sets.newHashSet("aaaaaa", "ba"), metaData, fieldPermissionsCache); + .authorize(UpdateSettingsAction.NAME, Sets.newHashSet("aaaaaa", "ba"), lookup, fieldPermissionsCache); assertThat(authzMap.get("aaaaaa").isGranted(), is(true)); assertThat(authzMap.get("b").isGranted(), is(true)); authzMap = superuserRole.indices().authorize(randomFrom(IndexAction.NAME, DeleteIndexAction.NAME, SearchAction.NAME), - Sets.newHashSet(RestrictedIndicesNames.SECURITY_INDEX_NAME), metaData, fieldPermissionsCache); + Sets.newHashSet(RestrictedIndicesNames.SECURITY_INDEX_NAME), lookup, fieldPermissionsCache); assertThat(authzMap.get(RestrictedIndicesNames.SECURITY_INDEX_NAME).isGranted(), is(true)); assertThat(authzMap.get(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX).isGranted(), is(true)); assertTrue(superuserRole.indices().check(SearchAction.NAME)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index b5514c8ad98fc..bf4a7080e6d71 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -114,6 +114,7 @@ import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapper; @@ -134,12 +135,6 @@ import org.elasticsearch.xpack.security.action.TransportGetApiKeyAction; import org.elasticsearch.xpack.security.action.TransportInvalidateApiKeyAction; import org.elasticsearch.xpack.security.action.filter.SecurityActionFilter; -import org.elasticsearch.xpack.security.action.interceptor.BulkShardRequestInterceptor; -import org.elasticsearch.xpack.security.action.interceptor.IndicesAliasesRequestInterceptor; -import org.elasticsearch.xpack.security.action.interceptor.RequestInterceptor; -import org.elasticsearch.xpack.security.action.interceptor.ResizeRequestInterceptor; -import org.elasticsearch.xpack.security.action.interceptor.SearchRequestInterceptor; -import org.elasticsearch.xpack.security.action.interceptor.UpdateRequestInterceptor; import org.elasticsearch.xpack.security.action.privilege.TransportDeletePrivilegesAction; import org.elasticsearch.xpack.security.action.privilege.TransportGetPrivilegesAction; import org.elasticsearch.xpack.security.action.privilege.TransportPutPrivilegesAction; @@ -180,6 +175,12 @@ import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener; import org.elasticsearch.xpack.security.authz.accesscontrol.OptOutQueryCache; +import org.elasticsearch.xpack.security.authz.interceptor.BulkShardRequestInterceptor; +import org.elasticsearch.xpack.security.authz.interceptor.IndicesAliasesRequestInterceptor; +import org.elasticsearch.xpack.security.authz.interceptor.RequestInterceptor; +import org.elasticsearch.xpack.security.authz.interceptor.ResizeRequestInterceptor; +import org.elasticsearch.xpack.security.authz.interceptor.SearchRequestInterceptor; +import org.elasticsearch.xpack.security.authz.interceptor.UpdateRequestInterceptor; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.authz.store.FileRolesStore; import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; @@ -436,36 +437,22 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste for (SecurityExtension extension : securityExtensions) { rolesProviders.addAll(extension.getRolesProviders(settings, resourceWatcherService)); } - final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, - reservedRolesStore, privilegeStore, rolesProviders, threadPool.getThreadContext(), getLicenseState(), fieldPermissionsCache); + + final ApiKeyService apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex.get(), clusterService); + components.add(apiKeyService); + final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, + privilegeStore, rolesProviders, threadPool.getThreadContext(), getLicenseState(), fieldPermissionsCache, apiKeyService); securityIndex.get().addIndexStateListener(allRolesStore::onSecurityIndexStateChange); // to keep things simple, just invalidate all cached entries on license change. this happens so rarely that the impact should be // minimal getLicenseState().addListener(allRolesStore::invalidateAll); - final ApiKeyService apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex.get(), clusterService, - allRolesStore); - components.add(apiKeyService); - final AuthenticationFailureHandler failureHandler = createAuthenticationFailureHandler(realms); authcService.set(new AuthenticationService(settings, realms, auditTrailService, failureHandler, threadPool, anonymousUser, tokenService, apiKeyService)); components.add(authcService.get()); securityIndex.get().addIndexStateListener(authcService.get()::onSecurityIndexStateChange); - final AuthorizationService authzService = new AuthorizationService(settings, allRolesStore, clusterService, - auditTrailService, failureHandler, threadPool, anonymousUser, apiKeyService, fieldPermissionsCache); - components.add(nativeRolesStore); // used by roles actions - components.add(reservedRolesStore); // used by roles actions - components.add(allRolesStore); // for SecurityFeatureSet and clear roles cache - components.add(authzService); - - ipFilter.set(new IPFilter(settings, auditTrailService, clusterService.getClusterSettings(), getLicenseState())); - components.add(ipFilter.get()); - DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings()); - securityInterceptor.set(new SecurityServerTransportInterceptor(settings, threadPool, authcService.get(), - authzService, getLicenseState(), getSslService(), securityContext.get(), destructiveOperations, clusterService)); - Set requestInterceptors = Sets.newHashSet( new ResizeRequestInterceptor(threadPool, getLicenseState(), auditTrailService), new IndicesAliasesRequestInterceptor(threadPool.getThreadContext(), getLicenseState(), auditTrailService)); @@ -478,12 +465,46 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste } requestInterceptors = Collections.unmodifiableSet(requestInterceptors); + final AuthorizationService authzService = new AuthorizationService(settings, allRolesStore, clusterService, + auditTrailService, failureHandler, threadPool, anonymousUser, getAuthorizationEngine(), requestInterceptors, + getLicenseState()); + + components.add(nativeRolesStore); // used by roles actions + components.add(reservedRolesStore); // used by roles actions + components.add(allRolesStore); // for SecurityFeatureSet and clear roles cache + components.add(authzService); + + ipFilter.set(new IPFilter(settings, auditTrailService, clusterService.getClusterSettings(), getLicenseState())); + components.add(ipFilter.get()); + DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings()); + securityInterceptor.set(new SecurityServerTransportInterceptor(settings, threadPool, authcService.get(), + authzService, getLicenseState(), getSslService(), securityContext.get(), destructiveOperations, clusterService)); + securityActionFilter.set(new SecurityActionFilter(authcService.get(), authzService, getLicenseState(), - requestInterceptors, threadPool, securityContext.get(), destructiveOperations)); + threadPool, securityContext.get(), destructiveOperations)); return components; } + private AuthorizationEngine getAuthorizationEngine() { + AuthorizationEngine authorizationEngine = null; + String extensionName = null; + for (SecurityExtension extension : securityExtensions) { + final AuthorizationEngine extensionEngine = extension.getAuthorizationEngine(settings); + if (extensionEngine != null && authorizationEngine != null) { + throw new IllegalStateException("Extensions [" + extensionName + "] and [" + extension.toString() + "] " + + "both set an authorization engine"); + } + authorizationEngine = extensionEngine; + extensionName = extension.toString(); + } + + if (authorizationEngine != null) { + logger.debug("Using authorization engine from extension [" + extensionName + "]"); + } + return authorizationEngine; + } + private AuthenticationFailureHandler createAuthenticationFailureHandler(final Realms realms) { AuthenticationFailureHandler failureHandler = null; String extensionName = null; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java index 53f9209ff2d7e..09612c5e01f0f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportCreateApiKeyAction.java @@ -19,6 +19,10 @@ import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.ApiKeyService; +import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; + +import java.util.Arrays; +import java.util.HashSet; /** * Implementation of the action needed to create an API key @@ -27,13 +31,15 @@ public final class TransportCreateApiKeyAction extends HandledTransportAction) CreateApiKeyRequest::new); this.apiKeyService = apiKeyService; this.securityContext = context; + this.rolesStore = rolesStore; } @Override @@ -42,7 +48,9 @@ protected void doExecute(Task task, CreateApiKeyRequest request, ActionListener< if (authentication == null) { listener.onFailure(new IllegalStateException("authentication is required")); } else { - apiKeyService.createApiKey(authentication, request, listener); + rolesStore.getRoleDescriptors(new HashSet<>(Arrays.asList(authentication.getUser().roles())), + ActionListener.wrap(roleDescriptors -> apiKeyService.createApiKey(authentication, request, roleDescriptors, listener), + listener::onFailure)); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java index a0ab370e6dba2..06d6446057bf3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilter.java @@ -32,13 +32,11 @@ import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.security.action.SecurityActionMapper; -import org.elasticsearch.xpack.security.action.interceptor.RequestInterceptor; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.AuthorizationUtils; import java.io.IOException; -import java.util.Set; import java.util.function.Predicate; public class SecurityActionFilter implements ActionFilter { @@ -50,19 +48,17 @@ public class SecurityActionFilter implements ActionFilter { private final AuthenticationService authcService; private final AuthorizationService authzService; private final SecurityActionMapper actionMapper = new SecurityActionMapper(); - private final Set requestInterceptors; private final XPackLicenseState licenseState; private final ThreadContext threadContext; private final SecurityContext securityContext; private final DestructiveOperations destructiveOperations; public SecurityActionFilter(AuthenticationService authcService, AuthorizationService authzService, - XPackLicenseState licenseState, Set requestInterceptors, ThreadPool threadPool, + XPackLicenseState licenseState, ThreadPool threadPool, SecurityContext securityContext, DestructiveOperations destructiveOperations) { this.authcService = authcService; this.authzService = authzService; this.licenseState = licenseState; - this.requestInterceptors = requestInterceptors; this.threadContext = threadPool.getThreadContext(); this.securityContext = securityContext; this.destructiveOperations = destructiveOperations; @@ -164,21 +160,8 @@ private void authorizeRequest(Authentication aut if (authentication == null) { listener.onFailure(new IllegalArgumentException("authentication must be non null for authorization")); } else { - final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = new AuthorizationUtils.AsyncAuthorizer(authentication, listener, - (userRoles, runAsRoles) -> { - authzService.authorize(authentication, securityAction, request, userRoles, runAsRoles); - /* - * We use a separate concept for code that needs to be run after authentication and authorization that could - * affect the running of the action. This is done to make it more clear of the state of the request. - */ - for (RequestInterceptor interceptor : requestInterceptors) { - if (interceptor.supports(request)) { - interceptor.intercept(request, authentication, runAsRoles != null ? runAsRoles : userRoles, securityAction); - } - } - listener.onResponse(null); - }); - asyncAuthorizer.authorize(authzService); + authzService.authorize(authentication, securityAction, request, ActionListener.wrap(ignore -> listener.onResponse(null), + listener::onFailure)); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java deleted file mode 100644 index c9acd02e74cde..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.action.interceptor; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.IndicesRequest; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; -import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; -import org.elasticsearch.xpack.core.security.authz.permission.Role; - -/** - * Base class for interceptors that disables features when field level security is configured for indices a request - * is going to execute on. - */ -abstract class FieldAndDocumentLevelSecurityRequestInterceptor implements - RequestInterceptor { - - private final ThreadContext threadContext; - private final XPackLicenseState licenseState; - private final Logger logger; - - FieldAndDocumentLevelSecurityRequestInterceptor(ThreadContext threadContext, XPackLicenseState licenseState) { - this.threadContext = threadContext; - this.licenseState = licenseState; - this.logger = LogManager.getLogger(getClass()); - } - - @Override - public void intercept(Request request, Authentication authentication, Role userPermissions, String action) { - if (licenseState.isDocumentAndFieldLevelSecurityAllowed()) { - final IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); - for (String index : request.indices()) { - IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(index); - if (indexAccessControl != null) { - boolean fieldLevelSecurityEnabled = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity(); - boolean documentLevelSecurityEnabled = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); - if (fieldLevelSecurityEnabled || documentLevelSecurityEnabled) { - if (fieldLevelSecurityEnabled || documentLevelSecurityEnabled) { - logger.trace("intercepted request for index [{}] with field level access controls [{}] document level access " + - "controls [{}]. disabling conflicting features", index, fieldLevelSecurityEnabled, - documentLevelSecurityEnabled); - } - disableFeatures(request, fieldLevelSecurityEnabled, documentLevelSecurityEnabled); - return; - } - } - logger.trace("intercepted request for index [{}] without field or document level access controls", index); - } - } - } - - protected abstract void disableFeatures(Request request, boolean fieldLevelSecurityEnabled, boolean documentLevelSecurityEnabled); - -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptor.java deleted file mode 100644 index 10c9ab0365341..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptor.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.action.interceptor; - -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; -import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.support.Exceptions; -import org.elasticsearch.xpack.security.audit.AuditTrailService; -import org.elasticsearch.xpack.security.audit.AuditUtil; - -import java.util.HashMap; -import java.util.Map; - -public final class IndicesAliasesRequestInterceptor implements RequestInterceptor { - - private final ThreadContext threadContext; - private final XPackLicenseState licenseState; - private final AuditTrailService auditTrailService; - - public IndicesAliasesRequestInterceptor(ThreadContext threadContext, XPackLicenseState licenseState, - AuditTrailService auditTrailService) { - this.threadContext = threadContext; - this.licenseState = licenseState; - this.auditTrailService = auditTrailService; - } - - @Override - public void intercept(IndicesAliasesRequest request, Authentication authentication, Role userPermissions, String action) { - final XPackLicenseState frozenLicenseState = licenseState.copyCurrentLicenseState(); - if (frozenLicenseState.isAuthAllowed()) { - if (frozenLicenseState.isDocumentAndFieldLevelSecurityAllowed()) { - IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); - for (IndicesAliasesRequest.AliasActions aliasAction : request.getAliasActions()) { - if (aliasAction.actionType() == IndicesAliasesRequest.AliasActions.Type.ADD) { - for (String index : aliasAction.indices()) { - IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(index); - if (indexAccessControl != null) { - final boolean fls = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity(); - final boolean dls = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); - if (fls || dls) { - throw new ElasticsearchSecurityException("Alias requests are not allowed for users who have " + - "field or document level security enabled on one of the indices", RestStatus.BAD_REQUEST); - } - } - } - } - } - } - - Map permissionsMap = new HashMap<>(); - for (IndicesAliasesRequest.AliasActions aliasAction : request.getAliasActions()) { - if (aliasAction.actionType() == IndicesAliasesRequest.AliasActions.Type.ADD) { - for (String index : aliasAction.indices()) { - Automaton indexPermissions = - permissionsMap.computeIfAbsent(index, userPermissions.indices()::allowedActionsMatcher); - for (String alias : aliasAction.aliases()) { - Automaton aliasPermissions = - permissionsMap.computeIfAbsent(alias, userPermissions.indices()::allowedActionsMatcher); - if (Operations.subsetOf(aliasPermissions, indexPermissions) == false) { - // TODO we've already audited a access granted event so this is going to look ugly - auditTrailService.accessDenied(AuditUtil.extractRequestId(threadContext), authentication, action, request, - userPermissions.names()); - throw Exceptions.authorizationError("Adding an alias is not allowed when the alias " + - "has more permissions than any of the indices"); - } - } - } - } - } - } - } - - @Override - public boolean supports(TransportRequest request) { - return request instanceof IndicesAliasesRequest; - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/RequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/RequestInterceptor.java deleted file mode 100644 index c994626a7f402..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/RequestInterceptor.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.action.interceptor; - -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authz.permission.Role; - -/** - * A request interceptor can introspect a request and modify it. - */ -public interface RequestInterceptor { - - /** - * If {@link #supports(TransportRequest)} returns true this interceptor will introspect the request - * and potentially modify it. - */ - void intercept(Request request, Authentication authentication, Role userPermissions, String action); - - /** - * Returns whether this request interceptor should intercept the specified request. - */ - boolean supports(TransportRequest request); - -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptor.java deleted file mode 100644 index 8f2e99d7defde..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptor.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.action.interceptor; - -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; -import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.support.Exceptions; -import org.elasticsearch.xpack.security.audit.AuditTrailService; - -import static org.elasticsearch.xpack.security.audit.AuditUtil.extractRequestId; - -public final class ResizeRequestInterceptor implements RequestInterceptor { - - private final ThreadContext threadContext; - private final XPackLicenseState licenseState; - private final AuditTrailService auditTrailService; - - public ResizeRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState, - AuditTrailService auditTrailService) { - this.threadContext = threadPool.getThreadContext(); - this.licenseState = licenseState; - this.auditTrailService = auditTrailService; - } - - @Override - public void intercept(ResizeRequest request, Authentication authentication, Role userPermissions, String action) { - final XPackLicenseState frozenLicenseState = licenseState.copyCurrentLicenseState(); - if (frozenLicenseState.isAuthAllowed()) { - if (frozenLicenseState.isDocumentAndFieldLevelSecurityAllowed()) { - IndicesAccessControl indicesAccessControl = - threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); - IndicesAccessControl.IndexAccessControl indexAccessControl = - indicesAccessControl.getIndexPermissions(request.getSourceIndex()); - if (indexAccessControl != null) { - final boolean fls = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity(); - final boolean dls = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); - if (fls || dls) { - throw new ElasticsearchSecurityException("Resize requests are not allowed for users when " + - "field or document level security is enabled on the source index", RestStatus.BAD_REQUEST); - } - } - } - - // ensure that the user would have the same level of access OR less on the target index - final Automaton sourceIndexPermissions = userPermissions.indices().allowedActionsMatcher(request.getSourceIndex()); - final Automaton targetIndexPermissions = - userPermissions.indices().allowedActionsMatcher(request.getTargetIndexRequest().index()); - if (Operations.subsetOf(targetIndexPermissions, sourceIndexPermissions) == false) { - // TODO we've already audited a access granted event so this is going to look ugly - auditTrailService.accessDenied(extractRequestId(threadContext), authentication, action, request, userPermissions.names()); - throw Exceptions.authorizationError("Resizing an index is not allowed when the target index " + - "has more permissions than the source index"); - } - } - } - - @Override - public boolean supports(TransportRequest request) { - return request instanceof ResizeRequest; - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesAction.java index 15060570538af..00232033b89ef 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesAction.java @@ -5,13 +5,9 @@ */ package org.elasticsearch.xpack.security.action.user; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -20,26 +16,9 @@ import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; -import org.elasticsearch.xpack.core.security.authz.permission.IndicesPermission; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authz.AuthorizationService; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Set; -import java.util.TreeSet; - -import static org.elasticsearch.common.Strings.arrayToCommaDelimitedString; - /** * Transport action for {@link GetUserPrivilegesAction} */ @@ -67,68 +46,6 @@ protected void doExecute(Task task, GetUserPrivilegesRequest request, ActionList return; } - authorizationService.roles(user, authentication, ActionListener.wrap( - role -> listener.onResponse(buildResponseObject(role)), - listener::onFailure)); - } - - // package protected for testing - GetUserPrivilegesResponse buildResponseObject(Role userRole) { - logger.trace(() -> new ParameterizedMessage("List privileges for role [{}]", arrayToCommaDelimitedString(userRole.names()))); - - // We use sorted sets for Strings because they will typically be small, and having a predictable order allows for simpler testing - final Set cluster = new TreeSet<>(); - // But we don't have a meaningful ordering for objects like ConditionalClusterPrivilege, so the tests work with "random" ordering - final Set conditionalCluster = new HashSet<>(); - for (Tuple tup : userRole.cluster().privileges()) { - if (tup.v2() == null) { - if (ClusterPrivilege.NONE.equals(tup.v1()) == false) { - cluster.addAll(tup.v1().name()); - } - } else { - conditionalCluster.add(tup.v2()); - } - } - - final Set indices = new LinkedHashSet<>(); - for (IndicesPermission.Group group : userRole.indices().groups()) { - final Set queries = group.getQuery() == null ? Collections.emptySet() : group.getQuery(); - final Set fieldSecurity = group.getFieldPermissions().hasFieldLevelSecurity() - ? group.getFieldPermissions().getFieldPermissionsDefinition().getFieldGrantExcludeGroups() : Collections.emptySet(); - indices.add(new GetUserPrivilegesResponse.Indices( - Arrays.asList(group.indices()), - group.privilege().name(), - fieldSecurity, - queries, - group.allowRestrictedIndices() - )); - } - - final Set application = new LinkedHashSet<>(); - for (String applicationName : userRole.application().getApplicationNames()) { - for (ApplicationPrivilege privilege : userRole.application().getPrivileges(applicationName)) { - final Set resources = userRole.application().getResourcePatterns(privilege); - if (resources.isEmpty()) { - logger.trace("No resources defined in application privilege {}", privilege); - } else { - application.add(RoleDescriptor.ApplicationResourcePrivileges.builder() - .application(applicationName) - .privileges(privilege.name()) - .resources(resources) - .build()); - } - } - } - - final Privilege runAsPrivilege = userRole.runAs().getPrivilege(); - final Set runAs; - if (Operations.isEmpty(runAsPrivilege.getAutomaton())) { - runAs = Collections.emptySet(); - } else { - runAs = runAsPrivilege.name(); - } - - return new GetUserPrivilegesResponse(cluster, conditionalCluster, indices, application, runAs); + authorizationService.retrieveUserPrivileges(authentication, request, listener); } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index 9164d9bb7df32..ae400172bf110 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -5,13 +5,9 @@ */ package org.elasticsearch.xpack.security.action.user; -import com.google.common.collect.Sets; - -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -21,20 +17,13 @@ import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; -import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivilegesMap; -import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authz.AuthorizationService; import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import java.util.Set; import java.util.stream.Collectors; @@ -69,10 +58,8 @@ protected void doExecute(Task task, HasPrivilegesRequest request, ActionListener return; } - authorizationService.roles(user, authentication, ActionListener.wrap( - role -> resolveApplicationPrivileges(request, ActionListener.wrap( - applicationPrivilegeLookup -> checkPrivileges(request, role, applicationPrivilegeLookup, listener), - listener::onFailure)), + resolveApplicationPrivileges(request, ActionListener.wrap(applicationPrivilegeDescriptors -> + authorizationService.checkPrivileges(authentication, request, applicationPrivilegeDescriptors, listener), listener::onFailure)); } @@ -82,56 +69,9 @@ private void resolveApplicationPrivileges(HasPrivilegesRequest request, privilegeStore.getPrivileges(applications, null, listener); } - private Set getApplicationNames(HasPrivilegesRequest request) { + public static Set getApplicationNames(HasPrivilegesRequest request) { return Arrays.stream(request.applicationPrivileges()) .map(RoleDescriptor.ApplicationResourcePrivileges::getApplication) .collect(Collectors.toSet()); } - - private void checkPrivileges(HasPrivilegesRequest request, Role userRole, - Collection applicationPrivileges, - ActionListener listener) { - logger.trace(() -> new ParameterizedMessage("Check whether role [{}] has privileges cluster=[{}] index=[{}] application=[{}]", - Strings.arrayToCommaDelimitedString(userRole.names()), - Strings.arrayToCommaDelimitedString(request.clusterPrivileges()), - Strings.arrayToCommaDelimitedString(request.indexPrivileges()), - Strings.arrayToCommaDelimitedString(request.applicationPrivileges()) - )); - - Map cluster = new HashMap<>(); - for (String checkAction : request.clusterPrivileges()) { - final ClusterPrivilege checkPrivilege = ClusterPrivilege.get(Collections.singleton(checkAction)); - cluster.put(checkAction, userRole.grants(checkPrivilege)); - } - boolean allMatch = cluster.values().stream().allMatch(Boolean::booleanValue); - - ResourcePrivilegesMap.Builder combineIndicesResourcePrivileges = ResourcePrivilegesMap.builder(); - for (RoleDescriptor.IndicesPrivileges check : request.indexPrivileges()) { - ResourcePrivilegesMap resourcePrivileges = userRole.checkIndicesPrivileges(Sets.newHashSet(check.getIndices()), - check.allowRestrictedIndices(), Sets.newHashSet(check.getPrivileges())); - allMatch = allMatch && resourcePrivileges.allAllowed(); - combineIndicesResourcePrivileges.addResourcePrivilegesMap(resourcePrivileges); - } - ResourcePrivilegesMap allIndices = combineIndicesResourcePrivileges.build(); - allMatch = allMatch && allIndices.allAllowed(); - - final Map> privilegesByApplication = new HashMap<>(); - for (String applicationName : getApplicationNames(request)) { - ResourcePrivilegesMap.Builder builder = ResourcePrivilegesMap.builder(); - for (RoleDescriptor.ApplicationResourcePrivileges p : request.applicationPrivileges()) { - if (applicationName.equals(p.getApplication())) { - ResourcePrivilegesMap appPrivsByResourceMap = userRole.checkApplicationResourcePrivileges(applicationName, - Sets.newHashSet(p.getResources()), Sets.newHashSet(p.getPrivileges()), applicationPrivileges); - builder.addResourcePrivilegesMap(appPrivsByResourceMap); - } - } - ResourcePrivilegesMap resourcePrivsForApplication = builder.build(); - allMatch = allMatch && resourcePrivsForApplication.allAllowed(); - privilegesByApplication.put(applicationName, resourcePrivsForApplication.getResourceToResourcePrivileges().values()); - } - - listener.onResponse(new HasPrivilegesResponse(request.username(), allMatch, cluster, - allIndices.getResourceToResourcePrivileges().values(), privilegesByApplication)); - } - } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrail.java index 4f5413c30d1f9..e99b822e1dca1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrail.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; import java.net.InetAddress; @@ -40,9 +41,11 @@ public interface AuditTrail { void authenticationFailed(String requestId, String realm, AuthenticationToken token, RestRequest request); - void accessGranted(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames); + void accessGranted(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo); - void accessDenied(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames); + void accessDenied(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo); void tamperedRequest(String requestId, RestRequest request); @@ -60,10 +63,13 @@ public interface AuditTrail { void connectionDenied(InetAddress inetAddress, String profile, SecurityIpFilterRule rule); - void runAsGranted(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames); + void runAsGranted(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo); - void runAsDenied(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames); + void runAsDenied(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo); - void runAsDenied(String requestId, Authentication authentication, RestRequest request, String[] roleNames); + void runAsDenied(String requestId, Authentication authentication, RestRequest request, + AuthorizationInfo authorizationInfo); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java index d6645227f8eb6..38bb93d8bcf50 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditTrailService.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; import java.net.InetAddress; @@ -128,19 +129,21 @@ public void authenticationFailed(String requestId, String realm, AuthenticationT } @Override - public void accessGranted(String requestId, Authentication authentication, String action, TransportMessage msg, String[] roleNames) { + public void accessGranted(String requestId, Authentication authentication, String action, TransportMessage msg, + AuthorizationInfo authorizationInfo) { if (licenseState.isAuditingAllowed()) { for (AuditTrail auditTrail : auditTrails) { - auditTrail.accessGranted(requestId, authentication, action, msg, roleNames); + auditTrail.accessGranted(requestId, authentication, action, msg, authorizationInfo); } } } @Override - public void accessDenied(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames) { + public void accessDenied(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo) { if (licenseState.isAuditingAllowed()) { for (AuditTrail auditTrail : auditTrails) { - auditTrail.accessDenied(requestId, authentication, action, message, roleNames); + auditTrail.accessDenied(requestId, authentication, action, message, authorizationInfo); } } } @@ -191,28 +194,31 @@ public void connectionDenied(InetAddress inetAddress, String profile, SecurityIp } @Override - public void runAsGranted(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames) { + public void runAsGranted(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo) { if (licenseState.isAuditingAllowed()) { for (AuditTrail auditTrail : auditTrails) { - auditTrail.runAsGranted(requestId, authentication, action, message, roleNames); + auditTrail.runAsGranted(requestId, authentication, action, message, authorizationInfo); } } } @Override - public void runAsDenied(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames) { + public void runAsDenied(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo) { if (licenseState.isAuditingAllowed()) { for (AuditTrail auditTrail : auditTrails) { - auditTrail.runAsDenied(requestId, authentication, action, message, roleNames); + auditTrail.runAsDenied(requestId, authentication, action, message, authorizationInfo); } } } @Override - public void runAsDenied(String requestId, Authentication authentication, RestRequest request, String[] roleNames) { + public void runAsDenied(String requestId, Authentication authentication, RestRequest request, + AuthorizationInfo authorizationInfo) { if (licenseState.isAuditingAllowed()) { for (AuditTrail auditTrail : auditTrails) { - auditTrail.runAsDenied(requestId, authentication, request, roleNames); + auditTrail.runAsDenied(requestId, authentication, request, authorizationInfo); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index d75eba4a42a0a..03d1d5045262f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.audit.AuditLevel; import org.elasticsearch.xpack.security.audit.AuditTrail; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.security.rest.RemoteHostHeader; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; @@ -50,6 +51,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.TreeMap; @@ -415,13 +417,14 @@ public void authenticationFailed(String requestId, String realm, AuthenticationT } @Override - public void accessGranted(String requestId, Authentication authentication, String action, TransportMessage msg, String[] roleNames) { + public void accessGranted(String requestId, Authentication authentication, String action, TransportMessage msg, + AuthorizationInfo authorizationInfo) { final User user = authentication.getUser(); final boolean isSystem = SystemUser.is(user) || XPackUser.is(user); if ((isSystem && events.contains(SYSTEM_ACCESS_GRANTED)) || ((isSystem == false) && events.contains(ACCESS_GRANTED))) { final Optional indices = indices(msg); if (eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(user), - Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { + Optional.of(effectiveRealmName(authentication)), Optional.of(authorizationInfo), indices)) == false) { final StringMapMessage logEntry = new LogEntryBuilder() .with(EVENT_TYPE_FIELD_NAME, TRANSPORT_ORIGIN_FIELD_VALUE) .with(EVENT_ACTION_FIELD_NAME, "access_granted") @@ -431,9 +434,9 @@ public void accessGranted(String requestId, Authentication authentication, Strin .withSubject(authentication) .withRestOrTransportOrigin(msg, threadContext) .with(INDICES_FIELD_NAME, indices.orElse(null)) - .with(PRINCIPAL_ROLES_FIELD_NAME, roleNames) .withOpaqueId(threadContext) .withXForwardedFor(threadContext) + .with(authorizationInfo.asMap()) .build(); logger.info(logEntry); } @@ -441,11 +444,12 @@ public void accessGranted(String requestId, Authentication authentication, Strin } @Override - public void accessDenied(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames) { + public void accessDenied(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo) { if (events.contains(ACCESS_DENIED)) { final Optional indices = indices(message); if (eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(authentication.getUser()), - Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { + Optional.of(effectiveRealmName(authentication)), Optional.of(authorizationInfo), indices)) == false) { final StringMapMessage logEntry = new LogEntryBuilder() .with(EVENT_TYPE_FIELD_NAME, TRANSPORT_ORIGIN_FIELD_VALUE) .with(EVENT_ACTION_FIELD_NAME, "access_denied") @@ -455,7 +459,7 @@ public void accessDenied(String requestId, Authentication authentication, String .withSubject(authentication) .withRestOrTransportOrigin(message, threadContext) .with(INDICES_FIELD_NAME, indices.orElse(null)) - .with(PRINCIPAL_ROLES_FIELD_NAME, roleNames) + .with(authorizationInfo.asMap()) .withOpaqueId(threadContext) .withXForwardedFor(threadContext) .build(); @@ -563,11 +567,12 @@ public void connectionDenied(InetAddress inetAddress, String profile, SecurityIp } @Override - public void runAsGranted(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames) { + public void runAsGranted(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo) { if (events.contains(RUN_AS_GRANTED)) { final Optional indices = indices(message); if (eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(authentication.getUser()), - Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { + Optional.of(effectiveRealmName(authentication)), Optional.of(authorizationInfo), indices)) == false) { final StringMapMessage logEntry = new LogEntryBuilder() .with(EVENT_TYPE_FIELD_NAME, TRANSPORT_ORIGIN_FIELD_VALUE) .with(EVENT_ACTION_FIELD_NAME, "run_as_granted") @@ -577,7 +582,7 @@ public void runAsGranted(String requestId, Authentication authentication, String .withRunAsSubject(authentication) .withRestOrTransportOrigin(message, threadContext) .with(INDICES_FIELD_NAME, indices.orElse(null)) - .with(PRINCIPAL_ROLES_FIELD_NAME, roleNames) + .with(authorizationInfo.asMap()) .withOpaqueId(threadContext) .withXForwardedFor(threadContext) .build(); @@ -587,11 +592,12 @@ public void runAsGranted(String requestId, Authentication authentication, String } @Override - public void runAsDenied(String requestId, Authentication authentication, String action, TransportMessage message, String[] roleNames) { + public void runAsDenied(String requestId, Authentication authentication, String action, TransportMessage message, + AuthorizationInfo authorizationInfo) { if (events.contains(RUN_AS_DENIED)) { final Optional indices = indices(message); if (eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(authentication.getUser()), - Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { + Optional.of(effectiveRealmName(authentication)), Optional.of(authorizationInfo), indices)) == false) { final StringMapMessage logEntry = new LogEntryBuilder() .with(EVENT_TYPE_FIELD_NAME, TRANSPORT_ORIGIN_FIELD_VALUE) .with(EVENT_ACTION_FIELD_NAME, "run_as_denied") @@ -601,7 +607,7 @@ public void runAsDenied(String requestId, Authentication authentication, String .withRunAsSubject(authentication) .withRestOrTransportOrigin(message, threadContext) .with(INDICES_FIELD_NAME, indices.orElse(null)) - .with(PRINCIPAL_ROLES_FIELD_NAME, roleNames) + .with(authorizationInfo.asMap()) .withOpaqueId(threadContext) .withXForwardedFor(threadContext) .build(); @@ -611,14 +617,14 @@ public void runAsDenied(String requestId, Authentication authentication, String } @Override - public void runAsDenied(String requestId, Authentication authentication, RestRequest request, String[] roleNames) { + public void runAsDenied(String requestId, Authentication authentication, RestRequest request, AuthorizationInfo authorizationInfo) { if (events.contains(RUN_AS_DENIED) && eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(authentication.getUser()), - Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), Optional.empty())) == false) { + Optional.of(effectiveRealmName(authentication)), Optional.of(authorizationInfo), Optional.empty())) == false) { final StringMapMessage logEntry = new LogEntryBuilder() .with(EVENT_TYPE_FIELD_NAME, REST_ORIGIN_FIELD_VALUE) .with(EVENT_ACTION_FIELD_NAME, "run_as_denied") - .with(PRINCIPAL_ROLES_FIELD_NAME, roleNames) + .with(authorizationInfo.asMap()) .withRestUriAndMethod(request) .withRunAsSubject(authentication) .withRestOrigin(request) @@ -762,29 +768,40 @@ LogEntryBuilder with(String key, String[] values) { return this; } + LogEntryBuilder with(Map map) { + for (Entry entry : map.entrySet()) { + Object value = entry.getValue(); + if (value.getClass().isArray()) { + logEntry.with(entry.getKey(), toQuotedJsonArray((Object[]) value)); + } else { + logEntry.with(entry.getKey(), value); + } + } + return this; + } + StringMapMessage build() { return logEntry; } - String toQuotedJsonArray(String[] values) { + String toQuotedJsonArray(Object[] values) { assert values != null; final StringBuilder stringBuilder = new StringBuilder(); final JsonStringEncoder jsonStringEncoder = JsonStringEncoder.getInstance(); stringBuilder.append("["); - for (final String value : values) { + for (final Object value : values) { if (value != null) { if (stringBuilder.length() > 1) { stringBuilder.append(","); } stringBuilder.append("\""); - jsonStringEncoder.quoteAsString(value, stringBuilder); + jsonStringEncoder.quoteAsString(value.toString(), stringBuilder); stringBuilder.append("\""); } } stringBuilder.append("]"); return stringBuilder.toString(); } - } @@ -979,7 +996,8 @@ static final class AuditEventMetaInfo { * user field (such as `anonymous_access_denied`) as well as events from the * "elastic" username. */ - AuditEventMetaInfo(Optional user, Optional realm, Optional roles, Optional indices) { + AuditEventMetaInfo(Optional user, Optional realm, Optional authorizationInfo, + Optional indices) { this.principal = user.map(u -> u.principal()).orElse(""); this.realm = realm.orElse(""); // Supplier indirection and lazy generation of Streams serves 2 purposes: @@ -987,8 +1005,12 @@ static final class AuditEventMetaInfo { // conditions on the `principal` and `realm` fields // 2. reusability of the AuditEventMetaInfo instance: in this case Streams have // to be regenerated as they cannot be operated upon twice - this.roles = () -> roles.filter(r -> r.length > 0).filter(a -> Arrays.stream(a).anyMatch(Objects::nonNull)) - .map(Arrays::stream).orElse(Stream.of("")); + this.roles = () -> authorizationInfo.filter(info -> { + final Object value = info.asMap().get("user.roles"); + return value instanceof String[] && + ((String[]) value).length != 0 && + Arrays.stream((String[]) value).anyMatch(Objects::nonNull); + }).map(info -> Arrays.stream((String[]) info.asMap().get("user.roles"))).orElse(Stream.of("")); this.indices = () -> indices.filter(i -> i.length > 0).filter(a -> Arrays.stream(a).anyMatch(Objects::nonNull)) .map(Arrays::stream).orElse(Stream.of("")); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index c2972b013ff05..0ca87adc28109 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -6,8 +6,6 @@ package org.elasticsearch.xpack.security.authc; -import com.google.common.collect.Sets; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; @@ -62,12 +60,10 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.permission.LimitedRole; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.support.SecurityIndexManager; +import javax.crypto.SecretKeyFactory; import java.io.Closeable; import java.io.IOException; import java.io.UncheckedIOException; @@ -84,11 +80,10 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; -import javax.crypto.SecretKeyFactory; - import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -132,12 +127,11 @@ public class ApiKeyService { private final Settings settings; private final ExpiredApiKeysRemover expiredApiKeysRemover; private final TimeValue deleteInterval; - private final CompositeRolesStore compositeRolesStore; private volatile long lastExpirationRunMs; - public ApiKeyService(Settings settings, Clock clock, Client client, SecurityIndexManager securityIndex, ClusterService clusterService, - CompositeRolesStore compositeRolesStore) { + public ApiKeyService(Settings settings, Clock clock, Client client, SecurityIndexManager securityIndex, + ClusterService clusterService) { this.clock = clock; this.client = client; this.securityIndex = securityIndex; @@ -147,16 +141,17 @@ public ApiKeyService(Settings settings, Clock clock, Client client, SecurityInde this.settings = settings; this.deleteInterval = DELETE_INTERVAL.get(settings); this.expiredApiKeysRemover = new ExpiredApiKeysRemover(settings, client); - this.compositeRolesStore = compositeRolesStore; } /** * Asynchronously creates a new API key based off of the request and authentication * @param authentication the authentication that this api key should be based off of * @param request the request to create the api key included any permission restrictions + * @param roleDescriptorSet the user's actual roles that we always enforce * @param listener the listener that will be used to notify of completion */ - public void createApiKey(Authentication authentication, CreateApiKeyRequest request, ActionListener listener) { + public void createApiKey(Authentication authentication, CreateApiKeyRequest request, Set roleDescriptorSet, + ActionListener listener) { ensureEnabled(); if (authentication == null) { listener.onFailure(new IllegalArgumentException("authentication must be provided")); @@ -209,13 +204,10 @@ public void createApiKey(Authentication authentication, CreateApiKeyRequest requ // Save limited_by_role_descriptors builder.startObject("limited_by_role_descriptors"); - compositeRolesStore.getRoleDescriptors(Sets.newHashSet(authentication.getUser().roles()), - ActionListener.wrap(rdSet -> { - for (RoleDescriptor descriptor : rdSet) { - builder.field(descriptor.getName(), - (contentBuilder, params) -> descriptor.toXContent(contentBuilder, params, true)); - } - }, listener::onFailure)); + for (RoleDescriptor descriptor : roleDescriptorSet) { + builder.field(descriptor.getName(), + (contentBuilder, params) -> descriptor.toXContent(contentBuilder, params, true)); + } builder.endObject(); builder.field("name", request.getName()) @@ -294,10 +286,9 @@ void authenticateWithApiKeyIfPresent(ThreadContext ctx, ActionListener listener) { + public void getRoleForApiKey(Authentication authentication, ActionListener listener) { if (authentication.getAuthenticationType() != Authentication.AuthenticationType.API_KEY) { throw new IllegalStateException("authentication type must be api key but is " + authentication.getAuthenticationType()); } @@ -312,18 +303,37 @@ public void getRoleForApiKey(Authentication authentication, CompositeRolesStore listener.onFailure(new ElasticsearchSecurityException("no role descriptors found for API key")); } else if (roleDescriptors == null || roleDescriptors.isEmpty()) { final List authnRoleDescriptorsList = parseRoleDescriptors(apiKeyId, authnRoleDescriptors); - rolesStore.buildAndCacheRoleFromDescriptors(authnRoleDescriptorsList, apiKeyId, listener); + listener.onResponse(new ApiKeyRoleDescriptors(apiKeyId, authnRoleDescriptorsList, null)); } else { final List roleDescriptorList = parseRoleDescriptors(apiKeyId, roleDescriptors); final List authnRoleDescriptorsList = parseRoleDescriptors(apiKeyId, authnRoleDescriptors); - rolesStore.buildAndCacheRoleFromDescriptors(roleDescriptorList, apiKeyId, ActionListener.wrap(role -> { - rolesStore.buildAndCacheRoleFromDescriptors(authnRoleDescriptorsList, apiKeyId, ActionListener.wrap(limitedByRole -> { - Role finalRole = LimitedRole.createLimitedRole(role, limitedByRole); - listener.onResponse(finalRole); - }, listener::onFailure)); - }, listener::onFailure)); + listener.onResponse(new ApiKeyRoleDescriptors(apiKeyId, roleDescriptorList, authnRoleDescriptorsList)); } + } + public static class ApiKeyRoleDescriptors { + + private final String apiKeyId; + private final List roleDescriptors; + private final List limitedByRoleDescriptors; + + public ApiKeyRoleDescriptors(String apiKeyId, List roleDescriptors, List limitedByDescriptors) { + this.apiKeyId = apiKeyId; + this.roleDescriptors = roleDescriptors; + this.limitedByRoleDescriptors = limitedByDescriptors; + } + + public String getApiKeyId() { + return apiKeyId; + } + + public List getRoleDescriptors() { + return roleDescriptors; + } + + public List getLimitedByRoleDescriptors() { + return limitedByRoleDescriptors; + } } private List parseRoleDescriptors(final String apiKeyId, final Map roleDescriptors) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java index 365b2e43188a9..8fb5abda10c54 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/AuthenticationService.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.Realm; -import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.support.Exceptions; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; @@ -44,6 +43,7 @@ import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.AuditUtil; import org.elasticsearch.xpack.security.authc.support.RealmUserLookup; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.EmptyAuthorizationInfo; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; @@ -643,7 +643,7 @@ ElasticsearchSecurityException anonymousAccessDenied() { @Override ElasticsearchSecurityException runAsDenied(Authentication authentication, AuthenticationToken token) { - auditTrail.runAsDenied(requestId, authentication, action, message, Role.EMPTY.names()); + auditTrail.runAsDenied(requestId, authentication, action, message, EmptyAuthorizationInfo.INSTANCE); return failureHandler.failedAuthentication(message, token, action, threadContext); } @@ -707,7 +707,7 @@ ElasticsearchSecurityException anonymousAccessDenied() { @Override ElasticsearchSecurityException runAsDenied(Authentication authentication, AuthenticationToken token) { - auditTrail.runAsDenied(requestId, authentication, request, Role.EMPTY.names()); + auditTrail.runAsDenied(requestId, authentication, request, EmptyAuthorizationInfo.INSTANCE); return failureHandler.failedAuthentication(request, token, threadContext); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 24df5b62c4980..acaf152628e8b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -3,59 +3,59 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + package org.elasticsearch.xpack.security.authz; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.StepListener; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.search.ClearScrollAction; -import org.elasticsearch.action.search.MultiSearchAction; -import org.elasticsearch.action.search.SearchScrollAction; -import org.elasticsearch.action.search.SearchTransportService; +import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.replication.TransportReplicationAction.ConcreteShardRequest; -import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportActionProxy; import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; -import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; -import org.elasticsearch.xpack.core.security.action.user.UserRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler; -import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; +import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AsyncSupplier; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationResult; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.EmptyAuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.IndexAuthorizationResult; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; +import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; -import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; -import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; @@ -63,54 +63,55 @@ import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.AuditUtil; -import org.elasticsearch.xpack.security.authc.ApiKeyService; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.authz.IndicesAndAliasesResolver.ResolvedIndices; +import org.elasticsearch.xpack.security.authz.interceptor.RequestInterceptor; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; -import java.util.function.Predicate; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; +import static org.elasticsearch.action.support.ContextPreservingActionListener.wrapPreservingContext; import static org.elasticsearch.xpack.core.security.SecurityField.setting; import static org.elasticsearch.xpack.core.security.support.Exceptions.authorizationError; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; public class AuthorizationService { public static final Setting ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING = Setting.boolSetting(setting("authc.anonymous.authz_exception"), true, Property.NodeScope); public static final String ORIGINATING_ACTION_KEY = "_originating_action_name"; - public static final String ROLE_NAMES_KEY = "_effective_role_names"; - - private static final Predicate SAME_USER_PRIVILEGE = Automatons.predicate( - ChangePasswordAction.NAME, AuthenticateAction.NAME, HasPrivilegesAction.NAME, GetUserPrivilegesAction.NAME); + public static final String AUTHORIZATION_INFO_KEY = "_authz_info"; + private static final AuthorizationInfo SYSTEM_AUTHZ_INFO = + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, new String[] { SystemUser.ROLE_NAME }); - private static final String INDEX_SUB_REQUEST_PRIMARY = IndexAction.NAME + "[p]"; - private static final String INDEX_SUB_REQUEST_REPLICA = IndexAction.NAME + "[r]"; - private static final String DELETE_SUB_REQUEST_PRIMARY = DeleteAction.NAME + "[p]"; - private static final String DELETE_SUB_REQUEST_REPLICA = DeleteAction.NAME + "[r]"; private static final Logger logger = LogManager.getLogger(AuthorizationService.class); + private final Settings settings; private final ClusterService clusterService; - private final CompositeRolesStore rolesStore; private final AuditTrailService auditTrail; private final IndicesAndAliasesResolver indicesAndAliasesResolver; private final AuthenticationFailureHandler authcFailureHandler; private final ThreadContext threadContext; private final AnonymousUser anonymousUser; - private final FieldPermissionsCache fieldPermissionsCache; - private final ApiKeyService apiKeyService; + private final AuthorizationEngine rbacEngine; + private final AuthorizationEngine authorizationEngine; + private final Set requestInterceptors; + private final XPackLicenseState licenseState; private final boolean isAnonymousEnabled; private final boolean anonymousAuthzExceptionEnabled; public AuthorizationService(Settings settings, CompositeRolesStore rolesStore, ClusterService clusterService, AuditTrailService auditTrail, AuthenticationFailureHandler authcFailureHandler, - ThreadPool threadPool, AnonymousUser anonymousUser, ApiKeyService apiKeyService, - FieldPermissionsCache fieldPermissionsCache) { - this.rolesStore = rolesStore; + ThreadPool threadPool, AnonymousUser anonymousUser, @Nullable AuthorizationEngine authorizationEngine, + Set requestInterceptors, XPackLicenseState licenseState) { this.clusterService = clusterService; this.auditTrail = auditTrail; this.indicesAndAliasesResolver = new IndicesAndAliasesResolver(settings, clusterService); @@ -119,8 +120,27 @@ public AuthorizationService(Settings settings, CompositeRolesStore rolesStore, C this.anonymousUser = anonymousUser; this.isAnonymousEnabled = AnonymousUser.isAnonymousEnabled(settings); this.anonymousAuthzExceptionEnabled = ANONYMOUS_AUTHORIZATION_EXCEPTION_SETTING.get(settings); - this.fieldPermissionsCache = fieldPermissionsCache; - this.apiKeyService = apiKeyService; + this.rbacEngine = new RBACEngine(settings, rolesStore); + this.authorizationEngine = authorizationEngine == null ? this.rbacEngine : authorizationEngine; + this.requestInterceptors = requestInterceptors; + this.settings = settings; + this.licenseState = licenseState; + } + + public void checkPrivileges(Authentication authentication, HasPrivilegesRequest request, + Collection applicationPrivilegeDescriptors, + ActionListener listener) { + getAuthorizationEngine(authentication).checkPrivileges(authentication, getAuthorizationInfoFromContext(), request, + applicationPrivilegeDescriptors, wrapPreservingContext(listener, threadContext)); + } + + public void retrieveUserPrivileges(Authentication authentication, GetUserPrivilegesRequest request, + ActionListener listener) { + getAuthorizationEngine(authentication).getUserPrivileges(authentication, getAuthorizationInfoFromContext(), request, listener); + } + + private AuthorizationInfo getAuthorizationInfoFromContext() { + return Objects.requireNonNull(threadContext.getTransient(AUTHORIZATION_INFO_KEY), "authorization info is missing from context"); } /** @@ -128,14 +148,16 @@ public AuthorizationService(Settings settings, CompositeRolesStore rolesStore, C * have the appropriate privileges for this action/request, an {@link ElasticsearchSecurityException} * will be thrown. * - * @param authentication The authentication information - * @param action The action - * @param request The request + * @param authentication The authentication information + * @param action The action + * @param originalRequest The request + * @param listener The listener that gets called. A call to {@link ActionListener#onResponse(Object)} indicates success * @throws ElasticsearchSecurityException If the given user is no allowed to execute the given request */ - public void authorize(Authentication authentication, String action, TransportRequest request, Role userRole, - Role runAsRole) throws ElasticsearchSecurityException { - final TransportRequest originalRequest = request; + public void authorize(final Authentication authentication, final String action, final TransportRequest originalRequest, + final ActionListener listener) throws ElasticsearchSecurityException { + // prior to doing any authorization lets set the originating action in the context only + putTransientIfNonExisting(ORIGINATING_ACTION_KEY, action); String auditId = AuditUtil.extractRequestId(threadContext); if (auditId == null) { @@ -144,212 +166,259 @@ public void authorize(Authentication authentication, String action, TransportReq if (isInternalUser(authentication.getUser()) != false) { auditId = AuditUtil.getOrGenerateRequestId(threadContext); } else { - auditTrail.tamperedRequest(null, authentication.getUser(), action, request); + auditTrail.tamperedRequest(null, authentication.getUser(), action, originalRequest); final String message = "Attempt to authorize action [" + action + "] for [" + authentication.getUser().principal() + "] without an existing request-id"; assert false : message; - throw new ElasticsearchSecurityException(message); - } - } - - if (request instanceof ConcreteShardRequest) { - request = ((ConcreteShardRequest) request).getRequest(); - assert TransportActionProxy.isProxyRequest(request) == false : "expected non-proxy request for action: " + action; - } else { - request = TransportActionProxy.unwrapRequest(request); - if (TransportActionProxy.isProxyRequest(originalRequest) && TransportActionProxy.isProxyAction(action) == false) { - throw new IllegalStateException("originalRequest is a proxy request for: [" + request + "] but action: [" - + action + "] isn't"); + listener.onFailure(new ElasticsearchSecurityException(message)); } } - // prior to doing any authorization lets set the originating action in the context only - putTransientIfNonExisting(ORIGINATING_ACTION_KEY, action); - // first we need to check if the user is the system. If it is, we'll just authorize the system access + // sometimes a request might be wrapped within another, which is the case for proxied + // requests and concrete shard requests + final TransportRequest unwrappedRequest = maybeUnwrapRequest(authentication, originalRequest, action, auditId); if (SystemUser.is(authentication.getUser())) { - if (SystemUser.isAuthorized(action)) { - putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, IndicesAccessControl.ALLOW_ALL); - putTransientIfNonExisting(ROLE_NAMES_KEY, new String[] { SystemUser.ROLE_NAME }); - auditTrail.accessGranted(auditId, authentication, action, request, new String[] { SystemUser.ROLE_NAME }); - return; - } - throw denial(auditId, authentication, action, request, new String[] { SystemUser.ROLE_NAME }); + // this never goes async so no need to wrap the listener + authorizeSystemUser(authentication, action, auditId, unwrappedRequest, listener); + } else { + final String finalAuditId = auditId; + final RequestInfo requestInfo = new RequestInfo(authentication, unwrappedRequest, action); + final ActionListener authzInfoListener = wrapPreservingContext(ActionListener.wrap( + authorizationInfo -> { + putTransientIfNonExisting(AUTHORIZATION_INFO_KEY, authorizationInfo); + maybeAuthorizeRunAs(requestInfo, finalAuditId, authorizationInfo, listener); + }, listener::onFailure), threadContext); + getAuthorizationEngine(authentication).resolveAuthorizationInfo(requestInfo, authzInfoListener); } + } - // get the roles of the authenticated user, which may be different than the effective - Role permission = userRole; - - // check if the request is a run as request + private void maybeAuthorizeRunAs(final RequestInfo requestInfo, final String requestId, final AuthorizationInfo authzInfo, + final ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); + final TransportRequest request = requestInfo.getRequest(); + final String action = requestInfo.getAction(); final boolean isRunAs = authentication.getUser().isRunAs(); if (isRunAs) { - // if we are running as a user we looked up then the authentication must contain a lookedUpBy. If it doesn't then this user - // doesn't really exist but the authc service allowed it through to avoid leaking users that exist in the system - if (authentication.getLookedUpBy() == null) { - throw denyRunAs(auditId, authentication, action, request, permission.names()); - } else if (permission.runAs().check(authentication.getUser().principal())) { - auditTrail.runAsGranted(auditId, authentication, action, request, permission.names()); - permission = runAsRole; - } else { - throw denyRunAs(auditId, authentication, action, request, permission.names()); - } + ActionListener runAsListener = wrapPreservingContext(ActionListener.wrap(result -> { + if (result.isGranted()) { + if (result.isAuditable()) { + auditTrail.runAsGranted(requestId, authentication, action, request, + authzInfo.getAuthenticatedUserAuthorizationInfo()); + } + authorizeAction(requestInfo, requestId, authzInfo, listener); + } else { + if (result.isAuditable()) { + auditTrail.runAsDenied(requestId, authentication, action, request, + authzInfo.getAuthenticatedUserAuthorizationInfo()); + } + listener.onFailure(denialException(authentication, action, null)); + } + }, e -> { + auditTrail.runAsDenied(requestId, authentication, action, request, + authzInfo.getAuthenticatedUserAuthorizationInfo()); + listener.onFailure(denialException(authentication, action, null)); + }), threadContext); + authorizeRunAs(requestInfo, authzInfo, runAsListener); + } else { + authorizeAction(requestInfo, requestId, authzInfo, listener); } - putTransientIfNonExisting(ROLE_NAMES_KEY, permission.names()); + } - // first, we'll check if the action is a cluster action. If it is, we'll only check it against the cluster permissions + private void authorizeAction(final RequestInfo requestInfo, final String requestId, final AuthorizationInfo authzInfo, + final ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); + final TransportRequest request = requestInfo.getRequest(); + final String action = requestInfo.getAction(); + final AuthorizationEngine authzEngine = getAuthorizationEngine(authentication); if (ClusterPrivilege.ACTION_MATCHER.test(action)) { - if (permission.checkClusterAction(action, request) || checkSameUserPermissions(action, request, authentication)) { - putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, IndicesAccessControl.ALLOW_ALL); - auditTrail.accessGranted(auditId, authentication, action, request, permission.names()); - return; - } - throw denial(auditId, authentication, action, request, permission.names()); + final ActionListener clusterAuthzListener = + wrapPreservingContext(new AuthorizationResultListener<>(result -> { + putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, IndicesAccessControl.ALLOW_ALL); + listener.onResponse(null); + }, listener::onFailure, requestInfo, requestId, authzInfo), threadContext); + authzEngine.authorizeClusterAction(requestInfo, authzInfo, clusterAuthzListener); + } else if (IndexPrivilege.ACTION_MATCHER.test(action)) { + final MetaData metaData = clusterService.state().metaData(); + final AsyncSupplier> authorizedIndicesSupplier = new CachingAsyncSupplier<>(authzIndicesListener -> + authzEngine.loadAuthorizedIndices(requestInfo, authzInfo, metaData.getAliasAndIndexLookup(), + authzIndicesListener)); + final AsyncSupplier resolvedIndicesAsyncSupplier = new CachingAsyncSupplier<>((resolvedIndicesListener) -> { + authorizedIndicesSupplier.getAsync(ActionListener.wrap(authorizedIndices -> { + resolveIndexNames(request, metaData, authorizedIndices, resolvedIndicesListener); + }, e -> { + auditTrail.accessDenied(requestId, authentication, action, request, authzInfo); + if (e instanceof IndexNotFoundException) { + listener.onFailure(e); + } else { + listener.onFailure(denialException(authentication, action, e)); + } + })); + }); + authzEngine.authorizeIndexAction(requestInfo, authzInfo, resolvedIndicesAsyncSupplier, + metaData.getAliasAndIndexLookup(), wrapPreservingContext(new AuthorizationResultListener<>(result -> + handleIndexActionAuthorizationResult(result, requestInfo, requestId, authzInfo, authzEngine, authorizedIndicesSupplier, + resolvedIndicesAsyncSupplier, metaData, listener), + listener::onFailure, requestInfo, requestId, authzInfo), threadContext)); + } else { + logger.warn("denying access as action [{}] is not an index or cluster action", action); + auditTrail.accessDenied(requestId, authentication, action, request, authzInfo); + listener.onFailure(denialException(authentication, action, null)); } + } - // ok... this is not a cluster action, let's verify it's an indices action - if (!IndexPrivilege.ACTION_MATCHER.test(action)) { - throw denial(auditId, authentication, action, request, permission.names()); + private void handleIndexActionAuthorizationResult(final IndexAuthorizationResult result, final RequestInfo requestInfo, + final String requestId, final AuthorizationInfo authzInfo, + final AuthorizationEngine authzEngine, + final AsyncSupplier> authorizedIndicesSupplier, + final AsyncSupplier resolvedIndicesAsyncSupplier, + final MetaData metaData, + final ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); + final TransportRequest request = requestInfo.getRequest(); + final String action = requestInfo.getAction(); + if (result.getIndicesAccessControl() != null) { + putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, + result.getIndicesAccessControl()); } - - //composite actions are explicitly listed and will be authorized at the sub-request / shard level - if (isCompositeAction(action)) { - if (request instanceof CompositeIndicesRequest == false) { - throw new IllegalStateException("Composite actions must implement " + CompositeIndicesRequest.class.getSimpleName() - + ", " + request.getClass().getSimpleName() + " doesn't"); - } - // we check if the user can execute the action, without looking at indices, which will be authorized at the shard level - if (permission.checkIndicesAction(action)) { - auditTrail.accessGranted(auditId, authentication, action, request, permission.names()); - return; - } - throw denial(auditId, authentication, action, request, permission.names()); - } else if (isTranslatedToBulkAction(action)) { - if (request instanceof CompositeIndicesRequest == false) { - throw new IllegalStateException("Bulk translated actions must implement " + CompositeIndicesRequest.class.getSimpleName() - + ", " + request.getClass().getSimpleName() + " doesn't"); - } - // we check if the user can execute the action, without looking at indices, which will be authorized at the shard level - if (permission.checkIndicesAction(action)) { - auditTrail.accessGranted(auditId, authentication, action, request, permission.names()); - return; - } - throw denial(auditId, authentication, action, request, permission.names()); - } else if (TransportActionProxy.isProxyAction(action)) { - // we authorize proxied actions once they are "unwrapped" on the next node - if (TransportActionProxy.isProxyRequest(originalRequest) == false) { - throw new IllegalStateException("originalRequest is not a proxy request: [" + originalRequest + "] but action: [" - + action + "] is a proxy action"); - } - if (permission.checkIndicesAction(action)) { - auditTrail.accessGranted(auditId, authentication, action, request, permission.names()); - return; + //if we are creating an index we need to authorize potential aliases created at the same time + if (IndexPrivilege.CREATE_INDEX_MATCHER.test(action)) { + assert request instanceof CreateIndexRequest; + Set aliases = ((CreateIndexRequest) request).aliases(); + if (aliases.isEmpty()) { + runRequestInterceptors(requestInfo, authzInfo, authorizationEngine, listener); } else { - // we do this here in addition to the denial below since we might run into an assertion on scroll request below if we - // don't have permission to read cross cluster but wrap a scroll request. - throw denial(auditId, authentication, action, request, permission.names()); + final RequestInfo aliasesRequestInfo = new RequestInfo(authentication, request, IndicesAliasesAction.NAME); + authzEngine.authorizeIndexAction(aliasesRequestInfo, authzInfo, + ril -> { + resolvedIndicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> { + List aliasesAndIndices = new ArrayList<>(resolvedIndices.getLocal()); + for (Alias alias : aliases) { + aliasesAndIndices.add(alias.name()); + } + ResolvedIndices withAliases = new ResolvedIndices(aliasesAndIndices, Collections.emptyList()); + ril.onResponse(withAliases); + }, ril::onFailure)); + }, + metaData.getAliasAndIndexLookup(), + wrapPreservingContext(new AuthorizationResultListener<>( + authorizationResult -> runRequestInterceptors(requestInfo, authzInfo, authorizationEngine, listener), + listener::onFailure, aliasesRequestInfo, requestId, authzInfo), threadContext)); } + } else if (action.equals(TransportShardBulkAction.ACTION_NAME)) { + // if this is performing multiple actions on the index, then check each of those actions. + assert request instanceof BulkShardRequest + : "Action " + action + " requires " + BulkShardRequest.class + " but was " + request.getClass(); + authorizeBulkItems(requestInfo, authzInfo, authzEngine, resolvedIndicesAsyncSupplier, authorizedIndicesSupplier, + metaData, requestId, + ActionListener.wrap(ignore -> runRequestInterceptors(requestInfo, authzInfo, authorizationEngine, listener), + listener::onFailure)); + } else { + runRequestInterceptors(requestInfo, authzInfo, authorizationEngine, listener); } + } - // some APIs are indices requests that are not actually associated with indices. For example, - // search scroll request, is categorized under the indices context, but doesn't hold indices names - // (in this case, the security check on the indices was done on the search request that initialized - // the scroll. Given that scroll is implemented using a context on the node holding the shard, we - // piggyback on it and enhance the context with the original authentication. This serves as our method - // to validate the scroll id only stays with the same user! - if (request instanceof IndicesRequest == false && request instanceof IndicesAliasesRequest == false) { - //note that clear scroll shard level actions can originate from a clear scroll all, which doesn't require any - //indices permission as it's categorized under cluster. This is why the scroll check is performed - //even before checking if the user has any indices permission. - if (isScrollRelatedAction(action)) { - // if the action is a search scroll action, we first authorize that the user can execute the action for some - // index and if they cannot, we can fail the request early before we allow the execution of the action and in - // turn the shard actions - if (SearchScrollAction.NAME.equals(action) && permission.checkIndicesAction(action) == false) { - throw denial(auditId, authentication, action, request, permission.names()); - } else { - // we store the request as a transient in the ThreadContext in case of a authorization failure at the shard - // level. If authorization fails we will audit a access_denied message and will use the request to retrieve - // information such as the index and the incoming address of the request - auditTrail.accessGranted(auditId, authentication, action, request, permission.names()); - return; - } - } else { - assert false : - "only scroll related requests are known indices api that don't support retrieving the indices they relate to"; - throw denial(auditId, authentication, action, request, permission.names()); + private void runRequestInterceptors(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + AuthorizationEngine authorizationEngine, ActionListener listener) { + if (requestInterceptors.isEmpty()) { + listener.onResponse(null); + } else { + Iterator requestInterceptorIterator = requestInterceptors.iterator(); + final StepListener firstStepListener = new StepListener<>(); + final RequestInterceptor first = requestInterceptorIterator.next(); + + StepListener prevListener = firstStepListener; + while (requestInterceptorIterator.hasNext()) { + final RequestInterceptor nextInterceptor = requestInterceptorIterator.next(); + final StepListener current = new StepListener<>(); + prevListener.whenComplete(v -> nextInterceptor.intercept(requestInfo, authorizationEngine, authorizationInfo, current), + listener::onFailure); + prevListener = current; } + + prevListener.whenComplete(v -> listener.onResponse(null), listener::onFailure); + first.intercept(requestInfo, authorizationEngine, authorizationInfo, firstStepListener); } + } - final boolean allowsRemoteIndices = request instanceof IndicesRequest - && IndicesAndAliasesResolver.allowsRemoteIndices((IndicesRequest) request); - // If this request does not allow remote indices - // then the user must have permission to perform this action on at least 1 local index - if (allowsRemoteIndices == false && permission.checkIndicesAction(action) == false) { - throw denial(auditId, authentication, action, request, permission.names()); - } + // pkg-private for testing + AuthorizationEngine getRunAsAuthorizationEngine(final Authentication authentication) { + return getAuthorizationEngineForUser(authentication.getUser().authenticatedUser()); + } - final MetaData metaData = clusterService.state().metaData(); - final AuthorizedIndices authorizedIndices = new AuthorizedIndices(permission, action, metaData); - final ResolvedIndices resolvedIndices = resolveIndexNames(auditId, authentication, action, request, metaData, - authorizedIndices, permission); - assert !resolvedIndices.isEmpty() - : "every indices request needs to have its indices set thus the resolved indices must not be empty"; - - // If this request does reference any remote indices - // then the user must have permission to perform this action on at least 1 local index - if (resolvedIndices.getRemote().isEmpty() && permission.checkIndicesAction(action) == false) { - throw denial(auditId, authentication, action, request, permission.names()); - } + // pkg-private for testing + AuthorizationEngine getAuthorizationEngine(final Authentication authentication) { + return getAuthorizationEngineForUser(authentication.getUser()); + } - //all wildcard expressions have been resolved and only the security plugin could have set '-*' here. - //'-*' matches no indices so we allow the request to go through, which will yield an empty response - if (resolvedIndices.isNoIndicesPlaceholder()) { - putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, IndicesAccessControl.ALLOW_NO_INDICES); - auditTrail.accessGranted(auditId, authentication, action, request, permission.names()); - return; + private AuthorizationEngine getAuthorizationEngineForUser(final User user) { + if (rbacEngine != authorizationEngine && licenseState.isAuthorizationEngineAllowed()) { + if (ClientReservedRealm.isReserved(user.principal(), settings) || isInternalUser(user)) { + return rbacEngine; + } else { + return authorizationEngine; + } + } else { + return rbacEngine; } + } - final Set localIndices = new HashSet<>(resolvedIndices.getLocal()); - IndicesAccessControl indicesAccessControl = permission.authorize(action, localIndices, metaData, fieldPermissionsCache); - if (indicesAccessControl.isGranted()) { - putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, indicesAccessControl); + private void authorizeSystemUser(final Authentication authentication, final String action, final String requestId, + final TransportRequest request, final ActionListener listener) { + if (SystemUser.isAuthorized(action)) { + putTransientIfNonExisting(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, IndicesAccessControl.ALLOW_ALL); + putTransientIfNonExisting(AUTHORIZATION_INFO_KEY, SYSTEM_AUTHZ_INFO); + auditTrail.accessGranted(requestId, authentication, action, request, SYSTEM_AUTHZ_INFO); + listener.onResponse(null); } else { - throw denial(auditId, authentication, action, request, permission.names()); + auditTrail.accessDenied(requestId, authentication, action, request, SYSTEM_AUTHZ_INFO); + listener.onFailure(denialException(authentication, action, null)); } + } - //if we are creating an index we need to authorize potential aliases created at the same time - if (IndexPrivilege.CREATE_INDEX_MATCHER.test(action)) { - assert request instanceof CreateIndexRequest; - Set aliases = ((CreateIndexRequest) request).aliases(); - if (!aliases.isEmpty()) { - Set aliasesAndIndices = Sets.newHashSet(localIndices); - for (Alias alias : aliases) { - aliasesAndIndices.add(alias.name()); - } - indicesAccessControl = permission.authorize("indices:admin/aliases", aliasesAndIndices, metaData, fieldPermissionsCache); - if (!indicesAccessControl.isGranted()) { - throw denial(auditId, authentication, "indices:admin/aliases", request, permission.names()); - } - // no need to re-add the indicesAccessControl in the context, - // because the create index call doesn't do anything FLS or DLS + private TransportRequest maybeUnwrapRequest(Authentication authentication, TransportRequest originalRequest, String action, + String requestId) { + final TransportRequest request; + if (originalRequest instanceof ConcreteShardRequest) { + request = ((ConcreteShardRequest) originalRequest).getRequest(); + assert TransportActionProxy.isProxyRequest(request) == false : "expected non-proxy request for action: " + action; + } else { + request = TransportActionProxy.unwrapRequest(originalRequest); + final boolean isOriginalRequestProxyRequest = TransportActionProxy.isProxyRequest(originalRequest); + final boolean isProxyAction = TransportActionProxy.isProxyAction(action); + if (isProxyAction && isOriginalRequestProxyRequest == false) { + IllegalStateException cause = new IllegalStateException("originalRequest is not a proxy request: [" + originalRequest + + "] but action: [" + action + "] is a proxy action"); + auditTrail.accessDenied(requestId, authentication, action, request, EmptyAuthorizationInfo.INSTANCE); + throw denialException(authentication, action, cause); + } + if (TransportActionProxy.isProxyRequest(originalRequest) && TransportActionProxy.isProxyAction(action) == false) { + IllegalStateException cause = new IllegalStateException("originalRequest is a proxy request for: [" + request + + "] but action: [" + action + "] isn't"); + auditTrail.accessDenied(requestId, authentication, action, request, EmptyAuthorizationInfo.INSTANCE); + throw denialException(authentication, action, cause); } } - - if (action.equals(TransportShardBulkAction.ACTION_NAME)) { - // is this is performing multiple actions on the index, then check each of those actions. - assert request instanceof BulkShardRequest - : "Action " + action + " requires " + BulkShardRequest.class + " but was " + request.getClass(); - - authorizeBulkItems(auditId, authentication, (BulkShardRequest) request, permission, metaData, localIndices, authorizedIndices); - } - - auditTrail.accessGranted(auditId, authentication, action, request, permission.names()); + return request; } private boolean isInternalUser(User user) { return SystemUser.is(user) || XPackUser.is(user) || XPackSecurityUser.is(user); } + private void authorizeRunAs(final RequestInfo requestInfo, final AuthorizationInfo authzInfo, + final ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); + if (authentication.getLookedUpBy() == null) { + // this user did not really exist + // TODO(jaymode) find a better way to indicate lookup failed for a user and we need to fail authz + listener.onResponse(AuthorizationResult.deny()); + } else { + final AuthorizationEngine runAsAuthzEngine = getRunAsAuthorizationEngine(authentication); + runAsAuthzEngine.authorizeRunAs(requestInfo, authzInfo, listener); + } + } + /** * Performs authorization checks on the items within a {@link BulkShardRequest}. * This inspects the {@link BulkItemRequest items} within the request, computes @@ -357,48 +426,99 @@ private boolean isInternalUser(User user) { * and then checks whether that action is allowed on the targeted index. Items * that fail this checks are {@link BulkItemRequest#abort(String, Exception) * aborted}, with an - * {@link #denial(String, Authentication, String, TransportRequest, String[]) access + * {@link #denialException(Authentication, String, Exception) access * denied} exception. Because a shard level request is for exactly 1 index, and * there are a small number of possible item {@link DocWriteRequest.OpType * types}, the number of distinct authorization checks that need to be performed * is very small, but the results must be cached, to avoid adding a high * overhead to each bulk request. */ - private void authorizeBulkItems(String auditRequestId, Authentication authentication, BulkShardRequest request, Role permission, - MetaData metaData, Set indices, AuthorizedIndices authorizedIndices) { + private void authorizeBulkItems(RequestInfo requestInfo, AuthorizationInfo authzInfo, + AuthorizationEngine authzEngine, AsyncSupplier resolvedIndicesAsyncSupplier, + AsyncSupplier> authorizedIndicesSupplier, + MetaData metaData, String requestId, ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); + final BulkShardRequest request = (BulkShardRequest) requestInfo.getRequest(); // Maps original-index -> expanded-index-name (expands date-math, but not aliases) final Map resolvedIndexNames = new HashMap<>(); - // Maps (resolved-index , action) -> is-granted - final Map, Boolean> indexActionAuthority = new HashMap<>(); - for (BulkItemRequest item : request.items()) { - String resolvedIndex = resolvedIndexNames.computeIfAbsent(item.index(), key -> { - final ResolvedIndices resolvedIndices = indicesAndAliasesResolver.resolveIndicesAndAliases(item.request(), metaData, - authorizedIndices); - if (resolvedIndices.getRemote().size() != 0) { - throw illegalArgument("Bulk item should not write to remote indices, but request writes to " - + String.join(",", resolvedIndices.getRemote())); - } - if (resolvedIndices.getLocal().size() != 1) { - throw illegalArgument("Bulk item should write to exactly 1 index, but request writes to " - + String.join(",", resolvedIndices.getLocal())); + // Maps action -> resolved indices set + final Map> actionToIndicesMap = new HashMap<>(); + + authorizedIndicesSupplier.getAsync(ActionListener.wrap(authorizedIndices -> { + resolvedIndicesAsyncSupplier.getAsync(ActionListener.wrap(overallResolvedIndices -> { + final Set localIndices = new HashSet<>(overallResolvedIndices.getLocal()); + for (BulkItemRequest item : request.items()) { + String resolvedIndex = resolvedIndexNames.computeIfAbsent(item.index(), key -> { + final ResolvedIndices resolvedIndices = + indicesAndAliasesResolver.resolveIndicesAndAliases(item.request(), metaData, authorizedIndices); + if (resolvedIndices.getRemote().size() != 0) { + throw illegalArgument("Bulk item should not write to remote indices, but request writes to " + + String.join(",", resolvedIndices.getRemote())); + } + if (resolvedIndices.getLocal().size() != 1) { + throw illegalArgument("Bulk item should write to exactly 1 index, but request writes to " + + String.join(",", resolvedIndices.getLocal())); + } + final String resolved = resolvedIndices.getLocal().get(0); + if (localIndices.contains(resolved) == false) { + throw illegalArgument("Found bulk item that writes to index " + resolved + " but the request writes to " + + localIndices); + } + return resolved; + }); + + final String itemAction = getAction(item); + actionToIndicesMap.compute(itemAction, (key, resolvedIndicesSet) -> { + final Set localSet = resolvedIndicesSet != null ? resolvedIndicesSet : new HashSet<>(); + localSet.add(resolvedIndex); + return localSet; + }); } - final String resolved = resolvedIndices.getLocal().get(0); - if (indices.contains(resolved) == false) { - throw illegalArgument("Found bulk item that writes to index " + resolved + " but the request writes to " + indices); - } - return resolved; - }); - final String itemAction = getAction(item); - final Tuple indexAndAction = new Tuple<>(resolvedIndex, itemAction); - final boolean granted = indexActionAuthority.computeIfAbsent(indexAndAction, key -> { - final IndicesAccessControl itemAccessControl = permission.authorize(itemAction, Collections.singleton(resolvedIndex), - metaData, fieldPermissionsCache); - return itemAccessControl.isGranted(); - }); - if (granted == false) { - item.abort(resolvedIndex, denial(auditRequestId, authentication, itemAction, request, permission.names())); - } - } + + final ActionListener>> bulkAuthzListener = + ActionListener.wrap(collection -> { + final Map actionToIndicesAccessControl = new HashMap<>(); + final AtomicBoolean audit = new AtomicBoolean(false); + collection.forEach(tuple -> { + final IndicesAccessControl existing = + actionToIndicesAccessControl.putIfAbsent(tuple.v1(), tuple.v2().getIndicesAccessControl()); + if (existing != null) { + throw new IllegalStateException("a value already exists for action " + tuple.v1()); + } + if (tuple.v2().isAuditable()) { + audit.set(true); + } + }); + + for (BulkItemRequest item : request.items()) { + final String resolvedIndex = resolvedIndexNames.get(item.index()); + final String itemAction = getAction(item); + final IndicesAccessControl indicesAccessControl = actionToIndicesAccessControl.get(getAction(item)); + final IndicesAccessControl.IndexAccessControl indexAccessControl + = indicesAccessControl.getIndexPermissions(resolvedIndex); + if (indexAccessControl == null || indexAccessControl.isGranted() == false) { + auditTrail.accessDenied(requestId, authentication, itemAction, request, authzInfo); + item.abort(resolvedIndex, denialException(authentication, itemAction, null)); + } else if (audit.get()) { + auditTrail.accessGranted(requestId, authentication, itemAction, request, authzInfo); + } + } + listener.onResponse(null); + }, listener::onFailure); + final ActionListener> groupedActionListener = wrapPreservingContext( + new GroupedActionListener<>(bulkAuthzListener, actionToIndicesMap.size(), Collections.emptyList()), threadContext); + + actionToIndicesMap.forEach((bulkItemAction, indices) -> { + final RequestInfo bulkItemInfo = + new RequestInfo(requestInfo.getAuthentication(), requestInfo.getRequest(), bulkItemAction); + authzEngine.authorizeIndexAction(bulkItemInfo, authzInfo, + ril -> ril.onResponse(new ResolvedIndices(new ArrayList<>(indices), Collections.emptyList())), + metaData.getAliasAndIndexLookup(), ActionListener.wrap(indexAuthorizationResult -> + groupedActionListener.onResponse(new Tuple<>(bulkItemAction, indexAuthorizationResult)), + groupedActionListener::onFailure)); + }); + }, listener::onFailure)); + }, listener::onFailure)); } private IllegalArgumentException illegalArgument(String message) { @@ -420,14 +540,9 @@ private static String getAction(BulkItemRequest item) { throw new IllegalArgumentException("No equivalent action for opType [" + docWriteRequest.opType() + "]"); } - private ResolvedIndices resolveIndexNames(String auditRequestId, Authentication authentication, String action, TransportRequest request, - MetaData metaData, AuthorizedIndices authorizedIndices, Role permission) { - try { - return indicesAndAliasesResolver.resolve(request, metaData, authorizedIndices); - } catch (Exception e) { - auditTrail.accessDenied(auditRequestId, authentication, action, request, permission.names()); - throw e; - } + private void resolveIndexNames(TransportRequest request, MetaData metaData, List authorizedIndices, + ActionListener listener) { + listener.onResponse(indicesAndAliasesResolver.resolve(request, metaData, authorizedIndices)); } private void putTransientIfNonExisting(String key, Object value) { @@ -437,155 +552,93 @@ private void putTransientIfNonExisting(String key, Object value) { } } - public void roles(User user, Authentication authentication, ActionListener roleActionListener) { - // we need to special case the internal users in this method, if we apply the anonymous roles to every user including these system - // user accounts then we run into the chance of a deadlock because then we need to get a role that we may be trying to get as the - // internal user. The SystemUser is special cased as it has special privileges to execute internal actions and should never be - // passed into this method. The XPackUser has the Superuser role and we can simply return that - if (SystemUser.is(user)) { - throw new IllegalArgumentException("the user [" + user.principal() + "] is the system user and we should never try to get its" + - " roles"); + private ElasticsearchSecurityException denialException(Authentication authentication, String action, Exception cause) { + final User authUser = authentication.getUser().authenticatedUser(); + // Special case for anonymous user + if (isAnonymousEnabled && anonymousUser.equals(authUser)) { + if (anonymousAuthzExceptionEnabled == false) { + return authcFailureHandler.authenticationRequired(action, threadContext); + } } - if (XPackUser.is(user)) { - assert XPackUser.INSTANCE.roles().length == 1; - roleActionListener.onResponse(XPackUser.ROLE); - return; + // check for run as + if (authentication.getUser().isRunAs()) { + logger.debug("action [{}] is unauthorized for user [{}] run as [{}]", action, authUser.principal(), + authentication.getUser().principal()); + return authorizationError("action [{}] is unauthorized for user [{}] run as [{}]", cause, action, authUser.principal(), + authentication.getUser().principal()); } - if (XPackSecurityUser.is(user)) { - roleActionListener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); - return; + logger.debug("action [{}] is unauthorized for user [{}]", action, authUser.principal()); + return authorizationError("action [{}] is unauthorized for user [{}]", cause, action, authUser.principal()); + } + + private class AuthorizationResultListener implements ActionListener { + + private final Consumer responseConsumer; + private final Consumer failureConsumer; + private final RequestInfo requestInfo; + private final String requestId; + private final AuthorizationInfo authzInfo; + + private AuthorizationResultListener(Consumer responseConsumer, Consumer failureConsumer, RequestInfo requestInfo, + String requestId, AuthorizationInfo authzInfo) { + this.responseConsumer = responseConsumer; + this.failureConsumer = failureConsumer; + this.requestInfo = requestInfo; + this.requestId = requestId; + this.authzInfo = authzInfo; } - final Authentication.AuthenticationType authType = authentication.getAuthenticationType(); - if (authType == Authentication.AuthenticationType.API_KEY) { - apiKeyService.getRoleForApiKey(authentication, rolesStore, roleActionListener); - } else { - Set roleNames = new HashSet<>(); - Collections.addAll(roleNames, user.roles()); - if (isAnonymousEnabled && anonymousUser.equals(user) == false) { - if (anonymousUser.roles().length == 0) { - throw new IllegalStateException("anonymous is only enabled when the anonymous user has roles"); + @Override + public void onResponse(T result) { + if (result.isGranted()) { + if (result.isAuditable()) { + auditTrail.accessGranted(requestId, requestInfo.getAuthentication(), requestInfo.getAction(), requestInfo.getRequest(), + authzInfo); + } + try { + responseConsumer.accept(result); + } catch (Exception e) { + failureConsumer.accept(e); } - Collections.addAll(roleNames, anonymousUser.roles()); - } - - if (roleNames.isEmpty()) { - roleActionListener.onResponse(Role.EMPTY); - } else if (roleNames.contains(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName())) { - roleActionListener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); } else { - rolesStore.roles(roleNames, roleActionListener); + handleFailure(result.isAuditable(), null); } } - } - - private static boolean isCompositeAction(String action) { - return action.equals(BulkAction.NAME) || - action.equals(MultiGetAction.NAME) || - action.equals(MultiTermVectorsAction.NAME) || - action.equals(MultiSearchAction.NAME) || - action.equals("indices:data/read/mpercolate") || - action.equals("indices:data/read/msearch/template") || - action.equals("indices:data/read/search/template") || - action.equals("indices:data/write/reindex") || - action.equals("indices:data/read/sql") || - action.equals("indices:data/read/sql/translate"); - } - - private static boolean isTranslatedToBulkAction(String action) { - return action.equals(IndexAction.NAME) || - action.equals(DeleteAction.NAME) || - action.equals(INDEX_SUB_REQUEST_PRIMARY) || - action.equals(INDEX_SUB_REQUEST_REPLICA) || - action.equals(DELETE_SUB_REQUEST_PRIMARY) || - action.equals(DELETE_SUB_REQUEST_REPLICA); - } - private static boolean isScrollRelatedAction(String action) { - return action.equals(SearchScrollAction.NAME) || - action.equals(SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME) || - action.equals(SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME) || - action.equals(SearchTransportService.QUERY_SCROLL_ACTION_NAME) || - action.equals(SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME) || - action.equals(ClearScrollAction.NAME) || - action.equals("indices:data/read/sql/close_cursor") || - action.equals(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); - } + @Override + public void onFailure(Exception e) { + handleFailure(true, e); + } - static boolean checkSameUserPermissions(String action, TransportRequest request, Authentication authentication) { - final boolean actionAllowed = SAME_USER_PRIVILEGE.test(action); - if (actionAllowed) { - if (request instanceof UserRequest == false) { - assert false : "right now only a user request should be allowed"; - return false; - } - UserRequest userRequest = (UserRequest) request; - String[] usernames = userRequest.usernames(); - if (usernames == null || usernames.length != 1 || usernames[0] == null) { - assert false : "this role should only be used for actions to apply to a single user"; - return false; - } - final String username = usernames[0]; - final boolean sameUsername = authentication.getUser().principal().equals(username); - if (sameUsername && ChangePasswordAction.NAME.equals(action)) { - return checkChangePasswordAction(authentication); + private void handleFailure(boolean audit, @Nullable Exception e) { + if (audit) { + auditTrail.accessDenied(requestId, requestInfo.getAuthentication(), requestInfo.getAction(), requestInfo.getRequest(), + authzInfo); } - - assert AuthenticateAction.NAME.equals(action) || HasPrivilegesAction.NAME.equals(action) - || GetUserPrivilegesAction.NAME.equals(action) || sameUsername == false - : "Action '" + action + "' should not be possible when sameUsername=" + sameUsername; - return sameUsername; + failureConsumer.accept(denialException(requestInfo.getAuthentication(), requestInfo.getAction(), e)); } - return false; } - private static boolean checkChangePasswordAction(Authentication authentication) { - // we need to verify that this user was authenticated by or looked up by a realm type that support password changes - // otherwise we open ourselves up to issues where a user in a different realm could be created with the same username - // and do malicious things - final boolean isRunAs = authentication.getUser().isRunAs(); - final String realmType; - if (isRunAs) { - realmType = authentication.getLookedUpBy().getType(); - } else { - realmType = authentication.getAuthenticatedBy().getType(); - } - - assert realmType != null; - // ensure the user was authenticated by a realm that we can change a password for. The native realm is an internal realm and - // right now only one can exist in the realm configuration - if this changes we should update this check - return ReservedRealm.TYPE.equals(realmType) || NativeRealmSettings.TYPE.equals(realmType); - } + private static class CachingAsyncSupplier implements AsyncSupplier { - ElasticsearchSecurityException denial(String auditRequestId, Authentication authentication, String action, TransportRequest request, - String[] roleNames) { - auditTrail.accessDenied(auditRequestId, authentication, action, request, roleNames); - return denialException(authentication, action); - } + private final AsyncSupplier asyncSupplier; + private V value = null; - private ElasticsearchSecurityException denyRunAs(String auditRequestId, Authentication authentication, String action, - TransportRequest request, String[] roleNames) { - auditTrail.runAsDenied(auditRequestId, authentication, action, request, roleNames); - return denialException(authentication, action); - } + private CachingAsyncSupplier(AsyncSupplier supplier) { + this.asyncSupplier = supplier; + } - private ElasticsearchSecurityException denialException(Authentication authentication, String action) { - final User authUser = authentication.getUser().authenticatedUser(); - // Special case for anonymous user - if (isAnonymousEnabled && anonymousUser.equals(authUser)) { - if (anonymousAuthzExceptionEnabled == false) { - throw authcFailureHandler.authenticationRequired(action, threadContext); + @Override + public synchronized void getAsync(ActionListener listener) { + if (value == null) { + asyncSupplier.getAsync(ActionListener.wrap(loaded -> { + value = loaded; + listener.onResponse(value); + }, listener::onFailure)); + } else { + listener.onResponse(value); } } - // check for run as - if (authentication.getUser().isRunAs()) { - logger.debug("action [{}] is unauthorized for user [{}] run as [{}]", action, authUser.principal(), - authentication.getUser().principal()); - return authorizationError("action [{}] is unauthorized for user [{}] run as [{}]", action, authUser.principal(), - authentication.getUser().principal()); - } - logger.debug("action [{}] is unauthorized for user [{}]", action, authUser.principal()); - return authorizationError("action [{}] is unauthorized for user [{}]", action, authUser.principal()); } public static void addSettings(List> settings) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 3b141a43b4b80..0397fac1027ea 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -6,20 +6,15 @@ package org.elasticsearch.xpack.security.authz; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; -import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.support.Automatons; -import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; -import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Predicate; @@ -126,61 +121,4 @@ public static void switchUserBasedOnActionOriginAndExecute(ThreadContext threadC private static boolean isInternalAction(String action) { return INTERNAL_PREDICATE.test(action); } - - /** - * A base class to authorize authorize a given {@link Authentication} against it's users or run-as users roles. - * This class fetches the roles for the users asynchronously and then authenticates the in the callback. - */ - public static class AsyncAuthorizer { - - private final ActionListener listener; - private final BiConsumer consumer; - private final Authentication authentication; - private volatile Role userRoles; - private volatile Role runAsRoles; - private CountDown countDown = new CountDown(2); // we expect only two responses!! - - public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { - this.consumer = consumer; - this.listener = listener; - this.authentication = authentication; - } - - public void authorize(AuthorizationService service) { - if (SystemUser.is(authentication.getUser().authenticatedUser())) { - assert authentication.getUser().isRunAs() == false; - setUserRoles(null); // we can inform the listener immediately - nothing to fetch for us on system user - setRunAsRoles(null); - } else { - service.roles(authentication.getUser().authenticatedUser(), authentication, - ActionListener.wrap(this::setUserRoles, listener::onFailure)); - if (authentication.getUser().isRunAs()) { - service.roles(authentication.getUser(), authentication, ActionListener.wrap(this::setRunAsRoles, listener::onFailure)); - } else { - setRunAsRoles(null); - } - } - } - - private void setUserRoles(Role roles) { - this.userRoles = roles; - maybeRun(); - } - - private void setRunAsRoles(Role roles) { - this.runAsRoles = roles; - maybeRun(); - } - - private void maybeRun() { - if (countDown.countDown()) { - try { - consumer.accept(userRoles, runAsRoles); - } catch (Exception e) { - listener.onFailure(e); - } - } - } - - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java deleted file mode 100644 index 40fa0e2ff9938..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizedIndices.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.authz; - -import org.elasticsearch.cluster.metadata.AliasOrIndex; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.xpack.core.security.authz.permission.Role; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.function.Predicate; - -/** - * Abstraction used to make sure that we lazily load authorized indices only when requested and only maximum once per request. Also - * makes sure that authorized indices don't get updated throughout the same request for the same user. - */ -class AuthorizedIndices { - private final String action; - private final MetaData metaData; - private final Role userRoles; - private List authorizedIndices; - - AuthorizedIndices(Role userRoles, String action, MetaData metaData) { - this.userRoles = userRoles; - this.action = action; - this.metaData = metaData; - } - - List get() { - if (authorizedIndices == null) { - authorizedIndices = load(); - } - return authorizedIndices; - } - - private List load() { - Predicate predicate = userRoles.allowedIndicesMatcher(action); - - List indicesAndAliases = new ArrayList<>(); - // TODO: can this be done smarter? I think there are usually more indices/aliases in the cluster then indices defined a roles? - for (Map.Entry entry : metaData.getAliasAndIndexLookup().entrySet()) { - String aliasOrIndex = entry.getKey(); - if (predicate.test(aliasOrIndex)) { - indicesAndAliases.add(aliasOrIndex); - } - } - - return Collections.unmodifiableList(indicesAndAliases); - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index aa1461b189a39..03c78ed903e81 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -28,7 +28,7 @@ import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField; +import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; import java.util.ArrayList; import java.util.Arrays; @@ -87,7 +87,7 @@ class IndicesAndAliasesResolver { * Otherwise, N will be added to the local index list. */ - ResolvedIndices resolve(TransportRequest request, MetaData metaData, AuthorizedIndices authorizedIndices) { + ResolvedIndices resolve(TransportRequest request, MetaData metaData, List authorizedIndices) { if (request instanceof IndicesAliasesRequest) { ResolvedIndices.Builder resolvedIndicesBuilder = new ResolvedIndices.Builder(); IndicesAliasesRequest indicesAliasesRequest = (IndicesAliasesRequest) request; @@ -107,7 +107,7 @@ ResolvedIndices resolve(TransportRequest request, MetaData metaData, AuthorizedI } - ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData metaData, AuthorizedIndices authorizedIndices) { + ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData metaData, List authorizedIndices) { final ResolvedIndices.Builder resolvedIndicesBuilder = new ResolvedIndices.Builder(); boolean indicesReplacedWithNoIndices = false; if (indicesRequest instanceof PutMappingRequest && ((PutMappingRequest) indicesRequest).getConcreteIndex() != null) { @@ -134,7 +134,7 @@ ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData // check for all and return list of authorized indices if (IndexNameExpressionResolver.isAllIndices(indicesList(indicesRequest.indices()))) { if (replaceWildcards) { - for (String authorizedIndex : authorizedIndices.get()) { + for (String authorizedIndex : authorizedIndices) { if (isIndexVisible(authorizedIndex, indicesOptions, metaData)) { resolvedIndicesBuilder.addLocal(authorizedIndex); } @@ -150,11 +150,11 @@ ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData split = new ResolvedIndices(Arrays.asList(indicesRequest.indices()), Collections.emptyList()); } List replaced = replaceWildcardsWithAuthorizedIndices(split.getLocal(), indicesOptions, metaData, - authorizedIndices.get(), replaceWildcards); + authorizedIndices, replaceWildcards); if (indicesOptions.ignoreUnavailable()) { //out of all the explicit names (expanded from wildcards and original ones that were left untouched) //remove all the ones that the current user is not authorized for and ignore them - replaced = replaced.stream().filter(authorizedIndices.get()::contains).collect(Collectors.toList()); + replaced = replaced.stream().filter(authorizedIndices::contains).collect(Collectors.toList()); } resolvedIndicesBuilder.addLocal(replaced); resolvedIndicesBuilder.addRemote(split.getRemote()); @@ -195,7 +195,7 @@ ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData AliasesRequest aliasesRequest = (AliasesRequest) indicesRequest; if (aliasesRequest.expandAliasesWildcards()) { List aliases = replaceWildcardsWithAuthorizedAliases(aliasesRequest.aliases(), - loadAuthorizedAliases(authorizedIndices.get(), metaData)); + loadAuthorizedAliases(authorizedIndices, metaData)); aliasesRequest.replaceAliases(aliases.toArray(new String[aliases.size()])); } if (indicesReplacedWithNoIndices) { @@ -226,9 +226,8 @@ ResolvedIndices resolveIndicesAndAliases(IndicesRequest indicesRequest, MetaData * request's concrete index is not in the list of authorized indices, then we need to look to * see if this can be authorized against an alias */ - static String getPutMappingIndexOrAlias(PutMappingRequest request, AuthorizedIndices authorizedIndices, MetaData metaData) { + static String getPutMappingIndexOrAlias(PutMappingRequest request, List authorizedIndicesList, MetaData metaData) { final String concreteIndexName = request.getConcreteIndex().getName(); - final List authorizedIndicesList = authorizedIndices.get(); // validate that the concrete index exists, otherwise there is no remapping that we could do final AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(concreteIndexName); @@ -457,100 +456,4 @@ ResolvedIndices splitLocalAndRemoteIndexNames(String... indices) { } } - /** - * Stores a collection of index names separated into "local" and "remote". - * This allows the resolution and categorization to take place exactly once per-request. - */ - public static class ResolvedIndices { - private final List local; - private final List remote; - - ResolvedIndices(List local, List remote) { - this.local = Collections.unmodifiableList(local); - this.remote = Collections.unmodifiableList(remote); - } - - /** - * Returns the collection of index names that have been stored as "local" indices. - * This is a List because order may be important. For example [ "a*" , "-a1" ] is interpreted differently - * to [ "-a1", "a*" ]. As a consequence, this list may contain duplicates. - */ - public List getLocal() { - return local; - } - - /** - * Returns the collection of index names that have been stored as "remote" indices. - */ - public List getRemote() { - return remote; - } - - /** - * @return true if both the {@link #getLocal() local} and {@link #getRemote() remote} index lists are empty. - */ - public boolean isEmpty() { - return local.isEmpty() && remote.isEmpty(); - } - - /** - * @return true if the {@link #getRemote() remote} index lists is empty, and the local index list contains the - * {@link IndicesAndAliasesResolverField#NO_INDEX_PLACEHOLDER no-index-placeholder} and nothing else. - */ - public boolean isNoIndicesPlaceholder() { - return remote.isEmpty() && local.size() == 1 && local.contains(NO_INDEX_PLACEHOLDER); - } - - private String[] toArray() { - final String[] array = new String[local.size() + remote.size()]; - int i = 0; - for (String index : local) { - array[i++] = index; - } - for (String index : remote) { - array[i++] = index; - } - return array; - } - - /** - * Builder class for ResolvedIndices that allows for the building of a list of indices - * without the need to construct new objects and merging them together - */ - private static class Builder { - - private final List local = new ArrayList<>(); - private final List remote = new ArrayList<>(); - - /** add a local index name */ - private void addLocal(String index) { - local.add(index); - } - - /** adds the array of local index names */ - private void addLocal(String[] indices) { - local.addAll(Arrays.asList(indices)); - } - - /** adds the list of local index names */ - private void addLocal(List indices) { - local.addAll(indices); - } - - /** adds the list of remote index names */ - private void addRemote(List indices) { - remote.addAll(indices); - } - - /** @return true if both the local and remote index lists are empty. */ - private boolean isEmpty() { - return local.isEmpty() && remote.isEmpty(); - } - - /** @return a immutable ResolvedIndices instance with the local and remote index lists */ - private ResolvedIndices build() { - return new ResolvedIndices(local, remote); - } - } - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java new file mode 100644 index 0000000000000..e2824e74ecafe --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -0,0 +1,552 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authz; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.delete.DeleteAction; +import org.elasticsearch.action.get.MultiGetAction; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.search.ClearScrollAction; +import org.elasticsearch.action.search.MultiSearchAction; +import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.SearchTransportService; +import org.elasticsearch.action.termvectors.MultiTermVectorsAction; +import org.elasticsearch.cluster.metadata.AliasOrIndex; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.transport.TransportActionProxy; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; +import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.UserRequest; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; +import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; +import org.elasticsearch.xpack.core.security.authz.permission.IndicesPermission; +import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivilegesMap; +import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; +import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeSet; +import java.util.function.Predicate; + +import static org.elasticsearch.common.Strings.arrayToCommaDelimitedString; +import static org.elasticsearch.xpack.security.action.user.TransportHasPrivilegesAction.getApplicationNames; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; + +public class RBACEngine implements AuthorizationEngine { + + private static final Predicate SAME_USER_PRIVILEGE = Automatons.predicate( + ChangePasswordAction.NAME, AuthenticateAction.NAME, HasPrivilegesAction.NAME, GetUserPrivilegesAction.NAME); + private static final String INDEX_SUB_REQUEST_PRIMARY = IndexAction.NAME + "[p]"; + private static final String INDEX_SUB_REQUEST_REPLICA = IndexAction.NAME + "[r]"; + private static final String DELETE_SUB_REQUEST_PRIMARY = DeleteAction.NAME + "[p]"; + private static final String DELETE_SUB_REQUEST_REPLICA = DeleteAction.NAME + "[r]"; + + private static final Logger logger = LogManager.getLogger(RBACEngine.class); + + private final CompositeRolesStore rolesStore; + private final FieldPermissionsCache fieldPermissionsCache; + + public RBACEngine(Settings settings, CompositeRolesStore rolesStore) { + this.rolesStore = rolesStore; + this.fieldPermissionsCache = new FieldPermissionsCache(settings); + } + + @Override + public void resolveAuthorizationInfo(RequestInfo requestInfo, ActionListener listener) { + final Authentication authentication = requestInfo.getAuthentication(); + getRoles(authentication.getUser(), authentication, ActionListener.wrap(role -> { + if (authentication.getUser().isRunAs()) { + getRoles(authentication.getUser().authenticatedUser(), authentication, ActionListener.wrap( + authenticatedUserRole -> listener.onResponse(new RBACAuthorizationInfo(role, authenticatedUserRole)), + listener::onFailure)); + } else { + listener.onResponse(new RBACAuthorizationInfo(role, role)); + } + }, listener::onFailure)); + } + + private void getRoles(User user, Authentication authentication, ActionListener listener) { + rolesStore.getRoles(user, authentication, listener); + } + + @Override + public void authorizeRunAs(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, ActionListener listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo) { + final Role role = ((RBACAuthorizationInfo) authorizationInfo).getAuthenticatedUserAuthorizationInfo().getRole(); + listener.onResponse( + new AuthorizationResult(role.checkRunAs(requestInfo.getAuthentication().getUser().principal()))); + } else { + listener.onFailure(new IllegalArgumentException("unsupported authorization info:" + + authorizationInfo.getClass().getSimpleName())); + } + } + + @Override + public void authorizeClusterAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + ActionListener listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo) { + final Role role = ((RBACAuthorizationInfo) authorizationInfo).getRole(); + if (role.checkClusterAction(requestInfo.getAction(), requestInfo.getRequest())) { + listener.onResponse(AuthorizationResult.granted()); + } else if (checkSameUserPermissions(requestInfo.getAction(), requestInfo.getRequest(), requestInfo.getAuthentication())) { + listener.onResponse(AuthorizationResult.granted()); + } else { + listener.onResponse(AuthorizationResult.deny()); + } + } else { + listener.onFailure(new IllegalArgumentException("unsupported authorization info:" + + authorizationInfo.getClass().getSimpleName())); + } + } + + // pkg private for testing + boolean checkSameUserPermissions(String action, TransportRequest request, Authentication authentication) { + final boolean actionAllowed = SAME_USER_PRIVILEGE.test(action); + if (actionAllowed) { + if (request instanceof UserRequest == false) { + assert false : "right now only a user request should be allowed"; + return false; + } + UserRequest userRequest = (UserRequest) request; + String[] usernames = userRequest.usernames(); + if (usernames == null || usernames.length != 1 || usernames[0] == null) { + assert false : "this role should only be used for actions to apply to a single user"; + return false; + } + final String username = usernames[0]; + final boolean sameUsername = authentication.getUser().principal().equals(username); + if (sameUsername && ChangePasswordAction.NAME.equals(action)) { + return checkChangePasswordAction(authentication); + } + + assert AuthenticateAction.NAME.equals(action) || HasPrivilegesAction.NAME.equals(action) + || GetUserPrivilegesAction.NAME.equals(action) || sameUsername == false + : "Action '" + action + "' should not be possible when sameUsername=" + sameUsername; + return sameUsername; + } + return false; + } + + private static boolean shouldAuthorizeIndexActionNameOnly(String action, TransportRequest request) { + switch (action) { + case BulkAction.NAME: + case IndexAction.NAME: + case DeleteAction.NAME: + case INDEX_SUB_REQUEST_PRIMARY: + case INDEX_SUB_REQUEST_REPLICA: + case DELETE_SUB_REQUEST_PRIMARY: + case DELETE_SUB_REQUEST_REPLICA: + case MultiGetAction.NAME: + case MultiTermVectorsAction.NAME: + case MultiSearchAction.NAME: + case "indices:data/read/mpercolate": + case "indices:data/read/msearch/template": + case "indices:data/read/search/template": + case "indices:data/write/reindex": + case "indices:data/read/sql": + case "indices:data/read/sql/translate": + if (request instanceof BulkShardRequest) { + return false; + } + if (request instanceof CompositeIndicesRequest == false) { + throw new IllegalStateException("Composite and bulk actions must implement " + + CompositeIndicesRequest.class.getSimpleName() + ", " + request.getClass().getSimpleName() + " doesn't. Action " + + action); + } + return true; + default: + return false; + } + } + + @Override + public void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + AsyncSupplier indicesAsyncSupplier, + Map aliasOrIndexLookup, + ActionListener listener) { + final String action = requestInfo.getAction(); + final TransportRequest request = requestInfo.getRequest(); + final Authentication authentication = requestInfo.getAuthentication(); + if (TransportActionProxy.isProxyAction(action) || shouldAuthorizeIndexActionNameOnly(action, request)) { + // we've already validated that the request is a proxy request so we can skip that but we still + // need to validate that the action is allowed and then move on + authorizeIndexActionName(action, authorizationInfo, null, listener); + } else if (request instanceof IndicesRequest == false && request instanceof IndicesAliasesRequest == false) { + // scroll is special + // some APIs are indices requests that are not actually associated with indices. For example, + // search scroll request, is categorized under the indices context, but doesn't hold indices names + // (in this case, the security check on the indices was done on the search request that initialized + // the scroll. Given that scroll is implemented using a context on the node holding the shard, we + // piggyback on it and enhance the context with the original authentication. This serves as our method + // to validate the scroll id only stays with the same user! + // note that clear scroll shard level actions can originate from a clear scroll all, which doesn't require any + // indices permission as it's categorized under cluster. This is why the scroll check is performed + // even before checking if the user has any indices permission. + if (isScrollRelatedAction(action)) { + // if the action is a search scroll action, we first authorize that the user can execute the action for some + // index and if they cannot, we can fail the request early before we allow the execution of the action and in + // turn the shard actions + if (SearchScrollAction.NAME.equals(action)) { + authorizeIndexActionName(action, authorizationInfo, null, listener); + } else { + // we store the request as a transient in the ThreadContext in case of a authorization failure at the shard + // level. If authorization fails we will audit a access_denied message and will use the request to retrieve + // information such as the index and the incoming address of the request + listener.onResponse(new IndexAuthorizationResult(true, IndicesAccessControl.ALLOW_NO_INDICES)); + } + } else { + assert false : + "only scroll related requests are known indices api that don't support retrieving the indices they relate to"; + listener.onFailure(new IllegalStateException("only scroll related requests are known indices api that don't support " + + "retrieving the indices they relate to")); + } + } else if (request instanceof IndicesRequest && + IndicesAndAliasesResolver.allowsRemoteIndices((IndicesRequest) request)) { + // remote indices are allowed + indicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> { + assert !resolvedIndices.isEmpty() + : "every indices request needs to have its indices set thus the resolved indices must not be empty"; + //all wildcard expressions have been resolved and only the security plugin could have set '-*' here. + //'-*' matches no indices so we allow the request to go through, which will yield an empty response + if (resolvedIndices.isNoIndicesPlaceholder()) { + // check action name + authorizeIndexActionName(action, authorizationInfo, IndicesAccessControl.ALLOW_NO_INDICES, listener); + } else { + buildIndicesAccessControl(authentication, action, authorizationInfo, + Sets.newHashSet(resolvedIndices.getLocal()), aliasOrIndexLookup, listener); + } + }, listener::onFailure)); + } else { + authorizeIndexActionName(action, authorizationInfo, IndicesAccessControl.ALLOW_NO_INDICES, + ActionListener.wrap(indexAuthorizationResult -> { + if (indexAuthorizationResult.isGranted()) { + indicesAsyncSupplier.getAsync(ActionListener.wrap(resolvedIndices -> { + assert !resolvedIndices.isEmpty() + : "every indices request needs to have its indices set thus the resolved indices must not be empty"; + //all wildcard expressions have been resolved and only the security plugin could have set '-*' here. + //'-*' matches no indices so we allow the request to go through, which will yield an empty response + if (resolvedIndices.isNoIndicesPlaceholder()) { + listener.onResponse(new IndexAuthorizationResult(true, IndicesAccessControl.ALLOW_NO_INDICES)); + } else { + buildIndicesAccessControl(authentication, action, authorizationInfo, + Sets.newHashSet(resolvedIndices.getLocal()), aliasOrIndexLookup, listener); + } + }, listener::onFailure)); + } else { + listener.onResponse(indexAuthorizationResult); + } + }, listener::onFailure)); + } + } + + private void authorizeIndexActionName(String action, AuthorizationInfo authorizationInfo, IndicesAccessControl grantedValue, + ActionListener listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo) { + final Role role = ((RBACAuthorizationInfo) authorizationInfo).getRole(); + if (role.checkIndicesAction(action)) { + listener.onResponse(new IndexAuthorizationResult(true, grantedValue)); + } else { + listener.onResponse(new IndexAuthorizationResult(true, IndicesAccessControl.DENIED)); + } + } else { + listener.onFailure(new IllegalArgumentException("unsupported authorization info:" + + authorizationInfo.getClass().getSimpleName())); + } + } + + @Override + public void loadAuthorizedIndices(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map aliasOrIndexLookup, ActionListener> listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo) { + final Role role = ((RBACAuthorizationInfo) authorizationInfo).getRole(); + listener.onResponse(resolveAuthorizedIndicesFromRole(role, requestInfo.getAction(), aliasOrIndexLookup)); + } else { + listener.onFailure( + new IllegalArgumentException("unsupported authorization info:" + authorizationInfo.getClass().getSimpleName())); + } + } + + @Override + public void validateIndexPermissionsAreSubset(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map> indexNameToNewNames, + ActionListener listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo) { + final Role role = ((RBACAuthorizationInfo) authorizationInfo).getRole(); + Map permissionMap = new HashMap<>(); + for (Entry> entry : indexNameToNewNames.entrySet()) { + Automaton existingPermissions = permissionMap.computeIfAbsent(entry.getKey(), role::allowedActionsMatcher); + for (String alias : entry.getValue()) { + Automaton newNamePermissions = permissionMap.computeIfAbsent(alias, role::allowedActionsMatcher); + if (Operations.subsetOf(newNamePermissions, existingPermissions) == false) { + listener.onResponse(AuthorizationResult.deny()); + return; + } + } + } + listener.onResponse(AuthorizationResult.granted()); + } else { + listener.onFailure( + new IllegalArgumentException("unsupported authorization info:" + authorizationInfo.getClass().getSimpleName())); + } + } + + @Override + public void checkPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, + HasPrivilegesRequest request, + Collection applicationPrivileges, + ActionListener listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo == false) { + listener.onFailure( + new IllegalArgumentException("unsupported authorization info:" + authorizationInfo.getClass().getSimpleName())); + return; + } + final Role userRole = ((RBACAuthorizationInfo) authorizationInfo).getRole(); + logger.trace(() -> new ParameterizedMessage("Check whether role [{}] has privileges cluster=[{}] index=[{}] application=[{}]", + Strings.arrayToCommaDelimitedString(userRole.names()), + Strings.arrayToCommaDelimitedString(request.clusterPrivileges()), + Strings.arrayToCommaDelimitedString(request.indexPrivileges()), + Strings.arrayToCommaDelimitedString(request.applicationPrivileges()) + )); + + Map cluster = new HashMap<>(); + for (String checkAction : request.clusterPrivileges()) { + final ClusterPrivilege checkPrivilege = ClusterPrivilege.get(Collections.singleton(checkAction)); + cluster.put(checkAction, userRole.grants(checkPrivilege)); + } + boolean allMatch = cluster.values().stream().allMatch(Boolean::booleanValue); + ResourcePrivilegesMap.Builder combineIndicesResourcePrivileges = ResourcePrivilegesMap.builder(); + for (RoleDescriptor.IndicesPrivileges check : request.indexPrivileges()) { + ResourcePrivilegesMap resourcePrivileges = userRole.checkIndicesPrivileges(Sets.newHashSet(check.getIndices()), + check.allowRestrictedIndices(), Sets.newHashSet(check.getPrivileges())); + allMatch = allMatch && resourcePrivileges.allAllowed(); + combineIndicesResourcePrivileges.addResourcePrivilegesMap(resourcePrivileges); + } + ResourcePrivilegesMap allIndices = combineIndicesResourcePrivileges.build(); + allMatch = allMatch && allIndices.allAllowed(); + + final Map> privilegesByApplication = new HashMap<>(); + for (String applicationName : getApplicationNames(request)) { + logger.debug("Checking privileges for application {}", applicationName); + ResourcePrivilegesMap.Builder builder = ResourcePrivilegesMap.builder(); + for (RoleDescriptor.ApplicationResourcePrivileges p : request.applicationPrivileges()) { + if (applicationName.equals(p.getApplication())) { + ResourcePrivilegesMap appPrivsByResourceMap = userRole.checkApplicationResourcePrivileges(applicationName, + Sets.newHashSet(p.getResources()), Sets.newHashSet(p.getPrivileges()), applicationPrivileges); + builder.addResourcePrivilegesMap(appPrivsByResourceMap); + } + } + ResourcePrivilegesMap resourcePrivsForApplication = builder.build(); + allMatch = allMatch && resourcePrivsForApplication.allAllowed(); + privilegesByApplication.put(applicationName, resourcePrivsForApplication.getResourceToResourcePrivileges().values()); + } + + listener.onResponse(new HasPrivilegesResponse(request.username(), allMatch, cluster, + allIndices.getResourceToResourcePrivileges().values(), privilegesByApplication)); + } + + + @Override + public void getUserPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, GetUserPrivilegesRequest request, + ActionListener listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo == false) { + listener.onFailure( + new IllegalArgumentException("unsupported authorization info:" + authorizationInfo.getClass().getSimpleName())); + } else { + final Role role = ((RBACAuthorizationInfo) authorizationInfo).getRole(); + listener.onResponse(buildUserPrivilegesResponseObject(role)); + } + } + + GetUserPrivilegesResponse buildUserPrivilegesResponseObject(Role userRole) { + logger.trace(() -> new ParameterizedMessage("List privileges for role [{}]", arrayToCommaDelimitedString(userRole.names()))); + + // We use sorted sets for Strings because they will typically be small, and having a predictable order allows for simpler testing + final Set cluster = new TreeSet<>(); + // But we don't have a meaningful ordering for objects like ConditionalClusterPrivilege, so the tests work with "random" ordering + final Set conditionalCluster = new HashSet<>(); + for (Tuple tup : userRole.cluster().privileges()) { + if (tup.v2() == null) { + if (ClusterPrivilege.NONE.equals(tup.v1()) == false) { + cluster.addAll(tup.v1().name()); + } + } else { + conditionalCluster.add(tup.v2()); + } + } + + final Set indices = new LinkedHashSet<>(); + for (IndicesPermission.Group group : userRole.indices().groups()) { + final Set queries = group.getQuery() == null ? Collections.emptySet() : group.getQuery(); + final Set fieldSecurity = group.getFieldPermissions().hasFieldLevelSecurity() + ? group.getFieldPermissions().getFieldPermissionsDefinition().getFieldGrantExcludeGroups() : Collections.emptySet(); + indices.add(new GetUserPrivilegesResponse.Indices( + Arrays.asList(group.indices()), + group.privilege().name(), + fieldSecurity, + queries, + group.allowRestrictedIndices() + )); + } + + final Set application = new LinkedHashSet<>(); + for (String applicationName : userRole.application().getApplicationNames()) { + for (ApplicationPrivilege privilege : userRole.application().getPrivileges(applicationName)) { + final Set resources = userRole.application().getResourcePatterns(privilege); + if (resources.isEmpty()) { + logger.trace("No resources defined in application privilege {}", privilege); + } else { + application.add(RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(applicationName) + .privileges(privilege.name()) + .resources(resources) + .build()); + } + } + } + + final Privilege runAsPrivilege = userRole.runAs().getPrivilege(); + final Set runAs; + if (Operations.isEmpty(runAsPrivilege.getAutomaton())) { + runAs = Collections.emptySet(); + } else { + runAs = runAsPrivilege.name(); + } + + return new GetUserPrivilegesResponse(cluster, conditionalCluster, indices, application, runAs); + } + + static List resolveAuthorizedIndicesFromRole(Role role, String action, Map aliasAndIndexLookup) { + Predicate predicate = role.allowedIndicesMatcher(action); + + List indicesAndAliases = new ArrayList<>(); + // TODO: can this be done smarter? I think there are usually more indices/aliases in the cluster then indices defined a roles? + for (Map.Entry entry : aliasAndIndexLookup.entrySet()) { + String aliasOrIndex = entry.getKey(); + if (predicate.test(aliasOrIndex)) { + indicesAndAliases.add(aliasOrIndex); + } + } + return Collections.unmodifiableList(indicesAndAliases); + } + + private void buildIndicesAccessControl(Authentication authentication, String action, + AuthorizationInfo authorizationInfo, Set indices, + Map aliasAndIndexLookup, + ActionListener listener) { + if (authorizationInfo instanceof RBACAuthorizationInfo) { + final Role role = ((RBACAuthorizationInfo) authorizationInfo).getRole(); + final IndicesAccessControl accessControl = role.authorize(action, indices, aliasAndIndexLookup, fieldPermissionsCache); + listener.onResponse(new IndexAuthorizationResult(true, accessControl)); + } else { + listener.onFailure(new IllegalArgumentException("unsupported authorization info:" + + authorizationInfo.getClass().getSimpleName())); + } + } + + private static boolean checkChangePasswordAction(Authentication authentication) { + // we need to verify that this user was authenticated by or looked up by a realm type that support password changes + // otherwise we open ourselves up to issues where a user in a different realm could be created with the same username + // and do malicious things + final boolean isRunAs = authentication.getUser().isRunAs(); + final String realmType; + if (isRunAs) { + realmType = authentication.getLookedUpBy().getType(); + } else { + realmType = authentication.getAuthenticatedBy().getType(); + } + + assert realmType != null; + // ensure the user was authenticated by a realm that we can change a password for. The native realm is an internal realm and + // right now only one can exist in the realm configuration - if this changes we should update this check + return ReservedRealm.TYPE.equals(realmType) || NativeRealmSettings.TYPE.equals(realmType); + } + + static class RBACAuthorizationInfo implements AuthorizationInfo { + + private final Role role; + private final Map info; + private final RBACAuthorizationInfo authenticatedUserAuthorizationInfo; + + RBACAuthorizationInfo(Role role, Role authenticatedUserRole) { + this.role = role; + this.info = Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, role.names()); + this.authenticatedUserAuthorizationInfo = + authenticatedUserRole == null ? this : new RBACAuthorizationInfo(authenticatedUserRole, null); + } + + Role getRole() { + return role; + } + + @Override + public Map asMap() { + return info; + } + + @Override + public RBACAuthorizationInfo getAuthenticatedUserAuthorizationInfo() { + return authenticatedUserAuthorizationInfo; + } + } + + private static boolean isScrollRelatedAction(String action) { + return action.equals(SearchScrollAction.NAME) || + action.equals(SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME) || + action.equals(SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME) || + action.equals(SearchTransportService.QUERY_SCROLL_ACTION_NAME) || + action.equals(SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME) || + action.equals(ClearScrollAction.NAME) || + action.equals("indices:data/read/sql/close_cursor") || + action.equals(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java index 044552d9d7710..5e0c2945caadc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java @@ -16,9 +16,10 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.audit.AuditUtil; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import static org.elasticsearch.xpack.security.authz.AuthorizationService.AUTHORIZATION_INFO_KEY; import static org.elasticsearch.xpack.security.authz.AuthorizationService.ORIGINATING_ACTION_KEY; -import static org.elasticsearch.xpack.security.authz.AuthorizationService.ROLE_NAMES_KEY; /** * A {@link SearchOperationListener} that is used to provide authorization for scroll requests. @@ -64,7 +65,7 @@ public void validateSearchContext(SearchContext searchContext, TransportRequest final Authentication current = Authentication.getAuthentication(threadContext); final String action = threadContext.getTransient(ORIGINATING_ACTION_KEY); ensureAuthenticatedUserIsSame(originalAuth, current, auditTrailService, searchContext.id(), action, request, - AuditUtil.extractRequestId(threadContext), threadContext.getTransient(ROLE_NAMES_KEY)); + AuditUtil.extractRequestId(threadContext), threadContext.getTransient(AUTHORIZATION_INFO_KEY)); } } } @@ -76,7 +77,8 @@ public void validateSearchContext(SearchContext searchContext, TransportRequest * (or lookup) realm. To work around this we compare the username and the originating realm type. */ static void ensureAuthenticatedUserIsSame(Authentication original, Authentication current, AuditTrailService auditTrailService, - long id, String action, TransportRequest request, String requestId, String[] roleNames) { + long id, String action, TransportRequest request, String requestId, + AuthorizationInfo authorizationInfo) { // this is really a best effort attempt since we cannot guarantee principal uniqueness // and realm names can change between nodes. final boolean samePrincipal = original.getUser().principal().equals(current.getUser().principal()); @@ -95,7 +97,7 @@ static void ensureAuthenticatedUserIsSame(Authentication original, Authenticatio final boolean sameUser = samePrincipal && sameRealmType; if (sameUser == false) { - auditTrailService.accessDenied(requestId, current, action, request, roleNames); + auditTrailService.accessDenied(requestId, current, action, request, authorizationInfo); throw new SearchContextMissingException(id); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/BulkShardRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/BulkShardRequestInterceptor.java similarity index 58% rename from x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/BulkShardRequestInterceptor.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/BulkShardRequestInterceptor.java index 6cbd47e475bea..21253f5b4bb37 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/BulkShardRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/BulkShardRequestInterceptor.java @@ -3,11 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.action.interceptor; +package org.elasticsearch.xpack.security.authz.interceptor; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.bulk.BulkItemRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.update.UpdateRequest; @@ -15,16 +16,16 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportRequest; -import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; -import org.elasticsearch.xpack.core.security.authz.permission.Role; /** * Similar to {@link UpdateRequestInterceptor}, but checks if there are update requests embedded in a bulk request. */ -public class BulkShardRequestInterceptor implements RequestInterceptor { +public class BulkShardRequestInterceptor implements RequestInterceptor { private static final Logger logger = LogManager.getLogger(BulkShardRequestInterceptor.class); @@ -37,31 +38,36 @@ public BulkShardRequestInterceptor(ThreadPool threadPool, XPackLicenseState lice } @Override - public void intercept(BulkShardRequest request, Authentication authentication, Role userPermissions, String action) { - if (licenseState.isDocumentAndFieldLevelSecurityAllowed()) { + public void intercept(RequestInfo requestInfo, AuthorizationEngine authzEngine, AuthorizationInfo authorizationInfo, + ActionListener listener) { + if (requestInfo.getRequest() instanceof BulkShardRequest && licenseState.isDocumentAndFieldLevelSecurityAllowed()) { IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); - for (BulkItemRequest bulkItemRequest : request.items()) { + final BulkShardRequest bulkShardRequest = (BulkShardRequest) requestInfo.getRequest(); + for (BulkItemRequest bulkItemRequest : bulkShardRequest.items()) { IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(bulkItemRequest.index()); + boolean found = false; if (indexAccessControl != null) { boolean fls = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity(); boolean dls = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); if (fls || dls) { if (bulkItemRequest.request() instanceof UpdateRequest) { - throw new ElasticsearchSecurityException("Can't execute a bulk request with update requests embedded if " + - "field or document level security is enabled", RestStatus.BAD_REQUEST); + found = true; + logger.trace("aborting bulk item update request for index [{}]", bulkItemRequest.index()); + bulkItemRequest.abort(bulkItemRequest.index(), new ElasticsearchSecurityException("Can't execute a bulk " + + "item request with update requests embedded if field or document level security is enabled", + RestStatus.BAD_REQUEST)); } } } - logger.trace("intercepted bulk request for index [{}] without any update requests, continuing execution", - bulkItemRequest.index()); + + if (found == false) { + logger.trace("intercepted bulk request for index [{}] without any update requests, continuing execution", + bulkItemRequest.index()); + } } } - } - - @Override - public boolean supports(TransportRequest request) { - return request instanceof BulkShardRequest; + listener.onResponse(null); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java new file mode 100644 index 0000000000000..cb2c1a5bb9315 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/FieldAndDocumentLevelSecurityRequestInterceptor.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.interceptor; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; + +/** + * Base class for interceptors that disables features when field level security is configured for indices a request + * is going to execute on. + */ +abstract class FieldAndDocumentLevelSecurityRequestInterceptor implements RequestInterceptor { + + private final ThreadContext threadContext; + private final XPackLicenseState licenseState; + private final Logger logger; + + FieldAndDocumentLevelSecurityRequestInterceptor(ThreadContext threadContext, XPackLicenseState licenseState) { + this.threadContext = threadContext; + this.licenseState = licenseState; + this.logger = LogManager.getLogger(getClass()); + } + + @Override + public void intercept(RequestInfo requestInfo, AuthorizationEngine authorizationEngine, AuthorizationInfo authorizationInfo, + ActionListener listener) { + if (requestInfo.getRequest() instanceof IndicesRequest) { + IndicesRequest indicesRequest = (IndicesRequest) requestInfo.getRequest(); + if (supports(indicesRequest) && licenseState.isDocumentAndFieldLevelSecurityAllowed()) { + final IndicesAccessControl indicesAccessControl = + threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); + for (String index : indicesRequest.indices()) { + IndicesAccessControl.IndexAccessControl indexAccessControl = indicesAccessControl.getIndexPermissions(index); + if (indexAccessControl != null) { + boolean fieldLevelSecurityEnabled = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity(); + boolean documentLevelSecurityEnabled = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); + if (fieldLevelSecurityEnabled || documentLevelSecurityEnabled) { + logger.trace("intercepted request for index [{}] with field level access controls [{}] " + + "document level access controls [{}]. disabling conflicting features", + index, fieldLevelSecurityEnabled, documentLevelSecurityEnabled); + disableFeatures(indicesRequest, fieldLevelSecurityEnabled, documentLevelSecurityEnabled, listener); + return; + } + } + logger.trace("intercepted request for index [{}] without field or document level access controls", index); + } + } + } + listener.onResponse(null); + } + + abstract void disableFeatures(IndicesRequest request, boolean fieldLevelSecurityEnabled, boolean documentLevelSecurityEnabled, + ActionListener listener); + + abstract boolean supports(IndicesRequest request); +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java new file mode 100644 index 0000000000000..cefa42eb0b9bd --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptor.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.interceptor; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.security.audit.AuditTrailService; +import org.elasticsearch.xpack.security.audit.AuditUtil; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.elasticsearch.action.support.ContextPreservingActionListener.wrapPreservingContext; + +public final class IndicesAliasesRequestInterceptor implements RequestInterceptor { + + private final ThreadContext threadContext; + private final XPackLicenseState licenseState; + private final AuditTrailService auditTrailService; + + public IndicesAliasesRequestInterceptor(ThreadContext threadContext, XPackLicenseState licenseState, + AuditTrailService auditTrailService) { + this.threadContext = threadContext; + this.licenseState = licenseState; + this.auditTrailService = auditTrailService; + } + + @Override + public void intercept(RequestInfo requestInfo, AuthorizationEngine authorizationEngine, AuthorizationInfo authorizationInfo, + ActionListener listener) { + if (requestInfo.getRequest() instanceof IndicesAliasesRequest) { + final IndicesAliasesRequest request = (IndicesAliasesRequest) requestInfo.getRequest(); + final XPackLicenseState frozenLicenseState = licenseState.copyCurrentLicenseState(); + if (frozenLicenseState.isAuthAllowed()) { + if (frozenLicenseState.isDocumentAndFieldLevelSecurityAllowed()) { + IndicesAccessControl indicesAccessControl = + threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); + for (IndicesAliasesRequest.AliasActions aliasAction : request.getAliasActions()) { + if (aliasAction.actionType() == IndicesAliasesRequest.AliasActions.Type.ADD) { + for (String index : aliasAction.indices()) { + IndicesAccessControl.IndexAccessControl indexAccessControl = + indicesAccessControl.getIndexPermissions(index); + if (indexAccessControl != null) { + final boolean fls = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity(); + final boolean dls = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); + if (fls || dls) { + listener.onFailure(new ElasticsearchSecurityException("Alias requests are not allowed for " + + "users who have field or document level security enabled on one of the indices", + RestStatus.BAD_REQUEST)); + return; + } + } + } + } + } + } + + Map> indexToAliasesMap = request.getAliasActions().stream() + .filter(aliasAction -> aliasAction.actionType() == IndicesAliasesRequest.AliasActions.Type.ADD) + .flatMap(aliasActions -> + Arrays.stream(aliasActions.indices()) + .map(indexName -> new Tuple<>(indexName, Arrays.asList(aliasActions.aliases())))) + .collect(Collectors.toMap(Tuple::v1, Tuple::v2, (existing, toMerge) -> { + List list = new ArrayList<>(existing.size() + toMerge.size()); + list.addAll(existing); + list.addAll(toMerge); + return list; + })); + authorizationEngine.validateIndexPermissionsAreSubset(requestInfo, authorizationInfo, indexToAliasesMap, + wrapPreservingContext(ActionListener.wrap(authzResult -> { + if (authzResult.isGranted()) { + // do not audit success again + listener.onResponse(null); + } else { + auditTrailService.accessDenied(AuditUtil.extractRequestId(threadContext), requestInfo.getAuthentication(), + requestInfo.getAction(), request, authorizationInfo); + listener.onFailure(Exceptions.authorizationError("Adding an alias is not allowed when the alias " + + "has more permissions than any of the indices")); + } + }, listener::onFailure), threadContext)); + } else { + listener.onResponse(null); + } + } else { + listener.onResponse(null); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java new file mode 100644 index 0000000000000..cfda99653f69d --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/RequestInterceptor.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.interceptor; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; + +/** + * A request interceptor can introspect a request and modify it. + */ +public interface RequestInterceptor { + + /** + * This interceptor will introspect the request and potentially modify it. If the interceptor does not apply + * to the request then the request will not be modified. + */ + void intercept(RequestInfo requestInfo, AuthorizationEngine authorizationEngine, AuthorizationInfo authorizationInfo, + ActionListener listener); +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java new file mode 100644 index 0000000000000..ba4f7a61faf1e --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptor.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.authz.interceptor; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; +import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; +import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.security.audit.AuditTrailService; + +import java.util.Collections; + +import static org.elasticsearch.action.support.ContextPreservingActionListener.wrapPreservingContext; +import static org.elasticsearch.xpack.security.audit.AuditUtil.extractRequestId; + +public final class ResizeRequestInterceptor implements RequestInterceptor { + + private final ThreadContext threadContext; + private final XPackLicenseState licenseState; + private final AuditTrailService auditTrailService; + + public ResizeRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState, + AuditTrailService auditTrailService) { + this.threadContext = threadPool.getThreadContext(); + this.licenseState = licenseState; + this.auditTrailService = auditTrailService; + } + + @Override + public void intercept(RequestInfo requestInfo, AuthorizationEngine authorizationEngine, AuthorizationInfo authorizationInfo, + ActionListener listener) { + if (requestInfo.getRequest() instanceof ResizeRequest) { + final ResizeRequest request = (ResizeRequest) requestInfo.getRequest(); + final XPackLicenseState frozenLicenseState = licenseState.copyCurrentLicenseState(); + if (frozenLicenseState.isAuthAllowed()) { + if (frozenLicenseState.isDocumentAndFieldLevelSecurityAllowed()) { + IndicesAccessControl indicesAccessControl = + threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); + IndicesAccessControl.IndexAccessControl indexAccessControl = + indicesAccessControl.getIndexPermissions(request.getSourceIndex()); + if (indexAccessControl != null) { + final boolean fls = indexAccessControl.getFieldPermissions().hasFieldLevelSecurity(); + final boolean dls = indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions(); + if (fls || dls) { + listener.onFailure(new ElasticsearchSecurityException("Resize requests are not allowed for users when " + + "field or document level security is enabled on the source index", RestStatus.BAD_REQUEST)); + return; + } + } + } + + authorizationEngine.validateIndexPermissionsAreSubset(requestInfo, authorizationInfo, + Collections.singletonMap(request.getSourceIndex(), Collections.singletonList(request.getTargetIndexRequest().index())), + wrapPreservingContext(ActionListener.wrap(authzResult -> { + if (authzResult.isGranted()) { + listener.onResponse(null); + } else { + if (authzResult.isAuditable()) { + auditTrailService.accessDenied(extractRequestId(threadContext), requestInfo.getAuthentication(), + requestInfo.getAction(), request, authorizationInfo); + } + listener.onFailure(Exceptions.authorizationError("Resizing an index is not allowed when the target index " + + "has more permissions than the source index")); + } + }, listener::onFailure), threadContext)); + } else { + listener.onResponse(null); + } + } else { + listener.onResponse(null); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/SearchRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptor.java similarity index 50% rename from x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/SearchRequestInterceptor.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptor.java index 5738d3eef5051..14084b963c3a1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/SearchRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/SearchRequestInterceptor.java @@ -3,42 +3,48 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.action.interceptor; +package org.elasticsearch.xpack.security.authz.interceptor; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportRequest; /** * If field level security is enabled this interceptor disables the request cache for search requests. */ -public class SearchRequestInterceptor extends FieldAndDocumentLevelSecurityRequestInterceptor { +public class SearchRequestInterceptor extends FieldAndDocumentLevelSecurityRequestInterceptor { public SearchRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) { super(threadPool.getThreadContext(), licenseState); } @Override - public void disableFeatures(SearchRequest request, boolean fieldLevelSecurityEnabled, boolean documentLevelSecurityEnabled) { + public void disableFeatures(IndicesRequest indicesRequest, boolean fieldLevelSecurityEnabled, boolean documentLevelSecurityEnabled, + ActionListener listener) { + final SearchRequest request = (SearchRequest) indicesRequest; request.requestCache(false); if (documentLevelSecurityEnabled) { if (request.source() != null && request.source().suggest() != null) { - throw new ElasticsearchSecurityException("Suggest isn't supported if document level security is enabled", - RestStatus.BAD_REQUEST); - } - if (request.source() != null && request.source().profile()) { - throw new ElasticsearchSecurityException("A search request cannot be profiled if document level security is enabled", - RestStatus.BAD_REQUEST); + listener.onFailure(new ElasticsearchSecurityException("Suggest isn't supported if document level security is enabled", + RestStatus.BAD_REQUEST)); + } else if (request.source() != null && request.source().profile()) { + listener.onFailure(new ElasticsearchSecurityException("A search request cannot be profiled if document level security " + + "is enabled", RestStatus.BAD_REQUEST)); + } else { + listener.onResponse(null); } + } else { + listener.onResponse(null); } } @Override - public boolean supports(TransportRequest request) { + public boolean supports(IndicesRequest request) { return request instanceof SearchRequest; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/UpdateRequestInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/UpdateRequestInterceptor.java similarity index 65% rename from x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/UpdateRequestInterceptor.java rename to x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/UpdateRequestInterceptor.java index db265333e6965..ba0c44eb4e5df 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/interceptor/UpdateRequestInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/interceptor/UpdateRequestInterceptor.java @@ -3,14 +3,15 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.action.interceptor; +package org.elasticsearch.xpack.security.authz.interceptor; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportRequest; /** * A request interceptor that fails update request if field or document level security is enabled. @@ -19,20 +20,21 @@ * because only the fields that a role can see would be used to perform the update and without knowing the user may * remove the other fields, not visible for him, from the document being updated. */ -public class UpdateRequestInterceptor extends FieldAndDocumentLevelSecurityRequestInterceptor { +public class UpdateRequestInterceptor extends FieldAndDocumentLevelSecurityRequestInterceptor { public UpdateRequestInterceptor(ThreadPool threadPool, XPackLicenseState licenseState) { super(threadPool.getThreadContext(), licenseState); } @Override - protected void disableFeatures(UpdateRequest updateRequest, boolean fieldLevelSecurityEnabled, boolean documentLevelSecurityEnabled) { - throw new ElasticsearchSecurityException("Can't execute an update request if field or document level security is enabled", - RestStatus.BAD_REQUEST); + protected void disableFeatures(IndicesRequest updateRequest, boolean fieldLevelSecurityEnabled, boolean documentLevelSecurityEnabled, + ActionListener listener) { + listener.onFailure(new ElasticsearchSecurityException("Can't execute an update request if field or document level security " + + "is enabled", RestStatus.BAD_REQUEST)); } @Override - public boolean supports(TransportRequest request) { + public boolean supports(IndicesRequest request) { return request instanceof UpdateRequest; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index 622617e42201e..2d1d4a98b4ba6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -25,11 +25,13 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.common.IteratingActionListener; +import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition.FieldGrantExcludeGroup; +import org.elasticsearch.xpack.core.security.authz.permission.LimitedRole; import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; @@ -37,6 +39,12 @@ import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; +import org.elasticsearch.xpack.core.security.user.AnonymousUser; +import org.elasticsearch.xpack.core.security.user.SystemUser; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; +import org.elasticsearch.xpack.core.security.user.XPackUser; +import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.ArrayList; @@ -97,19 +105,24 @@ public class CompositeRolesStore { private final Cache negativeLookupCache; private final ThreadContext threadContext; private final AtomicLong numInvalidation = new AtomicLong(); + private final AnonymousUser anonymousUser; + private final ApiKeyService apiKeyService; + private final boolean isAnonymousEnabled; private final List, ActionListener>> builtInRoleProviders; private final List, ActionListener>> allRoleProviders; public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, NativeRolesStore nativeRolesStore, ReservedRolesStore reservedRolesStore, NativePrivilegeStore privilegeStore, List, ActionListener>> rolesProviders, - ThreadContext threadContext, XPackLicenseState licenseState, FieldPermissionsCache fieldPermissionsCache) { + ThreadContext threadContext, XPackLicenseState licenseState, FieldPermissionsCache fieldPermissionsCache, + ApiKeyService apiKeyService) { this.fileRolesStore = fileRolesStore; fileRolesStore.addListener(this::invalidate); this.nativeRolesStore = nativeRolesStore; this.privilegeStore = privilegeStore; this.licenseState = licenseState; this.fieldPermissionsCache = fieldPermissionsCache; + this.apiKeyService = apiKeyService; CacheBuilder builder = CacheBuilder.builder(); final int cacheSize = CACHE_SIZE_SETTING.get(settings); if (cacheSize >= 0) { @@ -133,6 +146,8 @@ public CompositeRolesStore(Settings settings, FileRolesStore fileRolesStore, Nat allList.addAll(rolesProviders); this.allRoleProviders = Collections.unmodifiableList(allList); } + this.anonymousUser = new AnonymousUser(settings); + this.isAnonymousEnabled = AnonymousUser.isAnonymousEnabled(settings); } public void roles(Set roleNames, ActionListener roleActionListener) { @@ -164,6 +179,60 @@ public void roles(Set roleNames, ActionListener roleActionListener } } + public void getRoles(User user, Authentication authentication, ActionListener roleActionListener) { + // we need to special case the internal users in this method, if we apply the anonymous roles to every user including these system + // user accounts then we run into the chance of a deadlock because then we need to get a role that we may be trying to get as the + // internal user. The SystemUser is special cased as it has special privileges to execute internal actions and should never be + // passed into this method. The XPackUser has the Superuser role and we can simply return that + if (SystemUser.is(user)) { + throw new IllegalArgumentException("the user [" + user.principal() + "] is the system user and we should never try to get its" + + " roles"); + } + if (XPackUser.is(user)) { + assert XPackUser.INSTANCE.roles().length == 1; + roleActionListener.onResponse(XPackUser.ROLE); + return; + } + if (XPackSecurityUser.is(user)) { + roleActionListener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); + return; + } + + final Authentication.AuthenticationType authType = authentication.getAuthenticationType(); + if (authType == Authentication.AuthenticationType.API_KEY) { + apiKeyService.getRoleForApiKey(authentication, ActionListener.wrap(apiKeyRoleDescriptors -> { + final List descriptors = apiKeyRoleDescriptors.getRoleDescriptors(); + if (descriptors == null) { + roleActionListener.onFailure(new IllegalStateException("missing role descriptors")); + } else if (apiKeyRoleDescriptors.getLimitedByRoleDescriptors() == null) { + buildAndCacheRoleFromDescriptors(descriptors, apiKeyRoleDescriptors.getApiKeyId() + "_role_desc", roleActionListener); + } else { + buildAndCacheRoleFromDescriptors(descriptors, apiKeyRoleDescriptors.getApiKeyId() + "_role_desc", + ActionListener.wrap(role -> buildAndCacheRoleFromDescriptors(apiKeyRoleDescriptors.getLimitedByRoleDescriptors(), + apiKeyRoleDescriptors.getApiKeyId() + "_limited_role_desc", ActionListener.wrap( + limitedBy -> roleActionListener.onResponse(LimitedRole.createLimitedRole(role, limitedBy)), + roleActionListener::onFailure)), roleActionListener::onFailure)); + } + }, roleActionListener::onFailure)); + } else { + Set roleNames = new HashSet<>(Arrays.asList(user.roles())); + if (isAnonymousEnabled && anonymousUser.equals(user) == false) { + if (anonymousUser.roles().length == 0) { + throw new IllegalStateException("anonymous is only enabled when the anonymous user has roles"); + } + Collections.addAll(roleNames, anonymousUser.roles()); + } + + if (roleNames.isEmpty()) { + roleActionListener.onResponse(Role.EMPTY); + } else if (roleNames.contains(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName())) { + roleActionListener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); + } else { + roles(roleNames, roleActionListener); + } + } + } + public void buildAndCacheRoleFromDescriptors(Collection roleDescriptors, String source, ActionListener listener) { if (ROLES_STORE_SOURCE.equals(source)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index 40ad10b8acb88..29ea8838f58e6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.security.action.SecurityActionMapper; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authz.AuthorizationService; -import org.elasticsearch.xpack.security.authz.AuthorizationUtils; import java.io.IOException; @@ -121,20 +120,10 @@ requests from all the nodes are attached with a user (either a serialize SystemUser.is(authentication.getUser()) == false) { securityContext.executeAsUser(SystemUser.INSTANCE, (ctx) -> { final Authentication replaced = Authentication.getAuthentication(threadContext); - final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = - new AuthorizationUtils.AsyncAuthorizer(replaced, listener, (userRoles, runAsRoles) -> { - authzService.authorize(replaced, securityAction, request, userRoles, runAsRoles); - listener.onResponse(null); - }); - asyncAuthorizer.authorize(authzService); + authzService.authorize(replaced, securityAction, request, listener); }, version); } else { - final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = - new AuthorizationUtils.AsyncAuthorizer(authentication, listener, (userRoles, runAsRoles) -> { - authzService.authorize(authentication, securityAction, request, userRoles, runAsRoles); - listener.onResponse(null); - }); - asyncAuthorizer.authorize(authzService); + authzService.authorize(authentication, securityAction, request, listener); } }, listener::onFailure)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 8d25f0d836139..77f5b6c57b4c3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -841,7 +841,7 @@ public void testUpdateApiIsBlocked() throws Exception { ElasticsearchSecurityException securityException = (ElasticsearchSecurityException) bulkItem.getFailure().getCause(); assertThat(securityException.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(securityException.getMessage(), - equalTo("Can't execute a bulk request with update requests embedded if field or document level security is enabled")); + equalTo("Can't execute a bulk item request with update requests embedded if field or document level security is enabled")); assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field1").toString(), equalTo("value2")); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 54832519d8576..3055d1b0f456b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -1448,7 +1448,7 @@ public void testUpdateApiIsBlocked() throws Exception { ElasticsearchSecurityException securityException = (ElasticsearchSecurityException) bulkItem.getFailure().getCause(); assertThat(securityException.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(securityException.getMessage(), - equalTo("Can't execute a bulk request with update requests embedded if field or document level security is enabled")); + equalTo("Can't execute a bulk item request with update requests embedded if field or document level security is enabled")); assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field2").toString(), equalTo("value2")); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 80d5e3f85281e..78d6e22ac3645 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -27,11 +27,11 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; -import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.AuthenticationService; @@ -39,7 +39,6 @@ import org.junit.Before; import java.util.Collections; -import java.util.HashSet; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; @@ -83,8 +82,7 @@ public void init() throws Exception { when(state.nodes()).thenReturn(nodes); SecurityContext securityContext = new SecurityContext(settings, threadContext); - filter = new SecurityActionFilter(authcService, authzService, - licenseState, new HashSet<>(), threadPool, securityContext, destructiveOperations); + filter = new SecurityActionFilter(authcService, authzService, licenseState, threadPool, securityContext, destructiveOperations); } public void testApply() throws Exception { @@ -100,15 +98,14 @@ public void testApply() throws Exception { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq("_action"), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); - final Role empty = Role.EMPTY; doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[2]; - callback.onResponse(empty); + ActionListener callback = (ActionListener) i.getArguments()[3]; + callback.onResponse(null); return Void.TYPE; - }).when(authzService).roles(any(User.class), any(Authentication.class), any(ActionListener.class)); + }).when(authzService) + .authorize(any(Authentication.class), any(String.class), any(TransportRequest.class), any(ActionListener.class)); filter.apply(task, "_action", request, listener, chain); - verify(authzService).authorize(authentication, "_action", request, empty, null); + verify(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(ActionListener.class)); verify(chain).proceed(eq(task), eq("_action"), eq(request), isA(ContextPreservingActionListener.class)); } @@ -127,20 +124,18 @@ public void testApplyRestoresThreadContext() throws Exception { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq("_action"), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); - final Role empty = Role.EMPTY; doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[2]; - assertEquals(authentication, threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); - callback.onResponse(empty); + ActionListener callback = (ActionListener) i.getArguments()[3]; + callback.onResponse(null); return Void.TYPE; - }).when(authzService).roles(any(User.class), any(Authentication.class), any(ActionListener.class)); + }).when(authzService) + .authorize(any(Authentication.class), any(String.class), any(TransportRequest.class), any(ActionListener.class)); assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); filter.apply(task, "_action", request, listener, chain); assertNull(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); - verify(authzService).authorize(authentication, "_action", request, empty, null); + verify(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(ActionListener.class)); verify(chain).proceed(eq(task), eq("_action"), eq(request), isA(ContextPreservingActionListener.class)); } @@ -169,6 +164,12 @@ public void testApplyAsSystemUser() throws Exception { callback.onResponse(threadContext.getTransient(AuthenticationField.AUTHENTICATION_KEY)); return Void.TYPE; }).when(authcService).authenticate(eq(action), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); + doAnswer((i) -> { + ActionListener callback = (ActionListener) i.getArguments()[3]; + callback.onResponse(null); + return Void.TYPE; + }).when(authzService) + .authorize(any(Authentication.class), any(String.class), any(TransportRequest.class), any(ActionListener.class)); filter.apply(task, action, request, listener, chain); @@ -198,19 +199,18 @@ public void testApplyDestructiveOperations() throws Exception { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq(action), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); - final Role empty = Role.EMPTY; doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[2]; - callback.onResponse(empty); + ActionListener callback = (ActionListener) i.getArguments()[3]; + callback.onResponse(null); return Void.TYPE; - }).when(authzService).roles(any(User.class), any(Authentication.class), any(ActionListener.class)); + }).when(authzService) + .authorize(any(Authentication.class), any(String.class), any(TransportRequest.class), any(ActionListener.class)); filter.apply(task, action, request, listener, chain); if (failDestructiveOperations) { verify(listener).onFailure(isA(IllegalArgumentException.class)); verifyNoMoreInteractions(authzService, chain); } else { - verify(authzService).authorize(authentication, action, request, empty, null); + verify(authzService).authorize(eq(authentication), eq(action), eq(request), any(ActionListener.class)); verify(chain).proceed(eq(task), eq(action), eq(request), isA(ContextPreservingActionListener.class)); } } @@ -229,14 +229,7 @@ public void testActionProcessException() throws Exception { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq("_action"), eq(request), eq(SystemUser.INSTANCE), any(ActionListener.class)); - doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[2]; - callback.onResponse(Role.EMPTY); - return Void.TYPE; - }).when(authzService).roles(any(User.class), any(Authentication.class), any(ActionListener.class)); - doThrow(exception).when(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(Role.class), - any(Role.class)); + doThrow(exception).when(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(ActionListener.class)); filter.apply(task, "_action", request, listener, chain); verify(listener).onFailure(exception); verifyNoMoreInteractions(chain); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesActionTests.java deleted file mode 100644 index 40f467fcc1832..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUserPrivilegesActionTests.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -package org.elasticsearch.xpack.security.action.user; - -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; -import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; -import org.elasticsearch.xpack.security.authz.AuthorizationService; - -import java.util.Collections; -import java.util.Set; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyIterable; -import static org.hamcrest.Matchers.iterableWithSize; -import static org.mockito.Mockito.mock; - -public class TransportGetUserPrivilegesActionTests extends ESTestCase { - - public void testBuildResponseObject() { - final ManageApplicationPrivileges manageApplicationPrivileges = new ManageApplicationPrivileges(Sets.newHashSet("app01", "app02")); - final BytesArray query = new BytesArray("{\"term\":{\"public\":true}}"); - final Role role = Role.builder("test", "role") - .cluster(Sets.newHashSet("monitor", "manage_watcher"), Collections.singleton(manageApplicationPrivileges)) - .add(IndexPrivilege.get(Sets.newHashSet("read", "write")), "index-1") - .add(IndexPrivilege.ALL, "index-2", "index-3") - .add( - new FieldPermissions(new FieldPermissionsDefinition(new String[]{ "public.*" }, new String[0])), - Collections.singleton(query), - IndexPrivilege.READ, randomBoolean(), "index-4", "index-5") - .addApplicationPrivilege(new ApplicationPrivilege("app01", "read", "data:read"), Collections.singleton("*")) - .runAs(new Privilege(Sets.newHashSet("user01", "user02"), "user01", "user02")) - .build(); - - final TransportGetUserPrivilegesAction action = new TransportGetUserPrivilegesAction(mock(ThreadPool.class), - mock(TransportService.class), mock(ActionFilters.class), mock(AuthorizationService.class)); - final GetUserPrivilegesResponse response = action.buildResponseObject(role); - - assertThat(response.getClusterPrivileges(), containsInAnyOrder("monitor", "manage_watcher")); - assertThat(response.getConditionalClusterPrivileges(), containsInAnyOrder(manageApplicationPrivileges)); - - assertThat(response.getIndexPrivileges(), iterableWithSize(3)); - final GetUserPrivilegesResponse.Indices index1 = findIndexPrivilege(response.getIndexPrivileges(), "index-1"); - assertThat(index1.getIndices(), containsInAnyOrder("index-1")); - assertThat(index1.getPrivileges(), containsInAnyOrder("read", "write")); - assertThat(index1.getFieldSecurity(), emptyIterable()); - assertThat(index1.getQueries(), emptyIterable()); - final GetUserPrivilegesResponse.Indices index2 = findIndexPrivilege(response.getIndexPrivileges(), "index-2"); - assertThat(index2.getIndices(), containsInAnyOrder("index-2", "index-3")); - assertThat(index2.getPrivileges(), containsInAnyOrder("all")); - assertThat(index2.getFieldSecurity(), emptyIterable()); - assertThat(index2.getQueries(), emptyIterable()); - final GetUserPrivilegesResponse.Indices index4 = findIndexPrivilege(response.getIndexPrivileges(), "index-4"); - assertThat(index4.getIndices(), containsInAnyOrder("index-4", "index-5")); - assertThat(index4.getPrivileges(), containsInAnyOrder("read")); - assertThat(index4.getFieldSecurity(), containsInAnyOrder( - new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[]{ "public.*" }, new String[0]))); - assertThat(index4.getQueries(), containsInAnyOrder(query)); - - assertThat(response.getApplicationPrivileges(), containsInAnyOrder( - RoleDescriptor.ApplicationResourcePrivileges.builder().application("app01").privileges("read").resources("*").build()) - ); - - assertThat(response.getRunAs(), containsInAnyOrder("user01", "user02")); - } - - private GetUserPrivilegesResponse.Indices findIndexPrivilege(Set indices, String name) { - return indices.stream().filter(i -> i.getIndices().contains(name)).findFirst().get(); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java deleted file mode 100644 index 670c2366d65bf..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ /dev/null @@ -1,575 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.security.action.user; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.delete.DeleteAction; -import org.elasticsearch.action.index.IndexAction; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.mock.orig.Mockito; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; -import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authc.AuthenticationField; -import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.permission.LimitedRole; -import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; -import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; -import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.authz.AuthorizationService; -import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; -import org.hamcrest.Matchers; -import org.junit.Before; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Locale; -import java.util.Set; - -import static java.util.Collections.emptyMap; -import static org.elasticsearch.common.util.set.Sets.newHashSet; -import static org.hamcrest.Matchers.arrayWithSize; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.iterableWithSize; -import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -@TestLogging("org.elasticsearch.xpack.security.action.user.TransportHasPrivilegesAction:TRACE," + - "org.elasticsearch.xpack.core.security.authz.permission.ApplicationPermission:DEBUG") -public class TransportHasPrivilegesActionTests extends ESTestCase { - - private User user; - private Role role; - private TransportHasPrivilegesAction action; - private List applicationPrivileges; - - @Before - public void setup() { - user = new User(randomAlphaOfLengthBetween(4, 12)); - final ThreadPool threadPool = mock(ThreadPool.class); - final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - final TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - - final Authentication authentication = mock(Authentication.class); - threadContext.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication); - when(threadPool.getThreadContext()).thenReturn(threadContext); - - when(authentication.getUser()).thenReturn(user); - - AuthorizationService authorizationService = mock(AuthorizationService.class); - Mockito.doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onResponse(role); - return null; - }).when(authorizationService).roles(eq(user), any(Authentication.class), any(ActionListener.class)); - - applicationPrivileges = new ArrayList<>(); - NativePrivilegeStore privilegeStore = mock(NativePrivilegeStore.class); - Mockito.doAnswer(inv -> { - assertThat(inv.getArguments(), arrayWithSize(3)); - ActionListener> listener - = (ActionListener>) inv.getArguments()[2]; - logger.info("Privileges for ({}) are {}", Arrays.toString(inv.getArguments()), applicationPrivileges); - listener.onResponse(applicationPrivileges); - return null; - }).when(privilegeStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); - - action = new TransportHasPrivilegesAction(threadPool, transportService, mock(ActionFilters.class), authorizationService, - privilegeStore); - } - - /** - * This tests that action names in the request are considered "matched" by the relevant named privilege - * (in this case that {@link DeleteAction} and {@link IndexAction} are satisfied by {@link IndexPrivilege#WRITE}). - */ - public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception { - role = Role.builder("test1") - .cluster(Collections.singleton("all"), Collections.emptyList()) - .add(IndexPrivilege.WRITE, "academy") - .build(); - - final HasPrivilegesRequest request = new HasPrivilegesRequest(); - request.username(user.principal()); - request.clusterPrivileges(ClusterHealthAction.NAME); - request.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() - .indices("academy") - .privileges(DeleteAction.NAME, IndexAction.NAME) - .build()); - request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); - final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), request, future); - - final HasPrivilegesResponse response = future.get(); - assertThat(response, notNullValue()); - assertThat(response.getUsername(), is(user.principal())); - assertThat(response.isCompleteMatch(), is(true)); - - assertThat(response.getClusterPrivileges().size(), equalTo(1)); - assertThat(response.getClusterPrivileges().get(ClusterHealthAction.NAME), equalTo(true)); - - assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); - final ResourcePrivileges result = response.getIndexPrivileges().iterator().next(); - assertThat(result.getResource(), equalTo("academy")); - assertThat(result.getPrivileges().size(), equalTo(2)); - assertThat(result.getPrivileges().get(DeleteAction.NAME), equalTo(true)); - assertThat(result.getPrivileges().get(IndexAction.NAME), equalTo(true)); - } - - /** - * This tests that the action responds correctly when the user/role has some, but not all - * of the privileges being checked. - */ - public void testMatchSubsetOfPrivileges() throws Exception { - role = Role.builder("test2") - .cluster(ClusterPrivilege.MONITOR) - .add(IndexPrivilege.INDEX, "academy") - .add(IndexPrivilege.WRITE, "initiative") - .build(); - - final HasPrivilegesRequest request = new HasPrivilegesRequest(); - request.username(user.principal()); - request.clusterPrivileges("monitor", "manage"); - request.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() - .indices("academy", "initiative", "school") - .privileges("delete", "index", "manage") - .build()); - request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); - final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), request, future); - - final HasPrivilegesResponse response = future.get(); - assertThat(response, notNullValue()); - assertThat(response.getUsername(), is(user.principal())); - assertThat(response.isCompleteMatch(), is(false)); - assertThat(response.getClusterPrivileges().size(), equalTo(2)); - assertThat(response.getClusterPrivileges().get("monitor"), equalTo(true)); - assertThat(response.getClusterPrivileges().get("manage"), equalTo(false)); - assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(3)); - - final Iterator indexPrivilegesIterator = response.getIndexPrivileges().iterator(); - final ResourcePrivileges academy = indexPrivilegesIterator.next(); - final ResourcePrivileges initiative = indexPrivilegesIterator.next(); - final ResourcePrivileges school = indexPrivilegesIterator.next(); - - assertThat(academy.getResource(), equalTo("academy")); - assertThat(academy.getPrivileges().size(), equalTo(3)); - assertThat(academy.getPrivileges().get("index"), equalTo(true)); // explicit - assertThat(academy.getPrivileges().get("delete"), equalTo(false)); - assertThat(academy.getPrivileges().get("manage"), equalTo(false)); - - assertThat(initiative.getResource(), equalTo("initiative")); - assertThat(initiative.getPrivileges().size(), equalTo(3)); - assertThat(initiative.getPrivileges().get("index"), equalTo(true)); // implied by write - assertThat(initiative.getPrivileges().get("delete"), equalTo(true)); // implied by write - assertThat(initiative.getPrivileges().get("manage"), equalTo(false)); - - assertThat(school.getResource(), equalTo("school")); - assertThat(school.getPrivileges().size(), equalTo(3)); - assertThat(school.getPrivileges().get("index"), equalTo(false)); - assertThat(school.getPrivileges().get("delete"), equalTo(false)); - assertThat(school.getPrivileges().get("manage"), equalTo(false)); - } - - /** - * This tests that the action responds correctly when the user/role has none - * of the privileges being checked. - */ - public void testMatchNothing() throws Exception { - role = Role.builder("test3") - .cluster(ClusterPrivilege.MONITOR) - .build(); - - final HasPrivilegesResponse response = hasPrivileges(RoleDescriptor.IndicesPrivileges.builder() - .indices("academy") - .privileges("read", "write") - .build(), Strings.EMPTY_ARRAY); - assertThat(response.getUsername(), is(user.principal())); - assertThat(response.isCompleteMatch(), is(false)); - assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); - final ResourcePrivileges result = response.getIndexPrivileges().iterator().next(); - assertThat(result.getResource(), equalTo("academy")); - assertThat(result.getPrivileges().size(), equalTo(2)); - assertThat(result.getPrivileges().get("read"), equalTo(false)); - assertThat(result.getPrivileges().get("write"), equalTo(false)); - } - - /** - * Wildcards in the request are treated as - * does the user have ___ privilege on every possible index that matches this pattern? - * Or, expressed differently, - * does the user have ___ privilege on a wildcard that covers (is a superset of) this pattern? - */ - public void testWildcardHandling() throws Exception { - final ApplicationPrivilege kibanaRead = defineApplicationPrivilege("kibana", "read", - "data:read/*", "action:login", "action:view/dashboard"); - final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege("kibana", "write", - "data:write/*", "action:login", "action:view/dashboard"); - final ApplicationPrivilege kibanaAdmin = defineApplicationPrivilege("kibana", "admin", - "action:login", "action:manage/*"); - final ApplicationPrivilege kibanaViewSpace = defineApplicationPrivilege("kibana", "view-space", - "action:login", "space:view/*"); - role = Role.builder("test3") - .add(IndexPrivilege.ALL, "logstash-*", "foo?") - .add(IndexPrivilege.READ, "abc*") - .add(IndexPrivilege.WRITE, "*xyz") - .addApplicationPrivilege(kibanaRead, Collections.singleton("*")) - .addApplicationPrivilege(kibanaViewSpace, newHashSet("space/engineering/*", "space/builds")) - .build(); - - final HasPrivilegesRequest request = new HasPrivilegesRequest(); - request.username(user.principal()); - request.clusterPrivileges(Strings.EMPTY_ARRAY); - request.indexPrivileges( - RoleDescriptor.IndicesPrivileges.builder() - .indices("logstash-2016-*") - .privileges("write") // Yes, because (ALL,"logstash-*") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("logstash-*") - .privileges("read") // Yes, because (ALL,"logstash-*") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("log*") - .privileges("manage") // No, because "log*" includes indices that "logstash-*" does not - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("foo*", "foo?") - .privileges("read") // Yes, "foo?", but not "foo*", because "foo*" > "foo?" - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("abcd*") - .privileges("read", "write") // read = Yes, because (READ, "abc*"), write = No - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("abc*xyz") - .privileges("read", "write", "manage") // read = Yes ( READ "abc*"), write = Yes (WRITE, "*xyz"), manage = No - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("a*xyz") - .privileges("read", "write", "manage") // read = No, write = Yes (WRITE, "*xyz"), manage = No - .build() - ); - - request.applicationPrivileges( - RoleDescriptor.ApplicationResourcePrivileges.builder() - .resources("*") - .application("kibana") - .privileges(Sets.union(kibanaRead.name(), kibanaWrite.name())) // read = Yes, write = No - .build(), - RoleDescriptor.ApplicationResourcePrivileges.builder() - .resources("space/engineering/project-*", "space/*") // project-* = Yes, space/* = Not - .application("kibana") - .privileges("space:view/dashboard") - .build() - ); - - final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), request, future); - - final HasPrivilegesResponse response = future.get(); - assertThat(response, notNullValue()); - assertThat(response.getUsername(), is(user.principal())); - assertThat(response.isCompleteMatch(), is(false)); - assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(8)); - assertThat(response.getIndexPrivileges(), - containsInAnyOrder( - ResourcePrivileges.builder("logstash-2016-*").addPrivileges(Collections.singletonMap("write", true)).build(), - ResourcePrivileges.builder("logstash-*").addPrivileges(Collections.singletonMap("read", true)).build(), - ResourcePrivileges.builder("log*").addPrivileges(Collections.singletonMap("manage", false)).build(), - ResourcePrivileges.builder("foo?").addPrivileges(Collections.singletonMap("read", true)).build(), - ResourcePrivileges.builder("foo*").addPrivileges(Collections.singletonMap("read", false)).build(), - ResourcePrivileges.builder("abcd*").addPrivileges(mapBuilder().put("read", true).put("write", false).map()).build(), - ResourcePrivileges.builder("abc*xyz") - .addPrivileges(mapBuilder().put("read", true).put("write", true).put("manage", false).map()).build(), - ResourcePrivileges.builder("a*xyz") - .addPrivileges(mapBuilder().put("read", false).put("write", true).put("manage", false).map()).build())); - assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(1)); - final Set kibanaPrivileges = response.getApplicationPrivileges().get("kibana"); - assertThat(kibanaPrivileges, Matchers.iterableWithSize(3)); - assertThat(Strings.collectionToCommaDelimitedString(kibanaPrivileges), kibanaPrivileges, containsInAnyOrder( - ResourcePrivileges.builder("*").addPrivileges(mapBuilder().put("read", true).put("write", false).map()).build(), - ResourcePrivileges.builder("space/engineering/project-*") - .addPrivileges(Collections.singletonMap("space:view/dashboard", true)).build(), - ResourcePrivileges.builder("space/*").addPrivileges(Collections.singletonMap("space:view/dashboard", false)).build())); - } - - private ApplicationPrivilege defineApplicationPrivilege(String app, String name, String ... actions) { - this.applicationPrivileges.add(new ApplicationPrivilegeDescriptor(app, name, newHashSet(actions), emptyMap())); - return new ApplicationPrivilege(app, name, actions); - } - - public void testCheckingIndexPermissionsDefinedOnDifferentPatterns() throws Exception { - role = Role.builder("test-write") - .add(IndexPrivilege.INDEX, "apache-*") - .add(IndexPrivilege.DELETE, "apache-2016-*") - .build(); - - final HasPrivilegesResponse response = hasPrivileges(RoleDescriptor.IndicesPrivileges.builder() - .indices("apache-2016-12", "apache-2017-01") - .privileges("index", "delete") - .build(), Strings.EMPTY_ARRAY); - assertThat(response.isCompleteMatch(), is(false)); - assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(2)); - assertThat(response.getIndexPrivileges(), containsInAnyOrder( - ResourcePrivileges.builder("apache-2016-12").addPrivileges( - MapBuilder.newMapBuilder(new LinkedHashMap()) - .put("index", true).put("delete", true).map()).build(), - ResourcePrivileges.builder("apache-2017-01").addPrivileges( - MapBuilder.newMapBuilder(new LinkedHashMap()) - .put("index", true).put("delete", false).map()).build() - )); - } - - public void testCheckingApplicationPrivilegesOnDifferentApplicationsAndResources() throws Exception { - final ApplicationPrivilege app1Read = defineApplicationPrivilege("app1", "read", "data:read/*"); - final ApplicationPrivilege app1Write = defineApplicationPrivilege("app1", "write", "data:write/*"); - final ApplicationPrivilege app1All = defineApplicationPrivilege("app1", "all", "*"); - final ApplicationPrivilege app2Read = defineApplicationPrivilege("app2", "read", "data:read/*"); - final ApplicationPrivilege app2Write = defineApplicationPrivilege("app2", "write", "data:write/*"); - final ApplicationPrivilege app2All = defineApplicationPrivilege("app2", "all", "*"); - - role = Role.builder("test-role") - .addApplicationPrivilege(app1Read, Collections.singleton("foo/*")) - .addApplicationPrivilege(app1All, Collections.singleton("foo/bar/baz")) - .addApplicationPrivilege(app2Read, Collections.singleton("foo/bar/*")) - .addApplicationPrivilege(app2Write, Collections.singleton("*/bar/*")) - .build(); - - final HasPrivilegesResponse response = hasPrivileges(new RoleDescriptor.IndicesPrivileges[0], - new RoleDescriptor.ApplicationResourcePrivileges[]{ - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("app1") - .resources("foo/1", "foo/bar/2", "foo/bar/baz", "baz/bar/foo") - .privileges("read", "write", "all") - .build(), - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("app2") - .resources("foo/1", "foo/bar/2", "foo/bar/baz", "baz/bar/foo") - .privileges("read", "write", "all") - .build() - }, Strings.EMPTY_ARRAY); - - assertThat(response.isCompleteMatch(), is(false)); - assertThat(response.getIndexPrivileges(), Matchers.emptyIterable()); - assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(2)); - final Set app1 = response.getApplicationPrivileges().get("app1"); - assertThat(app1, Matchers.iterableWithSize(4)); - assertThat(Strings.collectionToCommaDelimitedString(app1), app1, containsInAnyOrder( - ResourcePrivileges.builder("foo/1") - .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()).put("read", true).put("write", false) - .put("all", false).map()) - .build(), - ResourcePrivileges.builder("foo/bar/2") - .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()).put("read", true).put("write", false) - .put("all", false).map()) - .build(), - ResourcePrivileges.builder("foo/bar/baz") - .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()).put("read", true).put("write", true) - .put("all", true).map()) - .build(), - ResourcePrivileges.builder("baz/bar/foo").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) - .put("read", false).put("write", false).put("all", false).map()).build())); - final Set app2 = response.getApplicationPrivileges().get("app2"); - assertThat(app2, Matchers.iterableWithSize(4)); - assertThat(Strings.collectionToCommaDelimitedString(app2), app2, containsInAnyOrder( - ResourcePrivileges.builder("foo/1") - .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()).put("read", false).put("write", false) - .put("all", false).map()) - .build(), - ResourcePrivileges.builder("foo/bar/2") - .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()).put("read", true).put("write", true) - .put("all", false).map()) - .build(), - ResourcePrivileges.builder("foo/bar/baz") - .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()).put("read", true).put("write", true) - .put("all", false).map()) - .build(), - ResourcePrivileges.builder("baz/bar/foo").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) - .put("read", false).put("write", true).put("all", false).map()).build() - )); - } - - public void testCheckingApplicationPrivilegesWithComplexNames() throws Exception { - final String appName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(3, 10); - final String action1 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 5); - final String action2 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(6, 9); - - final ApplicationPrivilege priv1 = defineApplicationPrivilege(appName, action1, "DATA:read/*", "ACTION:" + action1); - - role = Role.builder("test-write") - .addApplicationPrivilege(priv1, Collections.singleton("user/*/name")) - .build(); - - final HasPrivilegesResponse response = hasPrivileges( - new RoleDescriptor.IndicesPrivileges[0], - new RoleDescriptor.ApplicationResourcePrivileges[]{ - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application(appName) - .resources("user/hawkeye/name") - .privileges("DATA:read/user/*", "ACTION:" + action1, "ACTION:" + action2, action1, action2) - .build() - }, - "monitor"); - assertThat(response.isCompleteMatch(), is(false)); - assertThat(response.getApplicationPrivileges().keySet(), containsInAnyOrder(appName)); - assertThat(response.getApplicationPrivileges().get(appName), iterableWithSize(1)); - assertThat(response.getApplicationPrivileges().get(appName), containsInAnyOrder( - ResourcePrivileges.builder("user/hawkeye/name").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) - .put("DATA:read/user/*", true) - .put("ACTION:" + action1, true) - .put("ACTION:" + action2, false) - .put(action1, true) - .put(action2, false) - .map()).build() - )); - } - - public void testIsCompleteMatch() throws Exception { - final ApplicationPrivilege kibanaRead = defineApplicationPrivilege("kibana", "read", "data:read/*"); - final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege("kibana", "write", "data:write/*"); - role = Role.builder("test-write") - .cluster(ClusterPrivilege.MONITOR) - .add(IndexPrivilege.READ, "read-*") - .add(IndexPrivilege.ALL, "all-*") - .addApplicationPrivilege(kibanaRead, Collections.singleton("*")) - .build(); - - assertThat(hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), "monitor").isCompleteMatch(), is(true)); - assertThat(hasPrivileges(indexPrivileges("read", "read-123", "read-456", "all-999"), "manage").isCompleteMatch(), is(false)); - assertThat(hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), "monitor").isCompleteMatch(), is(false)); - assertThat(hasPrivileges(indexPrivileges("write", "read-123", "read-456", "all-999"), "manage").isCompleteMatch(), is(false)); - assertThat(hasPrivileges( - new RoleDescriptor.IndicesPrivileges[]{ - RoleDescriptor.IndicesPrivileges.builder() - .indices("read-a") - .privileges("read") - .build(), - RoleDescriptor.IndicesPrivileges.builder() - .indices("all-b") - .privileges("read", "write") - .build() - }, - new RoleDescriptor.ApplicationResourcePrivileges[]{ - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("kibana") - .resources("*") - .privileges("read") - .build() - }, - "monitor").isCompleteMatch(), is(true)); - assertThat(hasPrivileges( - new RoleDescriptor.IndicesPrivileges[]{indexPrivileges("read", "read-123", "read-456", "all-999")}, - new RoleDescriptor.ApplicationResourcePrivileges[]{ - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("kibana").resources("*").privileges("read").build(), - RoleDescriptor.ApplicationResourcePrivileges.builder() - .application("kibana").resources("*").privileges("write").build() - }, - "monitor").isCompleteMatch(), is(false)); - } - - public void testLimitedRoleHasPrivilegesApi() throws Exception { - final ApplicationPrivilege kibanaRead = defineApplicationPrivilege("kibana", "read", "data:read/*"); - final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege("kibana", "write", "data:write/*"); - Role baseRole = Role.builder("base-role").cluster(Sets.newHashSet("manage", "monitor"), Collections.emptySet()) - .add(IndexPrivilege.ALL, "all-*").addApplicationPrivilege(kibanaRead, Collections.singleton("*")) - .addApplicationPrivilege(kibanaWrite, Collections.singleton("*")).build(); - - Role limitedByRole = Role.builder("limited-by").cluster(ClusterPrivilege.MONITOR).add(IndexPrivilege.READ, "all-read-*") - .addApplicationPrivilege(kibanaRead, Collections.singleton("*")).build(); - - role = LimitedRole.createLimitedRole(baseRole, limitedByRole); - - assertThat(hasPrivileges(indexPrivileges("read", "all-read-1", "all-read-2", "all-read-*"), "monitor").isCompleteMatch(), is(true)); - assertThat(hasPrivileges(indexPrivileges("read", "all-1", "all-999"), "monitor").isCompleteMatch(), is(false)); - assertThat(hasPrivileges(indexPrivileges("read", "all-999"), "manage").isCompleteMatch(), is(false)); - assertThat(hasPrivileges(indexPrivileges("write", "all-999"), "monitor").isCompleteMatch(), is(false)); - assertThat(hasPrivileges(indexPrivileges("write", "all-*"), "manage").isCompleteMatch(), is(false)); - - HasPrivilegesResponse response = hasPrivileges(indexPrivileges("read", "all-read-999"), "manage", "monitor"); - assertThat(response.getClusterPrivileges().get("manage"), is(false)); - assertThat(response.getClusterPrivileges().get("monitor"), is(true)); - assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); - assertThat(response.getIndexPrivileges(), containsInAnyOrder(ResourcePrivileges.builder("all-read-999") - .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()).put("read", true).map()).build())); - - response = hasPrivileges(new RoleDescriptor.IndicesPrivileges[] { indexPrivileges("read", "all-read-*") }, - new RoleDescriptor.ApplicationResourcePrivileges[] { - RoleDescriptor.ApplicationResourcePrivileges.builder().application("kibana").resources("*").privileges("read") - .build(), - RoleDescriptor.ApplicationResourcePrivileges.builder().application("kibana").resources("*").privileges("write") - .build() }, - "monitor"); - assertThat(response.isCompleteMatch(), is(false)); - final Set kibanaPrivileges = response.getApplicationPrivileges().get("kibana"); - assertThat(kibanaPrivileges, containsInAnyOrder( - ResourcePrivileges.builder("*").addPrivileges(mapBuilder().put("write", false).put("read", true).map()).build())); - } - - private RoleDescriptor.IndicesPrivileges indexPrivileges(String priv, String... indices) { - return RoleDescriptor.IndicesPrivileges.builder() - .indices(indices) - .privileges(priv) - .build(); - } - - private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges indicesPrivileges, String... clusterPrivileges) - throws Exception { - return hasPrivileges( - new RoleDescriptor.IndicesPrivileges[]{indicesPrivileges}, - new RoleDescriptor.ApplicationResourcePrivileges[0], - clusterPrivileges - ); - } - - private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges[] indicesPrivileges, - RoleDescriptor.ApplicationResourcePrivileges[] appPrivileges, - String... clusterPrivileges) throws Exception { - final HasPrivilegesRequest request = new HasPrivilegesRequest(); - request.username(user.principal()); - request.clusterPrivileges(clusterPrivileges); - request.indexPrivileges(indicesPrivileges); - request.applicationPrivileges(appPrivileges); - final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), request, future); - final HasPrivilegesResponse response = future.get(); - assertThat(response, notNullValue()); - return response; - } - - private static MapBuilder mapBuilder() { - return MapBuilder.newMapBuilder(); - } - -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java index d4289080a9b30..fb194ecefc671 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/AuditTrailServiceTests.java @@ -13,15 +13,18 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; import org.junit.Before; import java.net.InetAddress; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; @@ -147,13 +150,14 @@ public void testAnonymousAccess() throws Exception { public void testAccessGranted() throws Exception { Authentication authentication =new Authentication(new User("_username", "r1"), new RealmRef(null, null, null), new RealmRef(null, null, null)); - String[] roles = new String[] { randomAlphaOfLengthBetween(1, 6) }; + AuthorizationInfo authzInfo = + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, new String[] { randomAlphaOfLengthBetween(1, 6) }); final String requestId = randomAlphaOfLengthBetween(6, 12); - service.accessGranted(requestId, authentication, "_action", message, roles); + service.accessGranted(requestId, authentication, "_action", message, authzInfo); verify(licenseState).isAuditingAllowed(); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { - verify(auditTrail).accessGranted(requestId, authentication, "_action", message, roles); + verify(auditTrail).accessGranted(requestId, authentication, "_action", message, authzInfo); } } else { verifyZeroInteractions(auditTrails.toArray((Object[]) new AuditTrail[auditTrails.size()])); @@ -163,13 +167,14 @@ public void testAccessGranted() throws Exception { public void testAccessDenied() throws Exception { Authentication authentication = new Authentication(new User("_username", "r1"), new RealmRef(null, null, null), new RealmRef(null, null, null)); - String[] roles = new String[] { randomAlphaOfLengthBetween(1, 6) }; + AuthorizationInfo authzInfo = + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, new String[] { randomAlphaOfLengthBetween(1, 6) }); final String requestId = randomAlphaOfLengthBetween(6, 12); - service.accessDenied(requestId, authentication, "_action", message, roles); + service.accessDenied(requestId, authentication, "_action", message, authzInfo); verify(licenseState).isAuditingAllowed(); if (isAuditingAllowed) { for (AuditTrail auditTrail : auditTrails) { - verify(auditTrail).accessDenied(requestId, authentication, "_action", message, roles); + verify(auditTrail).accessDenied(requestId, authentication, "_action", message, authzInfo); } } else { verifyZeroInteractions(auditTrails.toArray((Object[]) new AuditTrail[auditTrails.size()])); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailFilterTests.java index 6d4bb155ba704..5cdc4400c3801 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailFilterTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.AuditEventMetaInfo; import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrailTests.MockMessage; import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrailTests.RestContent; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.security.rest.RemoteHostHeader; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; import org.junit.Before; @@ -47,6 +48,7 @@ import java.util.Optional; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -131,7 +133,8 @@ public void testPolicyDoesNotMatchNullValuesInEvent() throws Exception { // role field matches assertTrue("Matches the role filter predicate.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo( + randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0]))), Optional.empty()))); final List unfilteredRoles = new ArrayList<>(); unfilteredRoles.add(null); @@ -139,7 +142,7 @@ public void testPolicyDoesNotMatchNullValuesInEvent() throws Exception { // null role among roles field does NOT match assertFalse("Does not match the role filter predicate because of null role.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), - Optional.of(unfilteredRoles.toArray(new String[0])), Optional.empty()))); + Optional.of(authzInfo(unfilteredRoles.toArray(new String[0]))), Optional.empty()))); // indices field matches assertTrue("Matches the index filter predicate.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), @@ -185,7 +188,7 @@ public void testSingleCompletePolicyPredicate() throws Exception { assertTrue("Matches the filter predicate.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo( Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); final User unfilteredUser; if (randomBoolean()) { @@ -198,22 +201,26 @@ public void testSingleCompletePolicyPredicate() throws Exception { assertFalse("Does not match the filter predicate because of the user.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(unfilteredUser), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); assertFalse("Does not match the filter predicate because of the empty user.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.empty(), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); assertFalse("Does not match the filter predicate because of the realm.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(UNFILTER_MARKER + randomAlphaOfLengthBetween(1, 8)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); assertFalse("Does not match the filter predicate because of the empty realm.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.empty(), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); final List someRolesDoNotMatch = new ArrayList<>(randomSubsetOf(randomIntBetween(0, filteredRoles.size()), filteredRoles)); for (int i = 0; i < randomIntBetween(1, 8); i++) { @@ -221,9 +228,9 @@ public void testSingleCompletePolicyPredicate() throws Exception { } assertFalse("Does not match the filter predicate because of some of the roles.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), - Optional.of(randomFrom(filteredRealms)), Optional.of(someRolesDoNotMatch.toArray(new String[0])), + Optional.of(randomFrom(filteredRealms)), Optional.of(authzInfo(someRolesDoNotMatch.toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); - final Optional emptyRoles = randomBoolean() ? Optional.empty() : Optional.of(new String[0]); + final Optional emptyRoles = randomBoolean() ? Optional.empty() : Optional.of(authzInfo(new String[0])); assertFalse("Does not match the filter predicate because of the empty roles.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), emptyRoles, @@ -236,13 +243,15 @@ public void testSingleCompletePolicyPredicate() throws Exception { assertFalse("Does not match the filter predicate because of some of the indices.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(someIndicesDoNotMatch.toArray(new String[0]))))); final Optional emptyIndices = randomBoolean() ? Optional.empty() : Optional.of(new String[0]); assertFalse("Does not match the filter predicate because of the empty indices.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), emptyIndices))); } @@ -284,7 +293,8 @@ public void testSingleCompleteWithEmptyFieldPolicyPredicate() throws Exception { assertTrue("Matches the filter predicate.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); final User unfilteredUser; if (randomBoolean()) { @@ -297,22 +307,26 @@ public void testSingleCompleteWithEmptyFieldPolicyPredicate() throws Exception { assertFalse("Does not match the filter predicate because of the user.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(unfilteredUser), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); assertTrue("Matches the filter predicate because of the empty user.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.empty(), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); assertFalse("Does not match the filter predicate because of the realm.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(UNFILTER_MARKER + randomAlphaOfLengthBetween(1, 8)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); assertTrue("Matches the filter predicate because of the empty realm.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.empty(), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); final List someRolesDoNotMatch = new ArrayList<>(randomSubsetOf(randomIntBetween(0, filteredRoles.size()), filteredRoles)); for (int i = 0; i < randomIntBetween(1, 8); i++) { @@ -320,9 +334,9 @@ public void testSingleCompleteWithEmptyFieldPolicyPredicate() throws Exception { } assertFalse("Does not match the filter predicate because of some of the roles.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), - Optional.of(randomFrom(filteredRealms)), Optional.of(someRolesDoNotMatch.toArray(new String[0])), + Optional.of(randomFrom(filteredRealms)), Optional.of(authzInfo(someRolesDoNotMatch.toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); - final Optional emptyRoles = randomBoolean() ? Optional.empty() : Optional.of(new String[0]); + final Optional emptyRoles = randomBoolean() ? Optional.empty() : Optional.of(authzInfo(new String[0])); assertTrue("Matches the filter predicate because of the empty roles.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), emptyRoles, @@ -335,19 +349,23 @@ public void testSingleCompleteWithEmptyFieldPolicyPredicate() throws Exception { assertFalse("Does not match the filter predicate because of some of the indices.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(someIndicesDoNotMatch.toArray(new String[0]))))); assertTrue("Matches the filter predicate because of the empty indices.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo( + randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0]))), Optional.empty()))); assertTrue("Matches the filter predicate because of the empty indices.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo( + randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0]))), Optional.of(new String[0])))); assertTrue("Matches the filter predicate because of the empty indices.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo( + randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0]))), Optional.of(new String[] { null })))); } @@ -396,26 +414,28 @@ public void testTwoPolicyPredicatesWithMissingFields() throws Exception { assertTrue("Matches both the first and the second filter predicates.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); // matches first policy but not the second assertTrue("Matches the first filter predicate but not the second.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(unfilteredUser), Optional.of(randomFrom(filteredRealms)), - Optional.of(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles).toArray(new String[0])), + Optional.of(authzInfo(randomSubsetOf(randomIntBetween(1, filteredRoles.size()), filteredRoles) + .toArray(new String[0]))), Optional.of(someIndicesDoNotMatch.toArray(new String[0]))))); // matches the second policy but not the first assertTrue("Matches the second filter predicate but not the first.", auditTrail.eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(randomFrom(filteredUsers)), Optional.of(UNFILTER_MARKER + randomAlphaOfLengthBetween(1, 8)), - Optional.of(someRolesDoNotMatch.toArray(new String[0])), + Optional.of(authzInfo(someRolesDoNotMatch.toArray(new String[0]))), Optional.of(randomSubsetOf(randomIntBetween(1, filteredIndices.size()), filteredIndices).toArray(new String[0]))))); // matches neither the first nor the second policies assertFalse("Matches neither the first nor the second filter predicates.", auditTrail.eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(unfilteredUser), Optional.of(UNFILTER_MARKER + randomAlphaOfLengthBetween(1, 8)), - Optional.of(someRolesDoNotMatch.toArray(new String[0])), + Optional.of(authzInfo(someRolesDoNotMatch.toArray(new String[0]))), Optional.of(someIndicesDoNotMatch.toArray(new String[0]))))); } @@ -548,55 +568,61 @@ public void testUsersFilter() throws Exception { threadContext.stashContext(); // accessGranted - auditTrail.accessGranted(randomAlphaOfLength(8), unfilteredAuthentication, "_action", message, new String[] { "role1" }); + auditTrail.accessGranted(randomAlphaOfLength(8), unfilteredAuthentication, "_action", message, authzInfo(new String[] { "role1" })); assertThat("AccessGranted message: unfiltered user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessGranted(randomAlphaOfLength(8), filteredAuthentication, "_action", message, new String[] { "role1" }); + auditTrail.accessGranted(randomAlphaOfLength(8), filteredAuthentication, "_action", message, authzInfo(new String[] { "role1" })); assertThat("AccessGranted message: filtered user is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), - "internal:_action", message, new String[] { "role1" }); + "internal:_action", message, authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message: system user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessGranted(randomAlphaOfLength(8), unfilteredAuthentication, "internal:_action", message, new String[] { "role1" }); + auditTrail.accessGranted(randomAlphaOfLength(8), unfilteredAuthentication, "internal:_action", message, + authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message: unfiltered user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessGranted(randomAlphaOfLength(8), filteredAuthentication, "internal:_action", message, new String[] { "role1" }); + auditTrail.accessGranted(randomAlphaOfLength(8), filteredAuthentication, "internal:_action", message, + authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message: filtered user is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); // accessDenied - auditTrail.accessDenied(randomAlphaOfLength(8), unfilteredAuthentication, "_action", message, new String[] { "role1" }); + auditTrail.accessDenied(randomAlphaOfLength(8), unfilteredAuthentication, "_action", message, + authzInfo(new String[] { "role1" })); assertThat("AccessDenied message: unfiltered user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessDenied(randomAlphaOfLength(8), filteredAuthentication, "_action", message, new String[] { "role1" }); + auditTrail.accessDenied(randomAlphaOfLength(8), filteredAuthentication, "_action", message, + authzInfo(new String[] { "role1" })); assertThat("AccessDenied message: filtered user is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), "internal:_action", - message, new String[] { "role1" }); + message, authzInfo(new String[] { "role1" })); assertThat("AccessDenied internal message: system user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessDenied(randomAlphaOfLength(8), unfilteredAuthentication, "internal:_action", message, new String[] { "role1" }); + auditTrail.accessDenied(randomAlphaOfLength(8), unfilteredAuthentication, "internal:_action", message, + authzInfo(new String[] { "role1" })); assertThat("AccessDenied internal message: unfiltered user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessDenied(randomAlphaOfLength(8), filteredAuthentication, "internal:_action", message, new String[] { "role1" }); + auditTrail.accessDenied(randomAlphaOfLength(8), filteredAuthentication, "internal:_action", message, + authzInfo(new String[] { "role1" })); assertThat("AccessDenied internal message: filtered user is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); @@ -652,36 +678,36 @@ public void testUsersFilter() throws Exception { // runAsGranted auditTrail.runAsGranted(randomAlphaOfLength(8), unfilteredAuthentication, "_action", new MockMessage(threadContext), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsGranted message: unfiltered user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsGranted(randomAlphaOfLength(8), filteredAuthentication, "_action", new MockMessage(threadContext), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsGranted message: filtered user is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); // runAsDenied auditTrail.runAsDenied(randomAlphaOfLength(8), unfilteredAuthentication, "_action", new MockMessage(threadContext), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsDenied message: unfiltered user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsDenied(randomAlphaOfLength(8), filteredAuthentication, "_action", new MockMessage(threadContext), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsDenied message: filtered user is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); - auditTrail.runAsDenied(randomAlphaOfLength(8), unfilteredAuthentication, getRestRequest(), new String[] { "role1" }); + auditTrail.runAsDenied(randomAlphaOfLength(8), unfilteredAuthentication, getRestRequest(), authzInfo(new String[] { "role1" })); assertThat("RunAsDenied rest request: unfiltered user is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.runAsDenied(randomAlphaOfLength(8), filteredAuthentication, getRestRequest(), new String[] { "role1" }); + auditTrail.runAsDenied(randomAlphaOfLength(8), filteredAuthentication, getRestRequest(), authzInfo(new String[] { "role1" })); assertThat("RunAsDenied rest request: filtered user is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); @@ -826,74 +852,74 @@ public void testRealmsFilter() throws Exception { // accessGranted auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(user, filteredRealm), "_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted message: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(user, unfilteredRealm), "_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted message: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, filteredRealm), "internal:_action", - message, new String[] { "role1" }); + message, authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message system user: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, unfilteredRealm), "internal:_action", - message, new String[] { "role1" }); + message, authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message system user: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(user, filteredRealm), "internal:_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(user, unfilteredRealm), "internal:_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); // accessDenied auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(user, filteredRealm), "_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessDenied message: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(user, unfilteredRealm), "_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessDenied message: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, filteredRealm), "internal:_action", - message, new String[] { "role1" }); + message, authzInfo(new String[] { "role1" })); assertThat("AccessDenied internal message system user: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, unfilteredRealm), "internal:_action", - message, new String[] { "role1" }); + message, authzInfo(new String[] { "role1" })); assertThat("AccessDenied internal message system user: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(user, filteredRealm), "internal:_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(user, unfilteredRealm), "internal:_action", message, - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted internal message: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); @@ -948,38 +974,38 @@ public void testRealmsFilter() throws Exception { // runAsGranted auditTrail.runAsGranted(randomAlphaOfLength(8), createAuthentication(user, filteredRealm), "_action", - new MockMessage(threadContext), new String[] { "role1" }); + new MockMessage(threadContext), authzInfo(new String[] { "role1" })); assertThat("RunAsGranted message: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsGranted(randomAlphaOfLength(8), createAuthentication(user, unfilteredRealm), "_action", - new MockMessage(threadContext), new String[] { "role1" }); + new MockMessage(threadContext), authzInfo(new String[] { "role1" })); assertThat("RunAsGranted message: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); // runAsDenied auditTrail.runAsDenied(randomAlphaOfLength(8), createAuthentication(user, filteredRealm), "_action", new MockMessage(threadContext), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsDenied message: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsDenied(randomAlphaOfLength(8), createAuthentication(user, unfilteredRealm), "_action", - new MockMessage(threadContext), new String[] { "role1" }); + new MockMessage(threadContext), authzInfo(new String[] { "role1" })); assertThat("RunAsDenied message: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsDenied(randomAlphaOfLength(8), createAuthentication(user, filteredRealm), getRestRequest(), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsDenied rest request: filtered realm is not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsDenied(randomAlphaOfLength(8), createAuthentication(user, unfilteredRealm), getRestRequest(), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsDenied rest request: unfiltered realm is filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); @@ -1143,67 +1169,67 @@ public void testRolesFilter() throws Exception { threadContext.stashContext(); // accessGranted - auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", message, unfilteredRoles); + auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", message, authzInfo(unfilteredRoles)); assertThat("AccessGranted message: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", message, filteredRoles); + auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", message, authzInfo(filteredRoles)); assertThat("AccessGranted message: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), - "internal:_action", message, unfilteredRoles); + "internal:_action", message, authzInfo(unfilteredRoles)); assertThat("AccessGranted internal message system user: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), - "internal:_action", message, filteredRoles); + "internal:_action", message, authzInfo(filteredRoles)); assertThat("AccessGranted internal message system user: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "internal:_action", message, unfilteredRoles); + auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "internal:_action", message, authzInfo(unfilteredRoles)); assertThat("AccessGranted internal message: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "internal:_action", message, filteredRoles); + auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "internal:_action", message, authzInfo(filteredRoles)); assertThat("AccessGranted internal message: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); // accessDenied - auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", message, unfilteredRoles); + auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", message, authzInfo(unfilteredRoles)); assertThat("AccessDenied message: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", message, filteredRoles); + auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", message, authzInfo(filteredRoles)); assertThat("AccessDenied message: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), "internal:_action", - message, unfilteredRoles); + message, authzInfo(unfilteredRoles)); assertThat("AccessDenied internal message system user: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), "internal:_action", - message, filteredRoles); + message, authzInfo(filteredRoles)); assertThat("AccessDenied internal message system user: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "internal:_action", message, unfilteredRoles); + auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "internal:_action", message, authzInfo(unfilteredRoles)); assertThat("AccessDenied internal message: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "internal:_action", message, filteredRoles); + auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "internal:_action", message, authzInfo(filteredRoles)); assertThat("AccessDenied internal message: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); @@ -1229,33 +1255,36 @@ public void testRolesFilter() throws Exception { threadContext.stashContext(); // runAsGranted - auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), unfilteredRoles); + auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), + authzInfo(unfilteredRoles)); assertThat("RunAsGranted message: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), filteredRoles); + auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), + authzInfo(filteredRoles)); assertThat("RunAsGranted message: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); // runAsDenied - auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), unfilteredRoles); + auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), + authzInfo(unfilteredRoles)); assertThat("RunAsDenied message: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), filteredRoles); + auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", new MockMessage(threadContext), authzInfo(filteredRoles)); assertThat("RunAsDenied message: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); - auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, getRestRequest(), unfilteredRoles); + auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, getRestRequest(), authzInfo(unfilteredRoles)); assertThat("RunAsDenied rest request: unfiltered roles filtered out", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); - auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, getRestRequest(), filteredRoles); + auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, getRestRequest(), authzInfo(filteredRoles)); assertThat("RunAsDenied rest request: filtered roles not filtered out", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); @@ -1464,7 +1493,7 @@ public void testIndicesFilter() throws Exception { threadContext.stashContext(); // accessGranted - auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, new String[] { "role1" }); + auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, authzInfo(new String[] { "role1" })); if (filterMissingIndices) { assertThat("AccessGranted message no index: not filtered out by the missing indices filter", logOutput.size(), is(0)); } else { @@ -1475,19 +1504,19 @@ public void testIndicesFilter() throws Exception { auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, unfilteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted message unfiltered indices: filtered out by indices filter", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, filteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted message filtered indices: not filtered out by indices filter", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), - "internal:_action", noIndexMessage, new String[] { "role1" }); + "internal:_action", noIndexMessage, authzInfo(new String[] { "role1" })); if (filterMissingIndices) { assertThat("AccessGranted message system user no index: not filtered out by the missing indices filter", logOutput.size(), is(0)); @@ -1498,19 +1527,19 @@ public void testIndicesFilter() throws Exception { threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), - "internal:_action", new MockIndicesRequest(threadContext, unfilteredIndices), new String[] { "role1" }); + "internal:_action", new MockIndicesRequest(threadContext, unfilteredIndices), authzInfo(new String[] { "role1" })); assertThat("AccessGranted message system user unfiltered indices: filtered out by indices filter", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessGranted(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), - "internal:_action", new MockIndicesRequest(threadContext, filteredIndices), new String[] { "role1" }); + "internal:_action", new MockIndicesRequest(threadContext, filteredIndices), authzInfo(new String[] { "role1" })); assertThat("AccessGranted message system user filtered indices: not filtered out by indices filter", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); // accessDenied - auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, new String[] { "role1" }); + auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, authzInfo(new String[] { "role1" })); if (filterMissingIndices) { assertThat("AccessDenied message no index: not filtered out by the missing indices filter", logOutput.size(), is(0)); } else { @@ -1520,19 +1549,19 @@ public void testIndicesFilter() throws Exception { threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, unfilteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessDenied message unfiltered indices: filtered out by indices filter", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, filteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessDenied message filtered indices: not filtered out by indices filter", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), "internal:_action", - noIndexMessage, new String[] { "role1" }); + noIndexMessage, authzInfo(new String[] { "role1" })); if (filterMissingIndices) { assertThat("AccessDenied message system user no index: not filtered out by the missing indices filter", logOutput.size(), is(0)); @@ -1544,14 +1573,14 @@ public void testIndicesFilter() throws Exception { auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), "internal:_action", new MockIndicesRequest(threadContext, unfilteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessDenied message system user unfiltered indices: filtered out by indices filter", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.accessDenied(randomAlphaOfLength(8), createAuthentication(SystemUser.INSTANCE, "effectiveRealmName"), "internal:_action", new MockIndicesRequest(threadContext, filteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("AccessGranted message system user filtered indices: not filtered out by indices filter", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); @@ -1577,7 +1606,7 @@ public void testIndicesFilter() throws Exception { threadContext.stashContext(); // runAsGranted - auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, new String[] { "role1" }); + auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, authzInfo(new String[] { "role1" })); if (filterMissingIndices) { assertThat("RunAsGranted message no index: not filtered out by missing indices filter", logOutput.size(), is(0)); } else { @@ -1587,19 +1616,19 @@ public void testIndicesFilter() throws Exception { threadContext.stashContext(); auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, unfilteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsGranted message unfiltered indices: filtered out by indices filter", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsGranted(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, filteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsGranted message filtered indices: not filtered out by indices filter", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); // runAsDenied - auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, new String[] { "role1" }); + auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", noIndexMessage, authzInfo(new String[] { "role1" })); if (filterMissingIndices) { assertThat("RunAsDenied message no index: not filtered out by missing indices filter", logOutput.size(), is(0)); } else { @@ -1609,18 +1638,18 @@ public void testIndicesFilter() throws Exception { threadContext.stashContext(); auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, unfilteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsDenied message unfiltered indices: filtered out by indices filter", logOutput.size(), is(1)); logOutput.clear(); threadContext.stashContext(); auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, "_action", new MockIndicesRequest(threadContext, filteredIndices), - new String[] { "role1" }); + authzInfo(new String[] { "role1" })); assertThat("RunAsDenied message filtered indices: not filtered out by indices filter", logOutput.size(), is(0)); logOutput.clear(); threadContext.stashContext(); - auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, getRestRequest(), new String[] { "role1" }); + auditTrail.runAsDenied(randomAlphaOfLength(8), authentication, getRestRequest(), authzInfo(new String[] { "role1" })); if (filterMissingIndices) { assertThat("RunAsDenied rest request: not filtered out by missing indices filter", logOutput.size(), is(0)); } else { @@ -1756,5 +1785,7 @@ public String toString() { } } - + private static AuthorizationInfo authzInfo(String[] roles) { + return () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, roles); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 817ed2a2358d0..55d5bd579c12d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditUtil; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.security.rest.RemoteHostHeader; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.elasticsearch.xpack.security.transport.filter.SecurityIpFilterRule; @@ -63,6 +64,8 @@ import java.util.Map; import java.util.Properties; import java.util.regex.Pattern; + +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -484,11 +487,12 @@ public void testAuthenticationFailedRealmRest() throws Exception { public void testAccessGranted() throws Exception { final TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - final String[] roles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final String[] expectedRoles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final AuthorizationInfo authorizationInfo = () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, expectedRoles); final Authentication authentication = createAuthentication(); final String requestId = randomRequestId(); - auditTrail.accessGranted(requestId, authentication, "_action", message, roles); + auditTrail.accessGranted(requestId, authentication, "_action", message, authorizationInfo); final MapBuilder checkedFields = new MapBuilder<>(commonFields); final MapBuilder checkedArrayFields = new MapBuilder<>(); checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, LoggingAuditTrail.TRANSPORT_ORIGIN_FIELD_VALUE) @@ -496,7 +500,7 @@ public void testAccessGranted() throws Exception { .put(LoggingAuditTrail.ACTION_FIELD_NAME, "_action") .put(LoggingAuditTrail.REQUEST_NAME_FIELD_NAME, message.getClass().getSimpleName()) .put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - checkedArrayFields.put(LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME, roles); + checkedArrayFields.put(PRINCIPAL_ROLES_FIELD_NAME, (String[]) authorizationInfo.asMap().get(PRINCIPAL_ROLES_FIELD_NAME)); subject(authentication, checkedFields); restOrTransportOrigin(message, threadContext, checkedFields); indicesRequest(message, checkedFields, checkedArrayFields); @@ -511,16 +515,17 @@ public void testAccessGranted() throws Exception { .put("xpack.security.audit.logfile.events.exclude", "access_granted") .build(); auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.accessGranted(requestId, authentication, "_action", message, roles); + auditTrail.accessGranted(requestId, authentication, "_action", message, authorizationInfo); assertEmptyLog(logger); } public void testAccessGrantedInternalSystemAction() throws Exception { final TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - final String[] roles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final String[] expectedRoles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final AuthorizationInfo authorizationInfo = () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, expectedRoles); final Authentication authentication = new Authentication(SystemUser.INSTANCE, new RealmRef("_reserved", "test", "foo"), null); final String requestId = randomRequestId(); - auditTrail.accessGranted(requestId, authentication, "internal:_action", message, roles); + auditTrail.accessGranted(requestId, authentication, "internal:_action", message, authorizationInfo); assertEmptyLog(logger); // test enabled @@ -529,7 +534,7 @@ public void testAccessGrantedInternalSystemAction() throws Exception { .put("xpack.security.audit.logfile.events.include", "system_access_granted") .build(); auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.accessGranted(requestId, authentication, "internal:_action", message, roles); + auditTrail.accessGranted(requestId, authentication, "internal:_action", message, authorizationInfo); final MapBuilder checkedFields = new MapBuilder<>(commonFields); final MapBuilder checkedArrayFields = new MapBuilder<>(); checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, LoggingAuditTrail.TRANSPORT_ORIGIN_FIELD_VALUE) @@ -539,7 +544,7 @@ public void testAccessGrantedInternalSystemAction() throws Exception { .put(LoggingAuditTrail.ACTION_FIELD_NAME, "internal:_action") .put(LoggingAuditTrail.REQUEST_NAME_FIELD_NAME, message.getClass().getSimpleName()) .put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - checkedArrayFields.put(LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME, roles); + checkedArrayFields.put(PRINCIPAL_ROLES_FIELD_NAME, (String[]) authorizationInfo.asMap().get(PRINCIPAL_ROLES_FIELD_NAME)); restOrTransportOrigin(message, threadContext, checkedFields); indicesRequest(message, checkedFields, checkedArrayFields); opaqueId(threadContext, checkedFields); @@ -549,11 +554,12 @@ public void testAccessGrantedInternalSystemAction() throws Exception { public void testAccessGrantedInternalSystemActionNonSystemUser() throws Exception { final TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - final String[] roles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final String[] expectedRoles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final AuthorizationInfo authorizationInfo = () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, expectedRoles); final Authentication authentication = createAuthentication(); final String requestId = randomRequestId(); - auditTrail.accessGranted(requestId, authentication, "internal:_action", message, roles); + auditTrail.accessGranted(requestId, authentication, "internal:_action", message, authorizationInfo); final MapBuilder checkedFields = new MapBuilder<>(commonFields); final MapBuilder checkedArrayFields = new MapBuilder<>(); checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, LoggingAuditTrail.TRANSPORT_ORIGIN_FIELD_VALUE) @@ -561,7 +567,7 @@ public void testAccessGrantedInternalSystemActionNonSystemUser() throws Exceptio .put(LoggingAuditTrail.ACTION_FIELD_NAME, "internal:_action") .put(LoggingAuditTrail.REQUEST_NAME_FIELD_NAME, message.getClass().getSimpleName()) .put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - checkedArrayFields.put(LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME, roles); + checkedArrayFields.put(PRINCIPAL_ROLES_FIELD_NAME, (String[]) authorizationInfo.asMap().get(PRINCIPAL_ROLES_FIELD_NAME)); subject(authentication, checkedFields); restOrTransportOrigin(message, threadContext, checkedFields); indicesRequest(message, checkedFields, checkedArrayFields); @@ -576,17 +582,18 @@ public void testAccessGrantedInternalSystemActionNonSystemUser() throws Exceptio .put("xpack.security.audit.logfile.events.exclude", "access_granted") .build(); auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.accessGranted(requestId, authentication, "internal:_action", message, roles); + auditTrail.accessGranted(requestId, authentication, "internal:_action", message, authorizationInfo); assertEmptyLog(logger); } public void testAccessDenied() throws Exception { final TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - final String[] roles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final String[] expectedRoles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final AuthorizationInfo authorizationInfo = () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, expectedRoles); final Authentication authentication = createAuthentication(); final String requestId = randomRequestId(); - auditTrail.accessDenied(requestId, authentication, "_action/bar", message, roles); + auditTrail.accessDenied(requestId, authentication, "_action/bar", message, authorizationInfo); final MapBuilder checkedFields = new MapBuilder<>(commonFields); final MapBuilder checkedArrayFields = new MapBuilder<>(); checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, LoggingAuditTrail.TRANSPORT_ORIGIN_FIELD_VALUE) @@ -594,7 +601,7 @@ public void testAccessDenied() throws Exception { .put(LoggingAuditTrail.ACTION_FIELD_NAME, "_action/bar") .put(LoggingAuditTrail.REQUEST_NAME_FIELD_NAME, message.getClass().getSimpleName()) .put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - checkedArrayFields.put(LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME, roles); + checkedArrayFields.put(PRINCIPAL_ROLES_FIELD_NAME, (String[]) authorizationInfo.asMap().get(PRINCIPAL_ROLES_FIELD_NAME)); subject(authentication, checkedFields); restOrTransportOrigin(message, threadContext, checkedFields); indicesRequest(message, checkedFields, checkedArrayFields); @@ -610,7 +617,7 @@ public void testAccessDenied() throws Exception { .put("xpack.security.audit.logfile.events.exclude", "access_denied") .build(); auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.accessDenied(requestId, authentication, "_action", message, roles); + auditTrail.accessDenied(requestId, authentication, "_action", message, authorizationInfo); assertEmptyLog(logger); } @@ -784,14 +791,15 @@ public void testConnectionGranted() throws Exception { public void testRunAsGranted() throws Exception { final TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - final String[] roles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final String[] expectedRoles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final AuthorizationInfo authorizationInfo = () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, expectedRoles); final Authentication authentication = new Authentication( new User("running as", new String[] { "r2" }, new User("_username", new String[] { "r1" })), new RealmRef("authRealm", "test", "foo"), new RealmRef("lookRealm", "up", "by")); final String requestId = randomRequestId(); - auditTrail.runAsGranted(requestId, authentication, "_action", message, roles); + auditTrail.runAsGranted(requestId, authentication, "_action", message, authorizationInfo); final MapBuilder checkedFields = new MapBuilder<>(commonFields); final MapBuilder checkedArrayFields = new MapBuilder<>(); checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, LoggingAuditTrail.TRANSPORT_ORIGIN_FIELD_VALUE) @@ -803,7 +811,7 @@ public void testRunAsGranted() throws Exception { .put(LoggingAuditTrail.ACTION_FIELD_NAME, "_action") .put(LoggingAuditTrail.REQUEST_NAME_FIELD_NAME, message.getClass().getSimpleName()) .put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - checkedArrayFields.put(LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME, roles); + checkedArrayFields.put(PRINCIPAL_ROLES_FIELD_NAME, (String[]) authorizationInfo.asMap().get(PRINCIPAL_ROLES_FIELD_NAME)); restOrTransportOrigin(message, threadContext, checkedFields); indicesRequest(message, checkedFields, checkedArrayFields); opaqueId(threadContext, checkedFields); @@ -817,20 +825,21 @@ public void testRunAsGranted() throws Exception { .put("xpack.security.audit.logfile.events.exclude", "run_as_granted") .build(); auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.runAsGranted(requestId, authentication, "_action", message, roles); + auditTrail.runAsGranted(requestId, authentication, "_action", message, authorizationInfo); assertEmptyLog(logger); } public void testRunAsDenied() throws Exception { final TransportMessage message = randomBoolean() ? new MockMessage(threadContext) : new MockIndicesRequest(threadContext); - final String[] roles = randomArray(0, 4, String[]::new, () -> randomAlphaOfLengthBetween(1, 4)); + final String[] expectedRoles = randomArray(0, 4, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)); + final AuthorizationInfo authorizationInfo = () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, expectedRoles); final Authentication authentication = new Authentication( new User("running as", new String[] { "r2" }, new User("_username", new String[] { "r1" })), new RealmRef("authRealm", "test", "foo"), new RealmRef("lookRealm", "up", "by")); final String requestId = randomRequestId(); - auditTrail.runAsDenied(requestId, authentication, "_action", message, roles); + auditTrail.runAsDenied(requestId, authentication, "_action", message, authorizationInfo); final MapBuilder checkedFields = new MapBuilder<>(commonFields); final MapBuilder checkedArrayFields = new MapBuilder<>(); checkedFields.put(LoggingAuditTrail.EVENT_TYPE_FIELD_NAME, LoggingAuditTrail.TRANSPORT_ORIGIN_FIELD_VALUE) @@ -842,7 +851,7 @@ public void testRunAsDenied() throws Exception { .put(LoggingAuditTrail.ACTION_FIELD_NAME, "_action") .put(LoggingAuditTrail.REQUEST_NAME_FIELD_NAME, message.getClass().getSimpleName()) .put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); - checkedArrayFields.put(LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME, roles); + checkedArrayFields.put(PRINCIPAL_ROLES_FIELD_NAME, (String[]) authorizationInfo.asMap().get(PRINCIPAL_ROLES_FIELD_NAME)); restOrTransportOrigin(message, threadContext, checkedFields); indicesRequest(message, checkedFields, checkedArrayFields); opaqueId(threadContext, checkedFields); @@ -856,7 +865,7 @@ public void testRunAsDenied() throws Exception { .put("xpack.security.audit.logfile.events.exclude", "run_as_denied") .build(); auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); - auditTrail.runAsDenied(requestId, authentication, "_action", message, roles); + auditTrail.runAsDenied(requestId, authentication, "_action", message, authorizationInfo); assertEmptyLog(logger); } @@ -962,7 +971,8 @@ public void testRequestsWithoutIndices() throws Exception { .build(); auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); final User user = new User("_username", new String[] { "r1" }); - final String role = randomAlphaOfLengthBetween(1, 6); + final AuthorizationInfo authorizationInfo = + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, new String[] { randomAlphaOfLengthBetween(1, 6) }); final String realm = randomAlphaOfLengthBetween(1, 6); // transport messages without indices final TransportMessage[] messages = new TransportMessage[] { new MockMessage(threadContext), @@ -983,10 +993,10 @@ public void testRequestsWithoutIndices() throws Exception { auditTrail.authenticationFailed("_req_id", realm, new MockToken(), "_action", message); assertThat(output.size(), is(logEntriesCount++)); assertThat(output.get(logEntriesCount - 2), not(containsString("indices="))); - auditTrail.accessGranted("_req_id", createAuthentication(), "_action", message, new String[]{role}); + auditTrail.accessGranted("_req_id", createAuthentication(), "_action", message, authorizationInfo); assertThat(output.size(), is(logEntriesCount++)); assertThat(output.get(logEntriesCount - 2), not(containsString("indices="))); - auditTrail.accessDenied("_req_id", createAuthentication(), "_action", message, new String[]{role}); + auditTrail.accessDenied("_req_id", createAuthentication(), "_action", message, authorizationInfo); assertThat(output.size(), is(logEntriesCount++)); assertThat(output.get(logEntriesCount - 2), not(containsString("indices="))); auditTrail.tamperedRequest("_req_id", "_action", message); @@ -995,10 +1005,10 @@ public void testRequestsWithoutIndices() throws Exception { auditTrail.tamperedRequest("_req_id", user, "_action", message); assertThat(output.size(), is(logEntriesCount++)); assertThat(output.get(logEntriesCount - 2), not(containsString("indices="))); - auditTrail.runAsGranted("_req_id", createAuthentication(), "_action", message, new String[]{role}); + auditTrail.runAsGranted("_req_id", createAuthentication(), "_action", message, authorizationInfo); assertThat(output.size(), is(logEntriesCount++)); assertThat(output.get(logEntriesCount - 2), not(containsString("indices="))); - auditTrail.runAsDenied("_req_id", createAuthentication(), "_action", message, new String[]{role}); + auditTrail.runAsDenied("_req_id", createAuthentication(), "_action", message, authorizationInfo); assertThat(output.size(), is(logEntriesCount++)); assertThat(output.get(logEntriesCount - 2), not(containsString("indices="))); auditTrail.authenticationSuccess("_req_id", realm, user, "_action", message); @@ -1047,7 +1057,7 @@ private void assertMsg(Logger logger, Map checkFields, Map x + "," + y) .orElse("") + "]"; final Pattern logEntryFieldPattern = Pattern.compile(Pattern.quote("\"" + checkArrayField.getKey() + "\":" + quotedValue)); - assertThat("Field " + checkArrayField.getKey() + " value mismatch. Expected " + quotedValue, + assertThat("Field " + checkArrayField.getKey() + " value mismatch. Expected " + quotedValue + ".\nLog line: " + logLine, logEntryFieldPattern.matcher(logLine).find(), is(true)); // remove checked field logLine = logEntryFieldPattern.matcher(logLine).replaceFirst(""); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 9b40f7dc0e68d..f9eb3fbd75766 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -28,13 +28,10 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; -import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.permission.LimitedRole; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.user.User; -import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.elasticsearch.xpack.security.authc.ApiKeyService.ApiKeyRoleDescriptors; import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import org.junit.After; import org.junit.Before; @@ -53,7 +50,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -191,26 +187,14 @@ public void testGetRolesForApiKeyNotInContext() throws Exception { final Authentication authentication = new Authentication(new User("joe"), new RealmRef("apikey", "apikey", "node"), null, Version.CURRENT, AuthenticationType.API_KEY, authMetadata); - CompositeRolesStore rolesStore = mock(CompositeRolesStore.class); - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - Collection descriptors = (Collection) invocationOnMock.getArguments()[0]; - if (descriptors.size() != 1) { - listener.onFailure(new IllegalStateException("descriptors was empty!")); - } else if (descriptors.iterator().next().getName().equals("superuser")) { - listener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); - } else { - listener.onFailure(new IllegalStateException("unexpected role name " + descriptors.iterator().next().getName())); - } - return Void.TYPE; - }).when(rolesStore).buildAndCacheRoleFromDescriptors(any(Collection.class), any(String.class), any(ActionListener.class)); ApiKeyService service = new ApiKeyService(Settings.EMPTY, Clock.systemUTC(), null, null, - ClusterServiceUtils.createClusterService(threadPool), rolesStore); + ClusterServiceUtils.createClusterService(threadPool)); - PlainActionFuture roleFuture = new PlainActionFuture<>(); - service.getRoleForApiKey(authentication, rolesStore, roleFuture); - Role role = roleFuture.get(); - assertThat(role.names(), arrayContaining("superuser")); + PlainActionFuture roleFuture = new PlainActionFuture<>(); + service.getRoleForApiKey(authentication, roleFuture); + ApiKeyRoleDescriptors result = roleFuture.get(); + assertThat(result.getRoleDescriptors().size(), is(1)); + assertThat(result.getRoleDescriptors().get(0).getName(), is("superuser")); } public void testGetRolesForApiKey() throws Exception { @@ -257,39 +241,21 @@ public void testGetRolesForApiKey() throws Exception { return null; } ).when(privilegesStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); - - CompositeRolesStore rolesStore = mock(CompositeRolesStore.class); - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - Collection descriptors = (Collection) invocationOnMock.getArguments()[0]; - if (descriptors.size() != 1) { - listener.onFailure(new IllegalStateException("descriptors was empty!")); - } else if (descriptors.iterator().next().getName().equals("a role")) { - CompositeRolesStore.buildRoleFromDescriptors(descriptors, new FieldPermissionsCache(Settings.EMPTY), - privilegesStore, ActionListener.wrap(r -> listener.onResponse(r), listener::onFailure)); - } else if (descriptors.iterator().next().getName().equals("limited role")) { - CompositeRolesStore.buildRoleFromDescriptors(descriptors, new FieldPermissionsCache(Settings.EMPTY), - privilegesStore, ActionListener.wrap(r -> listener.onResponse(r), listener::onFailure)); - } else { - listener.onFailure(new IllegalStateException("unexpected role name " + descriptors.iterator().next().getName())); - } - return Void.TYPE; - }).when(rolesStore).buildAndCacheRoleFromDescriptors(any(Collection.class), any(String.class), any(ActionListener.class)); ApiKeyService service = new ApiKeyService(Settings.EMPTY, Clock.systemUTC(), null, null, - ClusterServiceUtils.createClusterService(threadPool), rolesStore); + ClusterServiceUtils.createClusterService(threadPool)); - PlainActionFuture roleFuture = new PlainActionFuture<>(); - service.getRoleForApiKey(authentication, rolesStore, roleFuture); - Role role = roleFuture.get(); + PlainActionFuture roleFuture = new PlainActionFuture<>(); + service.getRoleForApiKey(authentication, roleFuture); + ApiKeyRoleDescriptors result = roleFuture.get(); if (emptyApiKeyRoleDescriptor) { - assertThat(role, instanceOf(Role.class)); - assertThat(role.names(), arrayContaining("limited role")); + assertNull(result.getLimitedByRoleDescriptors()); + assertThat(result.getRoleDescriptors().size(), is(1)); + assertThat(result.getRoleDescriptors().get(0).getName(), is("limited role")); } else { - assertThat(role, instanceOf(LimitedRole.class)); - LimitedRole limitedRole = (LimitedRole) role; - assertThat(limitedRole.names(), arrayContaining("a role")); - assertThat(limitedRole.limitedBy(), is(notNullValue())); - assertThat(limitedRole.limitedBy().names(), arrayContaining("limited role")); + assertThat(result.getRoleDescriptors().size(), is(1)); + assertThat(result.getLimitedByRoleDescriptors().size(), is(1)); + assertThat(result.getRoleDescriptors().get(0).getName(), is("a role")); + assertThat(result.getLimitedByRoleDescriptors().get(0).getName(), is("limited role")); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 4c1b0737254ad..7c6e9428f0759 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -66,7 +66,7 @@ import org.elasticsearch.xpack.core.security.authc.Realm.Factory; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; -import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.EmptyAuthorizationInfo; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; @@ -74,7 +74,6 @@ import org.elasticsearch.xpack.security.audit.AuditUtil; import org.elasticsearch.xpack.security.authc.AuthenticationService.Authenticator; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; -import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.junit.After; import org.junit.Before; @@ -211,8 +210,7 @@ licenseState, threadContext, mock(ReservedRealm.class), Arrays.asList(firstRealm return null; }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex, clusterService, - mock(CompositeRolesStore.class)); + apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex, clusterService); tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); service = new AuthenticationService(settings, realms, auditTrail, new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(settings), tokenService, apiKeyService); @@ -1022,7 +1020,7 @@ public void testRunAsWithEmptyRunAsUsernameRest() throws Exception { fail("exception should be thrown"); } catch (ElasticsearchException e) { String reqId = expectAuditRequestId(); - verify(auditTrail).runAsDenied(eq(reqId), any(Authentication.class), eq(restRequest), eq(Role.EMPTY.names())); + verify(auditTrail).runAsDenied(eq(reqId), any(Authentication.class), eq(restRequest), eq(EmptyAuthorizationInfo.INSTANCE)); verifyNoMoreInteractions(auditTrail); } } @@ -1041,7 +1039,8 @@ public void testRunAsWithEmptyRunAsUsername() throws Exception { authenticateBlocking("_action", message, null); fail("exception should be thrown"); } catch (ElasticsearchException e) { - verify(auditTrail).runAsDenied(eq(reqId), any(Authentication.class), eq("_action"), eq(message), eq(Role.EMPTY.names())); + verify(auditTrail).runAsDenied(eq(reqId), any(Authentication.class), eq("_action"), eq(message), + eq(EmptyAuthorizationInfo.INSTANCE)); verifyNoMoreInteractions(auditTrail); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index b9181a4341185..7c4cd564e9993 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -10,11 +10,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.MockIndicesRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -71,10 +70,9 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.security.user.privileges.Role.ClusterPrivilegeName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -88,7 +86,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.license.GetLicenseAction; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportActionProxy; @@ -97,50 +95,48 @@ import org.elasticsearch.xpack.core.security.action.privilege.DeletePrivilegesRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; -import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequestBuilder; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; -import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequestBuilder; -import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; -import org.elasticsearch.xpack.core.security.action.user.PutUserAction; -import org.elasticsearch.xpack.core.security.action.user.UserRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.authc.Authentication.AuthenticationType; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler; -import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; -import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; -import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; -import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField; +import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.permission.LimitedRole; import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.ElasticUser; +import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.audit.AuditTrailService; import org.elasticsearch.xpack.security.audit.AuditUtil; -import org.elasticsearch.xpack.security.authc.ApiKeyService; -import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.authz.store.NativePrivilegeStore; import org.elasticsearch.xpack.sql.action.SqlQueryAction; import org.elasticsearch.xpack.sql.action.SqlQueryRequest; import org.junit.Before; +import org.mockito.ArgumentMatcher; +import org.mockito.Matchers; import org.mockito.Mockito; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -151,19 +147,20 @@ import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.concurrent.CountDownLatch; import java.util.function.Predicate; import static java.util.Arrays.asList; import static org.elasticsearch.test.SecurityTestsUtils.assertAuthenticationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationException; import static org.elasticsearch.test.SecurityTestsUtils.assertThrowsAuthorizationExceptionRunAs; -import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.INTERNAL_SECURITY_INDEX; +import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -175,7 +172,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class AuthorizationServiceTests extends ESTestCase { @@ -186,7 +182,6 @@ public class AuthorizationServiceTests extends ESTestCase { private ThreadPool threadPool; private Map roleMap = new HashMap<>(); private CompositeRolesStore rolesStore; - private ApiKeyService apiKeyService; @SuppressWarnings("unchecked") @Before @@ -198,6 +193,7 @@ public void setup() { .build(); final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.state()).thenReturn(ClusterState.EMPTY_STATE); auditTrail = mock(AuditTrailService.class); threadContext = new ThreadContext(settings); threadPool = mock(ThreadPool.class); @@ -216,8 +212,9 @@ public void setup() { ).when(privilegesStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); doAnswer((i) -> { - ActionListener callback = (ActionListener) i.getArguments()[1]; - Set names = (Set) i.getArguments()[0]; + ActionListener callback = (ActionListener) i.getArguments()[2]; + User user = (User) i.getArguments()[0]; + Set names = new HashSet<>(Arrays.asList(user.roles())); assertNotNull(names); Set roleDescriptors = new HashSet<>(); for (String name : names) { @@ -235,21 +232,16 @@ public void setup() { ); } return Void.TYPE; - }).when(rolesStore).roles(any(Set.class), any(ActionListener.class)); - apiKeyService = mock(ApiKeyService.class); - authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(settings), - apiKeyService, new FieldPermissionsCache(Settings.EMPTY)); + }).when(rolesStore).getRoles(any(User.class), any(Authentication.class), any(ActionListener.class)); + roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + authorizationService = new AuthorizationService(settings, rolesStore, clusterService, + auditTrail, new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(settings), null, + Collections.emptySet(), new XPackLicenseState(settings)); } private void authorize(Authentication authentication, String action, TransportRequest request) { PlainActionFuture future = new PlainActionFuture<>(); - AuthorizationUtils.AsyncAuthorizer authorizer = new AuthorizationUtils.AsyncAuthorizer(authentication, future, - (userRoles, runAsRoles) -> { - authorizationService.authorize(authentication, action, request, userRoles, runAsRoles); - future.onResponse(null); - }); - authorizer.authorize(authorizationService); + authorizationService.authorize(authentication, action, request, future); future.actionGet(); } @@ -272,7 +264,8 @@ public void testActionsForSystemUserIsAuthorized() throws IOException { "indices:admin/settings/update" }; for (String action : actions) { authorize(authentication, action, request); - verify(auditTrail).accessGranted(requestId, authentication, action, request, new String[] { SystemUser.ROLE_NAME }); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(new String[] { SystemUser.ROLE_NAME })); } verifyNoMoreInteractions(auditTrail); @@ -285,7 +278,8 @@ public void testIndicesActionsForSystemUserWhichAreNotAuthorized() throws IOExce assertThrowsAuthorizationException( () -> authorize(authentication, "indices:", request), "indices:", SystemUser.INSTANCE.principal()); - verify(auditTrail).accessDenied(requestId, authentication, "indices:", request, new String[]{SystemUser.ROLE_NAME}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("indices:"), eq(request), + authzInfoRoles(new String[]{SystemUser.ROLE_NAME})); verifyNoMoreInteractions(auditTrail); } @@ -296,8 +290,8 @@ public void testClusterAdminActionsForSystemUserWhichAreNotAuthorized() throws I assertThrowsAuthorizationException( () -> authorize(authentication, "cluster:admin/whatever", request), "cluster:admin/whatever", SystemUser.INSTANCE.principal()); - verify(auditTrail).accessDenied(requestId, authentication, "cluster:admin/whatever", request, - new String[] { SystemUser.ROLE_NAME }); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("cluster:admin/whatever"), eq(request), + authzInfoRoles(new String[] { SystemUser.ROLE_NAME })); verifyNoMoreInteractions(auditTrail); } @@ -308,8 +302,8 @@ public void testClusterAdminSnapshotStatusActionForSystemUserWhichIsNotAuthorize assertThrowsAuthorizationException( () -> authorize(authentication, "cluster:admin/snapshot/status", request), "cluster:admin/snapshot/status", SystemUser.INSTANCE.principal()); - verify(auditTrail).accessDenied(requestId, authentication, "cluster:admin/snapshot/status", request, - new String[] { SystemUser.ROLE_NAME }); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("cluster:admin/snapshot/status"), eq(request), + authzInfoRoles(new String[] { SystemUser.ROLE_NAME })); verifyNoMoreInteractions(auditTrail); } @@ -329,7 +323,8 @@ public void testAuthorizeUsingConditionalPrivileges() throws IOException { roleMap.put("role1", role); authorize(authentication, DeletePrivilegesAction.NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, DeletePrivilegesAction.NAME, request, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(DeletePrivilegesAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -351,7 +346,8 @@ public void testAuthorizationDeniedWhenConditionalPrivilegesDoNotMatch() throws assertThrowsAuthorizationException( () -> authorize(authentication, DeletePrivilegesAction.NAME, request), DeletePrivilegesAction.NAME, "user1"); - verify(auditTrail).accessDenied(requestId, authentication, DeletePrivilegesAction.NAME, request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(DeletePrivilegesAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -363,7 +359,8 @@ public void testNoRolesCausesDenial() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, "indices:a", request), "indices:a", "test user"); - verify(auditTrail).accessDenied(requestId, authentication, "indices:a", request, Role.EMPTY.names()); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } @@ -374,7 +371,8 @@ public void testUserWithNoRolesCanPerformRemoteSearch() throws IOException { mockEmptyMetaData(); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); authorize(authentication, SearchAction.NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, SearchAction.NAME, request, Role.EMPTY.names()); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchAction.NAME), eq(request), + authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } @@ -392,7 +390,8 @@ public void testUserWithNoRolesCannotPerformLocalSearch() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, SearchAction.NAME, request), SearchAction.NAME, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, SearchAction.NAME, request, Role.EMPTY.names()); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(SearchAction.NAME), eq(request), + authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } @@ -409,7 +408,8 @@ public void testUserWithNoRolesCanPerformMultiClusterSearch() throws IOException assertThrowsAuthorizationException( () -> authorize(authentication, SearchAction.NAME, request), SearchAction.NAME, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, SearchAction.NAME, request, Role.EMPTY.names()); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(SearchAction.NAME), eq(request), + authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } @@ -421,7 +421,8 @@ public void testUserWithNoRolesCannotSql() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, SqlQueryAction.NAME, request), SqlQueryAction.NAME, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, SqlQueryAction.NAME, request, Role.EMPTY.names()); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(SqlQueryAction.NAME), eq(request), + authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } /** @@ -437,7 +438,8 @@ public void testRemoteIndicesOnlyWorkWithApplicableRequestTypes() throws IOExcep assertThrowsAuthorizationException( () -> authorize(authentication, DeleteIndexAction.NAME, request), DeleteIndexAction.NAME, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, DeleteIndexAction.NAME, request, Role.EMPTY.names()); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(DeleteIndexAction.NAME), eq(request), + authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } @@ -454,7 +456,7 @@ public void testUnknownRoleCausesDenial() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, action, request), action, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, action, request, Role.EMPTY.names()); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(action), eq(request), authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } @@ -462,14 +464,15 @@ public void testThatNonIndicesAndNonClusterActionIsDenied() throws IOException { final TransportRequest request = mock(TransportRequest.class); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); final Authentication authentication = createAuthentication(new User("test user", "a_all")); - final RoleDescriptor role = new RoleDescriptor("a_role", null, + final RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); roleMap.put("a_all", role); assertThrowsAuthorizationException( () -> authorize(authentication, "whatever", request), "whatever", "test user"); - verify(auditTrail).accessDenied(requestId, authentication, "whatever", request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("whatever"), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -483,14 +486,15 @@ public void testThatRoleWithNoIndicesIsDenied() throws IOException { TransportRequest request = tuple.v2(); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); final Authentication authentication = createAuthentication(new User("test user", "no_indices")); - RoleDescriptor role = new RoleDescriptor("a_role", null, null, null); + RoleDescriptor role = new RoleDescriptor("no_indices", null, null, null); roleMap.put("no_indices", role); mockEmptyMetaData(); assertThrowsAuthorizationException( () -> authorize(authentication, action, request), action, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, action, request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -500,11 +504,12 @@ public void testElasticUserAuthorizedForNonChangePasswordRequestsWhenNotInSetupM final Tuple request = randomCompositeRequest(); authorize(authentication, request.v1(), request.v2()); - verify(auditTrail).accessGranted(requestId, authentication, request.v1(), request.v2(), new String[]{ElasticUser.ROLE_NAME}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(request.v1()), eq(request.v2()), + authzInfoRoles(new String[]{ElasticUser.ROLE_NAME})); } - public void testSearchAgainstEmptyCluster() throws IOException { - RoleDescriptor role = new RoleDescriptor("a_role", null, + public void testSearchAgainstEmptyCluster() throws Exception { + RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); @@ -520,7 +525,8 @@ public void testSearchAgainstEmptyCluster() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, SearchAction.NAME, searchRequest), SearchAction.NAME, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, SearchAction.NAME, searchRequest, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(SearchAction.NAME), eq(searchRequest), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -528,18 +534,27 @@ public void testSearchAgainstEmptyCluster() throws IOException { //ignore_unavailable and allow_no_indices both set to true, user is not authorized for this index nor does it exist SearchRequest searchRequest = new SearchRequest("does_not_exist") .indicesOptions(IndicesOptions.fromOptions(true, true, true, false)); - authorize(authentication, SearchAction.NAME, searchRequest); - verify(auditTrail).accessGranted(requestId, authentication, SearchAction.NAME, searchRequest, new String[]{role.getName()}); - final IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); - final IndicesAccessControl.IndexAccessControl indexAccessControl = - indicesAccessControl.getIndexPermissions(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER); - assertFalse(indexAccessControl.getFieldPermissions().hasFieldLevelSecurity()); - assertFalse(indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions()); + final ActionListener listener = ActionListener.wrap(ignore -> { + final IndicesAccessControl indicesAccessControl = + threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); + assertNotNull(indicesAccessControl); + final IndicesAccessControl.IndexAccessControl indexAccessControl = + indicesAccessControl.getIndexPermissions(IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER); + assertFalse(indexAccessControl.getFieldPermissions().hasFieldLevelSecurity()); + assertFalse(indexAccessControl.getDocumentPermissions().hasDocumentLevelPermissions()); + }, e -> { + fail(e.getMessage()); + }); + final CountDownLatch latch = new CountDownLatch(1); + authorizationService.authorize(authentication, SearchAction.NAME, searchRequest, new LatchedActionListener<>(listener, latch)); + latch.await(); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchAction.NAME), eq(searchRequest), + authzInfoRoles(new String[]{role.getName()})); } } - public void testScrollRelatedRequestsAllowed() throws IOException { - RoleDescriptor role = new RoleDescriptor("a_role", null, + public void testScrollRelatedRequestsAllowed() { + RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); @@ -548,42 +563,42 @@ public void testScrollRelatedRequestsAllowed() throws IOException { final ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); authorize(authentication, ClearScrollAction.NAME, clearScrollRequest); - verify(auditTrail).accessGranted(requestId, authentication, ClearScrollAction.NAME, clearScrollRequest, - new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(ClearScrollAction.NAME), eq(clearScrollRequest), + authzInfoRoles(new String[]{role.getName()})); final SearchScrollRequest searchScrollRequest = new SearchScrollRequest(); authorize(authentication, SearchScrollAction.NAME, searchScrollRequest); - verify(auditTrail).accessGranted(requestId, authentication, SearchScrollAction.NAME, searchScrollRequest, - new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchScrollAction.NAME), eq(searchScrollRequest), + authzInfoRoles(new String[]{role.getName()})); // We have to use a mock request for other Scroll actions as the actual requests are package private to SearchTransportService final TransportRequest request = mock(TransportRequest.class); authorize(authentication, SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME, request, - new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME), + eq(request), authzInfoRoles(new String[]{role.getName()})); authorize(authentication, SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME, request, - new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchTransportService.FETCH_ID_SCROLL_ACTION_NAME), + eq(request), authzInfoRoles(new String[]{role.getName()})); authorize(authentication, SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME, request, - new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchTransportService.QUERY_FETCH_SCROLL_ACTION_NAME), + eq(request), authzInfoRoles(new String[]{role.getName()})); authorize(authentication, SearchTransportService.QUERY_SCROLL_ACTION_NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, SearchTransportService.QUERY_SCROLL_ACTION_NAME, request, - new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchTransportService.QUERY_SCROLL_ACTION_NAME), + eq(request), authzInfoRoles(new String[]{role.getName()})); authorize(authentication, SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME, request, - new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(SearchTransportService.FREE_CONTEXT_SCROLL_ACTION_NAME), + eq(request), authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } public void testAuthorizeIndicesFailures() throws IOException { TransportRequest request = new GetIndexRequest().indices("b"); ClusterState state = mockEmptyMetaData(); - RoleDescriptor role = new RoleDescriptor("a_role", null, + RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); @@ -592,7 +607,8 @@ public void testAuthorizeIndicesFailures() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, "indices:a", request), "indices:a", "test user"); - verify(auditTrail).accessDenied(requestId, authentication, "indices:a", request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); verify(clusterService, times(1)).state(); verify(state, times(1)).metaData(); @@ -602,7 +618,7 @@ public void testCreateIndexWithAliasWithoutPermissions() throws IOException { CreateIndexRequest request = new CreateIndexRequest("a"); request.alias(new Alias("a2")); ClusterState state = mockEmptyMetaData(); - RoleDescriptor role = new RoleDescriptor("a_role", null, + RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); @@ -611,7 +627,10 @@ public void testCreateIndexWithAliasWithoutPermissions() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, CreateIndexAction.NAME, request), IndicesAliasesAction.NAME, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, IndicesAliasesAction.NAME, request, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(CreateIndexAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(IndicesAliasesAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); verify(clusterService).state(); verify(state, times(1)).metaData(); @@ -629,7 +648,10 @@ public void testCreateIndexWithAlias() throws IOException { authorize(authentication, CreateIndexAction.NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, CreateIndexAction.NAME, request, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(CreateIndexAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq("indices:admin/aliases"), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); verify(clusterService).state(); verify(state, times(1)).metaData(); @@ -641,8 +663,8 @@ public void testDenialForAnonymousUser() throws IOException { Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "a_all").build(); final AnonymousUser anonymousUser = new AnonymousUser(settings); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, anonymousUser, apiKeyService, - new FieldPermissionsCache(settings)); + new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, anonymousUser, null, Collections.emptySet(), + new XPackLicenseState(settings)); RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[] { IndicesPrivileges.builder().indices("a").privileges("all").build() }, null); @@ -653,7 +675,8 @@ public void testDenialForAnonymousUser() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, "indices:a", request), "indices:a", anonymousUser.principal()); - verify(auditTrail).accessDenied(requestId, authentication, "indices:a", request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); verify(clusterService, times(1)).state(); verify(state, times(1)).metaData(); @@ -668,8 +691,8 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() throws IO .build(); final Authentication authentication = createAuthentication(new AnonymousUser(settings)); authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(settings), - apiKeyService, new FieldPermissionsCache(settings)); + new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(settings), null, + Collections.emptySet(), new XPackLicenseState(settings)); RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); @@ -679,7 +702,8 @@ public void testDenialForAnonymousUserAuthorizationExceptionDisabled() throws IO final ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, () -> authorize(authentication, "indices:a", request)); assertAuthenticationException(securityException, containsString("action [indices:a] requires authentication")); - verify(auditTrail).accessDenied(requestId, authentication, "indices:a", request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); verify(clusterService, times(1)).state(); verify(state, times(1)).metaData(); @@ -700,7 +724,8 @@ public void testAuditTrailIsRecordedWhenIndexWildcardThrowsError() throws IOExce () -> authorize(authentication, GetIndexAction.NAME, request)); assertThat(nfe.getIndex(), is(notNullValue())); assertThat(nfe.getIndex().getName(), is("not-an-index-*")); - verify(auditTrail).accessDenied(requestId, authentication, GetIndexAction.NAME, request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(GetIndexAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); verify(clusterService).state(); verify(state, times(1)).metaData(); @@ -714,23 +739,24 @@ public void testRunAsRequestWithNoRolesUser() throws IOException { assertThrowsAuthorizationExceptionRunAs( () -> authorize(authentication, "indices:a", request), "indices:a", "test user", "run as me"); // run as [run as me] - verify(auditTrail).runAsDenied(requestId, authentication, "indices:a", request, Role.EMPTY.names()); + verify(auditTrail).runAsDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(Role.EMPTY.names())); verifyNoMoreInteractions(auditTrail); } public void testRunAsRequestWithoutLookedUpBy() throws IOException { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); AuthenticateRequest request = new AuthenticateRequest("run as me"); - roleMap.put("can run as", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); - User user = new User("run as me", Strings.EMPTY_ARRAY, new User("test user", new String[]{"can run as"})); + roleMap.put("superuser", ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + User user = new User("run as me", Strings.EMPTY_ARRAY, new User("test user", new String[]{"superuser"})); Authentication authentication = new Authentication(user, new RealmRef("foo", "bar", "baz"), null); authentication.writeToContext(threadContext); assertNotEquals(user.authenticatedUser(), user); assertThrowsAuthorizationExceptionRunAs( () -> authorize(authentication, AuthenticateAction.NAME, request), AuthenticateAction.NAME, "test user", "run as me"); // run as [run as me] - verify(auditTrail).runAsDenied(requestId, authentication, AuthenticateAction.NAME, request, - new String[] { ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName() }); + verify(auditTrail).runAsDenied(eq(requestId), eq(authentication), eq(AuthenticateAction.NAME), eq(request), + authzInfoRoles(new String[] { ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName() })); verifyNoMoreInteractions(auditTrail); } @@ -748,7 +774,8 @@ public void testRunAsRequestRunningAsUnAllowedUser() throws IOException { assertThrowsAuthorizationExceptionRunAs( () -> authorize(authentication, "indices:a", request), "indices:a", "test user", "run as me"); - verify(auditTrail).runAsDenied(requestId, authentication, "indices:a", request, new String[]{role.getName()}); + verify(auditTrail).runAsDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -783,11 +810,14 @@ public void testRunAsRequestWithRunAsUserWithoutPermission() throws IOException assertThrowsAuthorizationExceptionRunAs( () -> authorize(authentication, "indices:a", request), "indices:a", "test user", "run as me"); - verify(auditTrail).runAsGranted(requestId, authentication, "indices:a", request, new String[]{runAsRole.getName()}); + verify(auditTrail).runAsGranted(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{runAsRole.getName()})); if (indexExists) { - verify(auditTrail).accessDenied(requestId, authentication, "indices:a", request, new String[]{bRole.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{bRole.getName()})); } else { - verify(auditTrail).accessDenied(requestId, authentication, "indices:a", request, Role.EMPTY.names()); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(Role.EMPTY.names())); } verifyNoMoreInteractions(auditTrail); } @@ -815,8 +845,10 @@ public void testRunAsRequestWithValidPermissions() throws IOException { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); authorize(authentication, "indices:a", request); - verify(auditTrail).runAsGranted(requestId, authentication, "indices:a", request, new String[]{runAsRole.getName()}); - verify(auditTrail).accessGranted(requestId, authentication, "indices:a", request, new String[]{bRole.getName()}); + verify(auditTrail).runAsGranted(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{runAsRole.getName()})); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq("indices:a"), eq(request), + authzInfoRoles(new String[]{bRole.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -870,19 +902,22 @@ public void testGrantAllRestrictedUserCannotExecuteOperationAgainstSecurityIndic assertThrowsAuthorizationException( () -> authorize(authentication, action, request), action, "all_access_user"); - verify(auditTrail).accessDenied(requestId, authentication, action, request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } // we should allow waiting for the health of the index or any index if the user has this permission ClusterHealthRequest request = new ClusterHealthRequest(randomFrom(SECURITY_INDEX_NAME, INTERNAL_SECURITY_INDEX)); authorize(authentication, ClusterHealthAction.NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, ClusterHealthAction.NAME, request, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(ClusterHealthAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); // multiple indices request = new ClusterHealthRequest(SECURITY_INDEX_NAME, INTERNAL_SECURITY_INDEX, "foo", "bar"); authorize(authentication, ClusterHealthAction.NAME, request); - verify(auditTrail).accessGranted(requestId, authentication, ClusterHealthAction.NAME, request, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(ClusterHealthAction.NAME), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); final SearchRequest searchRequest = new SearchRequest("_all"); @@ -920,19 +955,20 @@ public void testMonitoringOperationsAgainstSecurityIndexRequireAllowRestricted() for (final Tuple requestTuple : requests) { final String action = requestTuple.v1(); final TransportRequest request = requestTuple.v2(); - try (StoredContext storedContext = threadContext.stashContext()) { + try (StoredContext ignore = threadContext.stashContext()) { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); final Authentication restrictedUserAuthn = createAuthentication(new User("restricted_user", "restricted_monitor")); assertThrowsAuthorizationException(() -> authorize(restrictedUserAuthn, action, request), action, "restricted_user"); - verify(auditTrail).accessDenied(requestId, restrictedUserAuthn, action, request, new String[] { "restricted_monitor" }); + verify(auditTrail).accessDenied(eq(requestId), eq(restrictedUserAuthn), eq(action), eq(request), + authzInfoRoles(new String[] { "restricted_monitor" })); verifyNoMoreInteractions(auditTrail); } - try (StoredContext storedContext = threadContext.stashContext()) { + try (StoredContext ignore = threadContext.stashContext()) { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); final Authentication unrestrictedUserAuthn = createAuthentication(new User("unrestricted_user", "unrestricted_monitor")); authorize(unrestrictedUserAuthn, action, request); - verify(auditTrail).accessGranted(requestId, unrestrictedUserAuthn, action, request, - new String[] { "unrestricted_monitor" }); + verify(auditTrail).accessGranted(eq(requestId), eq(unrestrictedUserAuthn), eq(action), eq(request), + authzInfoRoles(new String[] { "unrestricted_monitor" })); verifyNoMoreInteractions(auditTrail); } } @@ -980,7 +1016,8 @@ public void testSuperusersCanExecuteOperationAgainstSecurityIndex() throws IOExc try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false)) { final Authentication authentication = createAuthentication(superuser); authorize(authentication, action, request); - verify(auditTrail).accessGranted(requestId, authentication, action, request, superuser.roles()); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(superuser.roles())); } } } @@ -1004,65 +1041,11 @@ public void testSuperusersCanExecuteOperationAgainstSecurityIndexWithWildcard() String action = SearchAction.NAME; SearchRequest request = new SearchRequest("_all"); authorize(authentication, action, request); - verify(auditTrail).accessGranted(requestId, authentication, action, request, superuser.roles()); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(request), authzInfoRoles(superuser.roles())); assertThat(request.indices(), arrayContainingInAnyOrder(INTERNAL_SECURITY_INDEX, SECURITY_INDEX_NAME)); } - public void testAnonymousRolesAreAppliedToOtherUsers() throws IOException { - TransportRequest request = new ClusterHealthRequest(); - Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "anonymous_user_role").build(); - final AnonymousUser anonymousUser = new AnonymousUser(settings); - authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, anonymousUser, apiKeyService, - new FieldPermissionsCache(settings)); - roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role", new String[]{"all"}, - new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null)); - mockEmptyMetaData(); - AuditUtil.getOrGenerateRequestId(threadContext); - - // sanity check the anonymous user - try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false)) { - authorize(createAuthentication(anonymousUser), ClusterHealthAction.NAME, request); - } - try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false)) { - authorize(createAuthentication(anonymousUser), IndicesExistsAction.NAME, new IndicesExistsRequest("a")); - } - - // test the no role user - final User userWithNoRoles = new User("no role user"); - try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false)) { - authorize(createAuthentication(userWithNoRoles), ClusterHealthAction.NAME, request); - } - try (ThreadContext.StoredContext ignore = threadContext.newStoredContext(false)) { - authorize(createAuthentication(userWithNoRoles), IndicesExistsAction.NAME, new IndicesExistsRequest("a")); - } - } - - public void testDefaultRoleUserWithoutRoles() throws IOException { - PlainActionFuture rolesFuture = new PlainActionFuture<>(); - final User user = new User("no role user"); - authorizationService.roles(user, createAuthentication(user), rolesFuture); - final Role roles = rolesFuture.actionGet(); - assertEquals(Role.EMPTY, roles); - } - - public void testAnonymousUserEnabledRoleAdded() throws IOException { - Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "anonymous_user_role").build(); - final AnonymousUser anonymousUser = new AnonymousUser(settings); - authorizationService = new AuthorizationService(settings, rolesStore, clusterService, auditTrail, - new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, anonymousUser, apiKeyService, - new FieldPermissionsCache(settings)); - roleMap.put("anonymous_user_role", new RoleDescriptor("anonymous_user_role", new String[]{"all"}, - new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null)); - mockEmptyMetaData(); - final User user = new User("no role user"); - PlainActionFuture rolesFuture = new PlainActionFuture<>(); - authorizationService.roles(user, createAuthentication(user), rolesFuture); - final Role roles = rolesFuture.actionGet(); - assertThat(Arrays.asList(roles.names()), hasItem("anonymous_user_role")); - } - - public void testCompositeActionsAreImmediatelyRejected() throws IOException { + public void testCompositeActionsAreImmediatelyRejected() { //if the user has no permission for composite actions against any index, the request fails straight-away in the main action final Tuple compositeRequest = randomCompositeRequest(); final String action = compositeRequest.v1(); @@ -1074,7 +1057,8 @@ public void testCompositeActionsAreImmediatelyRejected() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, action, request), action, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, action, request, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(new String[] { role.getName() })); verifyNoMoreInteractions(auditTrail); } @@ -1091,7 +1075,8 @@ public void testCompositeActionsIndicesAreNotChecked() throws IOException { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); authorize(authentication, action, request); - verify(auditTrail).accessGranted(requestId, authentication, action, request, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(new String[] { role.getName() })); verifyNoMoreInteractions(auditTrail); } @@ -1105,7 +1090,7 @@ public void testCompositeActionsMustImplementCompositeIndicesRequest() throws IO null)); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () -> authorize(createAuthentication(user), action, request)); - assertThat(illegalStateException.getMessage(), containsString("Composite actions must implement CompositeIndicesRequest")); + assertThat(illegalStateException.getMessage(), containsString("Composite and bulk actions must implement CompositeIndicesRequest")); } public void testCompositeActionsIndicesAreCheckedAtTheShardLevel() throws IOException { @@ -1179,12 +1164,16 @@ public void testAuthorizationOfIndividualBulkItems() throws IOException { final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); authorize(authentication, action, request); - verify(auditTrail).accessDenied(requestId, authentication, DeleteAction.NAME, request, - new String[] { role.getName() }); // alias-1 delete - verify(auditTrail).accessDenied(requestId, authentication, IndexAction.NAME, request, - new String[] { role.getName() }); // alias-2 index - verify(auditTrail).accessGranted(requestId, authentication, action, request, - new String[] { role.getName() }); // bulk request is allowed + verify(auditTrail, times(2)).accessGranted(eq(requestId), eq(authentication), eq(DeleteAction.NAME), eq(request), + authzInfoRoles(new String[] { role.getName() })); // concrete-index and alias-2 delete + verify(auditTrail, times(2)).accessGranted(eq(requestId), eq(authentication), eq(IndexAction.NAME), eq(request), + authzInfoRoles(new String[] { role.getName() })); // concrete-index and alias-1 index + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(DeleteAction.NAME), eq(request), + authzInfoRoles(new String[] { role.getName() })); // alias-1 delete + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(IndexAction.NAME), eq(request), + authzInfoRoles(new String[] { role.getName() })); // alias-2 index + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(new String[] { role.getName() })); // bulk request is allowed verifyNoMoreInteractions(auditTrail); } @@ -1211,10 +1200,13 @@ public void testAuthorizationOfIndividualBulkItemsWithDateMath() throws IOExcept authorize(authentication, action, request); // both deletes should fail - verify(auditTrail, Mockito.times(2)).accessDenied(requestId, authentication, DeleteAction.NAME, request, - new String[]{role.getName()}); + verify(auditTrail, times(2)).accessDenied(eq(requestId), eq(authentication), eq(DeleteAction.NAME), eq(request), + authzInfoRoles(new String[] { role.getName() })); + verify(auditTrail, times(2)).accessGranted(eq(requestId), eq(authentication), eq(IndexAction.NAME), eq(request), + authzInfoRoles(new String[] { role.getName() })); // bulk request is allowed - verify(auditTrail).accessGranted(requestId, authentication, action, request, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(request), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } @@ -1224,148 +1216,6 @@ private BulkShardRequest createBulkShardRequest(String indexName, TriFunction randomCompositeRequest() { switch (randomIntBetween(0, 7)) { case 0: @@ -1392,32 +1242,14 @@ private static Tuple randomCompositeRequest() { private static class MockCompositeIndicesRequest extends TransportRequest implements CompositeIndicesRequest { } - public void testDoesNotUseRolesStoreForXPackUser() { - PlainActionFuture rolesFuture = new PlainActionFuture<>(); - authorizationService.roles(XPackUser.INSTANCE, null, rolesFuture); - final Role roles = rolesFuture.actionGet(); - assertThat(roles, equalTo(XPackUser.ROLE)); - verifyZeroInteractions(rolesStore); - } - - public void testGetRolesForSystemUserThrowsException() { - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> authorizationService.roles(SystemUser.INSTANCE, - null, null)); - assertEquals("the user [_system] is the system user and we should never try to get its roles", iae.getMessage()); - } - - private Authentication createAuthentication(User user) throws IOException { + private Authentication createAuthentication(User user) { RealmRef lookedUpBy = user.authenticatedUser() == user ? null : new RealmRef("looked", "up", "by"); Authentication authentication = new Authentication(user, new RealmRef("test", "test", "foo"), lookedUpBy); - authentication.writeToContext(threadContext); - return authentication; - } - - private Authentication createAuthentication(User user, AuthenticationType type) throws IOException { - RealmRef lookedUpBy = user.authenticatedUser() == user ? null : new RealmRef("looked", "up", "by"); - Authentication authentication = - new Authentication(user, new RealmRef("test", "test", "foo"), lookedUpBy, Version.CURRENT, type, Collections.emptyMap()); - authentication.writeToContext(threadContext); + try { + authentication.writeToContext(threadContext); + } catch (IOException e) { + throw new UncheckedIOException("caught unexpected IOException", e); + } return authentication; } @@ -1434,8 +1266,10 @@ public void testProxyRequestFailsOnNonProxyAction() { TransportRequest transportRequest = TransportActionProxy.wrapRequest(node, request); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); User user = new User("test user", "role"); - IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, + ElasticsearchSecurityException ese = expectThrows(ElasticsearchSecurityException.class, () -> authorize(createAuthentication(user), "indices:some/action", transportRequest)); + assertThat(ese.getCause(), instanceOf(IllegalStateException.class)); + IllegalStateException illegalStateException = (IllegalStateException) ese.getCause(); assertThat(illegalStateException.getMessage(), startsWith("originalRequest is a proxy request for: [org.elasticsearch.transport.TransportRequest$")); assertThat(illegalStateException.getMessage(), endsWith("] but action: [indices:some/action] isn't")); @@ -1445,8 +1279,10 @@ public void testProxyRequestFailsOnNonProxyRequest() { TransportRequest request = TransportRequest.Empty.INSTANCE; User user = new User("test user", "role"); AuditUtil.getOrGenerateRequestId(threadContext); - IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, + ElasticsearchSecurityException ese = expectThrows(ElasticsearchSecurityException.class, () -> authorize(createAuthentication(user), TransportActionProxy.getProxyAction("indices:some/action"), request)); + assertThat(ese.getCause(), instanceOf(IllegalStateException.class)); + IllegalStateException illegalStateException = (IllegalStateException) ese.getCause(); assertThat(illegalStateException.getMessage(), startsWith("originalRequest is not a proxy request: [org.elasticsearch.transport.TransportRequest$")); assertThat(illegalStateException.getMessage(), @@ -1465,12 +1301,13 @@ public void testProxyRequestAuthenticationDenied() throws IOException { assertThrowsAuthorizationException( () -> authorize(authentication, action, transportRequest), action, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, action, proxiedRequest, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(action), eq(proxiedRequest), + authzInfoRoles(new String[]{role.getName()})); verifyNoMoreInteractions(auditTrail); } - public void testProxyRequestAuthenticationGrantedWithAllPrivileges() throws IOException { - RoleDescriptor role = new RoleDescriptor("a_role", null, + public void testProxyRequestAuthenticationGrantedWithAllPrivileges() { + RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("all").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); @@ -1483,11 +1320,12 @@ public void testProxyRequestAuthenticationGrantedWithAllPrivileges() throws IOEx final TransportRequest transportRequest = TransportActionProxy.wrapRequest(node, clearScrollRequest); final String action = TransportActionProxy.getProxyAction(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); authorize(authentication, action, transportRequest); - verify(auditTrail).accessGranted(requestId, authentication, action, clearScrollRequest, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(clearScrollRequest), + authzInfoRoles(new String[]{role.getName()})); } - public void testProxyRequestAuthenticationGranted() throws IOException { - RoleDescriptor role = new RoleDescriptor("a_role", null, + public void testProxyRequestAuthenticationGranted() { + RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("read_cross_cluster").build()}, null); final Authentication authentication = createAuthentication(new User("test user", "a_all")); roleMap.put("a_all", role); @@ -1499,12 +1337,13 @@ public void testProxyRequestAuthenticationGranted() throws IOException { final TransportRequest transportRequest = TransportActionProxy.wrapRequest(node, clearScrollRequest); final String action = TransportActionProxy.getProxyAction(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); authorize(authentication, action, transportRequest); - verify(auditTrail).accessGranted(requestId, authentication, action, clearScrollRequest, new String[]{role.getName()}); + verify(auditTrail).accessGranted(eq(requestId), eq(authentication), eq(action), eq(clearScrollRequest), + authzInfoRoles(new String[]{role.getName()})); } public void testProxyRequestAuthenticationDeniedWithReadPrivileges() throws IOException { final Authentication authentication = createAuthentication(new User("test user", "a_all")); - final RoleDescriptor role = new RoleDescriptor("a_role", null, + final RoleDescriptor role = new RoleDescriptor("a_all", null, new IndicesPrivileges[]{IndicesPrivileges.builder().indices("a").privileges("read").build()}, null); roleMap.put("a_all", role); final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); @@ -1515,41 +1354,153 @@ public void testProxyRequestAuthenticationDeniedWithReadPrivileges() throws IOEx String action = TransportActionProxy.getProxyAction(SearchTransportService.CLEAR_SCROLL_CONTEXTS_ACTION_NAME); assertThrowsAuthorizationException( () -> authorize(authentication, action, transportRequest), action, "test user"); - verify(auditTrail).accessDenied(requestId, authentication, action, clearScrollRequest, new String[]{role.getName()}); + verify(auditTrail).accessDenied(eq(requestId), eq(authentication), eq(action), eq(clearScrollRequest), + authzInfoRoles(new String[]{role.getName()})); } - public void testApiKeyAuthUsesApiKeyService() throws IOException { - AuditUtil.getOrGenerateRequestId(threadContext); - final Authentication authentication = createAuthentication(new User("test api key user", "api_key"), AuthenticationType.API_KEY); - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onResponse(ReservedRolesStore.SUPERUSER_ROLE); - return Void.TYPE; - }).when(apiKeyService).getRoleForApiKey(eq(authentication), eq(rolesStore), any(ActionListener.class)); + public void testAuthorizationEngineSelection() { + final AuthorizationEngine engine = new AuthorizationEngine() { + @Override + public void resolveAuthorizationInfo(RequestInfo requestInfo, ActionListener listener) { + throw new UnsupportedOperationException("not implemented"); + } - authorize(authentication, "cluster:admin/foo", new ClearScrollRequest()); - verify(apiKeyService).getRoleForApiKey(eq(authentication), eq(rolesStore), any(ActionListener.class)); - verifyZeroInteractions(rolesStore); + @Override + public void authorizeRunAs(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + ActionListener listener) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void authorizeClusterAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + ActionListener listener) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + AsyncSupplier indicesAsyncSupplier, + Map aliasOrIndexLookup, + ActionListener listener) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void loadAuthorizedIndices(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map aliasOrIndexLookup, ActionListener> listener) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void validateIndexPermissionsAreSubset(RequestInfo requestInfo, AuthorizationInfo authorizationInfo, + Map> indexNameToNewNames, + ActionListener listener) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void checkPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, + HasPrivilegesRequest hasPrivilegesRequest, + Collection applicationPrivilegeDescriptors, + ActionListener listener) { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + public void getUserPrivileges(Authentication authentication, AuthorizationInfo authorizationInfo, + GetUserPrivilegesRequest request, ActionListener listener) { + throw new UnsupportedOperationException("not implemented"); + } + }; + + XPackLicenseState licenseState = mock(XPackLicenseState.class); + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(true); + authorizationService = new AuthorizationService(Settings.EMPTY, rolesStore, clusterService, + auditTrail, new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(Settings.EMPTY), + engine, Collections.emptySet(), licenseState); + Authentication authentication; + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + authentication = createAuthentication(new User("test user", "a_all")); + assertEquals(engine, authorizationService.getAuthorizationEngine(authentication)); + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(false); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + } + + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + authentication = createAuthentication(new User("runas", new String[]{"runas_role"}, new User("runner", "runner_role"))); + assertEquals(engine, authorizationService.getAuthorizationEngine(authentication)); + assertEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(false); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + } + + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + authentication = createAuthentication(new User("runas", new String[]{"runas_role"}, new ElasticUser(true))); + assertEquals(engine, authorizationService.getAuthorizationEngine(authentication)); + assertNotEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(false); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + } + + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + authentication = createAuthentication(new User("elastic", new String[]{"superuser"}, new User("runner", "runner_role"))); + assertNotEquals(engine, authorizationService.getAuthorizationEngine(authentication)); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + assertEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(false); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + } + + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + authentication = createAuthentication(new User("kibana", new String[]{"kibana_system"}, new ElasticUser(true))); + assertNotEquals(engine, authorizationService.getAuthorizationEngine(authentication)); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + assertNotEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(false); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + } + + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + authentication = createAuthentication(randomFrom(XPackUser.INSTANCE, XPackSecurityUser.INSTANCE, + new ElasticUser(true), new KibanaUser(true))); + assertNotEquals(engine, authorizationService.getRunAsAuthorizationEngine(authentication)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + when(licenseState.isAuthorizationEngineAllowed()).thenReturn(false); + assertThat(authorizationService.getAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + assertThat(authorizationService.getRunAsAuthorizationEngine(authentication), instanceOf(RBACEngine.class)); + } } - public void testApiKeyAuthUsesApiKeyServiceWithScopedRole() throws IOException { - final Role fromRole = Role.builder("a-role").cluster(Collections.singleton(ClusterPrivilegeName.ALL), Collections.emptyList()) - .build(); - final Role scopedByRole = Role.builder("scoped-role") - .cluster(Collections.singleton(ClusterPrivilegeName.MANAGE_SECURITY), Collections.emptyList()).build(); - final Role role = LimitedRole.createLimitedRole(fromRole, scopedByRole); + static AuthorizationInfo authzInfoRoles(String[] expectedRoles) { + return Matchers.argThat(new RBACAuthorizationInfoRoleMatcher(expectedRoles)); + } - AuditUtil.getOrGenerateRequestId(threadContext); - final Authentication authentication = createAuthentication(new User("test api key user", "api_key"), AuthenticationType.API_KEY); - doAnswer(invocationOnMock -> { - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onResponse(role); - return Void.TYPE; - }).when(apiKeyService).getRoleForApiKey(eq(authentication), eq(rolesStore), any(ActionListener.class)); + private static class RBACAuthorizationInfoRoleMatcher extends ArgumentMatcher { + + private final String[] wanted; + + RBACAuthorizationInfoRoleMatcher(String[] expectedRoles) { + this.wanted = expectedRoles; + } - assertThrowsAuthorizationException(() -> authorize(authentication, "cluster:admin/foo", new ClearScrollRequest()), - "cluster:admin/foo", "test api key user"); - verify(apiKeyService).getRoleForApiKey(eq(authentication), eq(rolesStore), any(ActionListener.class)); - verifyZeroInteractions(rolesStore); + @Override + public boolean matches(Object item) { + if (item instanceof AuthorizationInfo) { + final String[] found = (String[]) ((AuthorizationInfo) item).asMap().get(PRINCIPAL_ROLES_FIELD_NAME); + return Arrays.equals(wanted, found); + } + return false; + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java index ffb20fbf9ac91..c0dc86315888b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizedIndicesTests.java @@ -36,9 +36,9 @@ public class AuthorizedIndicesTests extends ESTestCase { public void testAuthorizedIndicesUserWithoutRoles() { - AuthorizedIndices authorizedIndices = new AuthorizedIndices(Role.EMPTY, "", MetaData.EMPTY_META_DATA); - List list = authorizedIndices.get(); - assertTrue(list.isEmpty()); + List authorizedIndices = + RBACEngine.resolveAuthorizedIndicesFromRole(Role.EMPTY, "", MetaData.EMPTY_META_DATA.getAliasAndIndexLookup()); + assertTrue(authorizedIndices.isEmpty()); } public void testAuthorizedIndicesUserWithSomeRoles() { @@ -70,8 +70,8 @@ public void testAuthorizedIndicesUserWithSomeRoles() { final Set descriptors = Sets.newHashSet(aStarRole, bRole); CompositeRolesStore.buildRoleFromDescriptors(descriptors, new FieldPermissionsCache(Settings.EMPTY), null, future); Role roles = future.actionGet(); - AuthorizedIndices authorizedIndices = new AuthorizedIndices(roles, SearchAction.NAME, metaData); - List list = authorizedIndices.get(); + List list = + RBACEngine.resolveAuthorizedIndicesFromRole(roles, SearchAction.NAME, metaData.getAliasAndIndexLookup()); assertThat(list, containsInAnyOrder("a1", "a2", "aaaaaa", "b", "ab")); assertFalse(list.contains("bbbbb")); assertFalse(list.contains("ba")); @@ -81,9 +81,16 @@ public void testAuthorizedIndicesUserWithSomeRoles() { public void testAuthorizedIndicesUserWithSomeRolesEmptyMetaData() { Role role = Role.builder("role").add(IndexPrivilege.ALL, "*").build(); - AuthorizedIndices authorizedIndices = new AuthorizedIndices(role, SearchAction.NAME, MetaData.EMPTY_META_DATA); - List list = authorizedIndices.get(); - assertTrue(list.isEmpty()); + List authorizedIndices = + RBACEngine.resolveAuthorizedIndicesFromRole(role, SearchAction.NAME, MetaData.EMPTY_META_DATA.getAliasAndIndexLookup()); + assertTrue(authorizedIndices.isEmpty()); + } + + public void testSecurityIndicesAreRemovedFromRegularUser() { + Role role = Role.builder("user_role").add(IndexPrivilege.ALL, "*").cluster(ClusterPrivilege.ALL).build(); + List authorizedIndices = + RBACEngine.resolveAuthorizedIndicesFromRole(role, SearchAction.NAME, MetaData.EMPTY_META_DATA.getAliasAndIndexLookup()); + assertTrue(authorizedIndices.isEmpty()); } public void testSecurityIndicesAreRestrictedForDefaultRole() { @@ -103,11 +110,11 @@ public void testSecurityIndicesAreRestrictedForDefaultRole() { .build(), true) .build(); - AuthorizedIndices authorizedIndices = new AuthorizedIndices(role, SearchAction.NAME, metaData); - List list = authorizedIndices.get(); - assertThat(list, containsInAnyOrder("an-index", "another-index")); - assertThat(list, not(contains(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX))); - assertThat(list, not(contains(RestrictedIndicesNames.SECURITY_INDEX_NAME))); + List authorizedIndices = + RBACEngine.resolveAuthorizedIndicesFromRole(role, SearchAction.NAME, metaData.getAliasAndIndexLookup()); + assertThat(authorizedIndices, containsInAnyOrder("an-index", "another-index")); + assertThat(authorizedIndices, not(contains(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX))); + assertThat(authorizedIndices, not(contains(RestrictedIndicesNames.SECURITY_INDEX_NAME))); } public void testSecurityIndicesAreNotRemovedFromUnrestrictedRole() { @@ -127,14 +134,14 @@ public void testSecurityIndicesAreNotRemovedFromUnrestrictedRole() { .build(), true) .build(); - AuthorizedIndices authorizedIndices = new AuthorizedIndices(role, SearchAction.NAME, metaData); - List list = authorizedIndices.get(); - assertThat(list, containsInAnyOrder("an-index", "another-index", SecurityIndexManager.SECURITY_INDEX_NAME, - SecurityIndexManager.INTERNAL_SECURITY_INDEX)); + List authorizedIndices = + RBACEngine.resolveAuthorizedIndicesFromRole(role, SearchAction.NAME, metaData.getAliasAndIndexLookup()); + assertThat(authorizedIndices, containsInAnyOrder( + "an-index", "another-index", SecurityIndexManager.SECURITY_INDEX_NAME, SecurityIndexManager.INTERNAL_SECURITY_INDEX)); - AuthorizedIndices authorizedIndicesSuperUser = new AuthorizedIndices(ReservedRolesStore.SUPERUSER_ROLE, SearchAction.NAME, - metaData); - assertThat(authorizedIndicesSuperUser.get(), containsInAnyOrder("an-index", "another-index", - SecurityIndexManager.SECURITY_INDEX_NAME, SecurityIndexManager.INTERNAL_SECURITY_INDEX)); + List authorizedIndicesSuperUser = + RBACEngine.resolveAuthorizedIndicesFromRole(role, SearchAction.NAME, metaData.getAliasAndIndexLookup()); + assertThat(authorizedIndicesSuperUser, containsInAnyOrder( + "an-index", "another-index", SecurityIndexManager.SECURITY_INDEX_NAME, SecurityIndexManager.INTERNAL_SECURITY_INDEX)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index da6ff09737160..2f09b74ac3d53 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -49,25 +49,20 @@ import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; -import org.elasticsearch.xpack.core.security.authc.DefaultAuthenticationFailureHandler; import org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField; +import org.elasticsearch.xpack.core.security.authz.ResolvedIndices; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCache; import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; -import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackSecurityUser; import org.elasticsearch.xpack.core.security.user.XPackUser; -import org.elasticsearch.xpack.security.audit.AuditTrailService; -import org.elasticsearch.xpack.security.authc.ApiKeyService; -import org.elasticsearch.xpack.security.authz.IndicesAndAliasesResolver.ResolvedIndices; import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.test.SecurityTestUtils; @@ -77,7 +72,6 @@ import org.junit.Before; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -96,6 +90,7 @@ import static org.hamcrest.Matchers.not; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -106,10 +101,10 @@ public class IndicesAndAliasesResolverTests extends ESTestCase { private User userNoIndices; private CompositeRolesStore rolesStore; private MetaData metaData; - private AuthorizationService authzService; private IndicesAndAliasesResolver defaultIndicesResolver; private IndexNameExpressionResolver indexNameExpressionResolver; private Map roleMap; + private FieldPermissionsCache fieldPermissionsCache; @Before public void setup() { @@ -149,6 +144,7 @@ public void setup() { metaData = SecurityTestUtils.addAliasToMetaData(metaData, securityIndexName); } this.metaData = metaData; + this.fieldPermissionsCache = new FieldPermissionsCache(settings); user = new User("user", "role"); userDashIndices = new User("dash", "dash"); @@ -168,33 +164,31 @@ public void setup() { roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[1]; - Set names = (Set) i.getArguments()[0]; - assertNotNull(names); - Set roleDescriptors = new HashSet<>(); - for (String name : names) { - RoleDescriptor descriptor = roleMap.get(name); - if (descriptor != null) { - roleDescriptors.add(descriptor); - } + ActionListener callback = + (ActionListener) i.getArguments()[1]; + Set names = (Set) i.getArguments()[0]; + assertNotNull(names); + Set roleDescriptors = new HashSet<>(); + for (String name : names) { + RoleDescriptor descriptor = roleMap.get(name); + if (descriptor != null) { + roleDescriptors.add(descriptor); } + } - if (roleDescriptors.isEmpty()) { - callback.onResponse(Role.EMPTY); - } else { - CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache, null, - ActionListener.wrap(r -> callback.onResponse(r), callback::onFailure) - ); - } - return Void.TYPE; - }).when(rolesStore).roles(any(Set.class), any(ActionListener.class)); + if (roleDescriptors.isEmpty()) { + callback.onResponse(Role.EMPTY); + } else { + CompositeRolesStore.buildRoleFromDescriptors(roleDescriptors, fieldPermissionsCache, null, + ActionListener.wrap(r -> callback.onResponse(r), callback::onFailure) + ); + } + return Void.TYPE; + }).when(rolesStore).roles(any(Set.class), any(ActionListener.class)); + doCallRealMethod().when(rolesStore).getRoles(any(User.class), any(Authentication.class), any(ActionListener.class)); ClusterService clusterService = mock(ClusterService.class); when(clusterService.getClusterSettings()).thenReturn(new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - authzService = new AuthorizationService(settings, rolesStore, clusterService, - mock(AuditTrailService.class), new DefaultAuthenticationFailureHandler(Collections.emptyMap()), mock(ThreadPool.class), - new AnonymousUser(settings), mock(ApiKeyService.class), new FieldPermissionsCache(settings)); defaultIndicesResolver = new IndicesAndAliasesResolver(settings, clusterService); } @@ -583,7 +577,7 @@ public void testSearchWithRemoteAndLocalIndices() { public void testSearchWithRemoteAndLocalWildcards() { SearchRequest request = new SearchRequest("*:foo", "r*:bar*", "remote:baz*", "bar*", "foofoo"); request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), true, false)); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, SearchAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, SearchAction.NAME); final ResolvedIndices resolved = resolveIndices(request, authorizedIndices); assertThat(resolved.getRemote(), containsInAnyOrder("remote:foo", "other_remote:foo", "remote:bar*", "remote:baz*")); assertThat(resolved.getLocal(), containsInAnyOrder("bar", "foofoo")); @@ -701,7 +695,7 @@ public void testResolveIndicesAliasesRequestDeleteActions() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo").alias("foofoobar")); request.addAliasAction(AliasActions.remove().index("foofoo").alias("barbaz")); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all indices and aliases gets returned String[] expectedIndices = new String[]{"foo", "foofoobar", "foofoo", "barbaz"}; @@ -717,7 +711,7 @@ public void testResolveIndicesAliasesRequestDeleteActionsMissingIndex() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo").alias("foofoobar")); request.addAliasAction(AliasActions.remove().index("missing_index").alias("missing_alias")); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all indices and aliases gets returned, doesn't matter is some of them don't exist String[] expectedIndices = new String[]{"foo", "foofoobar", "missing_index", "missing_alias"}; @@ -733,7 +727,7 @@ public void testResolveWildcardsIndicesAliasesRequestDeleteActions() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo*").alias("foofoobar")); request.addAliasAction(AliasActions.remove().index("bar*").alias("barbaz")); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //union of all resolved indices and aliases gets returned, based on what user is authorized for String[] expectedIndices = new String[]{"foofoobar", "foofoo", "bar", "barbaz"}; @@ -750,7 +744,7 @@ public void testResolveAliasesWildcardsIndicesAliasesRequestDeleteActions() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("*").alias("foo*")); request.addAliasAction(AliasActions.remove().index("*bar").alias("foo*")); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //union of all resolved indices and aliases gets returned, based on what user is authorized for //note that the index side will end up containing matching aliases too, which is fine, as es core would do @@ -768,7 +762,7 @@ public void testResolveAllAliasesWildcardsIndicesAliasesRequestDeleteActions() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("*").alias("_all")); request.addAliasAction(AliasActions.remove().index("_all").aliases("_all", "explicit")); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //union of all resolved indices and aliases gets returned, based on what user is authorized for //note that the index side will end up containing matching aliases too, which is fine, as es core would do @@ -796,7 +790,7 @@ public void testResolveWildcardsIndicesAliasesRequestAddAndDeleteActions() { IndicesAliasesRequest request = new IndicesAliasesRequest(); request.addAliasAction(AliasActions.remove().index("foo*").alias("foofoobar")); request.addAliasAction(AliasActions.add().index("bar*").alias("foofoobar")); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, IndicesAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //union of all resolved indices and aliases gets returned, based on what user is authorized for String[] expectedIndices = new String[]{"foofoobar", "foofoo", "bar"}; @@ -811,7 +805,7 @@ public void testResolveWildcardsIndicesAliasesRequestAddAndDeleteActions() { public void testResolveGetAliasesRequestStrict() { GetAliasesRequest request = new GetAliasesRequest("alias1").indices("foo", "foofoo"); request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean())); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all indices and aliases gets returned String[] expectedIndices = new String[]{"alias1", "foo", "foofoo"}; @@ -824,7 +818,7 @@ public void testResolveGetAliasesRequestStrict() { public void testResolveGetAliasesRequestIgnoreUnavailable() { GetAliasesRequest request = new GetAliasesRequest("alias1").indices("foo", "foofoo"); request.indicesOptions(IndicesOptions.fromOptions(true, randomBoolean(), randomBoolean(), randomBoolean())); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); String[] expectedIndices = new String[]{"alias1", "foofoo"}; assertThat(indices.size(), equalTo(expectedIndices.length)); @@ -838,7 +832,7 @@ public void testResolveGetAliasesRequestMissingIndexStrict() { request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), true, randomBoolean())); request.indices("missing"); request.aliases("alias2"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all indices and aliases gets returned, missing is not an existing index/alias but that doesn't make any difference String[] expectedIndices = new String[]{"alias2", "missing"}; @@ -871,7 +865,7 @@ public void testGetAliasesRequestMissingIndexStrict() { request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), randomBoolean(), randomBoolean())); request.indices("missing"); request.aliases("alias2"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); String[] expectedIndices = new String[]{"alias2", "missing"}; assertThat(indices.size(), equalTo(expectedIndices.length)); @@ -885,7 +879,7 @@ public void testResolveWildcardsGetAliasesRequestStrictExpand() { request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), true, true)); request.aliases("alias1"); request.indices("foo*"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned, based on indices and aliases that user is authorized for String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoo-closed", "foofoobar", "foobarfoo"}; @@ -901,7 +895,7 @@ public void testResolveWildcardsGetAliasesRequestStrictExpandOpen() { request.indicesOptions(IndicesOptions.fromOptions(false, randomBoolean(), true, false)); request.aliases("alias1"); request.indices("foo*"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned, based on indices and aliases that user is authorized for String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoobar", "foobarfoo"}; @@ -917,7 +911,7 @@ public void testResolveWildcardsGetAliasesRequestLenientExpandOpen() { request.indicesOptions(IndicesOptions.fromOptions(true, randomBoolean(), true, false)); request.aliases("alias1"); request.indices("foo*", "bar", "missing"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned, based on indices and aliases that user is authorized for String[] expectedIndices = new String[]{"alias1", "foofoo", "foofoobar", "foobarfoo", "bar"}; @@ -953,7 +947,7 @@ public void testResolveAllGetAliasesRequest() { request.indices("_all"); } request.aliases("alias1"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed", "alias1"}; @@ -974,7 +968,7 @@ public void testResolveAllGetAliasesRequestExpandWildcardsOpenOnly() { request.indices("_all"); } request.aliases("alias1"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned String[] expectedIndices = new String[]{"bar", "foofoobar", "foobarfoo", "foofoo", "alias1"}; @@ -1033,7 +1027,7 @@ public void testResolveAllAliasesGetAliasesRequest() { if (randomBoolean()) { request.indices("_all"); } - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed"}; @@ -1048,7 +1042,7 @@ public void testResolveAllAndExplicitAliasesGetAliasesRequest() { if (randomBoolean()) { request.indices("_all"); } - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed", "explicit"}; @@ -1063,7 +1057,7 @@ public void testResolveAllAndWildcardsAliasesGetAliasesRequest() { if (randomBoolean()) { request.indices("_all"); } - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //the union of all resolved indices and aliases gets returned String[] expectedIndices = new String[]{"bar", "bar-closed", "foofoobar", "foobarfoo", "foofoo", "foofoo-closed"}; @@ -1077,7 +1071,7 @@ public void testResolveAliasesWildcardsGetAliasesRequest() { GetAliasesRequest request = new GetAliasesRequest(); request.indices("*bar"); request.aliases("foo*"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //union of all resolved indices and aliases gets returned, based on what user is authorized for //note that the index side will end up containing matching aliases too, which is fine, as es core would do @@ -1101,7 +1095,7 @@ public void testResolveAliasesWildcardsGetAliasesRequestNoAuthorizedIndices() { public void testResolveAliasesExclusionWildcardsGetAliasesRequest() { GetAliasesRequest request = new GetAliasesRequest(); request.aliases("foo*","-foobar*"); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, GetAliasesAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); //union of all resolved indices and aliases gets returned, based on what user is authorized for //note that the index side will end up containing matching aliases too, which is fine, as es core would do @@ -1180,7 +1174,7 @@ public void testCompositeIndicesRequestIsNotSupported() { } public void testResolveAdminAction() { - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, DeleteIndexAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(user, DeleteIndexAction.NAME); { RefreshRequest request = new RefreshRequest("*"); List indices = resolveIndices(request, authorizedIndices).getLocal(); @@ -1224,14 +1218,14 @@ public void testIndicesExists() { public void testXPackSecurityUserHasAccessToSecurityIndex() { SearchRequest request = new SearchRequest(); { - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, SearchAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); assertThat(indices, hasItem(SecurityIndexManager.SECURITY_INDEX_NAME)); } { IndicesAliasesRequest aliasesRequest = new IndicesAliasesRequest(); aliasesRequest.addAliasAction(AliasActions.add().alias("security_alias").index(SECURITY_INDEX_NAME)); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(XPackSecurityUser.INSTANCE, IndicesAliasesAction.NAME); List indices = resolveIndices(aliasesRequest, authorizedIndices).getLocal(); assertThat(indices, hasItem(SecurityIndexManager.SECURITY_INDEX_NAME)); } @@ -1239,7 +1233,7 @@ public void testXPackSecurityUserHasAccessToSecurityIndex() { public void testXPackUserDoesNotHaveAccessToSecurityIndex() { SearchRequest request = new SearchRequest(); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(XPackUser.INSTANCE, SearchAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(XPackUser.INSTANCE, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); assertThat(indices, not(hasItem(SecurityIndexManager.SECURITY_INDEX_NAME))); } @@ -1251,7 +1245,7 @@ public void testNonXPackUserAccessingSecurityIndex() { { SearchRequest request = new SearchRequest(); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(allAccessUser, SearchAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(allAccessUser, SearchAction.NAME); List indices = resolveIndices(request, authorizedIndices).getLocal(); assertThat(indices, not(hasItem(SecurityIndexManager.SECURITY_INDEX_NAME))); } @@ -1259,7 +1253,7 @@ public void testNonXPackUserAccessingSecurityIndex() { { IndicesAliasesRequest aliasesRequest = new IndicesAliasesRequest(); aliasesRequest.addAliasAction(AliasActions.add().alias("security_alias1").index("*")); - final AuthorizedIndices authorizedIndices = buildAuthorizedIndices(allAccessUser, IndicesAliasesAction.NAME); + final List authorizedIndices = buildAuthorizedIndices(allAccessUser, IndicesAliasesAction.NAME); List indices = resolveIndices(aliasesRequest, authorizedIndices).getLocal(); assertThat(indices, not(hasItem(SecurityIndexManager.SECURITY_INDEX_NAME))); } @@ -1351,7 +1345,7 @@ public void testAliasDateMathExpressionNotSupported() { public void testDynamicPutMappingRequestFromAlias() { PutMappingRequest request = new PutMappingRequest(Strings.EMPTY_ARRAY).setConcreteIndex(new Index("foofoo", UUIDs.base64UUID())); User user = new User("alias-writer", "alias_read_write"); - AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, PutMappingAction.NAME); + List authorizedIndices = buildAuthorizedIndices(user, PutMappingAction.NAME); String putMappingIndexOrAlias = IndicesAndAliasesResolver.getPutMappingIndexOrAlias(request, authorizedIndices, metaData); assertEquals("barbaz", putMappingIndexOrAlias); @@ -1366,12 +1360,12 @@ public void testDynamicPutMappingRequestFromAlias() { // TODO with the removal of DeleteByQuery is there another way to test resolving a write action? - private AuthorizedIndices buildAuthorizedIndices(User user, String action) { + private List buildAuthorizedIndices(User user, String action) { PlainActionFuture rolesListener = new PlainActionFuture<>(); final Authentication authentication = new Authentication(user, new RealmRef("test", "indices-aliases-resolver-tests", "node"), null); - authzService.roles(user, authentication, rolesListener); - return new AuthorizedIndices(rolesListener.actionGet(), action, metaData); + rolesStore.getRoles(user, authentication, rolesListener); + return RBACEngine.resolveAuthorizedIndicesFromRole(rolesListener.actionGet(), action, metaData.getAliasAndIndexLookup()); } public static IndexMetaData.Builder indexBuilder(String index) { @@ -1380,7 +1374,7 @@ public static IndexMetaData.Builder indexBuilder(String index) { .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)); } - private ResolvedIndices resolveIndices(TransportRequest request, AuthorizedIndices authorizedIndices) { + private ResolvedIndices resolveIndices(TransportRequest request, List authorizedIndices) { return defaultIndicesResolver.resolve(request, this.metaData, authorizedIndices); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java new file mode 100644 index 0000000000000..e43ca6bbc0b6f --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -0,0 +1,750 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authz; + +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.delete.DeleteAction; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.license.GetLicenseAction; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; +import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; +import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequestBuilder; +import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; +import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; +import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequestBuilder; +import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; +import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesResponse; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; +import org.elasticsearch.xpack.core.security.action.user.UserRequest; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; +import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; +import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; +import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; +import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; +import org.elasticsearch.xpack.core.security.authz.permission.ResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.permission.Role; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor; +import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.ConditionalClusterPrivileges.ManageApplicationPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; +import org.elasticsearch.xpack.security.authz.RBACEngine.RBACAuthorizationInfo; +import org.elasticsearch.xpack.security.authz.store.CompositeRolesStore; +import org.hamcrest.Matchers; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.emptyIterable; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +public class RBACEngineTests extends ESTestCase { + + private RBACEngine engine; + + @Before + public void createEngine() { + engine = new RBACEngine(Settings.EMPTY, mock(CompositeRolesStore.class)); + } + + public void testSameUserPermission() { + final User user = new User("joe"); + final boolean changePasswordRequest = randomBoolean(); + final TransportRequest request = changePasswordRequest ? + new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(user.principal()).request(); + final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; + final Authentication authentication = mock(Authentication.class); + final Authentication.RealmRef authenticatedBy = mock(Authentication.RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); + + assertThat(request, instanceOf(UserRequest.class)); + assertTrue(engine.checkSameUserPermissions(action, request, authentication)); + } + + public void testSameUserPermissionDoesNotAllowNonMatchingUsername() { + final User authUser = new User("admin", new String[]{"bar"}); + final User user = new User("joe", null, authUser); + final boolean changePasswordRequest = randomBoolean(); + final String username = randomFrom("", "joe" + randomAlphaOfLengthBetween(1, 5), randomAlphaOfLengthBetween(3, 10)); + final TransportRequest request = changePasswordRequest ? + new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); + final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; + final Authentication authentication = mock(Authentication.class); + final Authentication.RealmRef authenticatedBy = mock(Authentication.RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); + + assertThat(request, instanceOf(UserRequest.class)); + assertFalse(engine.checkSameUserPermissions(action, request, authentication)); + + when(authentication.getUser()).thenReturn(user); + final Authentication.RealmRef lookedUpBy = mock(Authentication.RealmRef.class); + when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); + when(lookedUpBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); + // this should still fail since the username is still different + assertFalse(engine.checkSameUserPermissions(action, request, authentication)); + + if (request instanceof ChangePasswordRequest) { + ((ChangePasswordRequest) request).username("joe"); + } else { + ((AuthenticateRequest) request).username("joe"); + } + assertTrue(engine.checkSameUserPermissions(action, request, authentication)); + } + + public void testSameUserPermissionDoesNotAllowOtherActions() { + final User user = mock(User.class); + final TransportRequest request = mock(TransportRequest.class); + final String action = randomFrom(PutUserAction.NAME, DeleteUserAction.NAME, ClusterHealthAction.NAME, ClusterStateAction.NAME, + ClusterStatsAction.NAME, GetLicenseAction.NAME); + final Authentication authentication = mock(Authentication.class); + final Authentication.RealmRef authenticatedBy = mock(Authentication.RealmRef.class); + final boolean runAs = randomBoolean(); + when(authentication.getUser()).thenReturn(user); + when(user.authenticatedUser()).thenReturn(runAs ? new User("authUser") : user); + when(user.isRunAs()).thenReturn(runAs); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()) + .thenReturn(randomAlphaOfLengthBetween(4, 12)); + + assertFalse(engine.checkSameUserPermissions(action, request, authentication)); + verifyZeroInteractions(user, request, authentication); + } + + public void testSameUserPermissionRunAsChecksAuthenticatedBy() { + final User authUser = new User("admin", new String[]{"bar"}); + final String username = "joe"; + final User user = new User(username, null, authUser); + final boolean changePasswordRequest = randomBoolean(); + final TransportRequest request = changePasswordRequest ? + new ChangePasswordRequestBuilder(mock(Client.class)).username(username).request() : + new AuthenticateRequestBuilder(mock(Client.class)).username(username).request(); + final String action = changePasswordRequest ? ChangePasswordAction.NAME : AuthenticateAction.NAME; + final Authentication authentication = mock(Authentication.class); + final Authentication.RealmRef authenticatedBy = mock(Authentication.RealmRef.class); + final Authentication.RealmRef lookedUpBy = mock(Authentication.RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); + when(lookedUpBy.getType()) + .thenReturn(changePasswordRequest ? randomFrom(ReservedRealm.TYPE, NativeRealmSettings.TYPE) : + randomAlphaOfLengthBetween(4, 12)); + assertTrue(engine.checkSameUserPermissions(action, request, authentication)); + + when(authentication.getUser()).thenReturn(authUser); + assertFalse(engine.checkSameUserPermissions(action, request, authentication)); + } + + public void testSameUserPermissionDoesNotAllowChangePasswordForOtherRealms() { + final User user = new User("joe"); + final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request(); + final String action = ChangePasswordAction.NAME; + final Authentication authentication = mock(Authentication.class); + final Authentication.RealmRef authenticatedBy = mock(Authentication.RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authenticatedBy.getType()).thenReturn(randomFrom(LdapRealmSettings.LDAP_TYPE, FileRealmSettings.TYPE, + LdapRealmSettings.AD_TYPE, PkiRealmSettings.TYPE, + randomAlphaOfLengthBetween(4, 12))); + + assertThat(request, instanceOf(UserRequest.class)); + assertFalse(engine.checkSameUserPermissions(action, request, authentication)); + verify(authenticatedBy).getType(); + verify(authentication).getAuthenticatedBy(); + verify(authentication, times(2)).getUser(); + verifyNoMoreInteractions(authenticatedBy, authentication); + } + + public void testSameUserPermissionDoesNotAllowChangePasswordForLookedUpByOtherRealms() { + final User authUser = new User("admin", new String[]{"bar"}); + final User user = new User("joe", null, authUser); + final ChangePasswordRequest request = new ChangePasswordRequestBuilder(mock(Client.class)).username(user.principal()).request(); + final String action = ChangePasswordAction.NAME; + final Authentication authentication = mock(Authentication.class); + final Authentication.RealmRef authenticatedBy = mock(Authentication.RealmRef.class); + final Authentication.RealmRef lookedUpBy = mock(Authentication.RealmRef.class); + when(authentication.getUser()).thenReturn(user); + when(authentication.getAuthenticatedBy()).thenReturn(authenticatedBy); + when(authentication.getLookedUpBy()).thenReturn(lookedUpBy); + when(lookedUpBy.getType()).thenReturn(randomFrom(LdapRealmSettings.LDAP_TYPE, FileRealmSettings.TYPE, + LdapRealmSettings.AD_TYPE, PkiRealmSettings.TYPE, + randomAlphaOfLengthBetween(4, 12))); + + assertThat(request, instanceOf(UserRequest.class)); + assertFalse(engine.checkSameUserPermissions(action, request, authentication)); + verify(authentication).getLookedUpBy(); + verify(authentication, times(2)).getUser(); + verify(lookedUpBy).getType(); + verifyNoMoreInteractions(authentication, lookedUpBy, authenticatedBy); + } + + /** + * This tests that action names in the request are considered "matched" by the relevant named privilege + * (in this case that {@link DeleteAction} and {@link IndexAction} are satisfied by {@link IndexPrivilege#WRITE}). + */ + public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception { + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test1") + .cluster(Collections.singleton("all"), Collections.emptyList()) + .add(IndexPrivilege.WRITE, "academy") + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(user.principal()); + request.clusterPrivileges(ClusterHealthAction.NAME); + request.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() + .indices("academy") + .privileges(DeleteAction.NAME, IndexAction.NAME) + .build()); + request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); + + final PlainActionFuture future = new PlainActionFuture<>(); + engine.checkPrivileges(authentication, authzInfo, request, Collections.emptyList(), future); + + final HasPrivilegesResponse response = future.get(); + assertThat(response, notNullValue()); + assertThat(response.getUsername(), is(user.principal())); + assertThat(response.isCompleteMatch(), is(true)); + + assertThat(response.getClusterPrivileges().size(), equalTo(1)); + assertThat(response.getClusterPrivileges().get(ClusterHealthAction.NAME), equalTo(true)); + + assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); + final ResourcePrivileges result = response.getIndexPrivileges().iterator().next(); + assertThat(result.getResource(), equalTo("academy")); + assertThat(result.getPrivileges().size(), equalTo(2)); + assertThat(result.getPrivileges().get(DeleteAction.NAME), equalTo(true)); + assertThat(result.getPrivileges().get(IndexAction.NAME), equalTo(true)); + } + + /** + * This tests that the action responds correctly when the user/role has some, but not all + * of the privileges being checked. + */ + public void testMatchSubsetOfPrivileges() throws Exception { + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test2") + .cluster(ClusterPrivilege.MONITOR) + .add(IndexPrivilege.INDEX, "academy") + .add(IndexPrivilege.WRITE, "initiative") + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(user.principal()); + request.clusterPrivileges("monitor", "manage"); + request.indexPrivileges(RoleDescriptor.IndicesPrivileges.builder() + .indices("academy", "initiative", "school") + .privileges("delete", "index", "manage") + .build()); + request.applicationPrivileges(new RoleDescriptor.ApplicationResourcePrivileges[0]); + final PlainActionFuture future = new PlainActionFuture<>(); + engine.checkPrivileges(authentication, authzInfo, request, Collections.emptyList(), future); + + final HasPrivilegesResponse response = future.get(); + assertThat(response, notNullValue()); + assertThat(response.getUsername(), is(user.principal())); + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getClusterPrivileges().size(), equalTo(2)); + assertThat(response.getClusterPrivileges().get("monitor"), equalTo(true)); + assertThat(response.getClusterPrivileges().get("manage"), equalTo(false)); + assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(3)); + + final Iterator indexPrivilegesIterator = response.getIndexPrivileges().iterator(); + final ResourcePrivileges academy = indexPrivilegesIterator.next(); + final ResourcePrivileges initiative = indexPrivilegesIterator.next(); + final ResourcePrivileges school = indexPrivilegesIterator.next(); + + assertThat(academy.getResource(), equalTo("academy")); + assertThat(academy.getPrivileges().size(), equalTo(3)); + assertThat(academy.getPrivileges().get("index"), equalTo(true)); // explicit + assertThat(academy.getPrivileges().get("delete"), equalTo(false)); + assertThat(academy.getPrivileges().get("manage"), equalTo(false)); + + assertThat(initiative.getResource(), equalTo("initiative")); + assertThat(initiative.getPrivileges().size(), equalTo(3)); + assertThat(initiative.getPrivileges().get("index"), equalTo(true)); // implied by write + assertThat(initiative.getPrivileges().get("delete"), equalTo(true)); // implied by write + assertThat(initiative.getPrivileges().get("manage"), equalTo(false)); + + assertThat(school.getResource(), equalTo("school")); + assertThat(school.getPrivileges().size(), equalTo(3)); + assertThat(school.getPrivileges().get("index"), equalTo(false)); + assertThat(school.getPrivileges().get("delete"), equalTo(false)); + assertThat(school.getPrivileges().get("manage"), equalTo(false)); + } + + /** + * This tests that the action responds correctly when the user/role has none + * of the privileges being checked. + */ + public void testMatchNothing() throws Exception { + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test3") + .cluster(ClusterPrivilege.MONITOR) + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + final HasPrivilegesResponse response = hasPrivileges(RoleDescriptor.IndicesPrivileges.builder() + .indices("academy") + .privileges("read", "write") + .build(), + authentication, authzInfo, Collections.emptyList(), Strings.EMPTY_ARRAY); + assertThat(response.getUsername(), is(user.principal())); + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(1)); + final ResourcePrivileges result = response.getIndexPrivileges().iterator().next(); + assertThat(result.getResource(), equalTo("academy")); + assertThat(result.getPrivileges().size(), equalTo(2)); + assertThat(result.getPrivileges().get("read"), equalTo(false)); + assertThat(result.getPrivileges().get("write"), equalTo(false)); + } + + /** + * Wildcards in the request are treated as + * does the user have ___ privilege on every possible index that matches this pattern? + * Or, expressed differently, + * does the user have ___ privilege on a wildcard that covers (is a superset of) this pattern? + */ + public void testWildcardHandling() throws Exception { + List privs = new ArrayList<>(); + final ApplicationPrivilege kibanaRead = defineApplicationPrivilege(privs, "kibana", "read", + "data:read/*", "action:login", "action:view/dashboard"); + final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege(privs, "kibana", "write", + "data:write/*", "action:login", "action:view/dashboard"); + final ApplicationPrivilege kibanaAdmin = defineApplicationPrivilege(privs, "kibana", "admin", + "action:login", "action:manage/*"); + final ApplicationPrivilege kibanaViewSpace = defineApplicationPrivilege(privs, "kibana", "view-space", + "action:login", "space:view/*"); + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test3") + .add(IndexPrivilege.ALL, "logstash-*", "foo?") + .add(IndexPrivilege.READ, "abc*") + .add(IndexPrivilege.WRITE, "*xyz") + .addApplicationPrivilege(kibanaRead, Collections.singleton("*")) + .addApplicationPrivilege(kibanaViewSpace, newHashSet("space/engineering/*", "space/builds")) + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(user.principal()); + request.clusterPrivileges(Strings.EMPTY_ARRAY); + request.indexPrivileges( + RoleDescriptor.IndicesPrivileges.builder() + .indices("logstash-2016-*") + .privileges("write") // Yes, because (ALL,"logstash-*") + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("logstash-*") + .privileges("read") // Yes, because (ALL,"logstash-*") + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("log*") + .privileges("manage") // No, because "log*" includes indices that "logstash-*" does not + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("foo*", "foo?") + .privileges("read") // Yes, "foo?", but not "foo*", because "foo*" > "foo?" + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("abcd*") + .privileges("read", "write") // read = Yes, because (READ, "abc*"), write = No + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("abc*xyz") + .privileges("read", "write", "manage") // read = Yes ( READ "abc*"), write = Yes (WRITE, "*xyz"), manage = No + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("a*xyz") + .privileges("read", "write", "manage") // read = No, write = Yes (WRITE, "*xyz"), manage = No + .build() + ); + + request.applicationPrivileges( + RoleDescriptor.ApplicationResourcePrivileges.builder() + .resources("*") + .application("kibana") + .privileges(Sets.union(kibanaRead.name(), kibanaWrite.name())) // read = Yes, write = No + .build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .resources("space/engineering/project-*", "space/*") // project-* = Yes, space/* = Not + .application("kibana") + .privileges("space:view/dashboard") + .build() + ); + + final PlainActionFuture future = new PlainActionFuture<>(); + engine.checkPrivileges(authentication, authzInfo, request, privs, future); + + final HasPrivilegesResponse response = future.get(); + assertThat(response, notNullValue()); + assertThat(response.getUsername(), is(user.principal())); + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(8)); + assertThat(response.getIndexPrivileges(), containsInAnyOrder( + ResourcePrivileges.builder("logstash-2016-*").addPrivileges(Collections.singletonMap("write", true)).build(), + ResourcePrivileges.builder("logstash-*").addPrivileges(Collections.singletonMap("read", true)).build(), + ResourcePrivileges.builder("log*").addPrivileges(Collections.singletonMap("manage", false)).build(), + ResourcePrivileges.builder("foo?").addPrivileges(Collections.singletonMap("read", true)).build(), + ResourcePrivileges.builder("foo*").addPrivileges(Collections.singletonMap("read", false)).build(), + ResourcePrivileges.builder("abcd*").addPrivileges(mapBuilder().put("read", true).put("write", false).map()).build(), + ResourcePrivileges.builder("abc*xyz") + .addPrivileges(mapBuilder().put("read", true).put("write", true).put("manage", false).map()).build(), + ResourcePrivileges.builder("a*xyz") + .addPrivileges(mapBuilder().put("read", false).put("write", true).put("manage", false).map()).build() + )); + assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(1)); + final Set kibanaPrivileges = response.getApplicationPrivileges().get("kibana"); + assertThat(kibanaPrivileges, Matchers.iterableWithSize(3)); + assertThat(Strings.collectionToCommaDelimitedString(kibanaPrivileges), kibanaPrivileges, containsInAnyOrder( + ResourcePrivileges.builder("*").addPrivileges(mapBuilder().put("read", true).put("write", false).map()).build(), + ResourcePrivileges.builder("space/engineering/project-*") + .addPrivileges(Collections.singletonMap("space:view/dashboard", true)).build(), + ResourcePrivileges.builder("space/*").addPrivileges(Collections.singletonMap("space:view/dashboard", false)).build() + )); + } + + public void testCheckingIndexPermissionsDefinedOnDifferentPatterns() throws Exception { + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test-write") + .add(IndexPrivilege.INDEX, "apache-*") + .add(IndexPrivilege.DELETE, "apache-2016-*") + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + final HasPrivilegesResponse response = hasPrivileges(RoleDescriptor.IndicesPrivileges.builder() + .indices("apache-2016-12", "apache-2017-01") + .privileges("index", "delete") + .build(), authentication, authzInfo, Collections.emptyList(), Strings.EMPTY_ARRAY); + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getIndexPrivileges(), Matchers.iterableWithSize(2)); + assertThat(response.getIndexPrivileges(), containsInAnyOrder( + ResourcePrivileges.builder("apache-2016-12") + .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("index", true).put("delete", true).map()).build(), + ResourcePrivileges.builder("apache-2017-01") + .addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("index", true).put("delete", false).map()).build() + )); + } + + public void testCheckingApplicationPrivilegesOnDifferentApplicationsAndResources() throws Exception { + List privs = new ArrayList<>(); + final ApplicationPrivilege app1Read = defineApplicationPrivilege(privs, "app1", "read", "data:read/*"); + final ApplicationPrivilege app1Write = defineApplicationPrivilege(privs, "app1", "write", "data:write/*"); + final ApplicationPrivilege app1All = defineApplicationPrivilege(privs, "app1", "all", "*"); + final ApplicationPrivilege app2Read = defineApplicationPrivilege(privs, "app2", "read", "data:read/*"); + final ApplicationPrivilege app2Write = defineApplicationPrivilege(privs, "app2", "write", "data:write/*"); + final ApplicationPrivilege app2All = defineApplicationPrivilege(privs, "app2", "all", "*"); + + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test-role") + .addApplicationPrivilege(app1Read, Collections.singleton("foo/*")) + .addApplicationPrivilege(app1All, Collections.singleton("foo/bar/baz")) + .addApplicationPrivilege(app2Read, Collections.singleton("foo/bar/*")) + .addApplicationPrivilege(app2Write, Collections.singleton("*/bar/*")) + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + final HasPrivilegesResponse response = hasPrivileges(new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app1") + .resources("foo/1", "foo/bar/2", "foo/bar/baz", "baz/bar/foo") + .privileges("read", "write", "all") + .build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("app2") + .resources("foo/1", "foo/bar/2", "foo/bar/baz", "baz/bar/foo") + .privileges("read", "write", "all") + .build() + }, authentication, authzInfo, privs, Strings.EMPTY_ARRAY); + + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getIndexPrivileges(), Matchers.emptyIterable()); + assertThat(response.getApplicationPrivileges().entrySet(), Matchers.iterableWithSize(2)); + final Set app1 = response.getApplicationPrivileges().get("app1"); + assertThat(app1, Matchers.iterableWithSize(4)); + assertThat(Strings.collectionToCommaDelimitedString(app1), app1, containsInAnyOrder( + ResourcePrivileges.builder("foo/1").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", false).put("all", false).map()).build(), + ResourcePrivileges.builder("foo/bar/2").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", false).put("all", false).map()).build(), + ResourcePrivileges.builder("foo/bar/baz").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", true).put("all", true).map()).build(), + ResourcePrivileges.builder("baz/bar/foo").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", false).put("write", false).put("all", false).map()).build() + )); + final Set app2 = response.getApplicationPrivileges().get("app2"); + assertThat(app2, Matchers.iterableWithSize(4)); + assertThat(Strings.collectionToCommaDelimitedString(app2), app2, containsInAnyOrder( + ResourcePrivileges.builder("foo/1").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", false).put("write", false).put("all", false).map()).build(), + ResourcePrivileges.builder("foo/bar/2").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", true).put("all", false).map()).build(), + ResourcePrivileges.builder("foo/bar/baz").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", true).put("write", true).put("all", false).map()).build(), + ResourcePrivileges.builder("baz/bar/foo").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("read", false).put("write", true).put("all", false).map()).build() + )); + } + + public void testCheckingApplicationPrivilegesWithComplexNames() throws Exception { + final String appName = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(3, 10); + final String action1 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(2, 5); + final String action2 = randomAlphaOfLength(1).toLowerCase(Locale.ROOT) + randomAlphaOfLengthBetween(6, 9); + + final List privs = new ArrayList<>(); + final ApplicationPrivilege priv1 = defineApplicationPrivilege(privs, appName, action1, "DATA:read/*", "ACTION:" + action1); + final ApplicationPrivilege priv2 = defineApplicationPrivilege(privs, appName, action2, "DATA:read/*", "ACTION:" + action2); + + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test-write") + .addApplicationPrivilege(priv1, Collections.singleton("user/*/name")) + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + final HasPrivilegesResponse response = hasPrivileges( + new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application(appName) + .resources("user/hawkeye/name") + .privileges("DATA:read/user/*", "ACTION:" + action1, "ACTION:" + action2, action1, action2) + .build() + }, authentication, authzInfo, privs, "monitor"); + assertThat(response.isCompleteMatch(), is(false)); + assertThat(response.getApplicationPrivileges().keySet(), containsInAnyOrder(appName)); + assertThat(response.getApplicationPrivileges().get(appName), iterableWithSize(1)); + assertThat(response.getApplicationPrivileges().get(appName), containsInAnyOrder( + ResourcePrivileges.builder("user/hawkeye/name").addPrivileges(MapBuilder.newMapBuilder(new LinkedHashMap()) + .put("DATA:read/user/*", true) + .put("ACTION:" + action1, true) + .put("ACTION:" + action2, false) + .put(action1, true) + .put(action2, false) + .map()).build() + )); + } + + public void testIsCompleteMatch() throws Exception { + final List privs = new ArrayList<>(); + final ApplicationPrivilege kibanaRead = defineApplicationPrivilege(privs, "kibana", "read", "data:read/*"); + final ApplicationPrivilege kibanaWrite = defineApplicationPrivilege(privs, "kibana", "write", "data:write/*"); + User user = new User(randomAlphaOfLengthBetween(4, 12)); + Authentication authentication = mock(Authentication.class); + when(authentication.getUser()).thenReturn(user); + Role role = Role.builder("test-write") + .cluster(ClusterPrivilege.MONITOR) + .add(IndexPrivilege.READ, "read-*") + .add(IndexPrivilege.ALL, "all-*") + .addApplicationPrivilege(kibanaRead, Collections.singleton("*")) + .build(); + RBACAuthorizationInfo authzInfo = new RBACAuthorizationInfo(role, null); + + + assertThat(hasPrivileges( + indexPrivileges("read", "read-123", "read-456", "all-999"), authentication, authzInfo, privs, "monitor").isCompleteMatch(), + is(true)); + assertThat(hasPrivileges( + indexPrivileges("read", "read-123", "read-456", "all-999"), authentication, authzInfo, privs, "manage").isCompleteMatch(), + is(false)); + assertThat(hasPrivileges( + indexPrivileges("write", "read-123", "read-456", "all-999"), authentication, authzInfo, privs, "monitor").isCompleteMatch(), + is(false)); + assertThat(hasPrivileges( + indexPrivileges("write", "read-123", "read-456", "all-999"), authentication, authzInfo, privs, "manage").isCompleteMatch(), + is(false)); + assertThat(hasPrivileges( + new RoleDescriptor.IndicesPrivileges[]{ + RoleDescriptor.IndicesPrivileges.builder() + .indices("read-a") + .privileges("read") + .build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices("all-b") + .privileges("read", "write") + .build() + }, + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana") + .resources("*") + .privileges("read") + .build() + }, authentication, authzInfo, privs, "monitor").isCompleteMatch(), is(true)); + assertThat(hasPrivileges( + new RoleDescriptor.IndicesPrivileges[]{indexPrivileges("read", "read-123", "read-456", "all-999")}, + new RoleDescriptor.ApplicationResourcePrivileges[]{ + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana").resources("*").privileges("read").build(), + RoleDescriptor.ApplicationResourcePrivileges.builder() + .application("kibana").resources("*").privileges("write").build() + }, authentication, authzInfo, privs, "monitor").isCompleteMatch(), is(false)); + } + + public void testBuildUserPrivilegeResponse() { + final ManageApplicationPrivileges manageApplicationPrivileges = new ManageApplicationPrivileges(Sets.newHashSet("app01", "app02")); + final BytesArray query = new BytesArray("{\"term\":{\"public\":true}}"); + final Role role = Role.builder("test", "role") + .cluster(Sets.newHashSet("monitor", "manage_watcher"), Collections.singleton(manageApplicationPrivileges)) + .add(IndexPrivilege.get(Sets.newHashSet("read", "write")), "index-1") + .add(IndexPrivilege.ALL, "index-2", "index-3") + .add( + new FieldPermissions(new FieldPermissionsDefinition(new String[]{ "public.*" }, new String[0])), + Collections.singleton(query), + IndexPrivilege.READ, randomBoolean(), "index-4", "index-5") + .addApplicationPrivilege(new ApplicationPrivilege("app01", "read", "data:read"), Collections.singleton("*")) + .runAs(new Privilege(Sets.newHashSet("user01", "user02"), "user01", "user02")) + .build(); + + final GetUserPrivilegesResponse response = engine.buildUserPrivilegesResponseObject(role); + + assertThat(response.getClusterPrivileges(), containsInAnyOrder("monitor", "manage_watcher")); + assertThat(response.getConditionalClusterPrivileges(), containsInAnyOrder(manageApplicationPrivileges)); + + assertThat(response.getIndexPrivileges(), iterableWithSize(3)); + final GetUserPrivilegesResponse.Indices index1 = findIndexPrivilege(response.getIndexPrivileges(), "index-1"); + assertThat(index1.getIndices(), containsInAnyOrder("index-1")); + assertThat(index1.getPrivileges(), containsInAnyOrder("read", "write")); + assertThat(index1.getFieldSecurity(), emptyIterable()); + assertThat(index1.getQueries(), emptyIterable()); + final GetUserPrivilegesResponse.Indices index2 = findIndexPrivilege(response.getIndexPrivileges(), "index-2"); + assertThat(index2.getIndices(), containsInAnyOrder("index-2", "index-3")); + assertThat(index2.getPrivileges(), containsInAnyOrder("all")); + assertThat(index2.getFieldSecurity(), emptyIterable()); + assertThat(index2.getQueries(), emptyIterable()); + final GetUserPrivilegesResponse.Indices index4 = findIndexPrivilege(response.getIndexPrivileges(), "index-4"); + assertThat(index4.getIndices(), containsInAnyOrder("index-4", "index-5")); + assertThat(index4.getPrivileges(), containsInAnyOrder("read")); + assertThat(index4.getFieldSecurity(), containsInAnyOrder( + new FieldPermissionsDefinition.FieldGrantExcludeGroup(new String[]{ "public.*" }, new String[0]))); + assertThat(index4.getQueries(), containsInAnyOrder(query)); + + assertThat(response.getApplicationPrivileges(), containsInAnyOrder( + RoleDescriptor.ApplicationResourcePrivileges.builder().application("app01").privileges("read").resources("*").build()) + ); + + assertThat(response.getRunAs(), containsInAnyOrder("user01", "user02")); + } + + private GetUserPrivilegesResponse.Indices findIndexPrivilege(Set indices, String name) { + return indices.stream().filter(i -> i.getIndices().contains(name)).findFirst().get(); + } + + private RoleDescriptor.IndicesPrivileges indexPrivileges(String priv, String... indices) { + return RoleDescriptor.IndicesPrivileges.builder() + .indices(indices) + .privileges(priv) + .build(); + } + + private ApplicationPrivilege defineApplicationPrivilege(List privs, String app, String name, + String ... actions) { + privs.add(new ApplicationPrivilegeDescriptor(app, name, newHashSet(actions), emptyMap())); + return new ApplicationPrivilege(app, name, actions); + } + + private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges indicesPrivileges, Authentication authentication, + AuthorizationInfo authorizationInfo, + List applicationPrivilegeDescriptors, + String... clusterPrivileges) throws Exception { + return hasPrivileges( + new RoleDescriptor.IndicesPrivileges[]{indicesPrivileges}, + new RoleDescriptor.ApplicationResourcePrivileges[0], + authentication, authorizationInfo, applicationPrivilegeDescriptors, + clusterPrivileges + ); + } + + private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges[] indicesPrivileges, + RoleDescriptor.ApplicationResourcePrivileges[] appPrivileges, + Authentication authentication, + AuthorizationInfo authorizationInfo, + List applicationPrivilegeDescriptors, + String... clusterPrivileges) throws Exception { + final HasPrivilegesRequest request = new HasPrivilegesRequest(); + request.username(authentication.getUser().principal()); + request.clusterPrivileges(clusterPrivileges); + request.indexPrivileges(indicesPrivileges); + request.applicationPrivileges(appPrivileges); + final PlainActionFuture future = new PlainActionFuture<>(); + engine.checkPrivileges(authentication, authorizationInfo, request, applicationPrivilegeDescriptors, future); + final HasPrivilegesResponse response = future.get(); + assertThat(response, notNullValue()); + return response; + } + + private static MapBuilder mapBuilder() { + return MapBuilder.newMapBuilder(); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java index b6fe4346e62a1..5b73d6d212fc5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListenerTests.java @@ -24,11 +24,17 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.audit.AuditTrailService; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationInfo; + +import java.util.Collections; import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; +import static org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail.PRINCIPAL_ROLES_FIELD_NAME; +import static org.elasticsearch.xpack.security.authz.AuthorizationService.AUTHORIZATION_INFO_KEY; import static org.elasticsearch.xpack.security.authz.AuthorizationService.ORIGINATING_ACTION_KEY; -import static org.elasticsearch.xpack.security.authz.AuthorizationService.ROLE_NAMES_KEY; +import static org.elasticsearch.xpack.security.authz.AuthorizationServiceTests.authzInfoRoles; import static org.elasticsearch.xpack.security.authz.SecuritySearchOperationListener.ensureAuthenticatedUserIsSame; +import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -112,13 +118,15 @@ public void testValidateSearchContext() throws Exception { Authentication authentication = new Authentication(new User("test", "role"), new RealmRef(realmName, type, nodeName), null); authentication.writeToContext(threadContext); threadContext.putTransient(ORIGINATING_ACTION_KEY, "action"); - threadContext.putTransient(ROLE_NAMES_KEY, authentication.getUser().roles()); + threadContext.putTransient(AUTHORIZATION_INFO_KEY, + (AuthorizationInfo) () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, authentication.getUser().roles())); final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); SearchContextMissingException expected = expectThrows(SearchContextMissingException.class, () -> listener.validateSearchContext(testSearchContext, request)); assertEquals(testSearchContext.id(), expected.id()); verify(licenseState, times(3)).isAuthAllowed(); - verify(auditTrailService).accessDenied(null, authentication, "action", request, authentication.getUser().roles()); + verify(auditTrailService).accessDenied(eq(null), eq(authentication), eq("action"), eq(request), + authzInfoRoles(authentication.getUser().roles())); } // another user running as the original user @@ -146,13 +154,15 @@ public void testValidateSearchContext() throws Exception { new Authentication(new User("authenticated", "runas"), new RealmRef(realmName, type, nodeName), null); authentication.writeToContext(threadContext); threadContext.putTransient(ORIGINATING_ACTION_KEY, "action"); - threadContext.putTransient(ROLE_NAMES_KEY, authentication.getUser().roles()); + threadContext.putTransient(AUTHORIZATION_INFO_KEY, + (AuthorizationInfo) () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, authentication.getUser().roles())); final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); SearchContextMissingException expected = expectThrows(SearchContextMissingException.class, () -> listener.validateSearchContext(testSearchContext, request)); assertEquals(testSearchContext.id(), expected.id()); verify(licenseState, times(5)).isAuthAllowed(); - verify(auditTrailService).accessDenied(null, authentication, "action", request, authentication.getUser().roles()); + verify(auditTrailService).accessDenied(eq(null), eq(authentication), eq("action"), eq(request), + authzInfoRoles(authentication.getUser().roles())); } } @@ -166,21 +176,24 @@ public void testEnsuredAuthenticatedUserIsSame() { AuditTrailService auditTrail = mock(AuditTrailService.class); final String auditId = randomAlphaOfLengthBetween(8, 20); - ensureAuthenticatedUserIsSame(original, current, auditTrail, id, action, request, auditId, original.getUser().roles()); + ensureAuthenticatedUserIsSame(original, current, auditTrail, id, action, request, auditId, + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, original.getUser().roles())); verifyZeroInteractions(auditTrail); // original user being run as User user = new User(new User("test", "role"), new User("authenticated", "runas")); current = new Authentication(user, new RealmRef("realm", "file", "node"), new RealmRef(randomAlphaOfLengthBetween(1, 16), "file", "node")); - ensureAuthenticatedUserIsSame(original, current, auditTrail, id, action, request, auditId, original.getUser().roles()); + ensureAuthenticatedUserIsSame(original, current, auditTrail, id, action, request, auditId, + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, original.getUser().roles())); verifyZeroInteractions(auditTrail); // both user are run as current = new Authentication(user, new RealmRef("realm", "file", "node"), new RealmRef(randomAlphaOfLengthBetween(1, 16), "file", "node")); Authentication runAs = current; - ensureAuthenticatedUserIsSame(runAs, current, auditTrail, id, action, request, auditId, original.getUser().roles()); + ensureAuthenticatedUserIsSame(runAs, current, auditTrail, id, action, request, auditId, + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, original.getUser().roles())); verifyZeroInteractions(auditTrail); // different authenticated by type @@ -188,36 +201,39 @@ public void testEnsuredAuthenticatedUserIsSame() { new Authentication(new User("test", "role"), new RealmRef("realm", randomAlphaOfLength(5), "node"), null); SearchContextMissingException e = expectThrows(SearchContextMissingException.class, () -> ensureAuthenticatedUserIsSame(original, differentRealmType, auditTrail, id, action, request, auditId, - original.getUser().roles())); + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, original.getUser().roles()))); assertEquals(id, e.id()); - verify(auditTrail).accessDenied(auditId, differentRealmType, action, request, original.getUser().roles()); + verify(auditTrail).accessDenied(eq(auditId), eq(differentRealmType), eq(action), eq(request), + authzInfoRoles(original.getUser().roles())); // wrong user Authentication differentUser = new Authentication(new User("test2", "role"), new RealmRef("realm", "realm", "node"), null); e = expectThrows(SearchContextMissingException.class, () -> ensureAuthenticatedUserIsSame(original, differentUser, auditTrail, id, action, request, auditId, - original.getUser().roles())); + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, original.getUser().roles()))); assertEquals(id, e.id()); - verify(auditTrail).accessDenied(auditId, differentUser, action, request, original.getUser().roles()); + verify(auditTrail).accessDenied(eq(auditId), eq(differentUser), eq(action), eq(request), + authzInfoRoles(original.getUser().roles())); // run as different user Authentication diffRunAs = new Authentication(new User(new User("test2", "role"), new User("authenticated", "runas")), new RealmRef("realm", "file", "node1"), new RealmRef("realm", "file", "node1")); e = expectThrows(SearchContextMissingException.class, () -> ensureAuthenticatedUserIsSame(original, diffRunAs, auditTrail, id, action, request, auditId, - original.getUser().roles())); + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, original.getUser().roles()))); assertEquals(id, e.id()); - verify(auditTrail).accessDenied(auditId, diffRunAs, action, request, original.getUser().roles()); + verify(auditTrail).accessDenied(eq(auditId), eq(diffRunAs), eq(action), eq(request), authzInfoRoles(original.getUser().roles())); // run as different looked up by type Authentication runAsDiffType = new Authentication(user, new RealmRef("realm", "file", "node"), new RealmRef(randomAlphaOfLengthBetween(1, 16), randomAlphaOfLengthBetween(5, 12), "node")); e = expectThrows(SearchContextMissingException.class, () -> ensureAuthenticatedUserIsSame(runAs, runAsDiffType, auditTrail, id, action, request, auditId, - original.getUser().roles())); + () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, original.getUser().roles()))); assertEquals(id, e.id()); - verify(auditTrail).accessDenied(auditId, runAsDiffType, action, request, original.getUser().roles()); + verify(auditTrail).accessDenied(eq(auditId), eq(runAsDiffType), eq(action), eq(request), + authzInfoRoles(original.getUser().roles())); } static class TestScrollSearchContext extends TestSearchContext { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java index f4b63942fbb9b..ed1eed302356a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.cluster.metadata.AliasMetaData; +import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; @@ -35,6 +36,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.SortedMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -55,14 +57,15 @@ public void testAuthorize() { .putAlias(AliasMetaData.builder("_alias")); MetaData md = MetaData.builder().put(imbBuilder).build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + SortedMap lookup = md.getAliasAndIndexLookup(); // basics: Set query = Collections.singleton(new BytesArray("{}")); String[] fields = new String[]{"_field"}; Role role = Role.builder("_role") - .add(new FieldPermissions(fieldPermissionDef(fields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_index") - .build(); - IndicesAccessControl permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md, fieldPermissionsCache); + .add(new FieldPermissions(fieldPermissionDef(fields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_index") + .build(); + IndicesAccessControl permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), lookup, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -72,9 +75,9 @@ public void testAuthorize() { // no document level security: role = Role.builder("_role") - .add(new FieldPermissions(fieldPermissionDef(fields, null)), null, IndexPrivilege.ALL, randomBoolean(), "_index") - .build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md, fieldPermissionsCache); + .add(new FieldPermissions(fieldPermissionDef(fields, null)), null, IndexPrivilege.ALL, randomBoolean(), "_index") + .build(); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), lookup, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -83,7 +86,7 @@ public void testAuthorize() { // no field level security: role = Role.builder("_role").add(new FieldPermissions(), query, IndexPrivilege.ALL, randomBoolean(), "_index").build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md, fieldPermissionsCache); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), lookup, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getDocumentPermissions().hasDocumentLevelPermissions(), is(true)); @@ -94,7 +97,7 @@ public void testAuthorize() { role = Role.builder("_role") .add(new FieldPermissions(fieldPermissionDef(fields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") .build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md, fieldPermissionsCache); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), lookup, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -113,9 +116,9 @@ public void testAuthorize() { String[] allFields = randomFrom(new String[]{"*"}, new String[]{"foo", "*"}, new String[]{randomAlphaOfLengthBetween(1, 10), "*"}); role = Role.builder("_role") - .add(new FieldPermissions(fieldPermissionDef(allFields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") - .build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md, fieldPermissionsCache); + .add(new FieldPermissions(fieldPermissionDef(allFields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") + .build(); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), lookup, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getDocumentPermissions().hasDocumentLevelPermissions(), is(true)); @@ -136,17 +139,17 @@ public void testAuthorize() { ) .putAlias(AliasMetaData.builder("_alias")); md = MetaData.builder(md).put(imbBuilder1).build(); - + lookup = md.getAliasAndIndexLookup(); // match all fields with more than one permission Set fooQuery = Collections.singleton(new BytesArray("{foo}")); allFields = randomFrom(new String[]{"*"}, new String[]{"foo", "*"}, new String[]{randomAlphaOfLengthBetween(1, 10), "*"}); role = Role.builder("_role") - .add(new FieldPermissions(fieldPermissionDef(allFields, null)), fooQuery, IndexPrivilege.ALL, randomBoolean(), "_alias") - .add(new FieldPermissions(fieldPermissionDef(allFields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") - .build(); - permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), md, fieldPermissionsCache); + .add(new FieldPermissions(fieldPermissionDef(allFields, null)), fooQuery, IndexPrivilege.ALL, randomBoolean(), "_alias") + .add(new FieldPermissions(fieldPermissionDef(allFields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") + .build(); + permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_alias"), lookup, fieldPermissionsCache); Set bothQueries = Sets.union(fooQuery, query); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -178,6 +181,7 @@ public void testAuthorizeMultipleGroupsMixedDls() { .putAlias(AliasMetaData.builder("_alias")); MetaData md = MetaData.builder().put(imbBuilder).build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + SortedMap lookup = md.getAliasAndIndexLookup(); Set query = Collections.singleton(new BytesArray("{}")); String[] fields = new String[]{"_field"}; @@ -185,7 +189,7 @@ public void testAuthorizeMultipleGroupsMixedDls() { .add(new FieldPermissions(fieldPermissionDef(fields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_index") .add(new FieldPermissions(fieldPermissionDef(null, null)), null, IndexPrivilege.ALL, randomBoolean(), "*") .build(); - IndicesAccessControl permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), md, fieldPermissionsCache); + IndicesAccessControl permissions = role.authorize(SearchAction.NAME, Sets.newHashSet("_index"), lookup, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -232,6 +236,7 @@ public void testCorePermissionAuthorize() { .put(new IndexMetaData.Builder("a1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) .put(new IndexMetaData.Builder("a2").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true) .build(); + SortedMap lookup = metaData.getAliasAndIndexLookup(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); IndicesPermission.Group group1 = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(), null, randomBoolean(), @@ -240,7 +245,7 @@ public void testCorePermissionAuthorize() { new FieldPermissions(fieldPermissionDef(null, new String[]{"denied_field"})), null, randomBoolean(), "a1"); IndicesPermission core = new IndicesPermission(group1, group2); Map authzMap = - core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), metaData, fieldPermissionsCache); + core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); assertTrue(authzMap.get("a1").getFieldPermissions().grantsAccessTo("denied_field")); assertTrue(authzMap.get("a1").getFieldPermissions().grantsAccessTo(randomAlphaOfLength(5))); // did not define anything for ba so we allow all @@ -260,7 +265,7 @@ public void testCorePermissionAuthorize() { new FieldPermissions(fieldPermissionDef(new String[] { "*_field2" }, new String[] { "denied_field2" })), null, randomBoolean(), "a2"); core = new IndicesPermission(group1, group2, group3, group4); - authzMap = core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "a2"), metaData, fieldPermissionsCache); + authzMap = core.authorize(SearchAction.NAME, Sets.newHashSet("a1", "a2"), lookup, fieldPermissionsCache); assertFalse(authzMap.get("a1").getFieldPermissions().hasFieldLevelSecurity()); assertFalse(authzMap.get("a2").getFieldPermissions().grantsAccessTo("denied_field2")); assertFalse(authzMap.get("a2").getFieldPermissions().grantsAccessTo("denied_field")); @@ -297,11 +302,12 @@ public void testSecurityIndicesPermissions() { .build(), true) .build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); + SortedMap lookup = metaData.getAliasAndIndexLookup(); // allow_restricted_indices: false IndicesPermission.Group group = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(), null, false, "*"); Map authzMap = new IndicesPermission(group).authorize(SearchAction.NAME, - Sets.newHashSet(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX, RestrictedIndicesNames.SECURITY_INDEX_NAME), metaData, + Sets.newHashSet(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX, RestrictedIndicesNames.SECURITY_INDEX_NAME), lookup, fieldPermissionsCache); assertThat(authzMap.get(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX).isGranted(), is(false)); assertThat(authzMap.get(RestrictedIndicesNames.SECURITY_INDEX_NAME).isGranted(), is(false)); @@ -309,7 +315,7 @@ public void testSecurityIndicesPermissions() { // allow_restricted_indices: true group = new IndicesPermission.Group(IndexPrivilege.ALL, new FieldPermissions(), null, true, "*"); authzMap = new IndicesPermission(group).authorize(SearchAction.NAME, - Sets.newHashSet(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX, RestrictedIndicesNames.SECURITY_INDEX_NAME), metaData, + Sets.newHashSet(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX, RestrictedIndicesNames.SECURITY_INDEX_NAME), lookup, fieldPermissionsCache); assertThat(authzMap.get(RestrictedIndicesNames.INTERNAL_SECURITY_INDEX).isGranted(), is(true)); assertThat(authzMap.get(RestrictedIndicesNames.SECURITY_INDEX_NAME).isGranted(), is(true)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java similarity index 65% rename from x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptorTests.java rename to x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java index 5ca1112fa9b72..ccc710df4830a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/IndicesAliasesRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/IndicesAliasesRequestInterceptorTests.java @@ -3,11 +3,13 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.action.interceptor; +package org.elasticsearch.xpack.security.authz.interceptor; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -16,19 +18,25 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationResult; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.EmptyAuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions; import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsDefinition; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.audit.AuditTrailService; import java.util.Collections; +import java.util.Map; import java.util.Set; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -58,7 +66,6 @@ public void testInterceptorThrowsWhenFLSDLSEnabled() { } else { queries = null; } - Role role = Role.builder().add(fieldPermissions, queries, IndexPrivilege.ALL, randomBoolean(), "foo").build(); final String action = IndicesAliasesAction.NAME; IndicesAccessControl accessControl = new IndicesAccessControl(true, Collections.singletonMap("foo", new IndicesAccessControl.IndexAccessControl(true, fieldPermissions, @@ -76,8 +83,20 @@ public void testInterceptorThrowsWhenFLSDLSEnabled() { if (randomBoolean()) { indicesAliasesRequest.addAliasAction(IndicesAliasesRequest.AliasActions.removeIndex().index("foofoo")); } + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + RequestInfo requestInfo = new RequestInfo(authentication, indicesAliasesRequest, action); + AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; + listener.onResponse(AuthorizationResult.deny()); + return null; + }).when(mockEngine).validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), any(Map.class), + any(ActionListener.class)); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, - () -> interceptor.intercept(indicesAliasesRequest, authentication, role, action)); + () -> { + interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + plainActionFuture.actionGet(); + }); assertEquals("Alias requests are not allowed for users who have field or document level security enabled on one of the indices", securityException.getMessage()); } @@ -87,15 +106,11 @@ public void testInterceptorThrowsWhenTargetHasGreaterPermissions() throws Except when(licenseState.copyCurrentLicenseState()).thenReturn(licenseState); when(licenseState.isAuthAllowed()).thenReturn(true); when(licenseState.isAuditingAllowed()).thenReturn(true); - when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true); + when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(randomBoolean()); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); AuditTrailService auditTrailService = new AuditTrailService(Collections.emptyList(), licenseState); Authentication authentication = new Authentication(new User("john", "role"), new RealmRef(null, null, null), new RealmRef(null, null, null)); - Role role = Role.builder() - .add(IndexPrivilege.ALL, "alias") - .add(IndexPrivilege.READ, "index") - .build(); final String action = IndicesAliasesAction.NAME; IndicesAccessControl accessControl = new IndicesAccessControl(true, Collections.emptyMap()); threadContext.putTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, accessControl); @@ -111,10 +126,24 @@ public void testInterceptorThrowsWhenTargetHasGreaterPermissions() throws Except indicesAliasesRequest.addAliasAction(IndicesAliasesRequest.AliasActions.removeIndex().index("foofoo")); } - ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, - () -> interceptor.intercept(indicesAliasesRequest, authentication, role, action)); - assertEquals("Adding an alias is not allowed when the alias has more permissions than any of the indices", + AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); + { + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + RequestInfo requestInfo = new RequestInfo(authentication, indicesAliasesRequest, action); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; + listener.onResponse(AuthorizationResult.deny()); + return null; + }).when(mockEngine).validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), any(Map.class), + any(ActionListener.class)); + ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, + () -> { + interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + plainActionFuture.actionGet(); + }); + assertEquals("Adding an alias is not allowed when the alias has more permissions than any of the indices", securityException.getMessage()); + } // swap target and source for success final IndicesAliasesRequest successRequest = new IndicesAliasesRequest(); @@ -125,6 +154,18 @@ public void testInterceptorThrowsWhenTargetHasGreaterPermissions() throws Except if (randomBoolean()) { successRequest.addAliasAction(IndicesAliasesRequest.AliasActions.removeIndex().index("foofoo")); } - interceptor.intercept(successRequest, authentication, role, action); + + { + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + RequestInfo requestInfo = new RequestInfo(authentication, successRequest, action); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; + listener.onResponse(AuthorizationResult.granted()); + return null; + }).when(mockEngine).validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), any(Map.class), + any(ActionListener.class)); + interceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + plainActionFuture.actionGet(); + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java similarity index 61% rename from x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptorTests.java rename to x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java index 9cf7c7ed14155..8737b912bc174 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/interceptor/ResizeRequestInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/interceptor/ResizeRequestInterceptorTests.java @@ -3,12 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.security.action.interceptor; +package org.elasticsearch.xpack.security.authz.interceptor; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.shrink.ResizeAction; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ShrinkAction; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -18,6 +20,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.AuthorizationResult; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.EmptyAuthorizationInfo; +import org.elasticsearch.xpack.core.security.authz.AuthorizationEngine.RequestInfo; import org.elasticsearch.xpack.core.security.authz.AuthorizationServiceField; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermissions; @@ -29,8 +35,12 @@ import org.elasticsearch.xpack.security.audit.AuditTrailService; import java.util.Collections; +import java.util.Map; import java.util.Set; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -61,7 +71,6 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { } else { queries = null; } - Role role = Role.builder().add(fieldPermissions, queries, IndexPrivilege.ALL, randomBoolean(), "foo").build(); final String action = randomFrom(ShrinkAction.NAME, ResizeAction.NAME); IndicesAccessControl accessControl = new IndicesAccessControl(true, Collections.singletonMap("foo", new IndicesAccessControl.IndexAccessControl(true, fieldPermissions, @@ -71,8 +80,20 @@ public void testResizeRequestInterceptorThrowsWhenFLSDLSEnabled() { ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("bar", "foo"), action); + AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; + listener.onResponse(AuthorizationResult.deny()); + return null; + }).when(mockEngine).validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), any(Map.class), + any(ActionListener.class)); ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, - () -> resizeRequestInterceptor.intercept(new ResizeRequest("bar", "foo"), authentication, role, action)); + () -> { + resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + plainActionFuture.actionGet(); + }); assertEquals("Resize requests are not allowed for users when field or document level security is enabled on the source index", securityException.getMessage()); } @@ -97,12 +118,38 @@ public void testResizeRequestInterceptorThrowsWhenTargetHasGreaterPermissions() threadContext.putTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY, accessControl); ResizeRequestInterceptor resizeRequestInterceptor = new ResizeRequestInterceptor(threadPool, licenseState, auditTrailService); - ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, - () -> resizeRequestInterceptor.intercept(new ResizeRequest("target", "source"), authentication, role, action)); - assertEquals("Resizing an index is not allowed when the target index has more permissions than the source index", + + AuthorizationEngine mockEngine = mock(AuthorizationEngine.class); + { + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("target", "source"), action); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; + listener.onResponse(AuthorizationResult.deny()); + return null; + }).when(mockEngine).validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), any(Map.class), + any(ActionListener.class)); + ElasticsearchSecurityException securityException = expectThrows(ElasticsearchSecurityException.class, + () -> { + resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + plainActionFuture.actionGet(); + }); + assertEquals("Resizing an index is not allowed when the target index has more permissions than the source index", securityException.getMessage()); + } // swap target and source for success - resizeRequestInterceptor.intercept(new ResizeRequest("source", "target"), authentication, role, action); + { + PlainActionFuture plainActionFuture = new PlainActionFuture<>(); + RequestInfo requestInfo = new RequestInfo(authentication, new ResizeRequest("source", "target"), action); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[3]; + listener.onResponse(AuthorizationResult.granted()); + return null; + }).when(mockEngine).validateIndexPermissionsAreSubset(eq(requestInfo), eq(EmptyAuthorizationInfo.INSTANCE), any(Map.class), + any(ActionListener.class)); + resizeRequestInterceptor.intercept(requestInfo, mockEngine, EmptyAuthorizationInfo.INSTANCE, plainActionFuture); + plainActionFuture.actionGet(); + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index ecd4a37ae4275..5061d4c11edc1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.security.user.privileges.Role.ClusterPrivilegeName; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; @@ -27,9 +28,13 @@ import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequest.Empty; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.Authentication.AuthenticationType; +import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.accesscontrol.IndicesAccessControl; @@ -42,6 +47,13 @@ import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.authz.store.RoleRetrievalResult; +import org.elasticsearch.xpack.core.security.user.AnonymousUser; +import org.elasticsearch.xpack.core.security.user.SystemUser; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.core.security.user.XPackUser; +import org.elasticsearch.xpack.security.audit.AuditUtil; +import org.elasticsearch.xpack.security.authc.ApiKeyService; +import org.elasticsearch.xpack.security.authc.ApiKeyService.ApiKeyRoleDescriptors; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.io.IOException; @@ -62,6 +74,8 @@ import static org.elasticsearch.mock.orig.Mockito.verifyNoMoreInteractions; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anySetOf; import static org.mockito.Matchers.eq; @@ -128,7 +142,7 @@ public void testRolesWhenDlsFlsUnlicensed() throws IOException { when(fileRolesStore.roleDescriptors(Collections.singleton("no_fls_dls"))).thenReturn(Collections.singleton(noFlsDlsRole)); CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), - new ThreadContext(Settings.EMPTY), licenseState, cache); + new ThreadContext(Settings.EMPTY), licenseState, cache, mock(ApiKeyService.class)); PlainActionFuture roleFuture = new PlainActionFuture<>(); compositeRolesStore.roles(Collections.singleton("fls"), roleFuture); @@ -193,7 +207,7 @@ public void testRolesWhenDlsFlsLicensed() throws IOException { when(fileRolesStore.roleDescriptors(Collections.singleton("no_fls_dls"))).thenReturn(Collections.singleton(noFlsDlsRole)); CompositeRolesStore compositeRolesStore = new CompositeRolesStore(Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), - new ThreadContext(Settings.EMPTY), licenseState, cache); + new ThreadContext(Settings.EMPTY), licenseState, cache, mock(ApiKeyService.class)); PlainActionFuture roleFuture = new PlainActionFuture<>(); compositeRolesStore.roles(Collections.singleton("fls"), roleFuture); @@ -235,7 +249,7 @@ public void testNegativeLookupsAreCached() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, nativePrivilegeStore, Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache); + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor final String roleName = randomAlphaOfLengthBetween(1, 10); @@ -286,7 +300,7 @@ public void testNegativeLookupsCacheDisabled() { .build(); final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(settings), - new XPackLicenseState(settings), cache); + new XPackLicenseState(settings), cache, mock(ApiKeyService.class)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor final String roleName = randomAlphaOfLengthBetween(1, 10); @@ -320,7 +334,7 @@ public void testNegativeLookupsAreNotCachedWithFailures() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache); + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class)); verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor final String roleName = randomAlphaOfLengthBetween(1, 10); @@ -397,7 +411,8 @@ public void testCustomRolesProviders() { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider1, inMemoryProvider2), - new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache); + new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, + mock(ApiKeyService.class)); final Set roleNames = Sets.newHashSet("roleA", "roleB", "unknown"); PlainActionFuture future = new PlainActionFuture<>(); @@ -458,8 +473,8 @@ public void testMergingRolesWithFls() { .settings(Settings.builder().put("index.version.created", Version.CURRENT).build()) .numberOfShards(1).numberOfReplicas(0).build(), true) .build(); - Map acls = - role.indices().authorize("indices:data/read/search", Collections.singleton("test"), metaData, cache); + Map acls = role.indices().authorize("indices:data/read/search", + Collections.singleton("test"), metaData.getAliasAndIndexLookup(), cache); assertFalse(acls.isEmpty()); assertTrue(acls.get("test").getFieldPermissions().grantsAccessTo("L1.foo")); assertFalse(acls.get("test").getFieldPermissions().grantsAccessTo("L2.foo")); @@ -600,8 +615,9 @@ public void testCustomRolesProviderFailures() throws Exception { final CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, - mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider1, failingProvider), - new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache); + mock(NativePrivilegeStore.class), Arrays.asList(inMemoryProvider1, failingProvider), + new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, + mock(ApiKeyService.class)); final Set roleNames = Sets.newHashSet("roleA", "roleB", "unknown"); PlainActionFuture future = new PlainActionFuture<>(); @@ -643,7 +659,7 @@ public void testCustomRolesProvidersLicensing() { xPackLicenseState.update(randomFrom(OperationMode.BASIC, OperationMode.GOLD, OperationMode.STANDARD), true, null); CompositeRolesStore compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), - Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache); + Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache, mock(ApiKeyService.class)); Set roleNames = Sets.newHashSet("roleA"); PlainActionFuture future = new PlainActionFuture<>(); @@ -655,7 +671,7 @@ Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(Nativ compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), - Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache); + Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache, mock(ApiKeyService.class)); // these licenses allow custom role providers xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), true, null); roleNames = Sets.newHashSet("roleA"); @@ -669,7 +685,7 @@ Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(Nativ // license expired, don't allow custom role providers compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), - Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache); + Arrays.asList(inMemoryProvider), new ThreadContext(Settings.EMPTY), xPackLicenseState, cache, mock(ApiKeyService.class)); xPackLicenseState.update(randomFrom(OperationMode.PLATINUM, OperationMode.TRIAL), false, null); roleNames = Sets.newHashSet("roleA"); future = new PlainActionFuture<>(); @@ -694,7 +710,7 @@ public void testCacheClearOnIndexHealthChange() { CompositeRolesStore compositeRolesStore = new CompositeRolesStore( Settings.EMPTY, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(Settings.EMPTY), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache) { + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class)) { @Override public void invalidateAll() { numInvalidation.incrementAndGet(); @@ -746,7 +762,7 @@ public void testCacheClearOnIndexOutOfDateChange() { CompositeRolesStore compositeRolesStore = new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), - new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache) { + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class)) { @Override public void invalidateAll() { numInvalidation.incrementAndGet(); @@ -764,6 +780,209 @@ public void invalidateAll() { assertEquals(2, numInvalidation.get()); } + public void testDefaultRoleUserWithoutRoles() { + final FileRolesStore fileRolesStore = mock(FileRolesStore.class); + doCallRealMethod().when(fileRolesStore).accept(any(Set.class), any(ActionListener.class)); + final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class); + doCallRealMethod().when(nativeRolesStore).accept(any(Set.class), any(ActionListener.class)); + when(fileRolesStore.roleDescriptors(anySetOf(String.class))).thenReturn(Collections.emptySet()); + doAnswer((invocationOnMock) -> { + ActionListener callback = (ActionListener) invocationOnMock.getArguments()[1]; + callback.onResponse(RoleRetrievalResult.failure(new RuntimeException("intentionally failed!"))); + return null; + }).when(nativeRolesStore).getRoleDescriptors(isA(Set.class), any(ActionListener.class)); + final ReservedRolesStore reservedRolesStore = spy(new ReservedRolesStore()); + + final CompositeRolesStore compositeRolesStore = + new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, + mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class)); + verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor + + + PlainActionFuture rolesFuture = new PlainActionFuture<>(); + final User user = new User("no role user"); + Authentication auth = new Authentication(user, new RealmRef("name", "type", "node"), null); + compositeRolesStore.getRoles(user, auth, rolesFuture); + final Role roles = rolesFuture.actionGet(); + assertEquals(Role.EMPTY, roles); + } + + public void testAnonymousUserEnabledRoleAdded() { + Settings settings = Settings.builder() + .put(SECURITY_ENABLED_SETTINGS) + .put(AnonymousUser.ROLES_SETTING.getKey(), "anonymous_user_role") + .build(); + final FileRolesStore fileRolesStore = mock(FileRolesStore.class); + doCallRealMethod().when(fileRolesStore).accept(any(Set.class), any(ActionListener.class)); + final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class); + doCallRealMethod().when(nativeRolesStore).accept(any(Set.class), any(ActionListener.class)); + doAnswer(invocationOnMock -> { + Set names = (Set) invocationOnMock.getArguments()[0]; + if (names.size() == 1 && names.contains("anonymous_user_role")) { + RoleDescriptor rd = new RoleDescriptor("anonymous_user_role", null, null, null); + return Collections.singleton(rd); + } + return Collections.emptySet(); + }). + when(fileRolesStore).roleDescriptors(anySetOf(String.class)); + doAnswer((invocationOnMock) -> { + ActionListener callback = (ActionListener) invocationOnMock.getArguments()[1]; + callback.onResponse(RoleRetrievalResult.failure(new RuntimeException("intentionally failed!"))); + return null; + }).when(nativeRolesStore).getRoleDescriptors(isA(Set.class), any(ActionListener.class)); + final ReservedRolesStore reservedRolesStore = spy(new ReservedRolesStore()); + + final CompositeRolesStore compositeRolesStore = + new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, + mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(settings), + new XPackLicenseState(settings), cache, mock(ApiKeyService.class)); + verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor + + PlainActionFuture rolesFuture = new PlainActionFuture<>(); + final User user = new User("no role user"); + Authentication auth = new Authentication(user, new RealmRef("name", "type", "node"), null); + compositeRolesStore.getRoles(user, auth, rolesFuture); + final Role roles = rolesFuture.actionGet(); + assertThat(Arrays.asList(roles.names()), hasItem("anonymous_user_role")); + } + + public void testDoesNotUseRolesStoreForXPackUser() { + final FileRolesStore fileRolesStore = mock(FileRolesStore.class); + doCallRealMethod().when(fileRolesStore).accept(any(Set.class), any(ActionListener.class)); + final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class); + doCallRealMethod().when(nativeRolesStore).accept(any(Set.class), any(ActionListener.class)); + when(fileRolesStore.roleDescriptors(anySetOf(String.class))).thenReturn(Collections.emptySet()); + doAnswer((invocationOnMock) -> { + ActionListener callback = (ActionListener) invocationOnMock.getArguments()[1]; + callback.onResponse(RoleRetrievalResult.failure(new RuntimeException("intentionally failed!"))); + return null; + }).when(nativeRolesStore).getRoleDescriptors(isA(Set.class), any(ActionListener.class)); + final ReservedRolesStore reservedRolesStore = spy(new ReservedRolesStore()); + + final CompositeRolesStore compositeRolesStore = + new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, + mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class)); + verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor + + PlainActionFuture rolesFuture = new PlainActionFuture<>(); + Authentication auth = new Authentication(XPackUser.INSTANCE, new RealmRef("name", "type", "node"), null); + compositeRolesStore.getRoles(XPackUser.INSTANCE, auth, rolesFuture); + final Role roles = rolesFuture.actionGet(); + assertThat(roles, equalTo(XPackUser.ROLE)); + verifyNoMoreInteractions(fileRolesStore, nativeRolesStore, reservedRolesStore); + } + + public void testGetRolesForSystemUserThrowsException() { + final FileRolesStore fileRolesStore = mock(FileRolesStore.class); + doCallRealMethod().when(fileRolesStore).accept(any(Set.class), any(ActionListener.class)); + final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class); + doCallRealMethod().when(nativeRolesStore).accept(any(Set.class), any(ActionListener.class)); + when(fileRolesStore.roleDescriptors(anySetOf(String.class))).thenReturn(Collections.emptySet()); + doAnswer((invocationOnMock) -> { + ActionListener callback = (ActionListener) invocationOnMock.getArguments()[1]; + callback.onResponse(RoleRetrievalResult.failure(new RuntimeException("intentionally failed!"))); + return null; + }).when(nativeRolesStore).getRoleDescriptors(isA(Set.class), any(ActionListener.class)); + final ReservedRolesStore reservedRolesStore = spy(new ReservedRolesStore()); + + final CompositeRolesStore compositeRolesStore = + new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, + mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, mock(ApiKeyService.class)); + verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> compositeRolesStore.getRoles(SystemUser.INSTANCE, null, null)); + assertEquals("the user [_system] is the system user and we should never try to get its roles", iae.getMessage()); + } + + public void testApiKeyAuthUsesApiKeyService() throws IOException { + final FileRolesStore fileRolesStore = mock(FileRolesStore.class); + doCallRealMethod().when(fileRolesStore).accept(any(Set.class), any(ActionListener.class)); + final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class); + doCallRealMethod().when(nativeRolesStore).accept(any(Set.class), any(ActionListener.class)); + when(fileRolesStore.roleDescriptors(anySetOf(String.class))).thenReturn(Collections.emptySet()); + doAnswer((invocationOnMock) -> { + ActionListener callback = (ActionListener) invocationOnMock.getArguments()[1]; + callback.onResponse(RoleRetrievalResult.failure(new RuntimeException("intentionally failed!"))); + return null; + }).when(nativeRolesStore).getRoleDescriptors(isA(Set.class), any(ActionListener.class)); + final ReservedRolesStore reservedRolesStore = spy(new ReservedRolesStore()); + ThreadContext threadContext = new ThreadContext(SECURITY_ENABLED_SETTINGS); + ApiKeyService apiKeyService = mock(ApiKeyService.class); + NativePrivilegeStore nativePrivStore = mock(NativePrivilegeStore.class); + doAnswer(invocationOnMock -> { + ActionListener> listener = + (ActionListener>) invocationOnMock.getArguments()[2]; + listener.onResponse(Collections.emptyList()); + return Void.TYPE; + }).when(nativePrivStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); + final CompositeRolesStore compositeRolesStore = + new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, + nativePrivStore, Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, apiKeyService); + AuditUtil.getOrGenerateRequestId(threadContext); + final Authentication authentication = new Authentication(new User("test api key user", "superuser"), + new RealmRef("_es_api_key", "_es_api_key", "node"), null, Version.CURRENT, AuthenticationType.API_KEY, Collections.emptyMap()); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + listener.onResponse(new ApiKeyRoleDescriptors("keyId", + Collections.singletonList(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR), null)); + return Void.TYPE; + }).when(apiKeyService).getRoleForApiKey(eq(authentication), any(ActionListener.class)); + + PlainActionFuture roleFuture = new PlainActionFuture<>(); + compositeRolesStore.getRoles(authentication.getUser(), authentication, roleFuture); + roleFuture.actionGet(); + + verify(apiKeyService).getRoleForApiKey(eq(authentication), any(ActionListener.class)); + } + + public void testApiKeyAuthUsesApiKeyServiceWithScopedRole() throws IOException { + final FileRolesStore fileRolesStore = mock(FileRolesStore.class); + doCallRealMethod().when(fileRolesStore).accept(any(Set.class), any(ActionListener.class)); + final NativeRolesStore nativeRolesStore = mock(NativeRolesStore.class); + doCallRealMethod().when(nativeRolesStore).accept(any(Set.class), any(ActionListener.class)); + when(fileRolesStore.roleDescriptors(anySetOf(String.class))).thenReturn(Collections.emptySet()); + doAnswer((invocationOnMock) -> { + ActionListener callback = (ActionListener) invocationOnMock.getArguments()[1]; + callback.onResponse(RoleRetrievalResult.failure(new RuntimeException("intentionally failed!"))); + return null; + }).when(nativeRolesStore).getRoleDescriptors(isA(Set.class), any(ActionListener.class)); + final ReservedRolesStore reservedRolesStore = spy(new ReservedRolesStore()); + ThreadContext threadContext = new ThreadContext(SECURITY_ENABLED_SETTINGS); + ApiKeyService apiKeyService = mock(ApiKeyService.class); + NativePrivilegeStore nativePrivStore = mock(NativePrivilegeStore.class); + doAnswer(invocationOnMock -> { + ActionListener> listener = + (ActionListener>) invocationOnMock.getArguments()[2]; + listener.onResponse(Collections.emptyList()); + return Void.TYPE; + }).when(nativePrivStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); + final CompositeRolesStore compositeRolesStore = + new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, + nativePrivStore, Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), + new XPackLicenseState(SECURITY_ENABLED_SETTINGS), cache, apiKeyService); + AuditUtil.getOrGenerateRequestId(threadContext); + final Authentication authentication = new Authentication(new User("test api key user", "api_key"), + new RealmRef("_es_api_key", "_es_api_key", "node"), null, Version.CURRENT, AuthenticationType.API_KEY, Collections.emptyMap()); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + listener.onResponse(new ApiKeyRoleDescriptors("keyId", + Collections.singletonList(new RoleDescriptor("a-role", new String[] { ClusterPrivilegeName.ALL }, null, null)), + Collections.singletonList( + new RoleDescriptor("scoped-role", new String[] { ClusterPrivilegeName.MANAGE_SECURITY }, null, null)))); + return Void.TYPE; + }).when(apiKeyService).getRoleForApiKey(eq(authentication), any(ActionListener.class)); + + PlainActionFuture roleFuture = new PlainActionFuture<>(); + compositeRolesStore.getRoles(authentication.getUser(), authentication, roleFuture); + Role role = roleFuture.actionGet(); + assertThat(role.checkClusterAction("cluster:admin/foo", Empty.INSTANCE), is(false)); + verify(apiKeyService).getRoleForApiKey(eq(authentication), any(ActionListener.class)); + } + private static class InMemoryRolesProvider implements BiConsumer, ActionListener> { private final Function, RoleRetrievalResult> roleDescriptorsFunc; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java index 9f35e996186f1..350c55a558cb6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java @@ -25,8 +25,6 @@ import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; -import org.elasticsearch.xpack.core.security.authz.permission.Role; -import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackUser; @@ -37,7 +35,6 @@ import java.io.IOException; import java.util.Collections; -import static org.elasticsearch.mock.orig.Mockito.times; import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError; import static org.elasticsearch.xpack.core.security.support.Exceptions.authorizationError; import static org.hamcrest.Matchers.equalTo; @@ -89,7 +86,7 @@ public void testInbound() throws Exception { PlainActionFuture future = new PlainActionFuture<>(); filter.inbound("_action", request, channel, future); //future.get(); // don't block it's not called really just mocked - verify(authzService).authorize(authentication, "_action", request, null, null); + verify(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(ActionListener.class)); } public void testInboundDestructiveOperations() throws Exception { @@ -113,7 +110,7 @@ public void testInboundDestructiveOperations() throws Exception { verify(listener).onFailure(isA(IllegalArgumentException.class)); verifyNoMoreInteractions(authzService); } else { - verify(authzService).authorize(authentication, action, request, null, null); + verify(authzService).authorize(eq(authentication), eq(action), eq(request), any(ActionListener.class)); } } @@ -148,18 +145,11 @@ public void testInboundAuthorizationException() throws Exception { callback.onResponse(authentication); return Void.TYPE; }).when(authcService).authenticate(eq("_action"), eq(request), eq((User)null), any(ActionListener.class)); - final Role empty = Role.EMPTY; - doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[2]; - callback.onResponse(empty); - return Void.TYPE; - }).when(authzService).roles(any(User.class), any(Authentication.class), any(ActionListener.class)); when(authentication.getVersion()).thenReturn(Version.CURRENT); when(authentication.getUser()).thenReturn(XPackUser.INSTANCE); PlainActionFuture future = new PlainActionFuture<>(); - doThrow(authorizationError("authz failed")).when(authzService).authorize(authentication, "_action", request, - empty, null); + doThrow(authorizationError("authz failed")) + .when(authzService).authorize(eq(authentication), eq("_action"), eq(request), any(ActionListener.class)); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { filter.inbound("_action", request, channel, future); future.actionGet(); @@ -186,12 +176,6 @@ public void testNodeProfileAllowsNodeActions() throws Exception { ServerTransportFilter filter = getNodeFilter(true); TransportRequest request = mock(TransportRequest.class); Authentication authentication = new Authentication(new User("test", "superuser"), new RealmRef("test", "test", "node1"), null); - doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[2]; - callback.onResponse(authentication.getUser().equals(i.getArguments()[0]) ? ReservedRolesStore.SUPERUSER_ROLE : null); - return Void.TYPE; - }).when(authzService).roles(any(User.class), any(Authentication.class), any(ActionListener.class)); doAnswer((i) -> { ActionListener callback = (ActionListener) i.getArguments()[3]; @@ -207,13 +191,11 @@ public void testNodeProfileAllowsNodeActions() throws Exception { filter.inbound(internalAction, request, channel, new PlainActionFuture<>()); verify(authcService).authenticate(eq(internalAction), eq(request), eq((User)null), any(ActionListener.class)); - verify(authzService).roles(eq(authentication.getUser()), any(Authentication.class), any(ActionListener.class)); - verify(authzService).authorize(authentication, internalAction, request, ReservedRolesStore.SUPERUSER_ROLE, null); + verify(authzService).authorize(eq(authentication), eq(internalAction), eq(request), any(ActionListener.class)); filter.inbound(nodeOrShardAction, request, channel, new PlainActionFuture<>()); verify(authcService).authenticate(eq(nodeOrShardAction), eq(request), eq((User)null), any(ActionListener.class)); - verify(authzService, times(2)).roles(eq(authentication.getUser()), any(Authentication.class), any(ActionListener.class)); - verify(authzService).authorize(authentication, nodeOrShardAction, request, ReservedRolesStore.SUPERUSER_ROLE, null); + verify(authzService).authorize(eq(authentication), eq(nodeOrShardAction), eq(request), any(ActionListener.class)); verifyNoMoreInteractions(authcService, authzService); } diff --git a/x-pack/qa/security-example-spi-extension/build.gradle b/x-pack/qa/security-example-spi-extension/build.gradle index 664e5f715bbb1..1ff65519c367d 100644 --- a/x-pack/qa/security-example-spi-extension/build.gradle +++ b/x-pack/qa/security-example-spi-extension/build.gradle @@ -2,7 +2,7 @@ apply plugin: 'elasticsearch.esplugin' esplugin { name 'spi-extension' - description 'An example spi extension pluing for xpack security' + description 'An example spi extension plugin for security' classname 'org.elasticsearch.example.SpiExtensionPlugin' extendedPlugins = ['x-pack-security'] } From a6ce671751fd219ecefd0d0c963a24489431c910 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 5 Feb 2019 13:42:43 -0700 Subject: [PATCH 04/19] Re-enable TasksClientDocumentationIT.testCancelTasks (#38234) This test has been disabled since November 2018, but I was not able to reproduce the failure. Re-enabling this so we can see the full log and get more context if it fails again. Relates to #35514 --- .../client/documentation/TasksClientDocumentationIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java index f05beff1b5f44..38c8986e1d9f0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java @@ -150,7 +150,6 @@ public void onFailure(Exception e) { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/35514") @SuppressWarnings("unused") public void testCancelTasks() throws IOException { RestHighLevelClient client = highLevelClient(); From 20c66c5a057c06c56c446636dc684434ca6c08c1 Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Tue, 5 Feb 2019 21:48:24 +0100 Subject: [PATCH 05/19] Bubble-up exceptions from scheduler (#38317) Instead of logging warnings we now rethrow exceptions thrown inside scheduled/submitted tasks. This will still log them as warnings in production but has the added benefit that if they are thrown during unit/integration test runs, the test will be flagged as an error. This is a continuation of #38014 Fixed NPE that caused CCR tests (IndexFollowingIT and likely others) to fail. schedule could bubble rejected exception to uncaught exception handler when not using SAME executor if thread pool is terminated. Now ignore rejected exception silently if executor is shutdown. --- .../threadpool/EvilThreadPoolTests.java | 68 ++++--------------- .../shard/GlobalCheckpointListeners.java | 2 +- .../elasticsearch/threadpool/Scheduler.java | 19 +++--- .../elasticsearch/threadpool/ThreadPool.java | 11 ++- 4 files changed, 33 insertions(+), 67 deletions(-) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java index 02ba33f19c4f8..99bde679f79e9 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/threadpool/EvilThreadPoolTests.java @@ -19,11 +19,6 @@ package org.elasticsearch.threadpool; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.core.LogEvent; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -31,7 +26,6 @@ import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.MockLogAppender; import org.junit.After; import org.junit.Before; @@ -44,7 +38,6 @@ import java.util.function.Consumer; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; @@ -367,63 +360,28 @@ private void runExecutionTest( uncaughtExceptionHandlerLatch.countDown(); }); - final CountDownLatch supplierLatch = new CountDownLatch(1); - Runnable job = () -> { - try { - runnable.run(); - } finally { - supplierLatch.countDown(); - } - }; - - // snoop on logging to also handle the cases where exceptions are simply logged in Scheduler. - final Logger schedulerLogger = LogManager.getLogger(Scheduler.SafeScheduledThreadPoolExecutor.class); - final MockLogAppender appender = new MockLogAppender(); - appender.addExpectation( - new MockLogAppender.LoggingExpectation() { - @Override - public void match(LogEvent event) { - if (event.getLevel() == Level.WARN) { - assertThat("no other warnings than those expected", - event.getMessage().getFormattedMessage(), - equalTo("uncaught exception in scheduled thread [" + Thread.currentThread().getName() + "]")); - assertTrue(expectThrowable); - assertNotNull(event.getThrown()); - assertTrue("only one message allowed", throwableReference.compareAndSet(null, event.getThrown())); - uncaughtExceptionHandlerLatch.countDown(); - } - } - - @Override - public void assertMatched() { + try { + runner.accept(() -> { + try { + runnable.run(); + } finally { + supplierLatch.countDown(); } }); + } catch (Throwable t) { + consumer.accept(Optional.of(t)); + return; + } - appender.start(); - Loggers.addAppender(schedulerLogger, appender); - try { - try { - runner.accept(job); - } catch (Throwable t) { - consumer.accept(Optional.of(t)); - return; - } - - supplierLatch.await(); + supplierLatch.await(); - if (expectThrowable) { - uncaughtExceptionHandlerLatch.await(); - } - } finally { - Loggers.removeAppender(schedulerLogger, appender); - appender.stop(); + if (expectThrowable) { + uncaughtExceptionHandlerLatch.await(); } consumer.accept(Optional.ofNullable(throwableReference.get())); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); } finally { Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java b/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java index eb9e36eeec0a8..d45a77ddb22ac 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java +++ b/server/src/main/java/org/elasticsearch/index/shard/GlobalCheckpointListeners.java @@ -135,7 +135,7 @@ synchronized void add(final long waitingForGlobalCheckpoint, final GlobalCheckpo * before we could be cancelled by the notification. In this case, our listener here would * not be in the map and we should not fire the timeout logic. */ - removed = listeners.remove(listener).v2() != null; + removed = listeners.remove(listener) != null; } if (removed) { final TimeoutException e = new TimeoutException(timeout.getStringRep()); diff --git a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java index 4c1ad6a3715c6..588495dd27d32 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java +++ b/server/src/main/java/org/elasticsearch/threadpool/Scheduler.java @@ -21,7 +21,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -47,8 +47,8 @@ public interface Scheduler { /** * Create a scheduler that can be used client side. Server side, please use ThreadPool.schedule instead. * - * Notice that if any scheduled jobs fail with an exception, they will be logged as a warning. This includes jobs started - * using execute, submit and schedule. + * Notice that if any scheduled jobs fail with an exception, these will bubble up to the uncaught exception handler where they will + * be logged as a warning. This includes jobs started using execute, submit and schedule. * @param settings the settings to use * @return executor */ @@ -250,7 +250,8 @@ public void onAfter() { } /** - * This subclass ensures to properly bubble up Throwable instances of type Error and logs exceptions thrown in submitted/scheduled tasks + * This subclass ensures to properly bubble up Throwable instances of both type Error and Exception thrown in submitted/scheduled + * tasks to the uncaught exception handler */ class SafeScheduledThreadPoolExecutor extends ScheduledThreadPoolExecutor { private static final Logger logger = LogManager.getLogger(SafeScheduledThreadPoolExecutor.class); @@ -272,12 +273,10 @@ public SafeScheduledThreadPoolExecutor(int corePoolSize) { @Override protected void afterExecute(Runnable r, Throwable t) { - Throwable exception = EsExecutors.rethrowErrors(r); - if (exception != null) { - logger.warn(() -> - new ParameterizedMessage("uncaught exception in scheduled thread [{}]", Thread.currentThread().getName()), - exception); - } + if (t != null) return; + // Scheduler only allows Runnable's so we expect no checked exceptions here. If anyone uses submit directly on `this`, we + // accept the wrapped exception in the output. + ExceptionsHelper.reThrowIfNotNull(EsExecutors.rethrowErrors(r)); } } } diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index d42abf6b4e94b..5ca2b15d6ffe0 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -501,7 +501,16 @@ class ThreadedRunnable implements Runnable { @Override public void run() { - executor.execute(runnable); + try { + executor.execute(runnable); + } catch (EsRejectedExecutionException e) { + if (e.isExecutorShutdown()) { + logger.debug(new ParameterizedMessage("could not schedule execution of [{}] on [{}] as executor is shut down", + runnable, executor), e); + } else { + throw e; + } + } } @Override From 68cb7b98796847dacf479990a8de3f88544401ba Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 5 Feb 2019 22:04:18 +0100 Subject: [PATCH 06/19] Disable bwc tests for #38443 (#38456) --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index d50801bd207f4..fcee85ff02754 100644 --- a/build.gradle +++ b/build.gradle @@ -159,8 +159,8 @@ task verifyVersions { * the enabled state of every bwc task. It should be set back to true * after the backport of the backcompat code is complete. */ -final boolean bwc_tests_enabled = true -final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ +final boolean bwc_tests_enabled = false +final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/38443" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") From afcdbd2bc03a9df3b5a45b6f7dfcc2b5e52df404 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 5 Feb 2019 22:09:32 +0100 Subject: [PATCH 07/19] XPack: core/ccr/Security-cli migration to java-time (#38415) part of the migrating joda time work. refactoring x-pack plugins usages of joda to java-time refers #27330 --- .../AutoFollowStatsMonitoringDocTests.java | 10 ++++----- .../ccr/FollowStatsMonitoringDocTests.java | 10 ++++----- .../xpack/core/scheduler/Cron.java | 9 ++++---- .../xpack/core/ssl/cert/CertificateInfo.java | 15 +++++++------ .../ml/action/PostDataFlushResponseTests.java | 6 +++-- .../ml/calendars/ScheduledEventTests.java | 7 +++--- .../autodetect/state/DataCountsTests.java | 12 ++++++---- .../xpack/core/ssl/SSLServiceTests.java | 22 +++++++++---------- .../xpack/security/cli/CertGenUtils.java | 12 +++++----- 9 files changed, 55 insertions(+), 48 deletions(-) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java index a764086f889bb..90d5d0c6e9277 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -19,11 +20,10 @@ import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.junit.Before; import java.io.IOException; +import java.time.ZoneOffset; import java.util.Collections; import java.util.List; import java.util.Map; @@ -38,7 +38,7 @@ import static org.hamcrest.Matchers.nullValue; public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase { - + private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time").withZone(ZoneOffset.UTC); private AutoFollowStats autoFollowStats; @Before @@ -94,7 +94,7 @@ public void testToXContent() throws IOException { equalTo( "{" + "\"cluster_uuid\":\"_cluster\"," - + "\"timestamp\":\"" + new DateTime(timestamp, DateTimeZone.UTC).toString() + "\"," + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(timestamp) + "\"," + "\"interval_ms\":" + intervalMillis + "," + "\"type\":\"ccr_auto_follow_stats\"," + "\"source_node\":{" @@ -103,7 +103,7 @@ public void testToXContent() throws IOException { + "\"transport_address\":\"_addr\"," + "\"ip\":\"_ip\"," + "\"name\":\"_name\"," - + "\"timestamp\":\"" + new DateTime(nodeTimestamp, DateTimeZone.UTC).toString() + "\"" + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(nodeTimestamp) + "\"" + "}," + "\"ccr_auto_follow_stats\":{" + "\"number_of_failed_follow_indices\":" + autoFollowStats.getNumberOfFailedFollowIndices() + "," diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java index 33affe45fc46c..dc3c4793d973f 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/FollowStatsMonitoringDocTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -19,11 +20,10 @@ import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.junit.Before; import java.io.IOException; +import java.time.ZoneOffset; import java.util.Collections; import java.util.Map; import java.util.NavigableMap; @@ -40,7 +40,7 @@ import static org.mockito.Mockito.mock; public class FollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase { - + private static final DateFormatter DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_time").withZone(ZoneOffset.UTC); private ShardFollowNodeTaskStatus status; @Override @@ -146,7 +146,7 @@ public void testToXContent() throws IOException { equalTo( "{" + "\"cluster_uuid\":\"_cluster\"," - + "\"timestamp\":\"" + new DateTime(timestamp, DateTimeZone.UTC).toString() + "\"," + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(timestamp) + "\"," + "\"interval_ms\":" + intervalMillis + "," + "\"type\":\"ccr_stats\"," + "\"source_node\":{" @@ -155,7 +155,7 @@ public void testToXContent() throws IOException { + "\"transport_address\":\"_addr\"," + "\"ip\":\"_ip\"," + "\"name\":\"_name\"," - + "\"timestamp\":\"" + new DateTime(nodeTimestamp, DateTimeZone.UTC).toString() + "\"" + + "\"timestamp\":\"" + DATE_TIME_FORMATTER.formatMillis(nodeTimestamp) + "\"" + "}," + "\"ccr_stats\":{" + "\"remote_cluster\":\"leader_cluster\"," diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java index 607b49be1f453..6391251bbcb26 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java @@ -6,13 +6,12 @@ package org.elasticsearch.xpack.core.scheduler; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; import java.io.IOException; +import java.time.ZoneOffset; import java.util.Calendar; import java.util.HashMap; import java.util.Iterator; @@ -199,8 +198,8 @@ * @author Refactoring from CronTrigger to CronExpression by Aaron Craven */ public class Cron implements ToXContentFragment { - protected static final TimeZone UTC = DateTimeZone.UTC.toTimeZone(); - protected static final DateTimeFormatter formatter = DateTimeFormat.forPattern("YYYY-MM-dd'T'HH:mm:ss"); + protected static final TimeZone UTC = TimeZone.getTimeZone(ZoneOffset.UTC); + protected static final DateFormatter formatter = DateFormatter.forPattern("yyyy-MM-dd'T'HH:mm:ss"); private static final int SECOND = 0; private static final int MINUTE = 1; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java index cb05a67886393..d361ad8fbadce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/cert/CertificateInfo.java @@ -10,11 +10,12 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; import java.security.cert.X509Certificate; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Objects; /** @@ -27,7 +28,7 @@ public class CertificateInfo implements ToXContentObject, Writeable { private final String subjectDn; private final String serialNumber; private final boolean hasPrivateKey; - private final DateTime expiry; + private final ZonedDateTime expiry; public CertificateInfo(String path, String format, String alias, boolean hasPrivateKey, X509Certificate certificate) { Objects.requireNonNull(certificate, "Certificate cannot be null"); @@ -37,7 +38,7 @@ public CertificateInfo(String path, String format, String alias, boolean hasPriv this.subjectDn = Objects.requireNonNull(certificate.getSubjectDN().getName()); this.serialNumber = certificate.getSerialNumber().toString(16); this.hasPrivateKey = hasPrivateKey; - this.expiry = new DateTime(certificate.getNotAfter(), DateTimeZone.UTC); + this.expiry = certificate.getNotAfter().toInstant().atZone(ZoneOffset.UTC); } public CertificateInfo(StreamInput in) throws IOException { @@ -47,7 +48,7 @@ public CertificateInfo(StreamInput in) throws IOException { this.subjectDn = in.readString(); this.serialNumber = in.readString(); this.hasPrivateKey = in.readBoolean(); - this.expiry = new DateTime(in.readLong(), DateTimeZone.UTC); + this.expiry = Instant.ofEpochMilli(in.readLong()).atZone(ZoneOffset.UTC); } @Override @@ -58,7 +59,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(subjectDn); out.writeString(serialNumber); out.writeBoolean(hasPrivateKey); - out.writeLong(expiry.getMillis()); + out.writeLong(expiry.toInstant().toEpochMilli()); } public String path() { @@ -81,7 +82,7 @@ public String serialNumber() { return serialNumber; } - public DateTime expiry() { + public ZonedDateTime expiry() { return expiry; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostDataFlushResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostDataFlushResponseTests.java index 88826fb096ae4..4c042db3885bb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostDataFlushResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostDataFlushResponseTests.java @@ -8,13 +8,15 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.ml.action.FlushJobAction.Response; -import org.joda.time.DateTime; + +import java.time.ZonedDateTime; +import java.util.Date; public class PostDataFlushResponseTests extends AbstractWireSerializingTestCase { @Override protected Response createTestInstance() { - return new Response(randomBoolean(), new DateTime(randomDateTimeZone()).toDate()); + return new Response(randomBoolean(), Date.from(ZonedDateTime.now(randomZone()).toInstant())); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java index 3d6c317542279..05209628542fe 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/calendars/ScheduledEventTests.java @@ -15,10 +15,9 @@ import org.elasticsearch.xpack.core.ml.job.config.Operator; import org.elasticsearch.xpack.core.ml.job.config.RuleAction; import org.elasticsearch.xpack.core.ml.job.config.RuleCondition; -import org.joda.time.DateTime; import java.io.IOException; -import java.time.Instant; +import java.time.Clock; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.EnumSet; @@ -29,7 +28,7 @@ public class ScheduledEventTests extends AbstractSerializingTestCase { public static ScheduledEvent createScheduledEvent(String calendarId) { - ZonedDateTime start = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new DateTime(randomDateTimeZone()).getMillis()), ZoneOffset.UTC); + ZonedDateTime start = Clock.systemUTC().instant().atZone(ZoneOffset.UTC); return new ScheduledEvent(randomAlphaOfLength(10), start, start.plusSeconds(randomIntBetween(1, 10000)), calendarId, null); } @@ -120,4 +119,4 @@ public void testLenientParser() throws IOException { ScheduledEvent.LENIENT_PARSER.apply(parser, null); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCountsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCountsTests.java index 091e6887701d7..a4d75999378f2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCountsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/DataCountsTests.java @@ -8,8 +8,8 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; -import org.joda.time.DateTime; +import java.time.ZonedDateTime; import java.util.Date; import static org.hamcrest.Matchers.greaterThan; @@ -21,9 +21,13 @@ public static DataCounts createTestInstance(String jobId) { randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), randomIntBetween(1, 1_000_000), - new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), - new DateTime(randomDateTimeZone()).toDate(), new DateTime(randomDateTimeZone()).toDate(), - new DateTime(randomDateTimeZone()).toDate()); + dateWithRandomTimeZone(), dateWithRandomTimeZone(), + dateWithRandomTimeZone(), dateWithRandomTimeZone(), + dateWithRandomTimeZone()); + } + + private static Date dateWithRandomTimeZone() { + return Date.from(ZonedDateTime.now(randomZone()).toInstant()); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index 9b697e9f08f01..ae48de463e8f3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; -import org.joda.time.DateTime; import org.junit.Before; import org.mockito.ArgumentCaptor; @@ -49,6 +48,7 @@ import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.cert.Certificate; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -563,7 +563,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("PEM")); assertThat(cert.serialNumber(), equalTo("580db8ad52bb168a4080e1df122a3f56")); assertThat(cert.subjectDn(), equalTo("CN=ad-ELASTICSEARCHAD-CA, DC=ad, DC=test, DC=elasticsearch, DC=com")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2029-08-27T16:32:42Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2029-08-27T16:32:42Z"))); assertThat(cert.hasPrivateKey(), equalTo(false)); cert = iterator.next(); @@ -572,7 +572,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("580db8ad52bb168a4080e1df122a3f56")); assertThat(cert.subjectDn(), equalTo("CN=ad-ELASTICSEARCHAD-CA, DC=ad, DC=test, DC=elasticsearch, DC=com")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2029-08-27T16:32:42Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2029-08-27T16:32:42Z"))); assertThat(cert.hasPrivateKey(), equalTo(false)); cert = iterator.next(); @@ -581,7 +581,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("3151a81eec8d4e34c56a8466a8510bcfbe63cc31")); assertThat(cert.subjectDn(), equalTo("CN=samba4")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2021-02-14T17:49:11.000Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2021-02-14T17:49:11.000Z"))); assertThat(cert.hasPrivateKey(), equalTo(false)); cert = iterator.next(); @@ -590,7 +590,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("d3850b2b1995ad5f")); assertThat(cert.subjectDn(), equalTo("CN=OpenLDAP, OU=Elasticsearch, O=Elastic, L=Mountain View, ST=CA, C=US")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2027-07-23T16:41:14Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2027-07-23T16:41:14Z"))); assertThat(cert.hasPrivateKey(), equalTo(false)); cert = iterator.next(); @@ -599,7 +599,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("b9d497f2924bbe29")); assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Client, OU=elasticsearch, O=org")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2019-09-22T18:52:55Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:55Z"))); assertThat(cert.hasPrivateKey(), equalTo(false)); cert = iterator.next(); @@ -608,7 +608,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("c0ea4216e8ff0fd8")); assertThat(cert.subjectDn(), equalTo("CN=testnode-client-profile")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2019-09-22T18:52:56Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:56Z"))); assertThat(cert.hasPrivateKey(), equalTo(false)); cert = iterator.next(); @@ -617,7 +617,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("223c736a")); assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2045-10-02T09:43:18.000Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2045-10-02T09:43:18.000Z"))); assertThat(cert.hasPrivateKey(), equalTo(true)); cert = iterator.next(); @@ -626,7 +626,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("7268203b")); assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2045-10-02T09:36:10.000Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2045-10-02T09:36:10.000Z"))); assertThat(cert.hasPrivateKey(), equalTo(true)); cert = iterator.next(); @@ -635,7 +635,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("jks")); assertThat(cert.serialNumber(), equalTo("b8b96c37e332cccb")); assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node, OU=elasticsearch, O=org")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2019-09-22T18:52:57.000Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:57.000Z"))); assertThat(cert.hasPrivateKey(), equalTo(true)); cert = iterator.next(); @@ -644,7 +644,7 @@ public void testReadCertificateInformation() throws Exception { assertThat(cert.format(), equalTo("PKCS12")); assertThat(cert.serialNumber(), equalTo("b8b96c37e332cccb")); assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node, OU=elasticsearch, O=org")); - assertThat(cert.expiry(), equalTo(DateTime.parse("2019-09-22T18:52:57Z"))); + assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:57Z"))); assertThat(cert.hasPrivateKey(), equalTo(true)); assertFalse(iterator.hasNext()); diff --git a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java index 0b88f3da40a14..af6a71310569a 100644 --- a/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java +++ b/x-pack/plugin/security/cli/src/main/java/org/elasticsearch/xpack/security/cli/CertGenUtils.java @@ -34,8 +34,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.InetAddressHelper; import org.elasticsearch.common.network.NetworkAddress; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import javax.net.ssl.X509ExtendedKeyManager; import javax.net.ssl.X509ExtendedTrustManager; @@ -54,6 +52,9 @@ import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; +import java.sql.Date; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.HashSet; import java.util.Locale; import java.util.Objects; @@ -152,11 +153,11 @@ private static X509Certificate generateSignedCertificate(X500Principal principal int days, String signatureAlgorithm) throws NoSuchAlgorithmException, CertificateException, CertIOException, OperatorCreationException { Objects.requireNonNull(keyPair, "Key-Pair must not be null"); - final DateTime notBefore = new DateTime(DateTimeZone.UTC); + final ZonedDateTime notBefore = ZonedDateTime.now(ZoneOffset.UTC); if (days < 1) { throw new IllegalArgumentException("the certificate must be valid for at least one day"); } - final DateTime notAfter = notBefore.plusDays(days); + final ZonedDateTime notAfter = notBefore.plusDays(days); final BigInteger serial = CertGenUtils.getSerial(); JcaX509ExtensionUtils extUtils = new JcaX509ExtensionUtils(); @@ -176,7 +177,8 @@ private static X509Certificate generateSignedCertificate(X500Principal principal JcaX509v3CertificateBuilder builder = new JcaX509v3CertificateBuilder(issuer, serial, - new Time(notBefore.toDate(), Locale.ROOT), new Time(notAfter.toDate(), Locale.ROOT), subject, keyPair.getPublic()); + new Time(Date.from(notBefore.toInstant()), Locale.ROOT), new Time(Date.from(notAfter.toInstant()), Locale.ROOT), subject, + keyPair.getPublic()); builder.addExtension(Extension.subjectKeyIdentifier, false, extUtils.createSubjectKeyIdentifier(keyPair.getPublic())); builder.addExtension(Extension.authorityKeyIdentifier, false, authorityKeyIdentifier); From 1f4f6f35c8db0bb9a8fb94e0277b2ac69d2d3729 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 5 Feb 2019 23:11:21 +0200 Subject: [PATCH 08/19] Handle deprecation header-AbstractUpgradeTestCase (#38396) --- .../org/elasticsearch/upgrades/AbstractUpgradeTestCase.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index 3b72674ed0751..d077ecedbeb22 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -86,8 +86,10 @@ public void setupForTests() throws Exception { boolean success = true; for (String template : templatesToWaitFor()) { try { + final Request headRequest = new Request("HEAD", "_template/" + template); + headRequest.setOptions(allowTypeRemovalWarnings()); final boolean exists = adminClient() - .performRequest(new Request("HEAD", "_template/" + template)) + .performRequest(headRequest) .getStatusLine().getStatusCode() == 200; success &= exists; logger.debug("template [{}] exists [{}]", template, exists); From 2c30501c74c195e6c289ad5dc9fd568557808a59 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Tue, 5 Feb 2019 23:12:52 +0200 Subject: [PATCH 09/19] SQL: Fix esType for DATETIME/DATE and INTERVALS (#38179) Since introduction of data types that don't have a corresponding type in ES the `esType` is error-prone when used for `unmappedType()` calls. Moreover since the renaming of `DATE` to `DATETIME` and the introduction of an actual date-only `DATE` the `esType` would return `datetime` which is not a valid type for ES mapping. Fixes: #38051 --- .../xpack/sql/analysis/analyzer/Analyzer.java | 4 +- .../xpack/sql/analysis/analyzer/Verifier.java | 4 +- .../sql/execution/search/SourceGenerator.java | 2 +- .../search/extractor/FieldHitExtractor.java | 2 +- .../xpack/sql/expression/Expressions.java | 2 +- .../function/aggregate/TopHits.java | 4 +- .../predicate/operator/arithmetic/Sub.java | 4 +- .../sql/plan/logical/command/ShowColumns.java | 2 +- .../plan/logical/command/sys/SysColumns.java | 5 +- .../plan/logical/command/sys/SysTypes.java | 7 +-- .../sql/plugin/TransportSqlQueryAction.java | 4 +- .../sql/querydsl/container/TopHitsAggRef.java | 2 +- .../xpack/sql/type/DataType.java | 59 +++++++++++-------- .../elasticsearch/xpack/sql/type/Schema.java | 4 +- .../analysis/index/IndexResolverTests.java | 4 +- .../xpack/sql/expression/ParameterTests.java | 16 ++--- .../sql/parser/EscapedFunctionsTests.java | 6 +- .../sql/parser/LikeEscapingParsingTests.java | 5 +- .../logical/command/sys/SysTablesTests.java | 2 +- .../sql/planner/QueryTranslatorTests.java | 20 +++---- .../sql/type/DataTypeConversionTests.java | 2 +- .../xpack/sql/type/TypesTests.java | 6 +- 22 files changed, 86 insertions(+), 80 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index f6bd4c6dc9b50..ae34daf8a5c75 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -219,7 +219,7 @@ else if (DataTypes.isUnsupported(fa.dataType())) { // compound fields else if (allowCompound == false && fa.dataType().isPrimitive() == false) { named = u.withUnresolvedMessage( - "Cannot use field [" + fa.name() + "] type [" + fa.dataType().esType + "] only its subfields"); + "Cannot use field [" + fa.name() + "] type [" + fa.dataType().typeName + "] only its subfields"); } } return named; @@ -1228,4 +1228,4 @@ protected boolean skipResolved() { return true; } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index ed9bd1f106830..ac59b08dbb726 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -682,7 +682,7 @@ private static void validateInExpression(LogicalPlan p, Set localFailur for (Expression value : in.list()) { if (areTypesCompatible(dt, value.dataType()) == false) { localFailures.add(fail(value, "expected data type [{}], value provided is of type [{}]", - dt.esType, value.dataType().esType)); + dt.typeName, value.dataType().typeName)); return; } } @@ -703,7 +703,7 @@ private static void validateConditional(LogicalPlan p, Set localFailure } else { if (areTypesCompatible(dt, child.dataType()) == false) { localFailures.add(fail(child, "expected data type [{}], value provided is of type [{}]", - dt.esType, child.dataType().esType)); + dt.typeName, child.dataType().typeName)); return; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java index 08bb737e65cc4..c22b1213d09dc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java @@ -175,4 +175,4 @@ private static void disableSource(SearchSourceBuilder builder) { builder.storedFields(NO_STORED_FIELD); } } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java index 503da62dc30ee..589481247ac39 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -87,7 +87,7 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(fieldName); - out.writeOptionalString(dataType == null ? null : dataType.esType); + out.writeOptionalString(dataType == null ? null : dataType.typeName); out.writeBoolean(useDocValue); out.writeOptionalString(hitName); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index 4959e73c15ae5..04d660642c8b2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -191,7 +191,7 @@ public static TypeResolution typeMustBe(Expression e, paramOrd == null || paramOrd == ParamOrdinal.DEFAULT ? "" : " " + paramOrd.name().toLowerCase(Locale.ROOT), acceptedTypesForErrorMsg(acceptedTypes), Expressions.name(e), - e.dataType().esType)); + e.dataType().typeName)); } private static String acceptedTypesForErrorMsg(String... acceptedTypes) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/TopHits.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/TopHits.java index 9cc8cccaa9778..227ca9b8db3d1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/TopHits.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/aggregate/TopHits.java @@ -46,7 +46,7 @@ protected TypeResolution resolveType() { ((FieldAttribute) field()).exactAttribute(); } catch (MappingException ex) { return new TypeResolution(format(null, "[{}] cannot operate on first argument field of data type [{}]", - functionName(), field().dataType().esType)); + functionName(), field().dataType().typeName)); } if (orderField() != null) { @@ -59,7 +59,7 @@ protected TypeResolution resolveType() { ((FieldAttribute) orderField()).exactAttribute(); } catch (MappingException ex) { return new TypeResolution(format(null, "[{}] cannot operate on second argument field of data type [{}]", - functionName(), orderField().dataType().esType)); + functionName(), orderField().dataType().typeName)); } } return TypeResolution.TYPE_RESOLVED; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java index e2454ffd26742..cad2d7ffa625a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/Sub.java @@ -7,8 +7,8 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; -import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.tree.Source; import org.elasticsearch.xpack.sql.type.DataTypes; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; @@ -36,7 +36,7 @@ protected Sub replaceChildren(Expression newLeft, Expression newRight) { protected TypeResolution resolveWithIntervals() { if (right().dataType().isDateBased() && DataTypes.isInterval(left().dataType())) { return new TypeResolution(format(null, "Cannot subtract a {}[{}] from an interval[{}]; do you mean the reverse?", - right().dataType().esType, right().source().text(), left().source().text())); + right().dataType().typeName, right().source().text(), left().source().text())); } return TypeResolution.TYPE_RESOLVED; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java index cf9eb0578f89c..ed21b52114064 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java @@ -81,7 +81,7 @@ private void fillInRows(Map mapping, String prefix, List liste List> rows = values // sort by SQL int type (that's what the JDBC/ODBC specs want) followed by name .sorted(Comparator.comparing((DataType t) -> t.sqlType.getVendorTypeNumber()).thenComparing(DataType::sqlName)) - .map(t -> asList(t.esType.toUpperCase(Locale.ROOT), + .map(t -> asList(t.toString(), t.sqlType.getVendorTypeNumber(), //https://docs.microsoft.com/en-us/sql/odbc/reference/appendixes/column-size?view=sql-server-2017 t.defaultPrecision, @@ -132,4 +131,4 @@ public boolean equals(Object obj) { return type.equals(((SysTypes) obj).type); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index a4955b740b6c2..49c296a51055b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -87,9 +87,9 @@ static SqlQueryResponse createResponse(SqlQueryRequest request, SchemaRowSet row List columns = new ArrayList<>(rowSet.columnCount()); for (Schema.Entry entry : rowSet.schema()) { if (Mode.isDriver(request.mode())) { - columns.add(new ColumnInfo("", entry.name(), entry.type().esType, entry.type().displaySize)); + columns.add(new ColumnInfo("", entry.name(), entry.type().typeName, entry.type().displaySize)); } else { - columns.add(new ColumnInfo("", entry.name(), entry.type().esType)); + columns.add(new ColumnInfo("", entry.name(), entry.type().typeName)); } } columns = unmodifiableList(columns); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/TopHitsAggRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/TopHitsAggRef.java index 8e5eabec9b861..740c655c5069b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/TopHitsAggRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/TopHitsAggRef.java @@ -33,6 +33,6 @@ public DataType fieldDataType() { @Override public String toString() { - return ">" + name + "[" + fieldDataType.esType + "]"; + return ">" + name + "[" + fieldDataType.typeName + "]"; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index d2699692d746e..14062b4caaf01 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -23,41 +23,41 @@ public enum DataType { // @formatter:off - // jdbc type, size, defPrecision,dispSize, int, rat, docvals - NULL( JDBCType.NULL, 0, 0, 0, false, false, false), - UNSUPPORTED( JDBCType.OTHER, 0, 0, 0, false, false, false), - BOOLEAN( JDBCType.BOOLEAN, 1, 1, 1, false, false, false), - BYTE( JDBCType.TINYINT, Byte.BYTES, 3, 5, true, false, true), - SHORT( JDBCType.SMALLINT, Short.BYTES, 5, 6, true, false, true), - INTEGER( JDBCType.INTEGER, Integer.BYTES, 10, 11, true, false, true), - LONG( JDBCType.BIGINT, Long.BYTES, 19, 20, true, false, true), + // esType jdbc type, size, defPrecision,dispSize, int, rat, docvals + NULL( "null", JDBCType.NULL, 0, 0, 0, false, false, false), + UNSUPPORTED( JDBCType.OTHER, 0, 0, 0, false, false, false), + BOOLEAN( "boolean", JDBCType.BOOLEAN, 1, 1, 1, false, false, false), + BYTE( "byte", JDBCType.TINYINT, Byte.BYTES, 3, 5, true, false, true), + SHORT( "short", JDBCType.SMALLINT, Short.BYTES, 5, 6, true, false, true), + INTEGER( "integer", JDBCType.INTEGER, Integer.BYTES, 10, 11, true, false, true), + LONG( "long", JDBCType.BIGINT, Long.BYTES, 19, 20, true, false, true), // 53 bits defaultPrecision ~ 15(15.95) decimal digits (53log10(2)), - DOUBLE( JDBCType.DOUBLE, Double.BYTES, 15, 25, false, true, true), + DOUBLE( "double", JDBCType.DOUBLE, Double.BYTES, 15, 25, false, true, true), // 24 bits defaultPrecision - 24*log10(2) =~ 7 (7.22) - FLOAT( JDBCType.REAL, Float.BYTES, 7, 15, false, true, true), - HALF_FLOAT( JDBCType.FLOAT, Double.BYTES, 16, 25, false, true, true), + FLOAT( "float", JDBCType.REAL, Float.BYTES, 7, 15, false, true, true), + HALF_FLOAT( "half_float", JDBCType.FLOAT, Double.BYTES, 16, 25, false, true, true), // precision is based on long - SCALED_FLOAT( JDBCType.FLOAT, Double.BYTES, 19, 25, false, true, true), - KEYWORD( JDBCType.VARCHAR, Integer.MAX_VALUE, 256, 0, false, false, true), - TEXT( JDBCType.VARCHAR, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false), - OBJECT( JDBCType.STRUCT, -1, 0, 0, false, false, false), - NESTED( JDBCType.STRUCT, -1, 0, 0, false, false, false), - BINARY( JDBCType.VARBINARY, -1, Integer.MAX_VALUE, 0, false, false, false), - DATE( JDBCType.DATE, Long.BYTES, 10, 10, false, false, true), + SCALED_FLOAT( "scaled_float", JDBCType.FLOAT, Double.BYTES, 19, 25, false, true, true), + KEYWORD( "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE, 256, 0, false, false, true), + TEXT( "text", JDBCType.VARCHAR, Integer.MAX_VALUE, Integer.MAX_VALUE, 0, false, false, false), + OBJECT( "object", JDBCType.STRUCT, -1, 0, 0, false, false, false), + NESTED( "nested", JDBCType.STRUCT, -1, 0, 0, false, false, false), + BINARY( "binary", JDBCType.VARBINARY, -1, Integer.MAX_VALUE, 0, false, false, false), + DATE( JDBCType.DATE, Long.BYTES, 10, 10, false, false, true), // since ODBC and JDBC interpret precision for Date as display size // the precision is 23 (number of chars in ISO8601 with millis) + Z (the UTC timezone) // see https://github.com/elastic/elasticsearch/issues/30386#issuecomment-386807288 - DATETIME( JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true), + DATETIME( "date", JDBCType.TIMESTAMP, Long.BYTES, 24, 24, false, false, true), // // specialized types // // IP can be v4 or v6. The latter has 2^128 addresses or 340,282,366,920,938,463,463,374,607,431,768,211,456 // aka 39 chars - IP( JDBCType.VARCHAR, 39, 39, 0, false, false, true), + IP( "ip", JDBCType.VARCHAR, 39, 39, 0, false, false, true), // // INTERVALS // the list is long as there are a lot of variations and that's what clients (ODBC) expect - // jdbc type, size, prec,disp, int, rat, docvals + // esType:null jdbc type, size, prec,disp, int, rat, docvals INTERVAL_YEAR( ExtTypes.INTERVAL_YEAR, Integer.BYTES, 7, 7, false, false, false), INTERVAL_MONTH( ExtTypes.INTERVAL_MONTH, Integer.BYTES, 7, 7, false, false, false), INTERVAL_DAY( ExtTypes.INTERVAL_DAY, Long.BYTES, 23, 23, false, false, false), @@ -126,7 +126,12 @@ public enum DataType { /** - * Elasticsearch type name + * Type's name used for error messages and column info for the clients + */ + public final String typeName; + + /** + * Elasticsearch data type that it maps to */ public final String esType; @@ -176,7 +181,13 @@ public enum DataType { DataType(SQLType sqlType, int size, int defaultPrecision, int displaySize, boolean isInteger, boolean isRational, boolean defaultDocValues) { - this.esType = name().toLowerCase(Locale.ROOT); + this(null, sqlType, size, defaultPrecision, displaySize, isInteger, isRational, defaultDocValues); + } + + DataType(String esType, SQLType sqlType, int size, int defaultPrecision, int displaySize, boolean isInteger, + boolean isRational, boolean defaultDocValues) { + this.typeName = name().toLowerCase(Locale.ROOT); + this.esType = esType; this.sqlType = sqlType; this.size = size; this.defaultPrecision = defaultPrecision; @@ -228,8 +239,6 @@ public static DataType fromOdbcType(String odbcType) { /** * Creates returns DataType enum corresponding to the specified es type - *

- * For any dataType DataType.fromTypeName(dataType.esType) == dataType */ public static DataType fromTypeName(String esType) { String uppercase = esType.toUpperCase(Locale.ROOT); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Schema.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Schema.java index 62a7881b6adc1..6faf2e5b224f4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Schema.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/type/Schema.java @@ -120,9 +120,9 @@ public String toString() { } sb.append(names.get(i)); sb.append(":"); - sb.append(types.get(i).esType); + sb.append(types.get(i).typeName); } sb.append("]"); return sb.toString(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java index bb328b2d8ffdc..6123bdf5d8fbb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolverTests.java @@ -154,7 +154,7 @@ public static Map> fromMappings(EsIndex.. if (entry.getValue().size() > 1) { for (EsIndex index : indices) { EsField field = index.mapping().get(fieldName); - UpdateableFieldCapabilities fieldCaps = (UpdateableFieldCapabilities) caps.get(field.getDataType().esType); + UpdateableFieldCapabilities fieldCaps = (UpdateableFieldCapabilities) caps.get(field.getDataType().typeName); fieldCaps.indices.add(index.name()); } //TODO: what about nonAgg/SearchIndices? @@ -171,7 +171,7 @@ private static void addFieldCaps(String parent, EsField field, String indexName, map = new HashMap<>(); merged.put(fieldName, map); } - FieldCapabilities caps = map.computeIfAbsent(field.getDataType().esType, + FieldCapabilities caps = map.computeIfAbsent(field.getDataType().typeName, esType -> new UpdateableFieldCapabilities(fieldName, esType, isSearchable(field.getDataType()), isAggregatable(field.getDataType()))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java index 52f14550b1449..97b7415786341 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/ParameterTests.java @@ -28,7 +28,7 @@ public class ParameterTests extends ESTestCase { public void testSingleParameter() { Expression expression = new SqlParser().createExpression("a = \n?", Collections.singletonList( - new SqlTypedParamValue(DataType.KEYWORD.esType, "foo") + new SqlTypedParamValue(DataType.KEYWORD.typeName, "foo") )); logger.info(expression); assertThat(expression, instanceOf(Equals.class)); @@ -42,10 +42,10 @@ public void testSingleParameter() { public void testMultipleParameters() { Expression expression = new SqlParser().createExpression("(? + ? * ?) - ?", Arrays.asList( - new SqlTypedParamValue(DataType.LONG.esType, 1L), - new SqlTypedParamValue(DataType.LONG.esType, 2L), - new SqlTypedParamValue(DataType.LONG.esType, 3L), - new SqlTypedParamValue(DataType.LONG.esType, 4L) + new SqlTypedParamValue(DataType.LONG.typeName, 1L), + new SqlTypedParamValue(DataType.LONG.typeName, 2L), + new SqlTypedParamValue(DataType.LONG.typeName, 3L), + new SqlTypedParamValue(DataType.LONG.typeName, 4L) )); assertThat(expression, instanceOf(Sub.class)); Sub sub = (Sub) expression; @@ -62,9 +62,9 @@ public void testMultipleParameters() { public void testNotEnoughParameters() { ParsingException ex = expectThrows(ParsingException.class, () -> new SqlParser().createExpression("(? + ? * ?) - ?", Arrays.asList( - new SqlTypedParamValue(DataType.LONG.esType, 1L), - new SqlTypedParamValue(DataType.LONG.esType, 2L), - new SqlTypedParamValue(DataType.LONG.esType, 3L) + new SqlTypedParamValue(DataType.LONG.typeName, 1L), + new SqlTypedParamValue(DataType.LONG.typeName, 2L), + new SqlTypedParamValue(DataType.LONG.typeName, 3L) ))); assertThat(ex.getMessage(), containsString("Not enough actual parameters")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java index 01b1d0d077930..8cbb0b528e9a6 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java @@ -144,9 +144,9 @@ public void testFunctionWithFunctionWithArg() { public void testFunctionWithFunctionWithArgAndParams() { String e = "POWER(?, {fn POWER({fn ABS(?)}, {fN ABS(?)})})"; Function f = (Function) parser.createExpression(e, - asList(new SqlTypedParamValue(DataType.LONG.esType, 1), - new SqlTypedParamValue(DataType.LONG.esType, 1), - new SqlTypedParamValue(DataType.LONG.esType, 1))); + asList(new SqlTypedParamValue(DataType.LONG.typeName, 1), + new SqlTypedParamValue(DataType.LONG.typeName, 1), + new SqlTypedParamValue(DataType.LONG.typeName, 1))); assertEquals(e, f.sourceText()); assertEquals(2, f.arguments().size()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java index 5221f9695699d..a9b9723c8cc1f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/LikeEscapingParsingTests.java @@ -13,11 +13,10 @@ import org.elasticsearch.xpack.sql.type.DataType; import static java.util.Collections.singletonList; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; - public class LikeEscapingParsingTests extends ESTestCase { private final SqlParser parser = new SqlParser(); @@ -33,7 +32,7 @@ private LikePattern like(String pattern) { Expression exp = null; boolean parameterized = randomBoolean(); if (parameterized) { - exp = parser.createExpression("exp LIKE ?", singletonList(new SqlTypedParamValue(DataType.KEYWORD.esType, pattern))); + exp = parser.createExpression("exp LIKE ?", singletonList(new SqlTypedParamValue(DataType.KEYWORD.typeName, pattern))); } else { exp = parser.createExpression(format(null, "exp LIKE '{}'", pattern)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java index 9487986a711e7..d319fdb2a8feb 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plan/logical/command/sys/SysTablesTests.java @@ -231,7 +231,7 @@ public void testSysTablesTypesEnumerationWoString() throws Exception { } private SqlTypedParamValue param(Object value) { - return new SqlTypedParamValue(DataTypes.fromJava(value).esType, value); + return new SqlTypedParamValue(DataTypes.fromJava(value).typeName, value); } private Tuple sql(String sql, List params) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 2cad625889474..ef7cdf54b89ab 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -632,16 +632,16 @@ public void testTopHitsAggregationWithOneArg() { "\"sort\":[{\"keyword\":{\"order\":\"asc\",\"missing\":\"_last\",\"unmapped_type\":\"keyword\"}}]}}}}}")); } { - PhysicalPlan p = optimizeAndPlan("SELECT LAST(keyword) FROM test"); + PhysicalPlan p = optimizeAndPlan("SELECT LAST(date) FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertEquals(1, eqe.output().size()); - assertEquals("LAST(keyword)", eqe.output().get(0).qualifiedName()); - assertTrue(eqe.output().get(0).dataType() == DataType.KEYWORD); + assertEquals("LAST(date)", eqe.output().get(0).qualifiedName()); + assertTrue(eqe.output().get(0).dataType() == DataType.DATETIME); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), endsWith("\"top_hits\":{\"from\":0,\"size\":1,\"version\":false,\"seq_no_primary_term\":false," + - "\"explain\":false,\"docvalue_fields\":[{\"field\":\"keyword\"}]," + - "\"sort\":[{\"keyword\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"keyword\"}}]}}}}}")); + "\"explain\":false,\"docvalue_fields\":[{\"field\":\"date\",\"format\":\"epoch_millis\"}]," + + "\"sort\":[{\"date\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"date\"}}]}}}}}")); } } @@ -661,17 +661,17 @@ public void testTopHitsAggregationWithTwoArgs() { } { - PhysicalPlan p = optimizeAndPlan("SELECT LAST(keyword, int) FROM test"); + PhysicalPlan p = optimizeAndPlan("SELECT LAST(date, int) FROM test"); assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec eqe = (EsQueryExec) p; assertEquals(1, eqe.output().size()); - assertEquals("LAST(keyword, int)", eqe.output().get(0).qualifiedName()); - assertTrue(eqe.output().get(0).dataType() == DataType.KEYWORD); + assertEquals("LAST(date, int)", eqe.output().get(0).qualifiedName()); + assertTrue(eqe.output().get(0).dataType() == DataType.DATETIME); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), endsWith("\"top_hits\":{\"from\":0,\"size\":1,\"version\":false,\"seq_no_primary_term\":false," + - "\"explain\":false,\"docvalue_fields\":[{\"field\":\"keyword\"}]," + + "\"explain\":false,\"docvalue_fields\":[{\"field\":\"date\",\"format\":\"epoch_millis\"}]," + "\"sort\":[{\"int\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"integer\"}}," + - "{\"keyword\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"keyword\"}}]}}}}}")); + "{\"date\":{\"order\":\"desc\",\"missing\":\"_last\",\"unmapped_type\":\"date\"}}]}}}}}")); } } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index 546d276e4ceb5..73b4ea8fa8daa 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -463,7 +463,7 @@ public void testCommonType() { public void testEsDataTypes() { for (DataType type : values()) { if (type != DATE) { // Doesn't have a corresponding type in ES - assertEquals(type, fromTypeName(type.esType)); + assertEquals(type, fromTypeName(type.typeName)); } } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java index a09a28ced7d5a..2a2488dda722f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -171,20 +171,20 @@ public void testNestedDoc() { public void testGeoField() { Map mapping = loadMapping("mapping-geo.json"); EsField dt = mapping.get("location"); - assertThat(dt.getDataType().esType, is("unsupported")); + assertThat(dt.getDataType().typeName, is("unsupported")); } public void testIpField() { Map mapping = loadMapping("mapping-ip.json"); assertThat(mapping.size(), is(1)); EsField dt = mapping.get("ip_addr"); - assertThat(dt.getDataType().esType, is("ip")); + assertThat(dt.getDataType().typeName, is("ip")); } public void testUnsupportedTypes() { Map mapping = loadMapping("mapping-unsupported.json"); EsField dt = mapping.get("range"); - assertThat(dt.getDataType().esType, is("unsupported")); + assertThat(dt.getDataType().typeName, is("unsupported")); } public static Map loadMapping(String name) { From 2b6b85815b52009240f29116fd7684d764ba13ae Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 5 Feb 2019 14:42:31 -0700 Subject: [PATCH 10/19] Update ilm-api.asciidoc, point to REMOVE policy (#38235) (#38463) --- docs/reference/ilm/apis/ilm-api.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/ilm/apis/ilm-api.asciidoc b/docs/reference/ilm/apis/ilm-api.asciidoc index edfc96d113fc7..aba8301a9a677 100644 --- a/docs/reference/ilm/apis/ilm-api.asciidoc +++ b/docs/reference/ilm/apis/ilm-api.asciidoc @@ -19,6 +19,7 @@ about Index Lifecycle Management. * <> * <> +* <> [float] [[ilm-api-management-endpoint]] From f939c3c5ef86278f735f9f5e812f1282606eba3f Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Tue, 5 Feb 2019 17:06:28 -0500 Subject: [PATCH 11/19] Assert job is not null in FullClusterRestartIT (#38218) `waitForRollUpJob` is an assertBusy that waits for the rollup job to appear in the tasks list, and waits for it to be a certain state. However, there was a null check around the state assertion, which meant if the job _was_ null, the assertion would be skipped, and the assertBusy would pass withouot an exception. This could then lead to downstream assertions to fail because the job was not actually ready, or in the wrong state. This changes the test to assert the job is not null, so the assertBusy operates as intended. --- .../xpack/restart/FullClusterRestartIT.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index d1aefb4000890..1e40b31a24c85 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -659,9 +659,8 @@ private void assertRollUpJob(final String rollupJob) throws Exception { } Map getRollupJobResponse = entityAsMap(client().performRequest(getRollupJobRequest)); Map job = getJob(getRollupJobResponse, rollupJob); - if (job != null) { - assertThat(ObjectPath.eval("status.job_state", job), expectedStates); - } + assertNotNull(job); + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); // check that the rollup job is started using the Tasks API final Request taskRequest = new Request("GET", "_tasks"); @@ -712,9 +711,8 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); Map job = getJob(getRollupJobResponse, rollupJob); - if (job != null) { - assertThat(ObjectPath.eval("status.job_state", job), expectedStates); - } + assertNotNull(job); + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); }, 30L, TimeUnit.SECONDS); } From fb0ec26fd4d5f0e53bc79ae06ba3f68c6454bd0d Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Tue, 5 Feb 2019 16:22:11 -0600 Subject: [PATCH 12/19] Set update mappings mater node timeout to 30 min (#38439) This is related to #35975. We do not want a slow master to fail a recovery from remote process due to a slow put mappings call. This commit increases the master node timeout on this call to 30 mins. --- .../org/elasticsearch/xpack/ccr/repository/CcrRepository.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index 7ceaeb903ec0e..833cd474450ff 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -313,7 +313,8 @@ private void updateMappings(Client leaderClient, Index leaderIndex, long leaderM final IndexMetaData leaderIndexMetadata = indexMetadataFuture.actionGet(ccrSettings.getRecoveryActionTimeout()); final MappingMetaData mappingMetaData = leaderIndexMetadata.mapping(); if (mappingMetaData != null) { - final PutMappingRequest putMappingRequest = CcrRequests.putMappingRequest(followerIndex.getName(), mappingMetaData); + final PutMappingRequest putMappingRequest = CcrRequests.putMappingRequest(followerIndex.getName(), mappingMetaData) + .masterNodeTimeout(TimeValue.timeValueMinutes(30)); followerClient.admin().indices().putMapping(putMappingRequest).actionGet(ccrSettings.getRecoveryActionTimeout()); } } From 79a45b47da618bd4529ef67051d6f74e2da721ce Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 5 Feb 2019 17:43:41 -0500 Subject: [PATCH 13/19] Recover retention leases during peer recovery (#38435) This commit integrates retention leases with recovery. With this change, we copy the current retention leases on primary to the replica during phase two of recovery. At this point, the replica will have been added to the replication group and so is already receiving retention lease sync requests from the primary. This means that if any retention lease syncs are triggered on the primary after we sample the retention leases here during phase two, that sync request will also arrive on the replica ensuring that the replica is from this point on up to date with the retention leases on the primary. We have to copy these during phase two since we will be applying indexing operations, potentially triggering merges, and therefore must ensure the correct retention leases are in place beforehand. --- .../elasticsearch/index/shard/IndexShard.java | 3 +- .../recovery/PeerRecoveryTargetService.java | 26 ++++--- .../recovery/RecoverySourceHandler.java | 75 ++++++++++++++----- .../indices/recovery/RecoveryTarget.java | 15 +++- .../recovery/RecoveryTargetHandler.java | 20 +++-- .../RecoveryTranslogOperationsRequest.java | 25 ++++++- .../recovery/RemoteRecoveryTargetHandler.java | 18 ++++- .../RecoveryDuringReplicationTests.java | 44 ++++++++--- .../index/seqno/RetentionLeaseSyncIT.java | 65 +++++++++++++++- .../index/shard/IndexShardTests.java | 73 +++++++++++++----- .../recovery/RecoverySourceHandlerTests.java | 24 +++--- 11 files changed, 305 insertions(+), 83 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index dfecdc173951e..22976af581be6 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -132,6 +132,7 @@ import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; +import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.rest.RestStatus; @@ -3082,7 +3083,7 @@ public long getMaxSeqNoOfUpdatesOrDeletes() { * which is at least the value of the max_seq_no_of_updates marker on the primary after that operation was executed on the primary. * * @see #acquireReplicaOperationPermit(long, long, long, ActionListener, String, Object) - * @see org.elasticsearch.indices.recovery.RecoveryTarget#indexTranslogOperations(List, int, long, long, ActionListener) + * @see RecoveryTarget#indexTranslogOperations(List, int, long, long, RetentionLeases, ActionListener) */ public void advanceMaxSeqNoOfUpdatesOrDeletes(long seqNo) { assert seqNo != UNASSIGNED_SEQ_NO diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 74585dcc261c2..068b92991db09 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -518,17 +518,21 @@ public void onTimeout(TimeValue timeout) { } }); }; - recoveryTarget.indexTranslogOperations(request.operations(), request.totalTranslogOps(), - request.maxSeenAutoIdTimestampOnPrimary(), request.maxSeqNoOfUpdatesOrDeletesOnPrimary(), - ActionListener.wrap( - checkpoint -> listener.onResponse(new RecoveryTranslogOperationsResponse(checkpoint)), - e -> { - if (e instanceof MapperException) { - retryOnMappingException.accept(e); - } else { - listener.onFailure(e); - } - }) + recoveryTarget.indexTranslogOperations( + request.operations(), + request.totalTranslogOps(), + request.maxSeenAutoIdTimestampOnPrimary(), + request.maxSeqNoOfUpdatesOrDeletesOnPrimary(), + request.retentionLeases(), + ActionListener.wrap( + checkpoint -> listener.onResponse(new RecoveryTranslogOperationsResponse(checkpoint)), + e -> { + if (e instanceof MapperException) { + retryOnMappingException.accept(e); + } else { + listener.onFailure(e); + } + }) ); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index f360a68b7a83c..a6973a46926a7 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -50,6 +50,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.seqno.LocalCheckpointTracker; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardClosedException; @@ -231,8 +232,16 @@ public void recoverToTarget(ActionListener listener) { // are at least as high as the corresponding values on the primary when any of these operations were executed on it. final long maxSeenAutoIdTimestamp = shard.getMaxSeenAutoIdTimestamp(); final long maxSeqNoOfUpdatesOrDeletes = shard.getMaxSeqNoOfUpdatesOrDeletes(); - phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, phase2Snapshot, maxSeenAutoIdTimestamp, - maxSeqNoOfUpdatesOrDeletes, sendSnapshotStep); + final RetentionLeases retentionLeases = shard.getRetentionLeases(); + phase2( + startingSeqNo, + requiredSeqNoRangeStart, + endingSeqNo, + phase2Snapshot, + maxSeenAutoIdTimestamp, + maxSeqNoOfUpdatesOrDeletes, + retentionLeases, + sendSnapshotStep); sendSnapshotStep.whenComplete( r -> IOUtils.close(phase2Snapshot), e -> { @@ -517,8 +526,15 @@ void prepareTargetForTranslog(boolean fileBasedRecovery, int totalTranslogOps, A * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it. * @param listener a listener which will be notified with the local checkpoint on the target. */ - void phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, long maxSeenAutoIdTimestamp, - long maxSeqNoOfUpdatesOrDeletes, ActionListener listener) throws IOException { + void phase2( + final long startingSeqNo, + final long requiredSeqNoRangeStart, + final long endingSeqNo, + final Translog.Snapshot snapshot, + final long maxSeenAutoIdTimestamp, + final long maxSeqNoOfUpdatesOrDeletes, + final RetentionLeases retentionLeases, + final ActionListener listener) throws IOException { assert requiredSeqNoRangeStart <= endingSeqNo + 1: "requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo; assert startingSeqNo <= requiredSeqNoRangeStart : @@ -584,25 +600,50 @@ void phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, listener::onFailure ); - sendBatch(readNextBatch, true, SequenceNumbers.UNASSIGNED_SEQ_NO, snapshot.totalOperations(), - maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, batchedListener); + sendBatch( + readNextBatch, + true, + SequenceNumbers.UNASSIGNED_SEQ_NO, + snapshot.totalOperations(), + maxSeenAutoIdTimestamp, + maxSeqNoOfUpdatesOrDeletes, + retentionLeases, + batchedListener); } - private void sendBatch(CheckedSupplier, IOException> nextBatch, boolean firstBatch, - long targetLocalCheckpoint, int totalTranslogOps, long maxSeenAutoIdTimestamp, - long maxSeqNoOfUpdatesOrDeletes, ActionListener listener) throws IOException { + private void sendBatch( + final CheckedSupplier, IOException> nextBatch, + final boolean firstBatch, + final long targetLocalCheckpoint, + final int totalTranslogOps, + final long maxSeenAutoIdTimestamp, + final long maxSeqNoOfUpdatesOrDeletes, + final RetentionLeases retentionLeases, + final ActionListener listener) throws IOException { final List operations = nextBatch.get(); // send the leftover operations or if no operations were sent, request the target to respond with its local checkpoint if (operations.isEmpty() == false || firstBatch) { cancellableThreads.execute(() -> { - recoveryTarget.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, - ActionListener.wrap( - newCheckpoint -> { - sendBatch(nextBatch, false, SequenceNumbers.max(targetLocalCheckpoint, newCheckpoint), - totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, listener); - }, - listener::onFailure - )); + recoveryTarget.indexTranslogOperations( + operations, + totalTranslogOps, + maxSeenAutoIdTimestamp, + maxSeqNoOfUpdatesOrDeletes, + retentionLeases, + ActionListener.wrap( + newCheckpoint -> { + sendBatch( + nextBatch, + false, + SequenceNumbers.max(targetLocalCheckpoint, newCheckpoint), + totalTranslogOps, + maxSeenAutoIdTimestamp, + maxSeqNoOfUpdatesOrDeletes, + retentionLeases, + listener); + }, + listener::onFailure + )); }); } else { listener.onResponse(targetLocalCheckpoint); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index b300490542968..e63b9ba8fd5ea 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.seqno.ReplicationTracker; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardNotRecoveringException; @@ -397,8 +398,13 @@ public void handoffPrimaryContext(final ReplicationTracker.PrimaryContext primar } @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestampOnPrimary, - long maxSeqNoOfDeletesOrUpdatesOnPrimary, ActionListener listener) { + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxSeenAutoIdTimestampOnPrimary, + final long maxSeqNoOfDeletesOrUpdatesOnPrimary, + final RetentionLeases retentionLeases, + final ActionListener listener) { ActionListener.completeWith(listener, () -> { final RecoveryState.Translog translog = state().getTranslog(); translog.totalOperations(totalTranslogOps); @@ -418,6 +424,11 @@ public void indexTranslogOperations(List operations, int tot * replaying any of these operations will be at least the max_seq_no_of_updates on the primary when that op was executed on. */ indexShard().advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfDeletesOrUpdatesOnPrimary); + /* + * We have to update the retention leases before we start applying translog operations to ensure we are retaining according to + * the policy. + */ + indexShard().updateRetentionLeasesOnReplica(retentionLeases); for (Translog.Operation operation : operations) { Engine.Result result = indexShard().applyTranslogOperation(operation, Engine.Operation.Origin.PEER_RECOVERY); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java index 0fa5e94c63cd2..be7c00d52c94d 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.seqno.ReplicationTracker; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.translog.Translog; @@ -32,14 +33,15 @@ public interface RecoveryTargetHandler { /** * Prepares the target to receive translog operations, after all file have been copied - * @param fileBasedRecovery whether or not this call is part of an file based recovery - * @param totalTranslogOps total translog operations expected to be sent + * + * @param fileBasedRecovery whether or not this call is part of an file based recovery + * @param totalTranslogOps total translog operations expected to be sent */ void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps, ActionListener listener); /** - * The finalize request refreshes the engine now that new segments are available, enables garbage collection of tombstone files, and - * updates the global checkpoint. + * The finalize request refreshes the engine now that new segments are available, enables garbage collection of tombstone files, updates + * the global checkpoint. * * @param globalCheckpoint the global checkpoint on the recovery source * @param listener the listener which will be notified when this method is completed @@ -67,11 +69,17 @@ public interface RecoveryTargetHandler { * @param maxSeqNoOfUpdatesOrDeletesOnPrimary the max seq_no of update operations (index operations overwrite Lucene) or delete ops on * the primary shard when capturing these operations. This value is at least as high as the * max_seq_no_of_updates on the primary was when any of these ops were processed on it. + * @param retentionLeases the retention leases on the primary * @param listener a listener which will be notified with the local checkpoint on the target * after these operations are successfully indexed on the target. */ - void indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestampOnPrimary, - long maxSeqNoOfUpdatesOrDeletesOnPrimary, ActionListener listener); + void indexTranslogOperations( + List operations, + int totalTranslogOps, + long maxSeenAutoIdTimestampOnPrimary, + long maxSeqNoOfUpdatesOrDeletesOnPrimary, + RetentionLeases retentionLeases, + ActionListener listener); /** * Notifies the target of the files it is going to receive diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index 0ae5d507eb357..239e423b3aaf9 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; @@ -39,18 +40,26 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { private int totalTranslogOps = RecoveryState.Translog.UNKNOWN; private long maxSeenAutoIdTimestampOnPrimary; private long maxSeqNoOfUpdatesOrDeletesOnPrimary; + private RetentionLeases retentionLeases; public RecoveryTranslogOperationsRequest() { } - RecoveryTranslogOperationsRequest(long recoveryId, ShardId shardId, List operations, int totalTranslogOps, - long maxSeenAutoIdTimestampOnPrimary, long maxSeqNoOfUpdatesOrDeletesOnPrimary) { + RecoveryTranslogOperationsRequest( + final long recoveryId, + final ShardId shardId, + final List operations, + final int totalTranslogOps, + final long maxSeenAutoIdTimestampOnPrimary, + final long maxSeqNoOfUpdatesOrDeletesOnPrimary, + final RetentionLeases retentionLeases) { this.recoveryId = recoveryId; this.shardId = shardId; this.operations = operations; this.totalTranslogOps = totalTranslogOps; this.maxSeenAutoIdTimestampOnPrimary = maxSeenAutoIdTimestampOnPrimary; this.maxSeqNoOfUpdatesOrDeletesOnPrimary = maxSeqNoOfUpdatesOrDeletesOnPrimary; + this.retentionLeases = retentionLeases; } public long recoveryId() { @@ -77,6 +86,10 @@ public long maxSeqNoOfUpdatesOrDeletesOnPrimary() { return maxSeqNoOfUpdatesOrDeletesOnPrimary; } + public RetentionLeases retentionLeases() { + return retentionLeases; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -95,6 +108,11 @@ public void readFrom(StreamInput in) throws IOException { // UNASSIGNED_SEQ_NO means uninitialized and replica won't enable optimization using seq_no maxSeqNoOfUpdatesOrDeletesOnPrimary = SequenceNumbers.UNASSIGNED_SEQ_NO; } + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + retentionLeases = new RetentionLeases(in); + } else { + retentionLeases = RetentionLeases.EMPTY; + } } @Override @@ -110,5 +128,8 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_6_5_0)) { out.writeZLong(maxSeqNoOfUpdatesOrDeletesOnPrimary); } + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + retentionLeases.writeTo(out); + } } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java index 0799cc6595189..436d59dba85ad 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.seqno.ReplicationTracker; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; @@ -113,10 +114,21 @@ public void handoffPrimaryContext(final ReplicationTracker.PrimaryContext primar } @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestampOnPrimary, - long maxSeqNoOfDeletesOrUpdatesOnPrimary, ActionListener listener) { + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxSeenAutoIdTimestampOnPrimary, + final long maxSeqNoOfDeletesOrUpdatesOnPrimary, + final RetentionLeases retentionLeases, + final ActionListener listener) { final RecoveryTranslogOperationsRequest request = new RecoveryTranslogOperationsRequest( - recoveryId, shardId, operations, totalTranslogOps, maxSeenAutoIdTimestampOnPrimary, maxSeqNoOfDeletesOrUpdatesOnPrimary); + recoveryId, + shardId, + operations, + totalTranslogOps, + maxSeenAutoIdTimestampOnPrimary, + maxSeqNoOfDeletesOrUpdatesOnPrimary, + retentionLeases); transportService.submitRequest(targetNode, PeerRecoveryTargetService.Actions.TRANSLOG_OPS, request, translogOpsRequestOptions, new ActionListenerResponseHandler<>(ActionListener.wrap(r -> listener.onResponse(r.localCheckpoint), listener::onFailure), RecoveryTranslogOperationsResponse::new, ThreadPool.Names.GENERIC)); diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 53db2b7dd8a9e..725225773a682 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.InternalEngineTests; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; @@ -502,13 +503,17 @@ protected EngineFactory getEngineFactory(ShardRouting routing) { AtomicBoolean opsSent = new AtomicBoolean(false); final Future recoveryFuture = shards.asyncRecoverReplica(newReplica, (indexShard, node) -> { recoveryStart.countDown(); - return new RecoveryTarget(indexShard, node, recoveryListener, l -> { - }) { + return new RecoveryTarget(indexShard, node, recoveryListener, l -> {}) { @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, - long maxSeenAutoIdTimestamp, long msu, ActionListener listener) { + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxSeenAutoIdTimestamp, + final long msu, + final RetentionLeases retentionLeases, + final ActionListener listener) { opsSent.set(true); - super.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp, msu, listener); + super.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp, msu, retentionLeases, listener); } }; }); @@ -575,9 +580,13 @@ protected EngineFactory getEngineFactory(final ShardRouting routing) { replica, (indexShard, node) -> new RecoveryTarget(indexShard, node, recoveryListener, l -> {}) { @Override - public void indexTranslogOperations(final List operations, final int totalTranslogOps, - final long maxAutoIdTimestamp, long maxSeqNoOfUpdates, - ActionListener listener) { + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxAutoIdTimestamp, + final long maxSeqNoOfUpdates, + final RetentionLeases retentionLeases, + final ActionListener listener) { // index a doc which is not part of the snapshot, but also does not complete on replica replicaEngineFactory.latchIndexers(1); threadPool.generic().submit(() -> { @@ -604,7 +613,13 @@ public void indexTranslogOperations(final List operations, f } catch (InterruptedException e) { throw new AssertionError(e); } - super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdates, listener); + super.indexTranslogOperations( + operations, + totalTranslogOps, + maxAutoIdTimestamp, + maxSeqNoOfUpdates, + retentionLeases, + listener); } }); pendingDocActiveWithExtraDocIndexed.await(); @@ -846,12 +861,17 @@ private void blockIfNeeded(RecoveryState.Stage currentStage) { } @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, - long maxAutoIdTimestamp, long maxSeqNoOfUpdates, ActionListener listener) { + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxAutoIdTimestamp, + final long maxSeqNoOfUpdates, + final RetentionLeases retentionLeases, + final ActionListener listener) { if (hasBlocked() == false) { blockIfNeeded(RecoveryState.Stage.TRANSLOG); } - super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdates, listener); + super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdates, retentionLeases, listener); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseSyncIT.java b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseSyncIT.java index 2eb0b54f36127..9162c5fab8752 100644 --- a/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseSyncIT.java +++ b/server/src/test/java/org/elasticsearch/index/seqno/RetentionLeaseSyncIT.java @@ -95,7 +95,7 @@ public void testRetentionLeasesSyncedOnAdd() throws Exception { final String source = randomAlphaOfLength(8); final CountDownLatch latch = new CountDownLatch(1); final ActionListener listener = ActionListener.wrap(r -> latch.countDown(), e -> fail(e.toString())); - // simulate a peer-recovery which locks the soft-deletes policy on the primary. + // simulate a peer recovery which locks the soft deletes policy on the primary final Closeable retentionLock = randomBoolean() ? primary.acquireRetentionLockForPeerRecovery() : () -> {}; currentRetentionLeases.put(id, primary.addRetentionLease(id, retainingSequenceNumber, source, listener)); latch.await(); @@ -205,4 +205,67 @@ public void testRetentionLeasesSyncOnExpiration() throws Exception { } } + public void testRetentionLeasesSyncOnRecovery() throws Exception { + final int numberOfReplicas = 1; + /* + * We effectively disable the background sync to ensure that the retention leases are not synced in the background so that the only + * source of retention leases on the replicas would be from the commit point and recovery. + */ + final Settings settings = Settings.builder() + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0) + .put(IndexService.RETENTION_LEASE_SYNC_INTERVAL_SETTING.getKey(), TimeValue.timeValueHours(24)) + .build(); + createIndex("index", settings); + ensureYellow("index"); + // exclude the replicas from being allocated + allowNodes("index", 1); + final AcknowledgedResponse response = client().admin() + .indices() + .prepareUpdateSettings("index").setSettings(Settings.builder().put("index.number_of_replicas", numberOfReplicas).build()) + .get(); + assertTrue(response.isAcknowledged()); + final String primaryShardNodeId = clusterService().state().routingTable().index("index").shard(0).primaryShard().currentNodeId(); + final String primaryShardNodeName = clusterService().state().nodes().get(primaryShardNodeId).getName(); + final IndexShard primary = internalCluster() + .getInstance(IndicesService.class, primaryShardNodeName) + .getShardOrNull(new ShardId(resolveIndex("index"), 0)); + final int length = randomIntBetween(1, 8); + final Map currentRetentionLeases = new HashMap<>(); + for (int i = 0; i < length; i++) { + final String id = randomValueOtherThanMany(currentRetentionLeases.keySet()::contains, () -> randomAlphaOfLength(8)); + final long retainingSequenceNumber = randomLongBetween(0, Long.MAX_VALUE); + final String source = randomAlphaOfLength(8); + final CountDownLatch latch = new CountDownLatch(1); + final ActionListener listener = ActionListener.wrap(r -> latch.countDown(), e -> fail(e.toString())); + currentRetentionLeases.put(id, primary.addRetentionLease(id, retainingSequenceNumber, source, listener)); + latch.await(); + /* + * Now renew the leases; since we do not flush immediately on renewal, this means that the latest retention leases will not be + * in the latest commit point and therefore not transferred during the file-copy phase of recovery. + */ + currentRetentionLeases.put(id, primary.renewRetentionLease(id, retainingSequenceNumber, source)); + } + + // now allow the replicas to be allocated and wait for recovery to finalize + allowNodes("index", 1 + numberOfReplicas); + ensureGreen("index"); + + // check current retention leases have been synced to all replicas + for (final ShardRouting replicaShard : clusterService().state().routingTable().index("index").shard(0).replicaShards()) { + final String replicaShardNodeId = replicaShard.currentNodeId(); + final String replicaShardNodeName = clusterService().state().nodes().get(replicaShardNodeId).getName(); + final IndexShard replica = internalCluster() + .getInstance(IndicesService.class, replicaShardNodeName) + .getShardOrNull(new ShardId(resolveIndex("index"), 0)); + final Map retentionLeasesOnReplica = RetentionLeases.toMap(replica.getRetentionLeases()); + assertThat(retentionLeasesOnReplica, equalTo(currentRetentionLeases)); + + // check retention leases have been committed on the replica + final RetentionLeases replicaCommittedRetentionLeases = RetentionLeases.decodeRetentionLeases( + replica.acquireLastIndexCommit(false).getIndexCommit().getUserData().get(Engine.RETENTION_LEASES)); + assertThat(currentRetentionLeases, equalTo(RetentionLeases.toMap(replicaCommittedRetentionLeases))); + } + } + } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 2851f43b1b990..316ed39574c0c 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -101,6 +101,7 @@ import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.VersionFieldMapper; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.store.Store; @@ -2426,10 +2427,20 @@ public void testTranslogRecoverySyncsTranslog() throws IOException { new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestamp, - long maxSeqNoOfUpdatesOrDeletes, ActionListener listener){ - super.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, - ActionListener.runAfter(listener, () -> assertFalse(replica.isSyncNeeded()))); + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxSeenAutoIdTimestamp, + final long maxSeqNoOfUpdatesOrDeletes, + final RetentionLeases retentionLeases, + final ActionListener listener){ + super.indexTranslogOperations( + operations, + totalTranslogOps, + maxSeenAutoIdTimestamp, + maxSeqNoOfUpdatesOrDeletes, + retentionLeases, + ActionListener.runAfter(listener, () -> assertFalse(replica.isSyncNeeded()))); } }, true, true); @@ -2533,14 +2544,26 @@ public void testShardActiveDuringPeerRecovery() throws IOException { new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, long maxAutoIdTimestamp, - long maxSeqNoOfUpdatesOrDeletes, ActionListener listener){ - super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, - ActionListener.wrap(checkpoint -> { - listener.onResponse(checkpoint); - // Shard should now be active since we did recover: - assertTrue(replica.isActive()); - }, listener::onFailure)); + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxAutoIdTimestamp, + final long maxSeqNoOfUpdatesOrDeletes, + final RetentionLeases retentionLeases, + final ActionListener listener){ + super.indexTranslogOperations( + operations, + totalTranslogOps, + maxAutoIdTimestamp, + maxSeqNoOfUpdatesOrDeletes, + retentionLeases, + ActionListener.wrap( + checkpoint -> { + listener.onResponse(checkpoint); + // Shard should now be active since we did recover: + assertTrue(replica.isActive()); + }, + listener::onFailure)); } }, false, true); @@ -2583,13 +2606,25 @@ public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTra } @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, long maxAutoIdTimestamp, - long maxSeqNoOfUpdatesOrDeletes, ActionListener listener) { - super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, - ActionListener.wrap(checkpoint -> { - assertListenerCalled.accept(replica); - listener.onResponse(checkpoint); - }, listener::onFailure)); + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long maxAutoIdTimestamp, + final long maxSeqNoOfUpdatesOrDeletes, + final RetentionLeases retentionLeases, + final ActionListener listener) { + super.indexTranslogOperations( + operations, + totalTranslogOps, + maxAutoIdTimestamp, + maxSeqNoOfUpdatesOrDeletes, + retentionLeases, + ActionListener.wrap( + checkpoint -> { + assertListenerCalled.accept(replica); + listener.onResponse(checkpoint); + }, + listener::onFailure)); } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index cd495b7e17bee..8391827b2f83c 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -63,6 +63,7 @@ import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.ReplicationTracker; +import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; @@ -238,7 +239,7 @@ public void testSendSnapshotSendsOps() throws IOException { RecoveryTargetHandler recoveryTarget = new TestRecoveryTargetHandler() { @Override public void indexTranslogOperations(List operations, int totalTranslogOps, long timestamp, long msu, - ActionListener listener) { + RetentionLeases retentionLeases, ActionListener listener) { shippedOps.addAll(operations); checkpointOnTarget.set(randomLongBetween(checkpointOnTarget.get(), Long.MAX_VALUE)); maybeExecuteAsync(() -> listener.onResponse(checkpointOnTarget.get())); @@ -247,7 +248,7 @@ public void indexTranslogOperations(List operations, int tot RecoverySourceHandler handler = new RecoverySourceHandler(shard, recoveryTarget, request, fileChunkSizeInBytes, between(1, 10)); PlainActionFuture future = new PlainActionFuture<>(); handler.phase2(startingSeqNo, requiredStartingSeqNo, endingSeqNo, newTranslogSnapshot(operations, Collections.emptyList()), - randomNonNegativeLong(), randomNonNegativeLong(), future); + randomNonNegativeLong(), randomNonNegativeLong(), RetentionLeases.EMPTY, future); final int expectedOps = (int) (endingSeqNo - startingSeqNo + 1); RecoverySourceHandler.SendSnapshotResult result = future.actionGet(); assertThat(result.totalOperations, equalTo(expectedOps)); @@ -265,7 +266,7 @@ public void indexTranslogOperations(List operations, int tot PlainActionFuture failedFuture = new PlainActionFuture<>(); expectThrows(IllegalStateException.class, () -> { handler.phase2(startingSeqNo, requiredStartingSeqNo, endingSeqNo, newTranslogSnapshot(operations, opsToSkip), - randomNonNegativeLong(), randomNonNegativeLong(), failedFuture); + randomNonNegativeLong(), randomNonNegativeLong(), RetentionLeases.EMPTY, failedFuture); failedFuture.actionGet(); }); } @@ -285,7 +286,7 @@ public void testSendSnapshotStopOnError() throws Exception { RecoveryTargetHandler recoveryTarget = new TestRecoveryTargetHandler() { @Override public void indexTranslogOperations(List operations, int totalTranslogOps, long timestamp, - long msu, ActionListener listener) { + long msu, RetentionLeases retentionLeases, ActionListener listener) { if (randomBoolean()) { maybeExecuteAsync(() -> listener.onResponse(SequenceNumbers.NO_OPS_PERFORMED)); } else { @@ -299,7 +300,7 @@ public void indexTranslogOperations(List operations, int tot final long startingSeqNo = randomLongBetween(0, ops.size() - 1L); final long endingSeqNo = randomLongBetween(startingSeqNo, ops.size() - 1L); handler.phase2(startingSeqNo, startingSeqNo, endingSeqNo, newTranslogSnapshot(ops, Collections.emptyList()), - randomNonNegativeLong(), randomNonNegativeLong(), future); + randomNonNegativeLong(), randomNonNegativeLong(), RetentionLeases.EMPTY, future); if (wasFailed.get()) { assertThat(expectThrows(RuntimeException.class, () -> future.actionGet()).getMessage(), equalTo("test - failed to index")); } @@ -498,11 +499,11 @@ void prepareTargetForTranslog(boolean fileBasedRecovery, int totalTranslogOps, A @Override void phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, - long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes, + long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes, RetentionLeases retentionLeases, ActionListener listener) throws IOException { phase2Called.set(true); super.phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, - maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, listener); + maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes, retentionLeases, listener); } }; @@ -716,8 +717,13 @@ public void handoffPrimaryContext(ReplicationTracker.PrimaryContext primaryConte } @Override - public void indexTranslogOperations(List operations, int totalTranslogOps, long timestamp, long msu, - ActionListener listener) { + public void indexTranslogOperations( + final List operations, + final int totalTranslogOps, + final long timestamp, + final long msu, + final RetentionLeases retentionLeases, + final ActionListener listener) { } @Override From 34f2cc78f661170ee3b3b2e703e6e06632189ef7 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 5 Feb 2019 23:56:59 +0100 Subject: [PATCH 14/19] Fix Master Failover and DataNode Leave Blocking Snapshot (#38460) * Closes #38447 --- .../java/org/elasticsearch/snapshots/SnapshotsService.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 17306e1585ff8..e2628fda991bd 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -687,8 +687,9 @@ public void applyClusterState(ClusterChangedEvent event) { if (event.localNodeMaster()) { // We don't remove old master when master flips anymore. So, we need to check for change in master final SnapshotsInProgress snapshotsInProgress = event.state().custom(SnapshotsInProgress.TYPE); + final boolean newMaster = event.previousState().nodes().isLocalNodeElectedMaster() == false; if (snapshotsInProgress != null) { - if (removedNodesCleanupNeeded(snapshotsInProgress, event.nodesDelta().removedNodes())) { + if (newMaster || removedNodesCleanupNeeded(snapshotsInProgress, event.nodesDelta().removedNodes())) { processSnapshotsOnRemovedNodes(); } if (event.routingTableChanged() && waitingShardsStartedOrUnassigned(snapshotsInProgress, event)) { @@ -704,7 +705,7 @@ public void applyClusterState(ClusterChangedEvent event) { || entry.state() != State.INIT && completed(entry.shards().values()) ).forEach(this::endSnapshot); } - if (event.previousState().nodes().isLocalNodeElectedMaster() == false) { + if (newMaster) { finalizeSnapshotDeletionFromPreviousMaster(event); } } From 8972ebabddc3c69fcaf40887f6066961c391b70f Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 6 Feb 2019 00:04:43 +0100 Subject: [PATCH 15/19] Enable bwc tests now that #38443 is backported. (#38462) --- build.gradle | 4 ++-- .../xpack/core/ccr/action/PutAutoFollowPatternAction.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index fcee85ff02754..d50801bd207f4 100644 --- a/build.gradle +++ b/build.gradle @@ -159,8 +159,8 @@ task verifyVersions { * the enabled state of every bwc task. It should be set back to true * after the backport of the backcompat code is complete. */ -final boolean bwc_tests_enabled = false -final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/38443" /* place a PR link here when committing bwc changes */ +final boolean bwc_tests_enabled = true +final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 1ae9801916bce..ec946ce51e821 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -153,7 +153,7 @@ public Request(StreamInput in) throws IOException { remoteCluster = in.readString(); leaderIndexPatterns = in.readStringList(); followIndexNamePattern = in.readOptionalString(); - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + if (in.getVersion().onOrAfter(Version.V_6_7_0)) { parameters = new FollowParameters(in); } else { parameters = new FollowParameters(); @@ -177,7 +177,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(remoteCluster); out.writeStringCollection(leaderIndexPatterns); out.writeOptionalString(followIndexNamePattern); - if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + if (out.getVersion().onOrAfter(Version.V_6_7_0)) { parameters.writeTo(out); } else { out.writeOptionalVInt(parameters.maxReadRequestOperationCount); From 1aa32cac8fc2f50458d2c07b557befdac88be2de Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 5 Feb 2019 19:16:25 -0500 Subject: [PATCH 16/19] Disable BWC to backport recovering retention leases (#38477) This commit disable the BWC tests in preparation for backporting recovery of retention leases during peer recovery. --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index d50801bd207f4..ac898482dbd3d 100644 --- a/build.gradle +++ b/build.gradle @@ -159,8 +159,8 @@ task verifyVersions { * the enabled state of every bwc task. It should be set back to true * after the backport of the backcompat code is complete. */ -final boolean bwc_tests_enabled = true -final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ +final boolean bwc_tests_enabled = false +final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/38435" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") From 57600c5acb876129158b97885820833da054cdc6 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Wed, 6 Feb 2019 11:21:54 +1100 Subject: [PATCH 17/19] Enable logs for intermittent test failure (#38426) I have not been able to reproduce the failing test scenario locally for #38408 and there are other similar tests which are running fine in the same test class. I am re-enabling the test with additional logs so that we can debug further on what's happening. I will keep the issue open for now and look out for the builds to see if there are any related failures. --- .../security/authc/ApiKeyIntegTests.java | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 499578e9cd3ae..69e008f60c696 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.security.authc; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.search.SearchResponse; @@ -21,6 +22,7 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse; import org.elasticsearch.xpack.core.security.action.GetApiKeyRequest; @@ -54,8 +56,11 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +@TestLogging("org.elasticsearch.xpack.security.authc.ApiKeyService:TRACE") public class ApiKeyIntegTests extends SecurityIntegTestCase { @Override @@ -232,7 +237,6 @@ public void testInvalidateApiKeysForApiKeyName() throws InterruptedException, Ex verifyInvalidateResponse(1, responses, invalidateResponse); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/38408") public void testGetAndInvalidateApiKeysWithExpiredAndInvalidatedApiKey() throws Exception { List responses = createApiKeys(1, null); Instant created = Instant.now(); @@ -249,6 +253,9 @@ public void testGetAndInvalidateApiKeysWithExpiredAndInvalidatedApiKey() throws assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); docId.set(searchResponse.getHits().getAt(0).getId()); }); + logger.info("searched and found API key with doc id = " + docId.get()); + assertThat(docId.get(), is(notNullValue())); + assertThat(docId.get(), is(responses.get(0).getId())); // hack doc to modify the expiration time to the week before Instant weekBefore = created.minus(8L, ChronoUnit.DAYS); @@ -259,6 +266,11 @@ public void testGetAndInvalidateApiKeysWithExpiredAndInvalidatedApiKey() throws PlainActionFuture listener = new PlainActionFuture<>(); securityClient.invalidateApiKey(InvalidateApiKeyRequest.usingApiKeyId(responses.get(0).getId()), listener); InvalidateApiKeyResponse invalidateResponse = listener.get(); + if (invalidateResponse.getErrors().isEmpty() == false) { + logger.error("error occurred while invalidating API key by id : " + invalidateResponse.getErrors().stream() + .map(ElasticsearchException::getMessage) + .collect(Collectors.joining(", "))); + } verifyInvalidateResponse(1, responses, invalidateResponse); // try again @@ -303,6 +315,9 @@ public void testInvalidatedApiKeysDeletedByRemover() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); docId.set(searchResponse.getHits().getAt(0).getId()); }); + logger.info("searched and found API key with doc id = " + docId.get()); + assertThat(docId.get(), is(notNullValue())); + assertThat(docId.get(), isIn(responses.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toList()))); AtomicBoolean deleteTriggered = new AtomicBoolean(false); assertBusy(() -> { @@ -333,6 +348,9 @@ public void testExpiredApiKeysDeletedAfter1Week() throws Exception { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); docId.set(searchResponse.getHits().getAt(0).getId()); }); + logger.info("searched and found API key with doc id = " + docId.get()); + assertThat(docId.get(), is(notNullValue())); + assertThat(docId.get(), isIn(responses.stream().map(CreateApiKeyResponse::getId).collect(Collectors.toList()))); // hack doc to modify the expiration time to the week before Instant weekBefore = created.minus(8L, ChronoUnit.DAYS); @@ -490,6 +508,7 @@ private List createApiKeys(int noOfApiKeys, TimeValue expi assertNotNull(response.getKey()); responses.add(response); } + assertThat(responses.size(), is(noOfApiKeys)); return responses; } } From 517aa959845f0f1b52406f6d13c2f84af8fff49e Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Wed, 6 Feb 2019 12:05:44 +1100 Subject: [PATCH 18/19] Fix exit code in certutil packaging test (#38393) The exit code is different on windows, and we don't really care about which code it is, we just need to check that it's not 0 (success) --- .../java/org/elasticsearch/packaging/test/ArchiveTestCase.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index 6261cd62a2b12..a6aad032baa5d 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -50,6 +50,7 @@ import static org.elasticsearch.packaging.util.ServerUtils.makeRequest; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.isEmptyString; import static org.junit.Assume.assumeThat; @@ -293,7 +294,7 @@ public void test90SecurityCliPackaging() { // Ensure that the exit code from the java command is passed back up through the shell script result = sh.runIgnoreExitCode(bin.elasticsearchCertutil + " invalid-command"); - assertThat(result.exitCode, is(64)); + assertThat(result.exitCode, is(not(0))); assertThat(result.stdout, containsString("Unknown command [invalid-command]")); }; Platforms.onLinux(action); From e73c9c90ee8a77c8b97e6bbcfc540bb7a00b2dae Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Tue, 5 Feb 2019 18:16:26 -0700 Subject: [PATCH 19/19] Add an authentication cache for API keys (#38469) This commit adds an authentication cache for API keys that caches the hash of an API key with a faster hash. This will enable better performance when API keys are used for bulk or heavy searching. --- .../xpack/security/Security.java | 6 +- .../xpack/security/authc/ApiKeyService.java | 144 +++++++++++++++--- .../security/authc/ApiKeyServiceTests.java | 113 +++++++++++++- .../authc/AuthenticationServiceTests.java | 2 +- 4 files changed, 230 insertions(+), 35 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index bf4a7080e6d71..b5b10b06622fe 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -438,7 +438,8 @@ Collection createComponents(Client client, ThreadPool threadPool, Cluste rolesProviders.addAll(extension.getRolesProviders(settings, resourceWatcherService)); } - final ApiKeyService apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex.get(), clusterService); + final ApiKeyService apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex.get(), clusterService, + threadPool); components.add(apiKeyService); final CompositeRolesStore allRolesStore = new CompositeRolesStore(settings, fileRolesStore, nativeRolesStore, reservedRolesStore, privilegeStore, rolesProviders, threadPool.getThreadContext(), getLicenseState(), fieldPermissionsCache, apiKeyService); @@ -631,6 +632,9 @@ public static List> getSettings(boolean transportClientMode, List PASSWORD_HASHING_ALGORITHM = new Setting<>( "xpack.security.authc.api_key_hashing.algorithm", "pbkdf2", Function.identity(), v -> { @@ -117,6 +124,12 @@ public class ApiKeyService { TimeValue.MINUS_ONE, Property.NodeScope); public static final Setting DELETE_INTERVAL = Setting.timeSetting("xpack.security.authc.api_key.delete.interval", TimeValue.timeValueHours(24L), Property.NodeScope); + public static final Setting CACHE_HASH_ALGO_SETTING = Setting.simpleString("xpack.security.authc.api_key.cache.hash_algo", + "ssha256", Setting.Property.NodeScope); + public static final Setting CACHE_TTL_SETTING = Setting.timeSetting("xpack.security.authc.api_key.cache.ttl", + TimeValue.timeValueHours(24L), Property.NodeScope); + public static final Setting CACHE_MAX_KEYS_SETTING = Setting.intSetting("xpack.security.authc.api_key.cache.max_keys", + 10000, Property.NodeScope); private final Clock clock; private final Client client; @@ -127,11 +140,14 @@ public class ApiKeyService { private final Settings settings; private final ExpiredApiKeysRemover expiredApiKeysRemover; private final TimeValue deleteInterval; + private final Cache> apiKeyAuthCache; + private final Hasher cacheHasher; + private final ThreadPool threadPool; private volatile long lastExpirationRunMs; - public ApiKeyService(Settings settings, Clock clock, Client client, SecurityIndexManager securityIndex, - ClusterService clusterService) { + public ApiKeyService(Settings settings, Clock clock, Client client, SecurityIndexManager securityIndex, ClusterService clusterService, + ThreadPool threadPool) { this.clock = clock; this.client = client; this.securityIndex = securityIndex; @@ -141,6 +157,17 @@ public ApiKeyService(Settings settings, Clock clock, Client client, SecurityInde this.settings = settings; this.deleteInterval = DELETE_INTERVAL.get(settings); this.expiredApiKeysRemover = new ExpiredApiKeysRemover(settings, client); + this.threadPool = threadPool; + this.cacheHasher = Hasher.resolve(CACHE_HASH_ALGO_SETTING.get(settings)); + final TimeValue ttl = CACHE_TTL_SETTING.get(settings); + if (ttl.getNanos() > 0) { + this.apiKeyAuthCache = CacheBuilder.>builder() + .setExpireAfterWrite(ttl) + .setMaximumWeight(CACHE_MAX_KEYS_SETTING.get(settings)) + .build(); + } else { + this.apiKeyAuthCache = null; + } } /** @@ -363,8 +390,8 @@ private List parseRoleDescriptors(final String apiKeyId, final M * @param credentials the credentials provided by the user * @param listener the listener to notify after verification */ - static void validateApiKeyCredentials(Map source, ApiKeyCredentials credentials, Clock clock, - ActionListener listener) { + void validateApiKeyCredentials(Map source, ApiKeyCredentials credentials, Clock clock, + ActionListener listener) { final Boolean invalidated = (Boolean) source.get("api_key_invalidated"); if (invalidated == null) { listener.onResponse(AuthenticationResult.terminate("api key document is missing invalidated field", null)); @@ -375,33 +402,87 @@ static void validateApiKeyCredentials(Map source, ApiKeyCredenti if (apiKeyHash == null) { throw new IllegalStateException("api key hash is missing"); } - final boolean verified = verifyKeyAgainstHash(apiKeyHash, credentials); - - if (verified) { - final Long expirationEpochMilli = (Long) source.get("expiration_time"); - if (expirationEpochMilli == null || Instant.ofEpochMilli(expirationEpochMilli).isAfter(clock.instant())) { - final Map creator = Objects.requireNonNull((Map) source.get("creator")); - final String principal = Objects.requireNonNull((String) creator.get("principal")); - final Map metadata = (Map) creator.get("metadata"); - final Map roleDescriptors = (Map) source.get("role_descriptors"); - final Map limitedByRoleDescriptors = (Map) source.get("limited_by_role_descriptors"); - final String[] roleNames = (roleDescriptors != null) ? roleDescriptors.keySet().toArray(Strings.EMPTY_ARRAY) - : limitedByRoleDescriptors.keySet().toArray(Strings.EMPTY_ARRAY); - final User apiKeyUser = new User(principal, roleNames, null, null, metadata, true); - final Map authResultMetadata = new HashMap<>(); - authResultMetadata.put(API_KEY_ROLE_DESCRIPTORS_KEY, roleDescriptors); - authResultMetadata.put(API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY, limitedByRoleDescriptors); - authResultMetadata.put(API_KEY_ID_KEY, credentials.getId()); - listener.onResponse(AuthenticationResult.success(apiKeyUser, authResultMetadata)); + + if (apiKeyAuthCache != null) { + final AtomicBoolean valueAlreadyInCache = new AtomicBoolean(true); + final ListenableFuture listenableCacheEntry; + try { + listenableCacheEntry = apiKeyAuthCache.computeIfAbsent(credentials.getId(), + k -> { + valueAlreadyInCache.set(false); + return new ListenableFuture<>(); + }); + } catch (ExecutionException e) { + listener.onFailure(e); + return; + } + + if (valueAlreadyInCache.get()) { + listenableCacheEntry.addListener(ActionListener.wrap(result -> { + if (result.success) { + if (result.verify(credentials.getKey())) { + // move on + validateApiKeyExpiration(source, credentials, clock, listener); + } else { + listener.onResponse(AuthenticationResult.unsuccessful("invalid credentials", null)); + } + } else if (result.verify(credentials.getKey())) { // same key, pass the same result + listener.onResponse(AuthenticationResult.unsuccessful("invalid credentials", null)); + } else { + apiKeyAuthCache.invalidate(credentials.getId(), listenableCacheEntry); + validateApiKeyCredentials(source, credentials, clock, listener); + } + }, listener::onFailure), + threadPool.generic(), threadPool.getThreadContext()); } else { - listener.onResponse(AuthenticationResult.terminate("api key is expired", null)); + final boolean verified = verifyKeyAgainstHash(apiKeyHash, credentials); + listenableCacheEntry.onResponse(new CachedApiKeyHashResult(verified, credentials.getKey())); + if (verified) { + // move on + validateApiKeyExpiration(source, credentials, clock, listener); + } else { + listener.onResponse(AuthenticationResult.unsuccessful("invalid credentials", null)); + } } } else { - listener.onResponse(AuthenticationResult.unsuccessful("invalid credentials", null)); + final boolean verified = verifyKeyAgainstHash(apiKeyHash, credentials); + if (verified) { + // move on + validateApiKeyExpiration(source, credentials, clock, listener); + } else { + listener.onResponse(AuthenticationResult.unsuccessful("invalid credentials", null)); + } } } } + // pkg private for testing + CachedApiKeyHashResult getFromCache(String id) { + return apiKeyAuthCache == null ? null : FutureUtils.get(apiKeyAuthCache.get(id), 0L, TimeUnit.MILLISECONDS); + } + + private void validateApiKeyExpiration(Map source, ApiKeyCredentials credentials, Clock clock, + ActionListener listener) { + final Long expirationEpochMilli = (Long) source.get("expiration_time"); + if (expirationEpochMilli == null || Instant.ofEpochMilli(expirationEpochMilli).isAfter(clock.instant())) { + final Map creator = Objects.requireNonNull((Map) source.get("creator")); + final String principal = Objects.requireNonNull((String) creator.get("principal")); + final Map metadata = (Map) creator.get("metadata"); + final Map roleDescriptors = (Map) source.get("role_descriptors"); + final Map limitedByRoleDescriptors = (Map) source.get("limited_by_role_descriptors"); + final String[] roleNames = (roleDescriptors != null) ? roleDescriptors.keySet().toArray(Strings.EMPTY_ARRAY) + : limitedByRoleDescriptors.keySet().toArray(Strings.EMPTY_ARRAY); + final User apiKeyUser = new User(principal, roleNames, null, null, metadata, true); + final Map authResultMetadata = new HashMap<>(); + authResultMetadata.put(API_KEY_ROLE_DESCRIPTORS_KEY, roleDescriptors); + authResultMetadata.put(API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY, limitedByRoleDescriptors); + authResultMetadata.put(API_KEY_ID_KEY, credentials.getId()); + listener.onResponse(AuthenticationResult.success(apiKeyUser, authResultMetadata)); + } else { + listener.onResponse(AuthenticationResult.terminate("api key is expired", null)); + } + } + /** * Gets the API Key from the Authorization header if the header begins with * ApiKey @@ -851,4 +932,17 @@ public void getApiKeyForApiKeyName(String apiKeyName, ActionListener future = new PlainActionFuture<>(); - ApiKeyService.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); AuthenticationResult result = future.get(); assertNotNull(result); assertTrue(result.isAuthenticated()); @@ -134,7 +140,7 @@ public void testValidateApiKey() throws Exception { sourceMap.put("expiration_time", Clock.systemUTC().instant().plus(1L, ChronoUnit.HOURS).toEpochMilli()); future = new PlainActionFuture<>(); - ApiKeyService.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); result = future.get(); assertNotNull(result); assertTrue(result.isAuthenticated()); @@ -147,7 +153,7 @@ public void testValidateApiKey() throws Exception { sourceMap.put("expiration_time", Clock.systemUTC().instant().minus(1L, ChronoUnit.HOURS).toEpochMilli()); future = new PlainActionFuture<>(); - ApiKeyService.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); result = future.get(); assertNotNull(result); assertFalse(result.isAuthenticated()); @@ -155,7 +161,7 @@ public void testValidateApiKey() throws Exception { sourceMap.remove("expiration_time"); creds = new ApiKeyService.ApiKeyCredentials(randomAlphaOfLength(12), new SecureString(randomAlphaOfLength(15).toCharArray())); future = new PlainActionFuture<>(); - ApiKeyService.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); result = future.get(); assertNotNull(result); assertFalse(result.isAuthenticated()); @@ -163,7 +169,7 @@ public void testValidateApiKey() throws Exception { sourceMap.put("api_key_invalidated", true); creds = new ApiKeyService.ApiKeyCredentials(randomAlphaOfLength(12), new SecureString(randomAlphaOfLength(15).toCharArray())); future = new PlainActionFuture<>(); - ApiKeyService.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); result = future.get(); assertNotNull(result); assertFalse(result.isAuthenticated()); @@ -188,7 +194,7 @@ public void testGetRolesForApiKeyNotInContext() throws Exception { final Authentication authentication = new Authentication(new User("joe"), new RealmRef("apikey", "apikey", "node"), null, Version.CURRENT, AuthenticationType.API_KEY, authMetadata); ApiKeyService service = new ApiKeyService(Settings.EMPTY, Clock.systemUTC(), null, null, - ClusterServiceUtils.createClusterService(threadPool)); + ClusterServiceUtils.createClusterService(threadPool), threadPool); PlainActionFuture roleFuture = new PlainActionFuture<>(); service.getRoleForApiKey(authentication, roleFuture); @@ -242,7 +248,7 @@ public void testGetRolesForApiKey() throws Exception { } ).when(privilegesStore).getPrivileges(any(Collection.class), any(Collection.class), any(ActionListener.class)); ApiKeyService service = new ApiKeyService(Settings.EMPTY, Clock.systemUTC(), null, null, - ClusterServiceUtils.createClusterService(threadPool)); + ClusterServiceUtils.createClusterService(threadPool), threadPool); PlainActionFuture roleFuture = new PlainActionFuture<>(); service.getRoleForApiKey(authentication, roleFuture); @@ -258,4 +264,95 @@ public void testGetRolesForApiKey() throws Exception { assertThat(result.getLimitedByRoleDescriptors().get(0).getName(), is("limited role")); } } + + public void testApiKeyCache() { + final String apiKey = randomAlphaOfLength(16); + Hasher hasher = randomFrom(Hasher.PBKDF2, Hasher.BCRYPT4, Hasher.BCRYPT); + final char[] hash = hasher.hash(new SecureString(apiKey.toCharArray())); + + Map sourceMap = new HashMap<>(); + sourceMap.put("api_key_hash", new String(hash)); + sourceMap.put("role_descriptors", Collections.singletonMap("a role", Collections.singletonMap("cluster", "all"))); + sourceMap.put("limited_by_role_descriptors", Collections.singletonMap("limited role", Collections.singletonMap("cluster", "all"))); + Map creatorMap = new HashMap<>(); + creatorMap.put("principal", "test_user"); + creatorMap.put("metadata", Collections.emptyMap()); + sourceMap.put("creator", creatorMap); + sourceMap.put("api_key_invalidated", false); + + ApiKeyService service = new ApiKeyService(Settings.EMPTY, Clock.systemUTC(), null, null, + ClusterServiceUtils.createClusterService(threadPool), threadPool); + ApiKeyCredentials creds = new ApiKeyCredentials(randomAlphaOfLength(12), new SecureString(apiKey.toCharArray())); + PlainActionFuture future = new PlainActionFuture<>(); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + AuthenticationResult result = future.actionGet(); + assertThat(result.isAuthenticated(), is(true)); + CachedApiKeyHashResult cachedApiKeyHashResult = service.getFromCache(creds.getId()); + assertNotNull(cachedApiKeyHashResult); + assertThat(cachedApiKeyHashResult.success, is(true)); + + creds = new ApiKeyCredentials(creds.getId(), new SecureString("foobar".toCharArray())); + future = new PlainActionFuture<>(); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + result = future.actionGet(); + assertThat(result.isAuthenticated(), is(false)); + final CachedApiKeyHashResult shouldBeSame = service.getFromCache(creds.getId()); + assertNotNull(shouldBeSame); + assertThat(shouldBeSame, sameInstance(cachedApiKeyHashResult)); + + sourceMap.put("api_key_hash", new String(hasher.hash(new SecureString("foobar".toCharArray())))); + creds = new ApiKeyCredentials(randomAlphaOfLength(12), new SecureString("foobar1".toCharArray())); + future = new PlainActionFuture<>(); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + result = future.actionGet(); + assertThat(result.isAuthenticated(), is(false)); + cachedApiKeyHashResult = service.getFromCache(creds.getId()); + assertNotNull(cachedApiKeyHashResult); + assertThat(cachedApiKeyHashResult.success, is(false)); + + creds = new ApiKeyCredentials(creds.getId(), new SecureString("foobar2".toCharArray())); + future = new PlainActionFuture<>(); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + result = future.actionGet(); + assertThat(result.isAuthenticated(), is(false)); + assertThat(service.getFromCache(creds.getId()), not(sameInstance(cachedApiKeyHashResult))); + assertThat(service.getFromCache(creds.getId()).success, is(false)); + + creds = new ApiKeyCredentials(creds.getId(), new SecureString("foobar".toCharArray())); + future = new PlainActionFuture<>(); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + result = future.actionGet(); + assertThat(result.isAuthenticated(), is(true)); + assertThat(service.getFromCache(creds.getId()), not(sameInstance(cachedApiKeyHashResult))); + assertThat(service.getFromCache(creds.getId()).success, is(true)); + } + + public void testApiKeyCacheDisabled() { + final String apiKey = randomAlphaOfLength(16); + Hasher hasher = randomFrom(Hasher.PBKDF2, Hasher.BCRYPT4, Hasher.BCRYPT); + final char[] hash = hasher.hash(new SecureString(apiKey.toCharArray())); + final Settings settings = Settings.builder() + .put(ApiKeyService.CACHE_TTL_SETTING.getKey(), "0s") + .build(); + + Map sourceMap = new HashMap<>(); + sourceMap.put("api_key_hash", new String(hash)); + sourceMap.put("role_descriptors", Collections.singletonMap("a role", Collections.singletonMap("cluster", "all"))); + sourceMap.put("limited_by_role_descriptors", Collections.singletonMap("limited role", Collections.singletonMap("cluster", "all"))); + Map creatorMap = new HashMap<>(); + creatorMap.put("principal", "test_user"); + creatorMap.put("metadata", Collections.emptyMap()); + sourceMap.put("creator", creatorMap); + sourceMap.put("api_key_invalidated", false); + + ApiKeyService service = new ApiKeyService(settings, Clock.systemUTC(), null, null, + ClusterServiceUtils.createClusterService(threadPool), threadPool); + ApiKeyCredentials creds = new ApiKeyCredentials(randomAlphaOfLength(12), new SecureString(apiKey.toCharArray())); + PlainActionFuture future = new PlainActionFuture<>(); + service.validateApiKeyCredentials(sourceMap, creds, Clock.systemUTC(), future); + AuthenticationResult result = future.actionGet(); + assertThat(result.isAuthenticated(), is(true)); + CachedApiKeyHashResult cachedApiKeyHashResult = service.getFromCache(creds.getId()); + assertNull(cachedApiKeyHashResult); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index 7c6e9428f0759..40d9a71d023d4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -210,7 +210,7 @@ licenseState, threadContext, mock(ReservedRealm.class), Arrays.asList(firstRealm return null; }).when(securityIndex).checkIndexVersionThenExecute(any(Consumer.class), any(Runnable.class)); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); - apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex, clusterService); + apiKeyService = new ApiKeyService(settings, Clock.systemUTC(), client, securityIndex, clusterService, threadPool); tokenService = new TokenService(settings, Clock.systemUTC(), client, securityIndex, clusterService); service = new AuthenticationService(settings, realms, auditTrail, new DefaultAuthenticationFailureHandler(Collections.emptyMap()), threadPool, new AnonymousUser(settings), tokenService, apiKeyService);