From c9adf63b92fbf13395f60453aaa503ef19d1b565 Mon Sep 17 00:00:00 2001 From: jon-wei Date: Mon, 29 Apr 2019 12:40:16 -0700 Subject: [PATCH 1/7] Adjust required permissions for system schema --- integration-tests/docker/sample-data.sql | 2 + .../ITBasicAuthConfigurationTest.java | 421 ++++++++++++++++-- .../auth_test_sys_schema_segments.json | 17 + .../auth_test_sys_schema_server_segments.json | 6 + .../results/auth_test_sys_schema_servers.json | 12 + .../results/auth_test_sys_schema_tasks.json | 17 + .../sql/calcite/schema/SystemSchema.java | 29 +- 7 files changed, 457 insertions(+), 47 deletions(-) create mode 100644 integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json create mode 100644 integration-tests/src/test/resources/results/auth_test_sys_schema_server_segments.json create mode 100644 integration-tests/src/test/resources/results/auth_test_sys_schema_servers.json create mode 100644 integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json diff --git a/integration-tests/docker/sample-data.sql b/integration-tests/docker/sample-data.sql index 18ab48ad556b..b2a10fe9d38e 100644 --- a/integration-tests/docker/sample-data.sql +++ b/integration-tests/docker/sample-data.sql @@ -18,3 +18,5 @@ INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,ver INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload) VALUES ('twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9','twitterstream','2013-05-13T00:03:48.807Z','2013-01-03T00:00:00.000Z','2013-01-04T00:00:00.000Z',0,'2013-01-04T04:09:13.590Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-03T00:00:00.000Z/2013-01-04T00:00:00.000Z\",\"version\":\"2013-01-04T04:09:13.590Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z/2013-01-04T04:09:13.590Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":411651320,\"identifier\":\"twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9\"}'); INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload) VALUES ('wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9','wikipedia_editstream','2013-03-15T20:49:52.348Z','2012-12-29T00:00:00.000Z','2013-01-10T08:00:00.000Z',0,'2013-01-10T08:13:47.830Z_v9',1,'{\"dataSource\":\"wikipedia_editstream\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia_editstream/2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z/2013-01-10T08:13:47.830Z_v9/0/index.zip\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}'); INSERT INTO druid_segments (id, dataSource, created_date, start, end, partitioned, version, used, payload) VALUES ('wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z', 'wikipedia', '2013-08-08T21:26:23.799Z', '2013-08-01T00:00:00.000Z', '2013-08-02T00:00:00.000Z', '0', '2013-08-08T21:22:48.989Z', '1', '{\"dataSource\":\"wikipedia\",\"interval\":\"2013-08-01T00:00:00.000Z/2013-08-02T00:00:00.000Z\",\"version\":\"2013-08-08T21:22:48.989Z\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia/20130801T000000.000Z_20130802T000000.000Z/2013-08-08T21_22_48.989Z/0/index.zip\"},\"dimensions\":\"dma_code,continent_code,geo,area_code,robot,country_name,network,city,namespace,anonymous,unpatrolled,page,postal_code,language,newpage,user,region_lookup\",\"metrics\":\"count,delta,variation,added,deleted\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":24664730,\"identifier\":\"wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z\"}'); +INSERT INTO druid_tasks (id, created_date, datasource, payload, status_payload, active) VALUES ('index_auth_test_2019-04-30T01:13:31.893Z', '2019-04-30T01:13:31.893Z', 'auth_test', '{\"id\":\"index_auth_test_2019-04-30T01:13:31.893Z\",\"created_date\":\"2019-04-30T01:13:31.893Z\",\"datasource\":\"auth_test\",\"active\":0}', '{\"id\":\"index_auth_test_2019-04-30T01:13:31.893Z\",\"status\":\"SUCCESS\",\"duration\":1}', 0); +INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload) VALUES ('auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9','auth_test','2013-03-15T20:49:52.348Z','2012-12-29T00:00:00.000Z','2013-01-10T08:00:00.000Z',0,'2013-01-10T08:13:47.830Z_v9',1,'{\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia_editstream/2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z/2013-01-10T08:13:47.830Z_v9/0/index.zip\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}'); diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 7272e5e632f0..12e495670ec4 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -21,6 +21,8 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import org.apache.calcite.avatica.AvaticaSqlException; import org.apache.druid.guice.annotations.Client; @@ -40,10 +42,14 @@ import org.apache.druid.server.security.ResourceType; import org.apache.druid.sql.avatica.DruidAvaticaHandler; import org.apache.druid.testing.IntegrationTestingConfig; +import org.apache.druid.testing.clients.CoordinatorResourceTestClient; import org.apache.druid.testing.guice.DruidTestModuleFactory; +import org.apache.druid.testing.utils.RetryUtil; +import org.apache.druid.testing.utils.TestQueryHelper; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.testng.Assert; +import org.testng.annotations.BeforeMethod; import org.testng.annotations.Guice; import org.testng.annotations.Test; @@ -58,6 +64,7 @@ import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.stream.Collectors; @Guice(moduleFactory = DruidTestModuleFactory.class) public class ITBasicAuthConfigurationTest @@ -69,6 +76,32 @@ public class ITBasicAuthConfigurationTest { }; + private static final TypeReference SYS_SCHEMA_RESULTS_TYPE_REFERENCE = + new TypeReference>>() + { + }; + + private static final String SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE = + "/results/auth_test_sys_schema_segments.json"; + private static final String SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE = + "/results/auth_test_sys_schema_server_segments.json"; + private static final String SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE = + "/results/auth_test_sys_schema_servers.json"; + private static final String SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE = + "/results/auth_test_sys_schema_tasks.json"; + + private static final String SYS_SCHEMA_SEGMENTS_QUERY = + "SELECT * FROM sys.segments WHERE datasource IN ('auth_test')"; + + private static final String SYS_SCHEMA_SERVERS_QUERY = + "SELECT * FROM sys.servers"; + + private static final String SYS_SCHEMA_SERVER_SEGMENTS_QUERY = + "SELECT * FROM sys.server_segments WHERE segment_id LIKE 'auth_test%'"; + + private static final String SYS_SCHEMA_TASKS_QUERY = + "SELECT * FROM sys.tasks WHERE datasource IN ('auth_test')"; + @Inject IntegrationTestingConfig config; @@ -81,83 +114,256 @@ public class ITBasicAuthConfigurationTest StatusResponseHandler responseHandler = new StatusResponseHandler(StandardCharsets.UTF_8); + @Inject + private CoordinatorResourceTestClient coordinatorClient; + + @BeforeMethod + public void before() throws Exception + { + // ensure that auth_test segments are loaded completely, we use them for testing system schema tables + RetryUtil.retryUntilTrue( + () -> coordinatorClient.areSegmentsLoaded("auth_test"), "auth_test segment load" + ); + } + @Test - public void testAuthConfiguration() throws Exception + public void testSystemSchemaAccess() throws Exception { HttpClient adminClient = new CredentialedHttpClient( new BasicCredentials("admin", "priest"), httpClient ); - HttpClient internalSystemClient = new CredentialedHttpClient( - new BasicCredentials("druid_system", "warlock"), + // check that admin access works on all nodes + checkNodeAccess(adminClient); + + // create a new user+role that can only read 'auth_test' + List readDatasourceOnlyPermissions = Collections.singletonList( + new ResourceAction( + new Resource("auth_test", ResourceType.DATASOURCE), + Action.READ + ) + ); + createUserAndRoleWithPermissions( + adminClient, + "datasourceOnlyUser", + "helloworld", + "datasourceOnlyRole", + readDatasourceOnlyPermissions + ); + HttpClient datasourceOnlyUserClient = new CredentialedHttpClient( + new BasicCredentials("datasourceOnlyUser", "helloworld"), httpClient ); - HttpClient newUserClient = new CredentialedHttpClient( - new BasicCredentials("druid", "helloworld"), + // create a new user+role that can only read 'auth_test' + STATE read access + List readDatasourceWithStatePermissions = ImmutableList.of( + new ResourceAction( + new Resource("auth_test", ResourceType.DATASOURCE), + Action.READ + ), + new ResourceAction( + new Resource(".*", ResourceType.STATE), + Action.READ + ) + ); + createUserAndRoleWithPermissions( + adminClient, + "datasourceWithStateUser", + "helloworld", + "datasourceWithStateRole", + readDatasourceWithStatePermissions + ); + HttpClient datasourceWithStateUserClient = new CredentialedHttpClient( + new BasicCredentials("datasourceWithStateUser", "helloworld"), httpClient ); - final HttpClient unsecuredClient = httpClient; + // check that we can access a datasource-permission restricted resource on the broker + makeRequest( + datasourceOnlyUserClient, + HttpMethod.GET, + config.getBrokerUrl() + "/druid/v2/datasources/auth_test", + null + ); - // check that we are allowed to access unsecured path without credentials. - checkUnsecuredCoordinatorLoadQueuePath(unsecuredClient); + // check that we can access a state-permission restricted resource on the broker + makeRequest(datasourceWithStateUserClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null); - // check that admin works - checkNodeAccess(adminClient); - // check that internal user works - checkNodeAccess(internalSystemClient); + // initial setup is done now, run the system schema response content tests + final List> adminSegments = jsonMapper.readValue( + TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE), + SYS_SCHEMA_RESULTS_TYPE_REFERENCE + ); - // create a new user that can read /status - makeRequest( - adminClient, - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authentication/db/basic/users/druid", - null + final List> adminServerSegments = jsonMapper.readValue( + TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE), + SYS_SCHEMA_RESULTS_TYPE_REFERENCE ); - makeRequest( + final List> adminServers = jsonMapper.readValue( + TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE), + SYS_SCHEMA_RESULTS_TYPE_REFERENCE + ); + + final List> adminTasks = jsonMapper.readValue( + TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE), + SYS_SCHEMA_RESULTS_TYPE_REFERENCE + ); + + // as admin + LOG.info("Checking sys.segments query as admin..."); + verifySystemSchemaQuery( adminClient, - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authentication/db/basic/users/druid/credentials", - jsonMapper.writeValueAsBytes(new BasicAuthenticatorCredentialUpdate("helloworld", 5000)) + SYS_SCHEMA_SEGMENTS_QUERY, + adminSegments ); - makeRequest( + LOG.info("Checking sys.servers query as admin..."); + verifySystemSchemaQuery( adminClient, - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/users/druid", - null + SYS_SCHEMA_SERVERS_QUERY, + adminServers ); - makeRequest( + LOG.info("Checking sys.server_segments query as admin..."); + verifySystemSchemaQuery( adminClient, - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/roles/druidrole", - null + SYS_SCHEMA_SERVER_SEGMENTS_QUERY, + adminServerSegments ); - makeRequest( + LOG.info("Checking sys.tasks query as admin..."); + verifySystemSchemaQuery( adminClient, - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/users/druid/roles/druidrole", - null + SYS_SCHEMA_TASKS_QUERY, + adminTasks + ); + + // as user that can only read auth_test + LOG.info("Checking sys.segments query as datasourceOnlyUser..."); + verifySystemSchemaQuery( + datasourceOnlyUserClient, + SYS_SCHEMA_SEGMENTS_QUERY, + adminSegments.stream() + .filter((segmentEntry) -> { + return "auth_test".equals(segmentEntry.get("datasource")); + }) + .collect(Collectors.toList()) + ); + + LOG.info("Checking sys.servers query as datasourceOnlyUser..."); + verifySystemSchemaQueryFailure( + datasourceOnlyUserClient, + SYS_SCHEMA_SERVERS_QUERY, + HttpResponseStatus.FORBIDDEN, + "{\"Access-Check-Result\":\"Insufficient permission to view servers :Allowed:false, Message:\"}" + ); + + LOG.info("Checking sys.server_segments query as datasourceOnlyUser..."); + verifySystemSchemaQueryFailure( + datasourceOnlyUserClient, + SYS_SCHEMA_SERVER_SEGMENTS_QUERY, + HttpResponseStatus.FORBIDDEN, + "{\"Access-Check-Result\":\"Insufficient permission to view servers :Allowed:false, Message:\"}" + ); + + LOG.info("Checking sys.tasks query as datasourceOnlyUser..."); + verifySystemSchemaQuery( + datasourceOnlyUserClient, + SYS_SCHEMA_TASKS_QUERY, + adminTasks.stream() + .filter((taskEntry) -> { + return "auth_test".equals(taskEntry.get("datasource")); + }) + .collect(Collectors.toList()) ); + // as user that can read auth_test and STATE + LOG.info("Checking sys.segments query as datasourceWithStateUser..."); + verifySystemSchemaQuery( + datasourceWithStateUserClient, + SYS_SCHEMA_SEGMENTS_QUERY, + adminSegments.stream() + .filter((segmentEntry) -> { + return "auth_test".equals(segmentEntry.get("datasource")); + }) + .collect(Collectors.toList()) + ); + + LOG.info("Checking sys.servers query as datasourceWithStateUser..."); + verifySystemSchemaQuery( + datasourceWithStateUserClient, + SYS_SCHEMA_SERVERS_QUERY, + adminServers + ); + + LOG.info("Checking sys.server_segments query as datasourceWithStateUser..."); + verifySystemSchemaQuery( + datasourceWithStateUserClient, + SYS_SCHEMA_SERVER_SEGMENTS_QUERY, + adminServerSegments.stream() + .filter((serverSegmentEntry) -> { + return ((String) serverSegmentEntry.get("segment_id")).contains("auth_test"); + }) + .collect(Collectors.toList()) + ); + + LOG.info("Checking sys.tasks query as datasourceWithStateUser..."); + verifySystemSchemaQuery( + datasourceWithStateUserClient, + SYS_SCHEMA_TASKS_QUERY, + adminTasks.stream() + .filter((taskEntry) -> { + return "auth_test".equals(taskEntry.get("datasource")); + }) + .collect(Collectors.toList()) + ); + } + + @Test + public void testAuthConfiguration() throws Exception + { + HttpClient adminClient = new CredentialedHttpClient( + new BasicCredentials("admin", "priest"), + httpClient + ); + + HttpClient internalSystemClient = new CredentialedHttpClient( + new BasicCredentials("druid_system", "warlock"), + httpClient + ); + + HttpClient newUserClient = new CredentialedHttpClient( + new BasicCredentials("druid", "helloworld"), + httpClient + ); + + final HttpClient unsecuredClient = httpClient; + + // check that we are allowed to access unsecured path without credentials. + checkUnsecuredCoordinatorLoadQueuePath(unsecuredClient); + + // check that admin works + checkNodeAccess(adminClient); + + // check that internal user works + checkNodeAccess(internalSystemClient); + + // create a new user+role that can read /status List permissions = Collections.singletonList( new ResourceAction( new Resource(".*", ResourceType.STATE), Action.READ ) ); - byte[] permissionsBytes = jsonMapper.writeValueAsBytes(permissions); - makeRequest( + createUserAndRoleWithPermissions( adminClient, - HttpMethod.POST, - config.getCoordinatorUrl() + "/druid-ext/basic-security/authorization/db/basic/roles/druidrole/permissions", - permissionsBytes + "druid", + "helloworld", + "druidrole", + permissions ); // check that the new user works @@ -166,7 +372,6 @@ public void testAuthConfiguration() throws Exception // check loadStatus checkLoadStatus(adminClient); - // create 100 users for (int i = 0; i < 100; i++) { makeRequest( @@ -333,6 +538,23 @@ private void checkLoadStatusSingle(HttpClient httpClient, String baseUrl) throws } private StatusResponseHolder makeRequest(HttpClient httpClient, HttpMethod method, String url, byte[] content) + { + return makeRequestWithExpectedStatus( + httpClient, + method, + url, + content, + HttpResponseStatus.OK + ); + } + + private StatusResponseHolder makeRequestWithExpectedStatus( + HttpClient httpClient, + HttpMethod method, + String url, + byte[] content, + HttpResponseStatus expectedStatus + ) { try { Request request = new Request(method, new URL(url)); @@ -349,7 +571,7 @@ private StatusResponseHolder makeRequest(HttpClient httpClient, HttpMethod metho responseHandler ).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { + if (!response.getStatus().equals(expectedStatus)) { String errMsg = StringUtils.format( "Error while making request to url[%s] status[%s] content[%s]", url, @@ -357,7 +579,7 @@ private StatusResponseHolder makeRequest(HttpClient httpClient, HttpMethod metho response.getContent() ); // it can take time for the auth config to propagate, so we retry - if (retryCount > 4) { + if (retryCount > 10) { throw new ISE(errMsg); } else { LOG.error(errMsg); @@ -375,4 +597,119 @@ private StatusResponseHolder makeRequest(HttpClient httpClient, HttpMethod metho throw new RuntimeException(e); } } + + + private void createUserAndRoleWithPermissions( + HttpClient adminClient, + String user, + String password, + String role, + List permissions + ) throws Exception + { + makeRequest( + adminClient, + HttpMethod.POST, + StringUtils.format( + "%s/druid-ext/basic-security/authentication/db/basic/users/%s", + config.getCoordinatorUrl(), + user + ), + null + ); + makeRequest( + adminClient, + HttpMethod.POST, + StringUtils.format( + "%s/druid-ext/basic-security/authentication/db/basic/users/%s/credentials", + config.getCoordinatorUrl(), + user + ), + jsonMapper.writeValueAsBytes(new BasicAuthenticatorCredentialUpdate(password, 5000)) + ); + makeRequest( + adminClient, + HttpMethod.POST, + StringUtils.format( + "%s/druid-ext/basic-security/authorization/db/basic/users/%s", + config.getCoordinatorUrl(), + user + ), + null + ); + makeRequest( + adminClient, + HttpMethod.POST, + StringUtils.format( + "%s/druid-ext/basic-security/authorization/db/basic/roles/%s", + config.getCoordinatorUrl(), + role + ), + null + ); + makeRequest( + adminClient, + HttpMethod.POST, + StringUtils.format( + "%s/druid-ext/basic-security/authorization/db/basic/users/%s/roles/%s", + config.getCoordinatorUrl(), + user, + role + ), + null + ); + byte[] permissionsBytes = jsonMapper.writeValueAsBytes(permissions); + makeRequest( + adminClient, + HttpMethod.POST, + StringUtils.format( + "%s/druid-ext/basic-security/authorization/db/basic/roles/%s/permissions", + config.getCoordinatorUrl(), + role + ), + permissionsBytes + ); + } + + private StatusResponseHolder makeSQLQueryRequest( + HttpClient httpClient, + String query, + HttpResponseStatus expectedStatus + ) throws Exception + { + Map queryMap = ImmutableMap.of( + "query", query + ); + return makeRequestWithExpectedStatus( + httpClient, + HttpMethod.POST, + config.getBrokerUrl() + "/druid/v2/sql", + jsonMapper.writeValueAsBytes(queryMap), + expectedStatus + ); + } + + private void verifySystemSchemaQuery( + HttpClient client, + String query, + List> expectedResults + ) throws Exception + { + StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query, HttpResponseStatus.OK); + String content = responseHolder.getContent(); + List> responseMap = jsonMapper.readValue(content, SYS_SCHEMA_RESULTS_TYPE_REFERENCE); + Assert.assertEquals(responseMap, expectedResults); + } + + private void verifySystemSchemaQueryFailure( + HttpClient client, + String query, + HttpResponseStatus expectedErrorStatus, + String expectedErrorMessage + ) throws Exception + { + StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query, expectedErrorStatus); + Assert.assertEquals(responseHolder.getStatus(), expectedErrorStatus); + Assert.assertEquals(responseHolder.getContent(), expectedErrorMessage); + } } diff --git a/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json b/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json new file mode 100644 index 000000000000..a6d5e59622c0 --- /dev/null +++ b/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json @@ -0,0 +1,17 @@ +[ + { + "segment_id": "auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9", + "datasource": "auth_test", + "start": "2012-12-29T00:00:00.000Z", + "end": "2013-01-10T08:00:00.000Z", + "size": 446027801, + "version": "2013-01-10T08:13:47.830Z_v9", + "partition_num": 0, + "num_replicas": 1, + "num_rows": 4462111, + "is_published": 1, + "is_available": 1, + "is_realtime": 0, + "payload": "{\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"load spec is pruned, because it's not needed on Brokers, but eats a lot of heap space\":\"\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}" + } +] diff --git a/integration-tests/src/test/resources/results/auth_test_sys_schema_server_segments.json b/integration-tests/src/test/resources/results/auth_test_sys_schema_server_segments.json new file mode 100644 index 000000000000..f644018f99bc --- /dev/null +++ b/integration-tests/src/test/resources/results/auth_test_sys_schema_server_segments.json @@ -0,0 +1,6 @@ +[ + { + "server": "172.172.172.6:8283", + "segment_id": "auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9" + } +] \ No newline at end of file diff --git a/integration-tests/src/test/resources/results/auth_test_sys_schema_servers.json b/integration-tests/src/test/resources/results/auth_test_sys_schema_servers.json new file mode 100644 index 000000000000..bf7c681af6e6 --- /dev/null +++ b/integration-tests/src/test/resources/results/auth_test_sys_schema_servers.json @@ -0,0 +1,12 @@ +[ + { + "server": "172.172.172.6:8283", + "host": "172.172.172.6", + "plaintext_port": 8083, + "tls_port": 8283, + "server_type": "historical", + "tier": "_default_tier", + "curr_size": 2208932412, + "max_size": 5000000000 + } +] diff --git a/integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json b/integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json new file mode 100644 index 000000000000..53b3f28353c6 --- /dev/null +++ b/integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json @@ -0,0 +1,17 @@ +[ + { + "task_id": "index_auth_test_2019-04-30T01:13:31.893Z", + "type": null, + "datasource": "auth_test", + "created_time": "2019-04-30T01:13:31.893Z", + "queue_insertion_time": "1970-01-01T00:00:00.000Z", + "status": "SUCCESS", + "runner_status": "NONE", + "duration": 1, + "location": null, + "host": null, + "plaintext_port": -1, + "tls_port": -1, + "error_msg": null + } +] diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java index 29f981619660..51442693e6d1 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java @@ -92,6 +92,9 @@ public class SystemSchema extends AbstractSchema private static final String SERVER_SEGMENTS_TABLE = "server_segments"; private static final String TASKS_TABLE = "tasks"; + private static Function> SEGMENT_RA_GENERATOR = segment -> + Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(segment.getDataSource())); + static final RowSignature SEGMENTS_SIGNATURE = RowSignature .builder() .add("segment_id", ValueType.STRING) @@ -330,13 +333,10 @@ private Iterator getAuthorizedPublishedSegments( final AuthenticationResult authenticationResult = (AuthenticationResult) root.get(PlannerContext.DATA_CTX_AUTHENTICATION_RESULT); - Function> raGenerator = segment -> Collections.singletonList( - AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(segment.getDataSource())); - final Iterable authorizedSegments = AuthorizationUtils.filterAuthorizedResources( authenticationResult, () -> it, - raGenerator, + SEGMENT_RA_GENERATOR, authorizerMapper ); return authorizedSegments.iterator(); @@ -493,11 +493,30 @@ public TableType getJdbcTableType() @Override public Enumerable scan(DataContext root) { + final AuthenticationResult authenticationResult = + (AuthenticationResult) root.get(PlannerContext.DATA_CTX_AUTHENTICATION_RESULT); + + final Access stateAccess = AuthorizationUtils.authorizeAllResourceActions( + authenticationResult, + Collections.singletonList(new ResourceAction(new Resource("STATE", ResourceType.STATE), Action.READ)), + authorizerMapper + ); + if (!stateAccess.isAllowed()) { + throw new ForbiddenException("Insufficient permission to view servers :" + stateAccess); + } + final List rows = new ArrayList<>(); final List druidServers = serverView.getDruidServers(); final int serverSegmentsTableSize = SERVER_SEGMENTS_SIGNATURE.getRowOrder().size(); for (ImmutableDruidServer druidServer : druidServers) { - for (DataSegment segment : druidServer.getLazyAllSegments()) { + final Iterable authorizedServerSegments = AuthorizationUtils.filterAuthorizedResources( + authenticationResult, + druidServer.getLazyAllSegments(), + SEGMENT_RA_GENERATOR, + authorizerMapper + ); + + for (DataSegment segment : authorizedServerSegments) { Object[] row = new Object[serverSegmentsTableSize]; row[0] = druidServer.getHost(); row[1] = segment.getId(); From 2b278e444ecc9d54bd9137fa83a00868c9c7fbcc Mon Sep 17 00:00:00 2001 From: jon-wei Date: Tue, 30 Apr 2019 12:31:19 -0700 Subject: [PATCH 2/7] PR comments, fix current_size handling --- .../extensions-core/druid-basic-security.md | 12 ++ docs/content/querying/sql.md | 4 + .../ITBasicAuthConfigurationTest.java | 114 ++++++++++++++++-- .../EventReceiverFirehoseFactory.java | 4 +- .../http/security/StateResourceFilter.java | 2 +- .../druid/server/security/Resource.java | 2 + .../sql/calcite/schema/SystemSchema.java | 38 +++--- 7 files changed, 145 insertions(+), 31 deletions(-) diff --git a/docs/content/development/extensions-core/druid-basic-security.md b/docs/content/development/extensions-core/druid-basic-security.md index adba32bb4688..e9aa6915b60c 100644 --- a/docs/content/development/extensions-core/druid-basic-security.md +++ b/docs/content/development/extensions-core/druid-basic-security.md @@ -310,6 +310,18 @@ For information on what HTTP methods are supported on a particular request endpo GET requires READ permission, while POST and DELETE require WRITE permission. +### SQL Permissions + +Queries on Druid datasources require DATASOURCE READ permissions for the specified datasource. + +Queries on the [information schema tables](../../querying/sql.html#information-schema) require DATASOURCE READ access for the specified datasource. + +Queries on the [system schema tables](../../querying/sql.html#system-schema) require the following permissions: +- `segments`: Segments will be filtered based on DATASOURCE READ permissions. +- `servers`: The user requires STATE READ permissions. +- `server_segments`: The user requires STATE READ permissions and segments will be filtered based on DATASOURCE READ permissions. +- `tasks`: Tasks will be filtered based on DATASOURCE READ permissions. + ## Configuration Propagation To prevent excessive load on the Coordinator, the Authenticator and Authorizer user/role database state is cached on each Druid process. diff --git a/docs/content/querying/sql.md b/docs/content/querying/sql.md index 9e97138dd200..a519236c797d 100644 --- a/docs/content/querying/sql.md +++ b/docs/content/querying/sql.md @@ -738,3 +738,7 @@ Broker will emit the following metrics for SQL. |`sqlQuery/time`|Milliseconds taken to complete a SQL.|id, nativeQueryIds, dataSource, remoteAddress, success.|< 1s| |`sqlQuery/bytes`|number of bytes returned in SQL response.|id, nativeQueryIds, dataSource, remoteAddress, success.| | + +## Authorization Permissions + +Please see [Defining SQL permissions](../../development/extensions-core/druid-basic-security.html#sql-permissions) for information on what permissions are needed for making SQL queries in a secured cluster. \ No newline at end of file diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 12e495670ec4..012b30ebe46e 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; import com.google.inject.Inject; import org.apache.calcite.avatica.AvaticaSqlException; import org.apache.druid.guice.annotations.Client; @@ -61,6 +62,7 @@ import java.sql.ResultSet; import java.sql.Statement; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; @@ -179,6 +181,25 @@ public void testSystemSchemaAccess() throws Exception httpClient ); + // create a new user+role with only STATE read access + List stateOnlyPermissions = ImmutableList.of( + new ResourceAction( + new Resource(".*", ResourceType.STATE), + Action.READ + ) + ); + createUserAndRoleWithPermissions( + adminClient, + "stateOnlyUser", + "helloworld", + "stateOnlyRole", + stateOnlyPermissions + ); + HttpClient stateOnlyUserClient = new CredentialedHttpClient( + new BasicCredentials("stateOnlyUser", "helloworld"), + httpClient + ); + // check that we can access a datasource-permission restricted resource on the broker makeRequest( datasourceOnlyUserClient, @@ -189,7 +210,7 @@ public void testSystemSchemaAccess() throws Exception // check that we can access a state-permission restricted resource on the broker makeRequest(datasourceWithStateUserClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null); - + makeRequest(stateOnlyUserClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null); // initial setup is done now, run the system schema response content tests final List> adminSegments = jsonMapper.readValue( @@ -202,9 +223,11 @@ public void testSystemSchemaAccess() throws Exception SYS_SCHEMA_RESULTS_TYPE_REFERENCE ); - final List> adminServers = jsonMapper.readValue( - TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE), - SYS_SCHEMA_RESULTS_TYPE_REFERENCE + final List> adminServers = getServersWithoutCurrentSize( + jsonMapper.readValue( + TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE), + SYS_SCHEMA_RESULTS_TYPE_REFERENCE + ) ); final List> adminTasks = jsonMapper.readValue( @@ -221,10 +244,10 @@ public void testSystemSchemaAccess() throws Exception ); LOG.info("Checking sys.servers query as admin..."); - verifySystemSchemaQuery( + verifySystemSchemaServerQuery( adminClient, SYS_SCHEMA_SERVERS_QUERY, - adminServers + getServersWithoutCurrentSize(adminServers) ); LOG.info("Checking sys.server_segments query as admin..."); @@ -258,7 +281,7 @@ public void testSystemSchemaAccess() throws Exception datasourceOnlyUserClient, SYS_SCHEMA_SERVERS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Insufficient permission to view servers :Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}" ); LOG.info("Checking sys.server_segments query as datasourceOnlyUser..."); @@ -266,7 +289,7 @@ public void testSystemSchemaAccess() throws Exception datasourceOnlyUserClient, SYS_SCHEMA_SERVER_SEGMENTS_QUERY, HttpResponseStatus.FORBIDDEN, - "{\"Access-Check-Result\":\"Insufficient permission to view servers :Allowed:false, Message:\"}" + "{\"Access-Check-Result\":\"Insufficient permission to view servers : Allowed:false, Message:\"}" ); LOG.info("Checking sys.tasks query as datasourceOnlyUser..."); @@ -293,7 +316,7 @@ public void testSystemSchemaAccess() throws Exception ); LOG.info("Checking sys.servers query as datasourceWithStateUser..."); - verifySystemSchemaQuery( + verifySystemSchemaServerQuery( datasourceWithStateUserClient, SYS_SCHEMA_SERVERS_QUERY, adminServers @@ -320,6 +343,35 @@ public void testSystemSchemaAccess() throws Exception }) .collect(Collectors.toList()) ); + + // as user that can only read STATE + LOG.info("Checking sys.segments query as stateOnlyUser..."); + verifySystemSchemaQuery( + stateOnlyUserClient, + SYS_SCHEMA_SEGMENTS_QUERY, + Collections.emptyList() + ); + + LOG.info("Checking sys.servers query as stateOnlyUser..."); + verifySystemSchemaServerQuery( + stateOnlyUserClient, + SYS_SCHEMA_SERVERS_QUERY, + adminServers + ); + + LOG.info("Checking sys.server_segments query as stateOnlyUser..."); + verifySystemSchemaQuery( + stateOnlyUserClient, + SYS_SCHEMA_SERVER_SEGMENTS_QUERY, + Collections.emptyList() + ); + + LOG.info("Checking sys.tasks query as stateOnlyUser..."); + verifySystemSchemaQuery( + stateOnlyUserClient, + SYS_SCHEMA_TASKS_QUERY, + Collections.emptyList() + ); } @Test @@ -598,7 +650,6 @@ private StatusResponseHolder makeRequestWithExpectedStatus( } } - private void createUserAndRoleWithPermissions( HttpClient adminClient, String user, @@ -689,18 +740,40 @@ private StatusResponseHolder makeSQLQueryRequest( ); } - private void verifySystemSchemaQuery( + private void verifySystemSchemaQueryBase( HttpClient client, String query, - List> expectedResults + List> expectedResults, + boolean isServerQuery ) throws Exception { StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query, HttpResponseStatus.OK); String content = responseHolder.getContent(); List> responseMap = jsonMapper.readValue(content, SYS_SCHEMA_RESULTS_TYPE_REFERENCE); + if (isServerQuery) { + responseMap = getServersWithoutCurrentSize(responseMap); + } Assert.assertEquals(responseMap, expectedResults); } + private void verifySystemSchemaQuery( + HttpClient client, + String query, + List> expectedResults + ) throws Exception + { + verifySystemSchemaQueryBase(client, query, expectedResults, false); + } + + private void verifySystemSchemaServerQuery( + HttpClient client, + String query, + List> expectedResults + ) throws Exception + { + verifySystemSchemaQueryBase(client, query, expectedResults, true); + } + private void verifySystemSchemaQueryFailure( HttpClient client, String query, @@ -712,4 +785,21 @@ private void verifySystemSchemaQueryFailure( Assert.assertEquals(responseHolder.getStatus(), expectedErrorStatus); Assert.assertEquals(responseHolder.getContent(), expectedErrorMessage); } + + /** + * current_size on historicals changes because cluster state is not isolated across different + * integration tests, zero it out for consistent test results + */ + private static List> getServersWithoutCurrentSize(List> servers) + { + return Lists.transform( + servers, + (server) -> { + Map newServer = new HashMap<>(); + newServer.putAll(server); + newServer.put("current_size", 0); + return newServer; + } + ); + } } diff --git a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java index b8ed0ad4077c..c5a8c47ea142 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java @@ -349,7 +349,7 @@ public Response addAll(InputStream in, @Context final HttpServletRequest req) th Access accessResult = AuthorizationUtils.authorizeResourceAction( req, new ResourceAction( - new Resource("STATE", ResourceType.STATE), + Resource.STATE_RESOURCE, Action.WRITE ), authorizerMapper @@ -538,7 +538,7 @@ public Response shutdown( Access accessResult = AuthorizationUtils.authorizeResourceAction( req, new ResourceAction( - new Resource("STATE", ResourceType.STATE), + Resource.STATE_RESOURCE, Action.WRITE ), authorizerMapper diff --git a/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java b/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java index b3231dc2b373..90c5320af95e 100644 --- a/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java +++ b/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java @@ -58,7 +58,7 @@ public StateResourceFilter( public ContainerRequest filter(ContainerRequest request) { final ResourceAction resourceAction = new ResourceAction( - new Resource("STATE", ResourceType.STATE), + Resource.STATE_RESOURCE, getAction(request) ); diff --git a/server/src/main/java/org/apache/druid/server/security/Resource.java b/server/src/main/java/org/apache/druid/server/security/Resource.java index 02b6539090cf..6770bdaf5cbc 100644 --- a/server/src/main/java/org/apache/druid/server/security/Resource.java +++ b/server/src/main/java/org/apache/druid/server/security/Resource.java @@ -24,6 +24,8 @@ public class Resource { + public static final Resource STATE_RESOURCE = new Resource("STATE", ResourceType.STATE); + private final String name; private final ResourceType type; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java index 51442693e6d1..f2b89d8a3d00 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java @@ -440,14 +440,9 @@ public Enumerable scan(DataContext root) final List druidServers = serverView.getDruidServers(); final AuthenticationResult authenticationResult = (AuthenticationResult) root.get(PlannerContext.DATA_CTX_AUTHENTICATION_RESULT); - final Access access = AuthorizationUtils.authorizeAllResourceActions( - authenticationResult, - Collections.singletonList(new ResourceAction(new Resource("STATE", ResourceType.STATE), Action.READ)), - authorizerMapper - ); - if (!access.isAllowed()) { - throw new ForbiddenException("Insufficient permission to view servers :" + access); - } + + checkStateReadAccessForServers(authenticationResult, authorizerMapper); + final FluentIterable results = FluentIterable .from(druidServers) .transform(val -> new Object[]{ @@ -496,14 +491,7 @@ public Enumerable scan(DataContext root) final AuthenticationResult authenticationResult = (AuthenticationResult) root.get(PlannerContext.DATA_CTX_AUTHENTICATION_RESULT); - final Access stateAccess = AuthorizationUtils.authorizeAllResourceActions( - authenticationResult, - Collections.singletonList(new ResourceAction(new Resource("STATE", ResourceType.STATE), Action.READ)), - authorizerMapper - ); - if (!stateAccess.isAllowed()) { - throw new ForbiddenException("Insufficient permission to view servers :" + stateAccess); - } + checkStateReadAccessForServers(authenticationResult, authorizerMapper); final List rows = new ArrayList<>(); final List druidServers = serverView.getDruidServers(); @@ -770,4 +758,22 @@ private static String toStringOrNull(@Nullable final Object object) return object.toString(); } + + /** + * Checks if an authenticated user has the STATE READ permissions needed to view server information. + */ + private static void checkStateReadAccessForServers( + AuthenticationResult authenticationResult, + AuthorizerMapper authorizerMapper + ) + { + final Access stateAccess = AuthorizationUtils.authorizeAllResourceActions( + authenticationResult, + Collections.singletonList(new ResourceAction(Resource.STATE_RESOURCE, Action.READ)), + authorizerMapper + ); + if (!stateAccess.isAllowed()) { + throw new ForbiddenException("Insufficient permission to view servers : " + stateAccess); + } + } } From 78a4e7a3eec4ed9e2811f52c3a305924a4cdce6a Mon Sep 17 00:00:00 2001 From: jon-wei Date: Tue, 30 Apr 2019 13:13:08 -0700 Subject: [PATCH 3/7] Checkstyle --- .../segment/realtime/firehose/EventReceiverFirehoseFactory.java | 1 - .../apache/druid/server/http/security/StateResourceFilter.java | 1 - .../java/org/apache/druid/sql/calcite/schema/SystemSchema.java | 1 - 3 files changed, 3 deletions(-) diff --git a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java index c5a8c47ea142..d8c1ef9f02f6 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java @@ -49,7 +49,6 @@ import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.server.security.ResourceType; import org.apache.druid.utils.Runnables; import org.joda.time.DateTime; diff --git a/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java b/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java index 90c5320af95e..275ea350086d 100644 --- a/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java +++ b/server/src/main/java/org/apache/druid/server/http/security/StateResourceFilter.java @@ -27,7 +27,6 @@ import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.server.security.ResourceType; /** * Use this ResourceFilter at end points where Druid Cluster State is read or written diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java index f2b89d8a3d00..d5831d1470c4 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java @@ -64,7 +64,6 @@ import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.server.security.ResourceType; import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.table.RowSignature; import org.apache.druid.timeline.DataSegment; From 6f2553822a3cb8a264f93ede0a24d97204fb17a3 Mon Sep 17 00:00:00 2001 From: jon-wei Date: Tue, 30 Apr 2019 13:57:21 -0700 Subject: [PATCH 4/7] Set curr_size instead of current_size --- .../druid/tests/security/ITBasicAuthConfigurationTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java index 012b30ebe46e..dfa3791b2fa2 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java @@ -787,7 +787,7 @@ private void verifySystemSchemaQueryFailure( } /** - * current_size on historicals changes because cluster state is not isolated across different + * curr_size on historicals changes because cluster state is not isolated across different * integration tests, zero it out for consistent test results */ private static List> getServersWithoutCurrentSize(List> servers) @@ -797,7 +797,7 @@ private static List> getServersWithoutCurrentSize(List { Map newServer = new HashMap<>(); newServer.putAll(server); - newServer.put("current_size", 0); + newServer.put("curr_size", 0); return newServer; } ); From 669e04af62ecbe81a1ea905c35c39e3a60527cb2 Mon Sep 17 00:00:00 2001 From: jon-wei Date: Wed, 1 May 2019 12:11:58 -0700 Subject: [PATCH 5/7] Adjust information schema docs --- .../development/extensions-core/druid-basic-security.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/content/development/extensions-core/druid-basic-security.md b/docs/content/development/extensions-core/druid-basic-security.md index e9aa6915b60c..28eff1fca9f3 100644 --- a/docs/content/development/extensions-core/druid-basic-security.md +++ b/docs/content/development/extensions-core/druid-basic-security.md @@ -314,7 +314,9 @@ GET requires READ permission, while POST and DELETE require WRITE permission. Queries on Druid datasources require DATASOURCE READ permissions for the specified datasource. -Queries on the [information schema tables](../../querying/sql.html#information-schema) require DATASOURCE READ access for the specified datasource. +Queries on the [INFORMATION_SCHEMA tables](../../querying/sql.html#information-schema) will +return information about datasources that the caller has DATASOURCE READ access to. Other +datasources will be omitted. Queries on the [system schema tables](../../querying/sql.html#system-schema) require the following permissions: - `segments`: Segments will be filtered based on DATASOURCE READ permissions. From 62b87962a431816f033ee7bd32cdf8e57d00d74c Mon Sep 17 00:00:00 2001 From: jon-wei Date: Wed, 1 May 2019 12:20:18 -0700 Subject: [PATCH 6/7] Fix merge conflict --- .../sql/calcite/schema/SystemSchema.java | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java index dc4888a1d9fe..e5cfa911c452 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/SystemSchema.java @@ -92,8 +92,16 @@ public class SystemSchema extends AbstractSchema private static final String SERVER_SEGMENTS_TABLE = "server_segments"; private static final String TASKS_TABLE = "tasks"; - private static final Function> SEGMENT_RA_GENERATOR = segment -> - Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(segment.getDataSource())); + private static final Function> + SEGMENT_WITH_OVERSHADOWED_STATUS_RA_GENERATOR = segment -> + Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply( + segment.getDataSegment().getDataSource()) + ); + + private static final Function> SEGMENT_RA_GENERATOR = + segment -> Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply( + segment.getDataSource()) + ); /** * Booleans constants represented as long type, @@ -340,17 +348,10 @@ private Iterator getAuthorizedPublishedSegments( final AuthenticationResult authenticationResult = (AuthenticationResult) root.get(PlannerContext.DATA_CTX_AUTHENTICATION_RESULT); -<<<<<<< HEAD - final Iterable authorizedSegments = AuthorizationUtils.filterAuthorizedResources( -======= - Function> raGenerator = segment -> Collections.singletonList( - AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(segment.getDataSegment().getDataSource())); - final Iterable authorizedSegments = AuthorizationUtils.filterAuthorizedResources( ->>>>>>> upstream/master authenticationResult, () -> it, - SEGMENT_RA_GENERATOR, + SEGMENT_WITH_OVERSHADOWED_STATUS_RA_GENERATOR, authorizerMapper ); return authorizedSegments.iterator(); From c5a702a9e22fcfec428939773f996c45b54d94ae Mon Sep 17 00:00:00 2001 From: jon-wei Date: Wed, 1 May 2019 14:09:25 -0700 Subject: [PATCH 7/7] Update tests --- integration-tests/docker/sample-data.sql | 2 +- .../test/resources/results/auth_test_sys_schema_segments.json | 3 ++- .../test/resources/results/auth_test_sys_schema_tasks.json | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/integration-tests/docker/sample-data.sql b/integration-tests/docker/sample-data.sql index b2a10fe9d38e..69bf6ea012bc 100644 --- a/integration-tests/docker/sample-data.sql +++ b/integration-tests/docker/sample-data.sql @@ -18,5 +18,5 @@ INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,ver INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload) VALUES ('twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9','twitterstream','2013-05-13T00:03:48.807Z','2013-01-03T00:00:00.000Z','2013-01-04T00:00:00.000Z',0,'2013-01-04T04:09:13.590Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-03T00:00:00.000Z/2013-01-04T00:00:00.000Z\",\"version\":\"2013-01-04T04:09:13.590Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z/2013-01-04T04:09:13.590Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":411651320,\"identifier\":\"twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9\"}'); INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload) VALUES ('wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9','wikipedia_editstream','2013-03-15T20:49:52.348Z','2012-12-29T00:00:00.000Z','2013-01-10T08:00:00.000Z',0,'2013-01-10T08:13:47.830Z_v9',1,'{\"dataSource\":\"wikipedia_editstream\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia_editstream/2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z/2013-01-10T08:13:47.830Z_v9/0/index.zip\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}'); INSERT INTO druid_segments (id, dataSource, created_date, start, end, partitioned, version, used, payload) VALUES ('wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z', 'wikipedia', '2013-08-08T21:26:23.799Z', '2013-08-01T00:00:00.000Z', '2013-08-02T00:00:00.000Z', '0', '2013-08-08T21:22:48.989Z', '1', '{\"dataSource\":\"wikipedia\",\"interval\":\"2013-08-01T00:00:00.000Z/2013-08-02T00:00:00.000Z\",\"version\":\"2013-08-08T21:22:48.989Z\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia/20130801T000000.000Z_20130802T000000.000Z/2013-08-08T21_22_48.989Z/0/index.zip\"},\"dimensions\":\"dma_code,continent_code,geo,area_code,robot,country_name,network,city,namespace,anonymous,unpatrolled,page,postal_code,language,newpage,user,region_lookup\",\"metrics\":\"count,delta,variation,added,deleted\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":24664730,\"identifier\":\"wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z\"}'); -INSERT INTO druid_tasks (id, created_date, datasource, payload, status_payload, active) VALUES ('index_auth_test_2019-04-30T01:13:31.893Z', '2019-04-30T01:13:31.893Z', 'auth_test', '{\"id\":\"index_auth_test_2019-04-30T01:13:31.893Z\",\"created_date\":\"2019-04-30T01:13:31.893Z\",\"datasource\":\"auth_test\",\"active\":0}', '{\"id\":\"index_auth_test_2019-04-30T01:13:31.893Z\",\"status\":\"SUCCESS\",\"duration\":1}', 0); +INSERT INTO druid_tasks (id, created_date, datasource, payload, status_payload, active) VALUES ('index_auth_test_2030-04-30T01:13:31.893Z', '2030-04-30T01:13:31.893Z', 'auth_test', '{\"id\":\"index_auth_test_2030-04-30T01:13:31.893Z\",\"created_date\":\"2030-04-30T01:13:31.893Z\",\"datasource\":\"auth_test\",\"active\":0}', '{\"id\":\"index_auth_test_2030-04-30T01:13:31.893Z\",\"status\":\"SUCCESS\",\"duration\":1}', 0); INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload) VALUES ('auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9','auth_test','2013-03-15T20:49:52.348Z','2012-12-29T00:00:00.000Z','2013-01-10T08:00:00.000Z',0,'2013-01-10T08:13:47.830Z_v9',1,'{\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia_editstream/2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z/2013-01-10T08:13:47.830Z_v9/0/index.zip\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}'); diff --git a/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json b/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json index a6d5e59622c0..f2046dedf3a6 100644 --- a/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json +++ b/integration-tests/src/test/resources/results/auth_test_sys_schema_segments.json @@ -12,6 +12,7 @@ "is_published": 1, "is_available": 1, "is_realtime": 0, - "payload": "{\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"load spec is pruned, because it's not needed on Brokers, but eats a lot of heap space\":\"\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}" + "is_overshadowed": 0, + "payload": "{\"dataSegment\":{\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"load spec is pruned, because it's not needed on Brokers, but eats a lot of heap space\":\"\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"},\"overshadowed\":false}" } ] diff --git a/integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json b/integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json index 53b3f28353c6..d27d7661eb52 100644 --- a/integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json +++ b/integration-tests/src/test/resources/results/auth_test_sys_schema_tasks.json @@ -1,9 +1,9 @@ [ { - "task_id": "index_auth_test_2019-04-30T01:13:31.893Z", + "task_id": "index_auth_test_2030-04-30T01:13:31.893Z", "type": null, "datasource": "auth_test", - "created_time": "2019-04-30T01:13:31.893Z", + "created_time": "2030-04-30T01:13:31.893Z", "queue_insertion_time": "1970-01-01T00:00:00.000Z", "status": "SUCCESS", "runner_status": "NONE",