From 5d2175e84098d48afb26dc2a74ad789bc8545c7b Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Mon, 25 Nov 2019 14:44:44 -0500 Subject: [PATCH 01/10] HBASE-17115 Define UI admins via an ACL The Hadoop AccessControlList allows us to specify admins of the webUI via a list of users and/or groups. Admins of the WebUI can mutate the system, potentially seeing sensitive data or modifying the system. hbase.security.authentication.spnego.admin.users is a comma-separated list of users who are admins. hbase.security.authentication.spnego.admin.groups is a comma-separated list of groups whose membership are admins. Either of these configuration properties may also contain an asterisk (*) which denotes "anything" (any user or group). To maintain previous semantics, the UI defaults to accepting any user as an admin. Previously, when a user was denied from some endpoint that was designated for admins, they received an HTTP/401. In this case, it is more correct to return HTTP/403 as they were correctly authenticated, but they were disallowed from fetching the given resource. The test is based off of work by Nihal Jain in HBASE-20472. Co-authored-by: Nihal Jain --- .../apache/hadoop/hbase/http/HttpServer.java | 65 ++-- .../apache/hadoop/hbase/http/InfoServer.java | 44 ++- .../hadoop/hbase/http/log/LogLevel.java | 8 + .../hadoop/hbase/http/TestHttpServer.java | 10 +- .../hadoop/hbase/http/TestSSLHttpServer.java | 2 +- .../hbase/http/TestSpnegoHttpServer.java | 4 +- .../hbase-webapps/master/snapshot.jsp | 3 +- .../resources/hbase-webapps/master/table.jsp | 3 +- .../hadoop/hbase/TestInfoServersACL.java | 298 ++++++++++++++++++ 9 files changed, 391 insertions(+), 46 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 661af4a49b47..85d5f9af22e2 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -131,6 +131,10 @@ public class HttpServer implements FilterContainer { "signature.secret.file"; public static final String HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY = HTTP_AUTHENTICATION_PREFIX + HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_SUFFIX; + public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY = + HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.users"; + public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY = + HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.groups"; // The ServletContext attribute where the daemon Configuration // gets stored. @@ -220,7 +224,7 @@ public static class Builder { private String bindAddress; /** * @see #addEndpoint(URI) - * @deprecated Since 0.99.0. Use builder pattern vai {@link #addEndpoint(URI)} instead. + * @deprecated Since 0.99.0. Use builder pattern via {@link #addEndpoint(URI)} instead. */ @Deprecated private int port = -1; @@ -593,6 +597,8 @@ private void initializeWebServer(String name, String hostName, webServer.setHandler(handlerCollection); + webAppContext.setAttribute(ADMINS_ACL, adminsAcl); + addDefaultApps(contexts, appDir, conf); addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); @@ -712,23 +718,24 @@ private void setContextAttributes(ServletContextHandler context, Configuration c * Add default servlets. */ protected void addDefaultServlets(ContextHandlerCollection contexts) throws IOException { + // set up default servlets - addServlet("stacks", "/stacks", StackServlet.class); - addServlet("logLevel", "/logLevel", LogLevel.Servlet.class); + addPrivilegedServlet("stacks", "/stacks", StackServlet.class); + addPrivilegedServlet("logLevel", "/logLevel", LogLevel.Servlet.class); // Hadoop3 has moved completely to metrics2, and dropped support for Metrics v1's // MetricsServlet (see HADOOP-12504). We'll using reflection to load if against hadoop2. // Remove when we drop support for hbase on hadoop2.x. try { - Class clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet"); - addServlet("metrics", "/metrics", clz); + Class clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet"); + addUnprivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class)); } catch (Exception e) { // do nothing } - addServlet("jmx", "/jmx", JMXJsonServlet.class); - addServlet("conf", "/conf", ConfServlet.class); + addUnprivilegedServlet("jmx", "/jmx", JMXJsonServlet.class); + addUnprivilegedServlet("conf", "/conf", ConfServlet.class); final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { - addServlet("prof", "/prof", ProfileServlet.class); + addPrivilegedServlet("prof", "/prof", ProfileServlet.class); Path tmpDir = Paths.get(ProfileServlet.OUTPUT_DIR); if (Files.notExists(tmpDir)) { Files.createDirectories(tmpDir); @@ -738,7 +745,7 @@ protected void addDefaultServlets(ContextHandlerCollection contexts) throws IOEx genCtx.setResourceBase(tmpDir.toAbsolutePath().toString()); genCtx.setDisplayName("prof-output"); } else { - addServlet("prof", "/prof", ProfileServlet.DisabledServlet.class); + addUnprivilegedServlet("prof", "/prof", ProfileServlet.DisabledServlet.class); LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property " + "not specified. Disabling /prof endpoint."); } @@ -770,30 +777,28 @@ public void addJerseyResourcePackage(final String packageName, } /** - * Add a servlet in the server. + * Adds a servlet in the server that any user can access. * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class */ - public void addServlet(String name, String pathSpec, + public void addUnprivilegedServlet(String name, String pathSpec, Class clazz) { - addInternalServlet(name, pathSpec, clazz, false); - addFilterPathMapping(pathSpec, webAppContext); + addServletWithAuth(name, pathSpec, clazz, false); } /** - * Add an internal servlet in the server. - * Note: This method is to be used for adding servlets that facilitate - * internal communication and not for user facing functionality. For - * servlets added using this method, filters are not enabled. - * - * @param name The name of the servlet (can be passed as null) - * @param pathSpec The path spec for the servlet - * @param clazz The servlet class + * Adds a servlet in the server that only administrators can access. */ - public void addInternalServlet(String name, String pathSpec, + public void addPrivilegedServlet(String name, String pathSpec, Class clazz) { - addInternalServlet(name, pathSpec, clazz, false); + addServletWithAuth(name, pathSpec, clazz, true); + } + + void addServletWithAuth(String name, String pathSpec, + Class clazz, boolean requireAuth) { + addInternalServlet(name, pathSpec, clazz, requireAuth); + addFilterPathMapping(pathSpec, webAppContext); } /** @@ -809,23 +814,13 @@ public void addInternalServlet(String name, String pathSpec, * @param clazz The servlet class * @param requireAuth Require Kerberos authenticate to access servlet */ - public void addInternalServlet(String name, String pathSpec, + void addInternalServlet(String name, String pathSpec, Class clazz, boolean requireAuth) { ServletHolder holder = new ServletHolder(clazz); if (name != null) { holder.setName(name); } webAppContext.addServlet(holder, pathSpec); - - if(requireAuth && UserGroupInformation.isSecurityEnabled()) { - LOG.info("Adding Kerberos (SPNEGO) filter to " + name); - ServletHandler handler = webAppContext.getServletHandler(); - FilterMapping fmap = new FilterMapping(); - fmap.setPathSpec(pathSpec); - fmap.setFilterName(SPNEGO_FILTER); - fmap.setDispatches(FilterMapping.ALL); - handler.addFilterMapping(fmap); - } } @Override @@ -1255,7 +1250,7 @@ public static boolean hasAdministratorAccess( if (servletContext.getAttribute(ADMINS_ACL) != null && !userHasAdministratorAccess(servletContext, remoteUser)) { - response.sendError(HttpServletResponse.SC_UNAUTHORIZED, "User " + response.sendError(HttpServletResponse.SC_FORBIDDEN, "User " + remoteUser + " is unauthorized to access this page."); return false; } diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java index 695fcd727167..fd2fb94983d6 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java @@ -20,10 +20,14 @@ import java.io.IOException; import java.net.URI; +import javax.servlet.ServletContext; import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.yetus.audience.InterfaceAudience; /** @@ -81,13 +85,31 @@ public InfoServer(String name, String bindAddress, int port, boolean findPort, .setSignatureSecretFileKey( HttpServer.HTTP_AUTHENTICATION_SIGNATURE_SECRET_FILE_KEY) .setSecurityEnabled(true); + + // Set an admin ACL on sensitive webUI endpoints + AccessControlList acl = buildAdminAcl(c); + builder.setACL(acl); } this.httpServer = builder.build(); } + /** + * Builds an ACL that will restrict the users who can issue commands to endpoints on the UI + * which are meant only for administrators. + */ + AccessControlList buildAdminAcl(Configuration conf) { + final String userGroups = conf.get(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, null); + final String adminGroups = conf.get(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY, null); + if (userGroups == null && adminGroups == null) { + // Backwards compatibility - if the user doesn't have anything set, allow all users in. + return new AccessControlList("*", null); + } + return new AccessControlList(userGroups, adminGroups); + } + public void addServlet(String name, String pathSpec, Class clazz) { - this.httpServer.addServlet(name, pathSpec, clazz); + this.httpServer.addUnprivilegedServlet(name, pathSpec, clazz); } public void setAttribute(String name, Object value) { @@ -110,4 +132,24 @@ public int getPort() { public void stop() throws Exception { this.httpServer.stop(); } + + + /** + * Returns true if and only if UI authentication (spnego) is enabled, UI authorization is enabled, + * and the requesting user is defined as an administrator. If the UI is set to readonly, this + * method always returns false. + */ + public static boolean canUserModifyUI( + HttpServletRequest req, ServletContext ctx, Configuration conf) { + if (conf.getBoolean("hbase.master.ui.readonly", false)) { + return false; + } + String remoteUser = req.getRemoteUser(); + if ("kerberos".equals(conf.get(HttpServer.HTTP_UI_AUTHENTICATION)) && + conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false) && + remoteUser != null) { + return HttpServer.userHasAdministratorAccess(ctx, remoteUser); + } + return false; + } } diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java index 003fa0d772a9..8135cbbd5b00 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java @@ -324,6 +324,14 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) response)) { return; } + // Disallow modification of the LogLevel if explicitly set to readonly + Configuration conf = (Configuration) getServletContext().getAttribute( + HttpServer.CONF_CONTEXT_ATTRIBUTE); + if (conf.getBoolean("hbase.master.ui.readonly", false)) { + response.sendError(HttpServletResponse.SC_FORBIDDEN, "Modification of HBase via" + + " the UI is disallowed in configuration."); + return; + } response.setContentType("text/html"); PrintWriter out; try { diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java index 1ab1140a74b8..c2532e801e3c 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java @@ -150,10 +150,10 @@ public void doGet(HttpServletRequest request, HttpServletResponse response) thro Configuration conf = new Configuration(); conf.setInt(HttpServer.HTTP_MAX_THREADS, MAX_THREADS); server = createTestServer(conf); - server.addServlet("echo", "/echo", EchoServlet.class); - server.addServlet("echomap", "/echomap", EchoMapServlet.class); - server.addServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class); - server.addServlet("longheader", "/longheader", LongHeaderServlet.class); + server.addUnprivilegedServlet("echo", "/echo", EchoServlet.class); + server.addUnprivilegedServlet("echomap", "/echomap", EchoMapServlet.class); + server.addUnprivilegedServlet("htmlcontent", "/htmlcontent", HtmlContentServlet.class); + server.addUnprivilegedServlet("longheader", "/longheader", LongHeaderServlet.class); server.addJerseyResourcePackage( JerseyResource.class.getPackage().getName(), "/jersey/*"); server.start(); @@ -582,7 +582,7 @@ public void testXFrameHeaderSameOrigin() throws Exception { .addEndpoint(new URI("http://localhost:0")) .setFindPort(true).setConf(conf).build(); myServer.setAttribute(HttpServer.CONF_CONTEXT_ATTRIBUTE, conf); - myServer.addServlet("echo", "/echo", EchoServlet.class); + myServer.addUnprivilegedServlet("echo", "/echo", EchoServlet.class); myServer.start(); String serverURL = "http://" diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java index c62ca6561cf3..364ff3d5edd4 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSSLHttpServer.java @@ -95,7 +95,7 @@ public static void setup() throws Exception { .trustStore(sslConf.get("ssl.server.truststore.location"), HBaseConfiguration.getPassword(sslConf, "ssl.server.truststore.password", null), sslConf.get("ssl.server.truststore.type", "jks")).build(); - server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class); + server.addUnprivilegedServlet("echo", "/echo", TestHttpServer.EchoServlet.class); server.start(); baseUrl = new URL("https://" + NetUtils.getHostPortString(server.getConnectorAddress(0))); diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java index 8d3cd9568a1d..c39ca9f5e49c 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestSpnegoHttpServer.java @@ -108,7 +108,7 @@ public static void setupServer() throws Exception { Configuration conf = buildSpnegoConfiguration(serverPrincipal, infoServerKeytab); server = createTestServerWithSecurity(conf); - server.addServlet("echo", "/echo", EchoServlet.class); + server.addUnprivilegedServlet("echo", "/echo", EchoServlet.class); server.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*"); server.start(); baseUrl = getServerURL(server); @@ -252,7 +252,7 @@ public void testMissingConfigurationThrowsException() throws Exception { // Intentionally skip keytab and principal HttpServer customServer = createTestServerWithSecurity(conf); - customServer.addServlet("echo", "/echo", EchoServlet.class); + customServer.addUnprivilegedServlet("echo", "/echo", EchoServlet.class); customServer.addJerseyResourcePackage(JerseyResource.class.getPackage().getName(), "/jersey/*"); customServer.start(); } diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp index fc75ca773d88..9b1328a3a32b 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp @@ -22,6 +22,7 @@ import="org.apache.hadoop.conf.Configuration" import="org.apache.hadoop.hbase.client.Admin" import="org.apache.hadoop.hbase.client.SnapshotDescription" + import="org.apache.hadoop.hbase.http.InfoServer" import="org.apache.hadoop.hbase.master.HMaster" import="org.apache.hadoop.hbase.snapshot.SnapshotInfo" import="org.apache.hadoop.util.StringUtils" @@ -30,7 +31,7 @@ <% HMaster master = (HMaster)getServletContext().getAttribute(HMaster.MASTER); Configuration conf = master.getConfiguration(); - boolean readOnly = conf.getBoolean("hbase.master.ui.readonly", false); + boolean readOnly = !InfoServer.canUserModifyUI(request, getServletContext(), conf); String snapshotName = request.getParameter("name"); SnapshotDescription snapshot = null; SnapshotInfo.SnapshotStats stats = null; diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp index b5c677a9a53d..33717b094c9a 100644 --- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp +++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp @@ -50,6 +50,7 @@ import="org.apache.hadoop.hbase.client.RegionLocator" import="org.apache.hadoop.hbase.client.RegionReplicaUtil" import="org.apache.hadoop.hbase.client.Table" + import="org.apache.hadoop.hbase.http.InfoServer" import="org.apache.hadoop.hbase.master.HMaster" import="org.apache.hadoop.hbase.master.RegionState" import="org.apache.hadoop.hbase.master.assignment.RegionStates" @@ -109,7 +110,7 @@ Table table; boolean withReplica = false; boolean showFragmentation = conf.getBoolean("hbase.master.ui.fragmentation.enabled", false); - boolean readOnly = conf.getBoolean("hbase.master.ui.readonly", false); + boolean readOnly = !InfoServer.canUserModifyUI(request, getServletContext(), conf); int numMetaReplicas = conf.getInt(HConstants.META_REPLICAS_NUM, HConstants.DEFAULT_META_REPLICA_NUM); Map frags = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java new file mode 100644 index 000000000000..4678e874d3ae --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java @@ -0,0 +1,298 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.net.HttpURLConnection; +import java.net.URL; +import java.security.PrivilegedExceptionAction; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; +import org.apache.hadoop.hbase.http.HttpServer; +import org.apache.hadoop.hbase.security.HBaseKerberosUtils; +import org.apache.hadoop.hbase.security.token.TokenProvider; +import org.apache.hadoop.hbase.testclassification.MiscTests; +import org.apache.hadoop.hbase.testclassification.SmallTests; +import org.apache.hadoop.hbase.util.FSUtils; +import org.apache.hadoop.hbase.util.Pair; +import org.apache.hadoop.minikdc.MiniKdc; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.http.auth.AuthSchemeProvider; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.KerberosCredentials; +import org.apache.http.client.config.AuthSchemes; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.config.Lookup; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.impl.auth.SPNegoSchemeFactory; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.ietf.jgss.GSSCredential; +import org.ietf.jgss.GSSManager; +import org.ietf.jgss.GSSName; +import org.ietf.jgss.Oid; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.experimental.categories.Category; +import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Testing info servers for admin acl. + */ +@Category({ MiscTests.class, SmallTests.class }) +public class TestInfoServersACL { + + @ClassRule + public static final HBaseClassTestRule CLASS_RULE = + HBaseClassTestRule.forClass(TestInfoServersACL.class); + + private static final Logger LOG = LoggerFactory.getLogger(TestInfoServersACL.class); + private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); + private static Configuration conf; + + protected static String USERNAME; + private static LocalHBaseCluster CLUSTER; + private static final File KEYTAB_FILE = new File(UTIL.getDataTestDir("keytab").toUri().getPath()); + private static MiniKdc KDC; + private static String HOST = "localhost"; + private static String PRINCIPAL; + private static String HTTP_PRINCIPAL; + + @Rule + public TestName name = new TestName(); + + // user/group present in hbase.admin.acl + private static final String USER_ADMIN_STR = "admin"; + + // user with no permissions + private static final String USER_NONE_STR = "none"; + + @BeforeClass + public static void beforeClass() throws Exception { + conf = UTIL.getConfiguration(); + KDC = UTIL.setupMiniKdc(KEYTAB_FILE); + USERNAME = UserGroupInformation.getLoginUser().getShortUserName(); + PRINCIPAL = USERNAME + "/" + HOST; + HTTP_PRINCIPAL = "HTTP/" + HOST; + // Create principals for services and the test users + KDC.createPrincipal(KEYTAB_FILE, PRINCIPAL, HTTP_PRINCIPAL, USER_ADMIN_STR, USER_NONE_STR); + UTIL.startMiniZKCluster(); + + HBaseKerberosUtils.setSecuredConfiguration(conf, + PRINCIPAL + "@" + KDC.getRealm(), HTTP_PRINCIPAL + "@" + KDC.getRealm()); + HBaseKerberosUtils.setSSLConfiguration(UTIL, TestInfoServersACL.class); + + conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, + TokenProvider.class.getName()); + UTIL.startMiniDFSCluster(1); + Path rootdir = UTIL.getDataTestDirOnTestFS("TestInfoServersACL"); + FSUtils.setRootDir(conf, rootdir); + + // The info servers do not run in tests by default. + // Set them to ephemeral ports so they will start + // setup configuration + conf.setInt(HConstants.MASTER_INFO_PORT, 0); + conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0); + + conf.set(HttpServer.HTTP_UI_AUTHENTICATION, "kerberos"); + conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_PRINCIPAL_KEY, HTTP_PRINCIPAL); + conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_KEYTAB_KEY, KEYTAB_FILE.getAbsolutePath()); + + // ACL lists work only when "hadoop.security.authorization" is set to true + conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); + // only user admin will have acl access + conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, USER_ADMIN_STR); + + CLUSTER = new LocalHBaseCluster(conf, 1); + CLUSTER.startup(); + } + + /** + * Helper method to shut down the cluster (if running) + */ + @AfterClass + public static void shutDownMiniCluster() throws Exception { + if (CLUSTER != null) { + CLUSTER.shutdown(); + CLUSTER.join(); + } + if (KDC != null) { + KDC.stop(); + } + UTIL.shutdownMiniCluster(); + } + + @Test + public void testAuthorizedUser() throws Exception { + UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath()); + admin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + // Check the expected content is present in the http response + String expectedContent = "Get Log Level"; + Pair pair = getLogLevelPage(); + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertTrue("expected=" + expectedContent + ", content=" + pair.getSecond(), + pair.getSecond().contains(expectedContent)); + return null; + } + }); + } + + @Test + public void testUnauthorizedUser() throws Exception { + UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_NONE_STR, KEYTAB_FILE.getAbsolutePath()); + nonAdmin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + Pair pair = getLogLevelPage(); + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue()); + return null; + } + }); + } + + @Test + public void testTableActionsAvailableForAdmins() throws Exception { + final String expectedAuthorizedContent = "Actions:"; + UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath()); + admin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + // Check the expected content is present in the http response + Pair pair = getTablePage(TableName.META_TABLE_NAME); + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(), + pair.getSecond().contains(expectedAuthorizedContent)); + return null; + } + }); + + UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_NONE_STR, KEYTAB_FILE.getAbsolutePath()); + nonAdmin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + Pair pair = getTablePage(TableName.META_TABLE_NAME); + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertFalse("should not find=" + expectedAuthorizedContent + ", content=" + + pair.getSecond(), pair.getSecond().contains(expectedAuthorizedContent)); + return null; + } + }); + } + + @Test + public void testLogsAvailableForAdmins() throws Exception { + final String expectedAuthorizedContent = "Directory: /logs/"; + UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath()); + admin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + // Check the expected content is present in the http response + Pair pair = getLogsPage(); + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(), + pair.getSecond().contains(expectedAuthorizedContent)); + return null; + } + }); + + UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_NONE_STR, KEYTAB_FILE.getAbsolutePath()); + nonAdmin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + Pair pair = getLogsPage(); + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue()); + return null; + } + }); + } + + private String getInfoServerHostAndPort() { + return "http://localhost:" + CLUSTER.getActiveMaster().getInfoServer().getPort(); + } + + private Pair getLogLevelPage() throws Exception { + // Build the url which we want to connect to + URL url = new URL(getInfoServerHostAndPort() + "/logLevel"); + return getUrlContent(url); + } + + private Pair getTablePage(TableName tn) throws Exception { + URL url = new URL(getInfoServerHostAndPort() + "/table.jsp?name=" + tn.getNameAsString()); + return getUrlContent(url); + } + + private Pair getLogsPage() throws Exception { + URL url = new URL(getInfoServerHostAndPort() + "/logs/"); + return getUrlContent(url); + } + + /** + * Retrieves the content of the specified URL. The content will only be returned if the status + * code for the operation was HTTP 200/OK. + */ + private Pair getUrlContent(URL url) throws Exception { + try (CloseableHttpClient client = createHttpClient( + UserGroupInformation.getCurrentUser().getUserName())) { + CloseableHttpResponse resp = client.execute(new HttpGet(url.toURI())); + int code = resp.getStatusLine().getStatusCode(); + if (code == HttpURLConnection.HTTP_OK) { + return new Pair<>(code, EntityUtils.toString(resp.getEntity())); + } + return new Pair<>(code, null); + } + } + + private CloseableHttpClient createHttpClient(String clientPrincipal) throws Exception { + // Logs in with Kerberos via GSS + GSSManager gssManager = GSSManager.getInstance(); + // jGSS Kerberos login constant + Oid oid = new Oid("1.2.840.113554.1.2.2"); + GSSName gssClient = gssManager.createName(clientPrincipal, GSSName.NT_USER_NAME); + GSSCredential credential = gssManager.createCredential(gssClient, + GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY); + + Lookup authRegistry = RegistryBuilder.create() + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true)) + .build(); + + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential)); + + return HttpClients.custom() + .setDefaultAuthSchemeRegistry(authRegistry) + .setDefaultCredentialsProvider(credentialsProvider) + .build(); + } +} From 581c02a45ce2108752ebdf77ec3ab48c570e39ba Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Fri, 17 Jan 2020 13:39:42 -0500 Subject: [PATCH 02/10] Addressing feedback from busbey * Expand on javadoc for add[Un]PrivilegedServlet method(s) * Expand on the securing the webUI section of the book --- .../apache/hadoop/hbase/http/HttpServer.java | 13 +++- src/main/asciidoc/_chapters/security.adoc | 65 ++++++++++++++++++- 2 files changed, 74 insertions(+), 4 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 85d5f9af22e2..19a373f335b0 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -777,7 +777,9 @@ public void addJerseyResourcePackage(final String packageName, } /** - * Adds a servlet in the server that any user can access. + * Adds a servlet in the server that any user can access. This method differs from + * {@link #addPrivilegedServlet(String, String, Class)} in that any authenticated user + * can interact with the servlet added by this method. * @param name The name of the servlet (can be passed as null) * @param pathSpec The path spec for the servlet * @param clazz The servlet class @@ -788,13 +790,20 @@ public void addUnprivilegedServlet(String name, String pathSpec, } /** - * Adds a servlet in the server that only administrators can access. + * Adds a servlet in the server that only administrators can access. This method differs from + * {@link #addUnprivilegedServlet(String, String, Class)} in that only those authenticated user + * who are identified as administrators can interact with the servlet added by this method. */ public void addPrivilegedServlet(String name, String pathSpec, Class clazz) { addServletWithAuth(name, pathSpec, clazz, true); } + /** + * Internal method to add a servlet to the HTTP server. Developers should not call this method + * directly, but invoke it via {@link #addUnprivilegedServlet(String, String, Class)} or + * {@link #addPrivilegedServlet(String, String, Class)}. + */ void addServletWithAuth(String name, String pathSpec, Class clazz, boolean requireAuth) { addInternalServlet(name, pathSpec, clazz, requireAuth); diff --git a/src/main/asciidoc/_chapters/security.adoc b/src/main/asciidoc/_chapters/security.adoc index ef5a32144482..df7a3af6f075 100644 --- a/src/main/asciidoc/_chapters/security.adoc +++ b/src/main/asciidoc/_chapters/security.adoc @@ -37,9 +37,11 @@ HBase adheres to the Apache Software Foundation's policy on reported vulnerabili If you wish to send an encrypted report, you can use the GPG details provided for the general ASF security list. This will likely increase the response time to your report. ==== +== Web UI Security + HBase provides mechanisms to secure various components and aspects of HBase and how it relates to the rest of the Hadoop infrastructure, as well as clients and resources outside Hadoop. -== Using Secure HTTP (HTTPS) for the Web UI +=== Using Secure HTTP (HTTPS) for the Web UI A default HBase install uses insecure HTTP connections for Web UIs for the master and region servers. To enable secure HTTP (HTTPS) connections instead, set `hbase.ssl.enabled` to `true` in _hbase-site.xml_. @@ -70,7 +72,7 @@ If you know how to fix this without opening a second port for HTTPS, patches are ==== [[hbase.secure.spnego.ui]] -== Using SPNEGO for Kerberos authentication with Web UIs +=== Using SPNEGO for Kerberos authentication with Web UIs Kerberos-authentication to HBase Web UIs can be enabled via configuring SPNEGO with the `hbase.security.authentication.ui` property in _hbase-site.xml_. Enabling this authentication requires that HBase is also configured to use Kerberos authentication @@ -122,6 +124,65 @@ A number of properties exist to configure SPNEGO authentication for the web serv ---- +=== Defining administrators of the Web UI + +In the previous section, we cover how to enable authentication for the Web UI via SPNEGO. +However, some portions of the Web UI could be used to impact the availability and performance +of an HBase cluster. As such, it is desirable to ensure that only those with proper authority +can interact with these sensitive endpoints. + +HBase allows the adminstrators to be defined via a list of usernames or groups in hbase-site.xml + +[source,xml] +---- + + hbase.security.authentication.spnego.admin.users + + + + hbase.security.authentication.spnego.admin.groups + + +---- + +The usernames are those which the Kerberos identity maps to, given the Hadoop `auth_to_local` rules +in core-site.xml. The groups here are the Unix groups associated with the mapped usernames. + +Consider the following scenario to describe how the configuration properties operate. Consider +three users which are defined in the Kerberos KDC: + +* `alice@COMPANY.COM` +* `bob@COMPANY.COM` +* `charlie@COMPANY.COM` + +The default Hadoop `auth_to_local` rules map these principals to the "shortname": + +* `alice` +* `bob` +* `charlie` + +Unix groups membership define that `alice` is a member of the group `admins`. +`bob` and `charlie` are not members of the `admins` group. + +[source,xml] +---- + + hbase.security.authentication.spnego.admin.users + charlie + + + hbase.security.authentication.spnego.admin.groups + admins + +---- + +Given the above configuration, `alice` is allowed to access sensitive endpoints in the Web UI +as she is a member of the `admins` group. `charlie` is also allowed to access sensitive endpoints +because he is explicitly listed as an admin in the configuration. `bob` is not allowed to access +sensitive endpoints, but can still use any non-sensitive endpoints in the Web UI. + +If it doesn't go without saying: non-authenticated users cannot access any part of the Web UI. + [[hbase.secure.configuration]] == Secure Client Access to Apache HBase From 0a0c629485410944365d2c0434417debadc1c57e Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Tue, 21 Jan 2020 13:27:35 -0500 Subject: [PATCH 03/10] Cleanup -- make [un]privilged method calls more obvious via InfoServer Make sure the DumpServlet is protected. --- .../apache/hadoop/hbase/http/InfoServer.java | 22 +++++++++++++++++++ .../apache/hadoop/hbase/master/HMaster.java | 2 +- .../hbase/regionserver/HRegionServer.java | 4 ++-- 3 files changed, 25 insertions(+), 3 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java index fd2fb94983d6..e6c4a0942505 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java @@ -107,11 +107,33 @@ AccessControlList buildAdminAcl(Configuration conf) { return new AccessControlList(userGroups, adminGroups); } + /** + * Explicitly invoke {@link #addPrivilegedServlet(String, String, Class)} or + * {@link #addUnprivilegedServlet(String, String, Class)} instead of this method. + * This method will add a servlet which any authenticated user can access. + */ + @Deprecated public void addServlet(String name, String pathSpec, Class clazz) { + addUnprivilegedServlet(name, pathSpec, clazz); + } + + /** + * @see HttpServer#addUnprivilegedServlet(String, String, Class) + */ + public void addUnprivilegedServlet(String name, String pathSpec, + Class clazz) { this.httpServer.addUnprivilegedServlet(name, pathSpec, clazz); } + /** + * @see HttpServer#addPrivilegedServlet(String, String, Class) + */ + public void addPrivilegedServlet(String name, String pathSpec, + Class clazz) { + this.httpServer.addPrivilegedServlet(name, pathSpec, clazz); + } + public void setAttribute(String name, Object value) { this.httpServer.setAttribute(name, value); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java index b78a83571bce..6864ce699203 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java @@ -742,7 +742,7 @@ protected RSRpcServices createRpcServices() throws IOException { @Override protected void configureInfoServer() { - infoServer.addServlet("master-status", "/master-status", MasterStatusServlet.class); + infoServer.addUnprivilegedServlet("master-status", "/master-status", MasterStatusServlet.class); infoServer.setAttribute(MASTER, this); if (LoadBalancer.isTablesOnMaster(conf)) { super.configureInfoServer(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 524fcd9bca6b..1fbd5483b95f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -760,7 +760,7 @@ protected RSRpcServices createRpcServices() throws IOException { } protected void configureInfoServer() { - infoServer.addServlet("rs-status", "/rs-status", RSStatusServlet.class); + infoServer.addUnprivilegedServlet("rs-status", "/rs-status", RSStatusServlet.class); infoServer.setAttribute(REGIONSERVER, this); } @@ -2124,7 +2124,7 @@ private void putUpWebUI() throws IOException { while (true) { try { this.infoServer = new InfoServer(getProcessName(), addr, port, false, this.conf); - infoServer.addServlet("dump", "/dump", getDumpServlet()); + infoServer.addPrivilegedServlet("dump", "/dump", getDumpServlet()); configureInfoServer(); this.infoServer.start(); break; From 10de5b8df6137e3486b03451c1c932076bf0d296 Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Tue, 21 Jan 2020 15:30:07 -0500 Subject: [PATCH 04/10] Fix QA issues --- .../apache/hadoop/hbase/http/HttpServer.java | 1 - .../apache/hadoop/hbase/http/InfoServer.java | 3 +- .../hadoop/hbase/http/TestHttpServer.java | 2 +- .../hadoop/hbase/TestInfoServersACL.java | 35 +++++++++---------- 4 files changed, 19 insertions(+), 22 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 19a373f335b0..16e48609ce19 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -79,7 +79,6 @@ import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.FilterMapping; import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.MultiException; import org.eclipse.jetty.util.ssl.SslContextFactory; diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java index e6c4a0942505..1d610f08ba4e 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java @@ -99,7 +99,8 @@ public InfoServer(String name, String bindAddress, int port, boolean findPort, */ AccessControlList buildAdminAcl(Configuration conf) { final String userGroups = conf.get(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, null); - final String adminGroups = conf.get(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY, null); + final String adminGroups = conf.get( + HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY, null); if (userGroups == null && adminGroups == null) { // Backwards compatibility - if the user doesn't have anything set, allow all users in. return new AccessControlList("*", null); diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java index c2532e801e3c..881c66ac2087 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java @@ -490,7 +490,7 @@ public void testHasAdministratorAccess() throws Exception { Mockito.when(acls.isUserAllowed(Mockito.any())).thenReturn(false); Mockito.when(context.getAttribute(HttpServer.ADMINS_ACL)).thenReturn(acls); Assert.assertFalse(HttpServer.hasAdministratorAccess(context, request, response)); - Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_UNAUTHORIZED), + Mockito.verify(response).sendError(Mockito.eq(HttpServletResponse.SC_FORBIDDEN), Mockito.anyString()); //authorization ON & user NOT NULL & ACLs NOT NULL & user in in ACLs diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java index 4678e874d3ae..7e2226e85ca2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java @@ -275,24 +275,21 @@ private Pair getUrlContent(URL url) throws Exception { } private CloseableHttpClient createHttpClient(String clientPrincipal) throws Exception { - // Logs in with Kerberos via GSS - GSSManager gssManager = GSSManager.getInstance(); - // jGSS Kerberos login constant - Oid oid = new Oid("1.2.840.113554.1.2.2"); - GSSName gssClient = gssManager.createName(clientPrincipal, GSSName.NT_USER_NAME); - GSSCredential credential = gssManager.createCredential(gssClient, - GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY); - - Lookup authRegistry = RegistryBuilder.create() - .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true)) - .build(); - - BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential)); - - return HttpClients.custom() - .setDefaultAuthSchemeRegistry(authRegistry) - .setDefaultCredentialsProvider(credentialsProvider) - .build(); + // Logs in with Kerberos via GSS + GSSManager gssManager = GSSManager.getInstance(); + // jGSS Kerberos login constant + Oid oid = new Oid("1.2.840.113554.1.2.2"); + GSSName gssClient = gssManager.createName(clientPrincipal, GSSName.NT_USER_NAME); + GSSCredential credential = gssManager.createCredential( + gssClient, GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY); + + Lookup authRegistry = RegistryBuilder.create() + .register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true)).build(); + + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential)); + + return HttpClients.custom().setDefaultAuthSchemeRegistry(authRegistry) + .setDefaultCredentialsProvider(credentialsProvider).build(); } } From 3279b85b06094d5970bc0ee6453024e8d51e69ef Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Wed, 22 Jan 2020 13:13:44 -0500 Subject: [PATCH 05/10] Fix checkstyle and be more explicit in book additions --- .../main/java/org/apache/hadoop/hbase/http/InfoServer.java | 5 +++++ src/main/asciidoc/_chapters/security.adoc | 4 +++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java index 1d610f08ba4e..8c948adde8ef 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java @@ -112,6 +112,11 @@ AccessControlList buildAdminAcl(Configuration conf) { * Explicitly invoke {@link #addPrivilegedServlet(String, String, Class)} or * {@link #addUnprivilegedServlet(String, String, Class)} instead of this method. * This method will add a servlet which any authenticated user can access. + * + * @deprecated Use {@link #addUnprivilegedServet(String, String, Class)} or + * {@link #addPrivilegedServlet(String, String, Class)} instead of this + * method which does not state outwardly what kind of authz rules will + * be applied to this servlet. */ @Deprecated public void addServlet(String name, String pathSpec, diff --git a/src/main/asciidoc/_chapters/security.adoc b/src/main/asciidoc/_chapters/security.adoc index df7a3af6f075..2635630cccb6 100644 --- a/src/main/asciidoc/_chapters/security.adoc +++ b/src/main/asciidoc/_chapters/security.adoc @@ -179,7 +179,9 @@ Unix groups membership define that `alice` is a member of the group `admins`. Given the above configuration, `alice` is allowed to access sensitive endpoints in the Web UI as she is a member of the `admins` group. `charlie` is also allowed to access sensitive endpoints because he is explicitly listed as an admin in the configuration. `bob` is not allowed to access -sensitive endpoints, but can still use any non-sensitive endpoints in the Web UI. +sensitive endpoints because he is not a member of the `admins` group nor is listed as an explicit +admin user via `hbase.security.authentication.spnego.admin.users`, but can still use any +non-sensitive endpoints in the Web UI. If it doesn't go without saying: non-authenticated users cannot access any part of the Web UI. From 85ffa84120fbaa73785cea1a75fbd2b8442cadfe Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Thu, 23 Jan 2020 20:37:22 -0500 Subject: [PATCH 06/10] Use a single filter for all authz --- .../hbase/http/AdminAuthorizedFilter.java | 65 +++++++++++++++++ .../apache/hadoop/hbase/http/HttpServer.java | 47 +++++++++--- .../hbase/{ => http}/TestInfoServersACL.java | 73 ++++++++++++++++++- 3 files changed, 171 insertions(+), 14 deletions(-) create mode 100644 hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedFilter.java rename hbase-server/src/test/java/org/apache/hadoop/hbase/{ => http}/TestInfoServersACL.java (79%) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedFilter.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedFilter.java new file mode 100644 index 000000000000..215ff37e3bf5 --- /dev/null +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/AdminAuthorizedFilter.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.http; + +import java.io.IOException; + +import javax.servlet.Filter; +import javax.servlet.FilterChain; +import javax.servlet.FilterConfig; +import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.yetus.audience.InterfaceAudience; + +@InterfaceAudience.Private +public class AdminAuthorizedFilter implements Filter { + + private Configuration conf; + private AccessControlList adminsAcl; + + @Override public void init(FilterConfig filterConfig) throws ServletException { + adminsAcl = (AccessControlList) filterConfig.getServletContext().getAttribute( + HttpServer.ADMINS_ACL); + conf = (Configuration) filterConfig.getServletContext().getAttribute( + HttpServer.CONF_CONTEXT_ATTRIBUTE); + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) + throws IOException, ServletException { + if (!(request instanceof HttpServletRequest) || !(response instanceof HttpServletResponse)) { + throw new UnsupportedOperationException("Only accepts HTTP"); + } + HttpServletRequest httpReq = (HttpServletRequest) request; + HttpServletResponse httpResp = (HttpServletResponse) response; + + if (!HttpServer.hasAdministratorAccess(conf, adminsAcl, httpReq, httpResp)) { + return; + } + + chain.doFilter(request, response); + } + + @Override public void destroy() {} +} diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 16e48609ce19..6d4bcc4dfe12 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -36,6 +36,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; + import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; @@ -396,11 +397,6 @@ public HttpServer build() throws IOException { HttpServer server = new HttpServer(this); - if (this.securityEnabled) { - server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey, kerberosNameRulesKey, - signatureSecretFileKey); - } - for (URI ep : endpoints) { ServerConnector listener = null; String scheme = ep.getScheme(); @@ -569,11 +565,11 @@ private HttpServer(final Builder b) throws IOException { this.adminsAcl = b.adminsAcl; this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir); this.findPort = b.findPort; - initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs); + initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b); } private void initializeWebServer(String name, String hostName, - Configuration conf, String[] pathSpecs) + Configuration conf, String[] pathSpecs, HttpServer.Builder b) throws FileNotFoundException, IOException { Preconditions.checkNotNull(webAppContext); @@ -598,6 +594,9 @@ private void initializeWebServer(String name, String hostName, webAppContext.setAttribute(ADMINS_ACL, adminsAcl); + // Default apps need to be set first, so that all filters are applied to them. + // Because they're added to defaultContexts, we need them there before we start + // adding filters addDefaultApps(contexts, appDir, conf); addGlobalFilter("safety", QuotingInputFilter.class.getName(), null); @@ -610,6 +609,12 @@ private void initializeWebServer(String name, String hostName, SecurityHeadersFilter.class.getName(), SecurityHeadersFilter.getDefaultParameters(conf)); + // But security needs to be enabled prior to adding the other servlets + if (b.securityEnabled) { + initSpnego(conf, hostName, b.usernameConfKey, b.keytabConfKey, b.kerberosNameRulesKey, + b.signatureSecretFileKey); + } + final FilterInitializer[] initializers = getFilterInitializers(conf); if (initializers != null) { conf = new Configuration(conf); @@ -696,7 +701,6 @@ protected void addDefaultApps(ContextHandlerCollection parent, } logContext.setDisplayName("logs"); setContextAttributes(logContext, conf); - addNoCacheFilter(webAppContext); defaultContexts.put(logContext, true); } // set up the context for "/static/*" @@ -814,7 +818,7 @@ void addServletWithAuth(String name, String pathSpec, * protect with Kerberos authentication. * Note: This method is to be used for adding servlets that facilitate * internal communication and not for user facing functionality. For - + * servlets added using this method, filters (except internal Kerberos + * servlets added using this method, filters (except internal Kerberos * filters) are not enabled. * * @param name The name of the servlet (can be passed as null) @@ -828,6 +832,15 @@ void addInternalServlet(String name, String pathSpec, if (name != null) { holder.setName(name); } + if (requireAuth) { + FilterHolder filter = new FilterHolder(AdminAuthorizedFilter.class); + filter.setName(AdminAuthorizedFilter.class.getSimpleName()); + FilterMapping fmap = new FilterMapping(); + fmap.setPathSpec(pathSpec); + fmap.setDispatches(FilterMapping.ALL); + fmap.setFilterName(AdminAuthorizedFilter.class.getSimpleName()); + webAppContext.getServletHandler().addFilter(filter, fmap); + } webAppContext.addServlet(holder, pathSpec); } @@ -1242,6 +1255,13 @@ public static boolean hasAdministratorAccess( HttpServletResponse response) throws IOException { Configuration conf = (Configuration) servletContext.getAttribute(CONF_CONTEXT_ATTRIBUTE); + AccessControlList acl = (AccessControlList) servletContext.getAttribute(ADMINS_ACL); + + return hasAdministratorAccess(conf, acl, request, response); + } + + public static boolean hasAdministratorAccess(Configuration conf, AccessControlList acl, + HttpServletRequest request, HttpServletResponse response) throws IOException { // If there is no authorization, anybody has administrator access. if (!conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) { @@ -1256,8 +1276,7 @@ public static boolean hasAdministratorAccess( return false; } - if (servletContext.getAttribute(ADMINS_ACL) != null && - !userHasAdministratorAccess(servletContext, remoteUser)) { + if (acl != null && !userHasAdministratorAccess(acl, remoteUser)) { response.sendError(HttpServletResponse.SC_FORBIDDEN, "User " + remoteUser + " is unauthorized to access this page."); return false; @@ -1279,9 +1298,13 @@ public static boolean userHasAdministratorAccess(ServletContext servletContext, String remoteUser) { AccessControlList adminsAcl = (AccessControlList) servletContext .getAttribute(ADMINS_ACL); + return userHasAdministratorAccess(adminsAcl, remoteUser); + } + + public static boolean userHasAdministratorAccess(AccessControlList acl, String remoteUser) { UserGroupInformation remoteUserUGI = UserGroupInformation.createRemoteUser(remoteUser); - return adminsAcl != null && adminsAcl.isUserAllowed(remoteUserUGI); + return acl != null && acl.isUserAllowed(remoteUserUGI); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java similarity index 79% rename from hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java index 7e2226e85ca2..612d39406cd4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestInfoServersACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java @@ -15,7 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hbase; +package org.apache.hadoop.hbase.http; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -29,8 +29,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.LocalHBaseCluster; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.hbase.security.HBaseKerberosUtils; import org.apache.hadoop.hbase.security.token.TokenProvider; import org.apache.hadoop.hbase.testclassification.MiscTests; @@ -132,6 +136,7 @@ public static void beforeClass() throws Exception { conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, true); // only user admin will have acl access conf.set(HttpServer.HTTP_SPNEGO_AUTHENTICATION_ADMIN_USERS_KEY, USER_ADMIN_STR); + //conf.set(HttpServer.FILTER_INITIALIZERS_PROPERTY, ""); CLUSTER = new LocalHBaseCluster(conf, 1); CLUSTER.startup(); @@ -238,6 +243,60 @@ public void testLogsAvailableForAdmins() throws Exception { }); } + @Test + public void testDumpActionsAvailableForAdmins() throws Exception { + final String expectedAuthorizedContent = "Master status for"; + UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath()); + admin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + // Check the expected content is present in the http response + Pair pair = getMasterDumpPage(); + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(), + pair.getSecond().contains(expectedAuthorizedContent)); + return null; + } + }); + + UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_NONE_STR, KEYTAB_FILE.getAbsolutePath()); + nonAdmin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + Pair pair = getMasterDumpPage(); + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue()); + return null; + } + }); + } + + @Test + public void testStackActionsAvailableForAdmins() throws Exception { + final String expectedAuthorizedContent = "Process Thread Dump"; + UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath()); + admin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + // Check the expected content is present in the http response + Pair pair = getStacksPage(); + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(), + pair.getSecond().contains(expectedAuthorizedContent)); + return null; + } + }); + + UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_NONE_STR, KEYTAB_FILE.getAbsolutePath()); + nonAdmin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + Pair pair = getStacksPage(); + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue()); + return null; + } + }); + } + private String getInfoServerHostAndPort() { return "http://localhost:" + CLUSTER.getActiveMaster().getInfoServer().getPort(); } @@ -258,6 +317,16 @@ private Pair getLogsPage() throws Exception { return getUrlContent(url); } + private Pair getMasterDumpPage() throws Exception { + URL url = new URL (getInfoServerHostAndPort() + "/dump"); + return getUrlContent(url); + } + + private Pair getStacksPage() throws Exception { + URL url = new URL (getInfoServerHostAndPort() + "/stacks"); + return getUrlContent(url); + } + /** * Retrieves the content of the specified URL. The content will only be returned if the status * code for the operation was HTTP 200/OK. From 278ff07b4bef28d7e7c0abeda8ba3683fe3841a8 Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Fri, 24 Jan 2020 12:23:48 -0500 Subject: [PATCH 07/10] Only add admin authz filter when security is enabled on UI --- .../org/apache/hadoop/hbase/http/HttpServer.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 6d4bcc4dfe12..564592733aa9 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -175,6 +175,7 @@ public List getServerConnectors() { protected final boolean findPort; protected final Map defaultContexts = new HashMap<>(); protected final List filterNames = new ArrayList<>(); + protected final boolean authenticationEnabled; static final String STATE_DESCRIPTION_ALIVE = " - alive"; static final String STATE_DESCRIPTION_NOT_LIVE = " - not live"; @@ -565,6 +566,7 @@ private HttpServer(final Builder b) throws IOException { this.adminsAcl = b.adminsAcl; this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir); this.findPort = b.findPort; + this.authenticationEnabled = b.securityEnabled; initializeWebServer(b.name, b.hostName, b.conf, b.pathSpecs, b); } @@ -610,7 +612,7 @@ private void initializeWebServer(String name, String hostName, SecurityHeadersFilter.getDefaultParameters(conf)); // But security needs to be enabled prior to adding the other servlets - if (b.securityEnabled) { + if (authenticationEnabled) { initSpnego(conf, hostName, b.usernameConfKey, b.keytabConfKey, b.kerberosNameRulesKey, b.signatureSecretFileKey); } @@ -808,8 +810,8 @@ public void addPrivilegedServlet(String name, String pathSpec, * {@link #addPrivilegedServlet(String, String, Class)}. */ void addServletWithAuth(String name, String pathSpec, - Class clazz, boolean requireAuth) { - addInternalServlet(name, pathSpec, clazz, requireAuth); + Class clazz, boolean requireAuthz) { + addInternalServlet(name, pathSpec, clazz, requireAuthz); addFilterPathMapping(pathSpec, webAppContext); } @@ -827,12 +829,12 @@ void addServletWithAuth(String name, String pathSpec, * @param requireAuth Require Kerberos authenticate to access servlet */ void addInternalServlet(String name, String pathSpec, - Class clazz, boolean requireAuth) { + Class clazz, boolean requireAuthz) { ServletHolder holder = new ServletHolder(clazz); if (name != null) { holder.setName(name); } - if (requireAuth) { + if (authenticationEnabled && requireAuthz) { FilterHolder filter = new FilterHolder(AdminAuthorizedFilter.class); filter.setName(AdminAuthorizedFilter.class.getSimpleName()); FilterMapping fmap = new FilterMapping(); From 78c554105c3328aa14013d39b0461fe494680e30 Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Fri, 24 Jan 2020 12:26:57 -0500 Subject: [PATCH 08/10] Fix QA nits --- .../main/java/org/apache/hadoop/hbase/http/InfoServer.java | 2 +- .../java/org/apache/hadoop/hbase/http/TestInfoServersACL.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java index 8c948adde8ef..6ee37cb95de4 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/InfoServer.java @@ -113,7 +113,7 @@ AccessControlList buildAdminAcl(Configuration conf) { * {@link #addUnprivilegedServlet(String, String, Class)} instead of this method. * This method will add a servlet which any authenticated user can access. * - * @deprecated Use {@link #addUnprivilegedServet(String, String, Class)} or + * @deprecated Use {@link #addUnprivilegedServlet(String, String, Class)} or * {@link #addPrivilegedServlet(String, String, Class)} instead of this * method which does not state outwardly what kind of authz rules will * be applied to this servlet. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java index 612d39406cd4..f99efb7f527f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java @@ -318,12 +318,12 @@ private Pair getLogsPage() throws Exception { } private Pair getMasterDumpPage() throws Exception { - URL url = new URL (getInfoServerHostAndPort() + "/dump"); + URL url = new URL(getInfoServerHostAndPort() + "/dump"); return getUrlContent(url); } private Pair getStacksPage() throws Exception { - URL url = new URL (getInfoServerHostAndPort() + "/stacks"); + URL url = new URL(getInfoServerHostAndPort() + "/stacks"); return getUrlContent(url); } From 1733ec10c3f002692962cb41995c8e6337d5c29a Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Fri, 24 Jan 2020 16:10:14 -0500 Subject: [PATCH 09/10] Mark /metrics and /jmx as privileged --- .../apache/hadoop/hbase/http/HttpServer.java | 4 +- .../hadoop/hbase/http/TestInfoServersACL.java | 74 +++++++++++++++++++ 2 files changed, 76 insertions(+), 2 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index 564592733aa9..e316b73b2c3b 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -732,11 +732,11 @@ protected void addDefaultServlets(ContextHandlerCollection contexts) throws IOEx // Remove when we drop support for hbase on hadoop2.x. try { Class clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet"); - addUnprivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class)); + addPrivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class)); } catch (Exception e) { // do nothing } - addUnprivilegedServlet("jmx", "/jmx", JMXJsonServlet.class); + addPrivilegedServlet("jmx", "/jmx", JMXJsonServlet.class); addUnprivilegedServlet("conf", "/conf", ConfServlet.class); final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java index f99efb7f527f..fa5dea651eb4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestInfoServersACL.java @@ -297,6 +297,70 @@ public void testStackActionsAvailableForAdmins() throws Exception { }); } + @Test + public void testJmxAvailableForAdmins() throws Exception { + final String expectedAuthorizedContent = "Hadoop:service=HBase"; + UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath()); + admin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + // Check the expected content is present in the http response + Pair pair = getJmxPage(); + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(), + pair.getSecond().contains(expectedAuthorizedContent)); + return null; + } + }); + + UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_NONE_STR, KEYTAB_FILE.getAbsolutePath()); + nonAdmin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + Pair pair = getJmxPage(); + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue()); + return null; + } + }); + } + + @Test + public void testMetricsAvailableForAdmins() throws Exception { + // Looks like there's nothing exported to this, but leave it since + // it's Hadoop2 only and will eventually be removed due to that. + final String expectedAuthorizedContent = ""; + UserGroupInformation admin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_ADMIN_STR, KEYTAB_FILE.getAbsolutePath()); + admin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + // Check the expected content is present in the http response + Pair pair = getMetricsPage(); + if (HttpURLConnection.HTTP_NOT_FOUND == pair.getFirst()) { + // Not on hadoop 2 + return null; + } + assertEquals(HttpURLConnection.HTTP_OK, pair.getFirst().intValue()); + assertTrue("expected=" + expectedAuthorizedContent + ", content=" + pair.getSecond(), + pair.getSecond().contains(expectedAuthorizedContent)); + return null; + } + }); + + UserGroupInformation nonAdmin = UserGroupInformation.loginUserFromKeytabAndReturnUGI( + USER_NONE_STR, KEYTAB_FILE.getAbsolutePath()); + nonAdmin.doAs(new PrivilegedExceptionAction() { + @Override public Void run() throws Exception { + Pair pair = getMetricsPage(); + if (HttpURLConnection.HTTP_NOT_FOUND == pair.getFirst()) { + // Not on hadoop 2 + return null; + } + assertEquals(HttpURLConnection.HTTP_FORBIDDEN, pair.getFirst().intValue()); + return null; + } + }); + } + private String getInfoServerHostAndPort() { return "http://localhost:" + CLUSTER.getActiveMaster().getInfoServer().getPort(); } @@ -327,6 +391,16 @@ private Pair getStacksPage() throws Exception { return getUrlContent(url); } + private Pair getJmxPage() throws Exception { + URL url = new URL(getInfoServerHostAndPort() + "/jmx"); + return getUrlContent(url); + } + + private Pair getMetricsPage() throws Exception { + URL url = new URL(getInfoServerHostAndPort() + "/metrics"); + return getUrlContent(url); + } + /** * Retrieves the content of the specified URL. The content will only be returned if the status * code for the operation was HTTP 200/OK. From f91bc20da0042118ac3b3d45cf8d385a0c6925b9 Mon Sep 17 00:00:00 2001 From: Josh Elser Date: Fri, 24 Jan 2020 19:07:20 -0500 Subject: [PATCH 10/10] Add config to disable conf/ serving via UI --- .../apache/hadoop/hbase/http/HttpServer.java | 17 +++++++++++++---- src/main/asciidoc/_chapters/security.adoc | 18 ++++++++++++++++++ 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java index e316b73b2c3b..23f59560c7ad 100644 --- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java +++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/HttpServer.java @@ -135,6 +135,9 @@ public class HttpServer implements FilterContainer { HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.users"; public static final String HTTP_SPNEGO_AUTHENTICATION_ADMIN_GROUPS_KEY = HTTP_SPNEGO_AUTHENTICATION_PREFIX + "admin.groups"; + public static final String HTTP_PRIVILEGED_CONF_KEY = + "hbase.security.authentication.ui.config.protected"; + public static final boolean HTTP_PRIVILEGED_CONF_DEFAULT = false; // The ServletContext attribute where the daemon Configuration // gets stored. @@ -626,7 +629,7 @@ private void initializeWebServer(String name, String hostName, } } - addDefaultServlets(contexts); + addDefaultServlets(contexts, conf); if (pathSpecs != null) { for (String path : pathSpecs) { @@ -722,8 +725,8 @@ private void setContextAttributes(ServletContextHandler context, Configuration c /** * Add default servlets. */ - protected void addDefaultServlets(ContextHandlerCollection contexts) throws IOException { - + protected void addDefaultServlets( + ContextHandlerCollection contexts, Configuration conf) throws IOException { // set up default servlets addPrivilegedServlet("stacks", "/stacks", StackServlet.class); addPrivilegedServlet("logLevel", "/logLevel", LogLevel.Servlet.class); @@ -737,7 +740,13 @@ protected void addDefaultServlets(ContextHandlerCollection contexts) throws IOEx // do nothing } addPrivilegedServlet("jmx", "/jmx", JMXJsonServlet.class); - addUnprivilegedServlet("conf", "/conf", ConfServlet.class); + // While we don't expect users to have sensitive information in their configuration, they + // might. Give them an option to not expose the service configuration to all users. + if (conf.getBoolean(HTTP_PRIVILEGED_CONF_KEY, HTTP_PRIVILEGED_CONF_DEFAULT)) { + addPrivilegedServlet("conf", "/conf", ConfServlet.class); + } else { + addUnprivilegedServlet("conf", "/conf", ConfServlet.class); + } final String asyncProfilerHome = ProfileServlet.getAsyncProfilerHome(); if (asyncProfilerHome != null && !asyncProfilerHome.trim().isEmpty()) { addPrivilegedServlet("prof", "/prof", ProfileServlet.class); diff --git a/src/main/asciidoc/_chapters/security.adoc b/src/main/asciidoc/_chapters/security.adoc index 2635630cccb6..107b2fff0e64 100644 --- a/src/main/asciidoc/_chapters/security.adoc +++ b/src/main/asciidoc/_chapters/security.adoc @@ -185,6 +185,24 @@ non-sensitive endpoints in the Web UI. If it doesn't go without saying: non-authenticated users cannot access any part of the Web UI. +=== Other UI security-related configuration + +While it is a clear anti-pattern for HBase developers, the developers acknowledge that the HBase +configuration (including Hadoop configuration files) may contain sensitive information. As such, +a user may find that they do not want to expose the HBase service-level configuration to all +authenticated users. They may configure HBase to require a user must be an admin to access +the service-level configuration via the HBase UI. This configuration is *false* by default +(any authenticated user may access the configuration). + +Users who wish to change this would set the following in their hbase-site.xml: +[source,xml] +---- + + hbase.security.authentication.ui.config.protected + true + +---- + [[hbase.secure.configuration]] == Secure Client Access to Apache HBase