Skip to content

Commit

Permalink
HIVE-26670: Track every single HTTP request between beeline and hs2
Browse files Browse the repository at this point in the history
  • Loading branch information
abstractdog committed Oct 27, 2022
1 parent 16ce755 commit dd28dea
Show file tree
Hide file tree
Showing 6 changed files with 119 additions and 4 deletions.
2 changes: 2 additions & 0 deletions common/src/java/org/apache/hadoop/hive/conf/Constants.java
Original file line number Diff line number Diff line change
Expand Up @@ -103,4 +103,6 @@ public class Constants {
public static final Pattern COMPACTION_POOLS_PATTERN = Pattern.compile("hive\\.compactor\\.worker\\.(.*)\\.threads");
public static final String HIVE_COMPACTOR_WORKER_POOL = "hive.compactor.worker.pool";

public static final String HTTP_HEADER_REQUEST_TRACK = "Request-Track";
public static final String TIME_POSTFIX_REQUEST_TRACK = "_TIME";
}
28 changes: 25 additions & 3 deletions jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
import java.util.concurrent.Executor;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Stream;
import java.util.function.Supplier;

import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
Expand Down Expand Up @@ -144,6 +145,7 @@
import org.apache.http.protocol.HttpContext;
import org.apache.http.ssl.SSLContexts;
import org.apache.http.util.Args;
import org.apache.thrift.TBaseHelper;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.transport.THttpClient;
Expand Down Expand Up @@ -182,6 +184,7 @@ public class HiveConnection implements java.sql.Connection {
private Subject loggedInSubject;
private int maxRetries = 1;
private IJdbcBrowserClient browserClient;
private Map<String, String> additionalHttpHeaders = new HashMap<String, String>();

/**
* Get all direct HiveServer2 URLs from a ZooKeeper based HiveServer2 URL
Expand Down Expand Up @@ -559,8 +562,7 @@ private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException {
CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null;
HttpClientBuilder httpClientBuilder = null;
// Request interceptor for any request pre-processing logic
HttpRequestInterceptor requestInterceptor;
Map<String, String> additionalHttpHeaders = new HashMap<String, String>();
HttpRequestInterceptorBase requestInterceptor;
Map<String, String> customCookies = new HashMap<String, String>();

// Retrieve the additional HttpHeaders
Expand Down Expand Up @@ -752,8 +754,12 @@ protected boolean requestIsAborted(final HttpRequest request) {
httpClientBuilder
.setRedirectStrategy(new HiveJdbcSamlRedirectStrategy(browserClient));
}

requestInterceptor.setRequestTrackingEnabled(isRequestTrackingEnabled());

// Add the request interceptor to the client builder
httpClientBuilder.addInterceptorFirst(requestInterceptor);
httpClientBuilder.addInterceptorFirst(requestInterceptor.sessionId(getSessionId()));
httpClientBuilder.addInterceptorLast(new HttpResponseInterceptorBase());

// Add an interceptor to add in an XSRF header
httpClientBuilder.addInterceptorLast(new XsrfHttpRequestInterceptor());
Expand Down Expand Up @@ -813,6 +819,22 @@ RegistryBuilder.<ConnectionSocketFactory> create().register("https", socketFacto
return httpClientBuilder.build();
}

private boolean isRequestTrackingEnabled() {
return Boolean.valueOf(sessConfMap.get(JdbcConnectionParams.JDBC_PARAM_REQUEST_TRACK));
}

private Supplier<String> getSessionId() {
Supplier<String> sessionId = () -> {
if (sessHandle == null) {
return "NO_SESSION";
}
StringBuilder b = new StringBuilder();
TBaseHelper.toString(sessHandle.getSessionId().bufferForGuid(), b);
return b.toString().replaceAll("\\s", "");
};
return sessionId;
}

private String getJWT() {
String jwtCredential = getJWTStringFromSession();
if (jwtCredential == null || jwtCredential.isEmpty()) {
Expand Down
36 changes: 36 additions & 0 deletions jdbc/src/java/org/apache/hive/jdbc/HttpRequestInterceptorBase.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,21 +19,33 @@
package org.apache.hive.jdbc;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Supplier;
import org.apache.hadoop.hive.conf.Constants;
import org.apache.http.Header;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.client.CookieStore;
import org.apache.http.protocol.HttpContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public abstract class HttpRequestInterceptorBase implements HttpRequestInterceptor {
public static final Logger LOG = LoggerFactory.getLogger(HiveConnection.class.getName());

CookieStore cookieStore;
boolean isCookieEnabled;
String cookieName;
boolean isSSL;
Map<String, String> additionalHeaders;
Map<String, String> customCookies;
final AtomicLong trackCounter = new AtomicLong();
private Supplier<String> sessionId = null;

private boolean requestTrackingEnabled;

// Abstract function to add HttpAuth Header
protected abstract void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContext)
Expand Down Expand Up @@ -77,6 +89,17 @@ public void process(HttpRequest httpRequest, HttpContext httpContext)
if (isCookieEnabled) {
httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_CONST_FALSE);
}

if (requestTrackingEnabled) {
if (additionalHeaders == null) {
additionalHeaders = new HashMap<>();
}
String trackHeader = getNewTrackHeader();
LOG.info("{}:{}", Constants.HTTP_HEADER_REQUEST_TRACK, trackHeader);
additionalHeaders.put(Constants.HTTP_HEADER_REQUEST_TRACK, trackHeader);
httpContext.setAttribute(Constants.HTTP_HEADER_REQUEST_TRACK, trackHeader);
httpContext.setAttribute(trackHeader + Constants.TIME_POSTFIX_REQUEST_TRACK, System.currentTimeMillis());
}
// Insert the additional http headers
if (additionalHeaders != null) {
for (Map.Entry<String, String> entry : additionalHeaders.entrySet()) {
Expand All @@ -102,4 +125,17 @@ public void process(HttpRequest httpRequest, HttpContext httpContext)
throw new HttpException(e.getMessage(), e);
}
}

protected String getNewTrackHeader() {
return String.format("HIVE_%s_%020d", sessionId.get(), trackCounter.incrementAndGet());
}

public HttpRequestInterceptor sessionId(Supplier<String> sessionId) {
this.sessionId = sessionId;
return this;
}

public void setRequestTrackingEnabled(boolean requestTrackingEnabled) {
this.requestTrackingEnabled = requestTrackingEnabled;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.jdbc;

import java.io.IOException;

import org.apache.hadoop.hive.conf.Constants;
import org.apache.http.HttpException;
import org.apache.http.HttpResponse;
import org.apache.http.HttpResponseInterceptor;
import org.apache.http.protocol.HttpContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class HttpResponseInterceptorBase implements HttpResponseInterceptor {
public static final Logger LOG = LoggerFactory.getLogger(HiveConnection.class.getName());

@Override
public void process(HttpResponse response, HttpContext context) throws HttpException, IOException {
String trackHeader = (String) context.getAttribute(Constants.HTTP_HEADER_REQUEST_TRACK);
if (trackHeader == null) {
return;
}
long elapsed = System.currentTimeMillis() - (long) context.getAttribute(trackHeader + "_TIME");
LOG.info("Response to {} in {} ms", trackHeader, elapsed);
context.removeAttribute(Constants.HTTP_HEADER_REQUEST_TRACK);
context.removeAttribute(trackHeader + "_TIME");
}
}
2 changes: 2 additions & 0 deletions jdbc/src/java/org/apache/hive/jdbc/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,8 @@ public static class JdbcConnectionParams {
static final String DEFAULT_COOKIE_NAMES_HS2 = "hive.server2.auth";
// The http header prefix for additional headers which have to be appended to the request
static final String HTTP_HEADER_PREFIX = "http.header.";
// Request tracking
static final String JDBC_PARAM_REQUEST_TRACK = "requestTrack";
// Set the fetchSize
static final String FETCH_SIZE = "fetchSize";
static final String INIT_FILE = "initFile";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.security.SecureRandom;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
Expand All @@ -45,6 +44,7 @@
import com.google.common.base.Preconditions;
import com.google.common.io.ByteStreams;

import org.apache.hadoop.hive.conf.Constants;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim;
Expand Down Expand Up @@ -153,6 +153,8 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
String clientIpAddress;
boolean requireNewCookie = false;

logTrackingHeaderIfAny(request);

try {
if (hiveConf.getBoolean(ConfVars.HIVE_SERVER2_XSRF_FILTER_ENABLED.varname,false)){
boolean continueProcessing = Utils.doXsrfFilter(request,response,null,null);
Expand Down Expand Up @@ -310,6 +312,13 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response)
}
}

private void logTrackingHeaderIfAny(HttpServletRequest request) {
if (request.getHeader(Constants.HTTP_HEADER_REQUEST_TRACK) != null) {
String requestTrackHeader = request.getHeader(Constants.HTTP_HEADER_REQUEST_TRACK);
LOG.info("{}:{}", Constants.HTTP_HEADER_REQUEST_TRACK, requestTrackHeader);
}
}

private String validateJWT(HttpServletRequest request, HttpServletResponse response)
throws HttpAuthenticationException {
Preconditions.checkState(jwtValidator != null, "JWT validator should have been set");
Expand Down

0 comments on commit dd28dea

Please sign in to comment.