Skip to content

Commit

Permalink
HBASE-17566 Jetty upgrade fixes
Browse files Browse the repository at this point in the history
Fix issues caused by HBASE-12894 Upgrade Jetty to 9.2.6 commit.

- removed Jetty 6.x dependencies (org.mortbay.jetty.*)
- corrected @Ignore-d unit tests

Signed-off-by: Michael Stack <stack@apache.org>
  • Loading branch information
meszibalu authored and saintstack committed Jan 30, 2017
1 parent ffe7dac commit e68ab09
Show file tree
Hide file tree
Showing 12 changed files with 95 additions and 125 deletions.
Expand Up @@ -22,18 +22,21 @@
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;


import org.apache.http.Header; import org.apache.commons.logging.Log;
import org.apache.http.HttpResponse; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.mortbay.log.Log; import org.apache.http.Header;
import org.apache.http.HttpResponse;


/** /**
* The HTTP result code, response headers, and body of a HTTP response. * The HTTP result code, response headers, and body of a HTTP response.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Stable @InterfaceStability.Stable
public class Response { public class Response {
private static final Log LOG = LogFactory.getLog(Response.class);

private int code; private int code;
private Header[] headers; private Header[] headers;
private byte[] body; private byte[] body;
Expand Down Expand Up @@ -139,7 +142,7 @@ public byte[] getBody() {
try { try {
body = Client.getResponseBody(resp); body = Client.getResponseBody(resp);
} catch (IOException ioe) { } catch (IOException ioe) {
Log.debug("encountered ioe when obtaining body", ioe); LOG.debug("encountered ioe when obtaining body", ioe);
} }
} }
return body; return body;
Expand Down
Expand Up @@ -25,7 +25,7 @@


import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.mortbay.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.DefaultServlet;


/** /**
* General servlet which is admin-authorized. * General servlet which is admin-authorized.
Expand Down
Expand Up @@ -534,22 +534,23 @@ private void initializeWebServer(String name, String hostName,


Preconditions.checkNotNull(webAppContext); Preconditions.checkNotNull(webAppContext);


HandlerCollection handlerCollection = new HandlerCollection();

ContextHandlerCollection contexts = new ContextHandlerCollection(); ContextHandlerCollection contexts = new ContextHandlerCollection();
RequestLog requestLog = HttpRequestLog.getRequestLog(name); RequestLog requestLog = HttpRequestLog.getRequestLog(name);


if (requestLog != null) { if (requestLog != null) {
RequestLogHandler requestLogHandler = new RequestLogHandler(); RequestLogHandler requestLogHandler = new RequestLogHandler();
requestLogHandler.setRequestLog(requestLog); requestLogHandler.setRequestLog(requestLog);
HandlerCollection handlers = new HandlerCollection(); handlerCollection.addHandler(requestLogHandler);
handlers.setHandlers(new Handler[] { requestLogHandler, contexts });
webServer.setHandler(handlers);
} else {
webServer.setHandler(contexts);
} }


final String appDir = getWebAppsPath(name); final String appDir = getWebAppsPath(name);


webServer.setHandler(webAppContext); handlerCollection.addHandler(contexts);
handlerCollection.addHandler(webAppContext);

webServer.setHandler(handlerCollection);


addDefaultApps(contexts, appDir, conf); addDefaultApps(contexts, appDir, conf);


Expand Down Expand Up @@ -629,14 +630,13 @@ protected void addDefaultApps(ContextHandlerCollection parent,
logDir = System.getProperty("hadoop.log.dir"); logDir = System.getProperty("hadoop.log.dir");
} }
if (logDir != null) { if (logDir != null) {
ServletContextHandler logContext = new ServletContextHandler(parent, "/*"); ServletContextHandler logContext = new ServletContextHandler(parent, "/logs");
logContext.addServlet(AdminAuthorizedServlet.class, "/*"); logContext.addServlet(AdminAuthorizedServlet.class, "/*");
logContext.setResourceBase(logDir); logContext.setResourceBase(logDir);


if (conf.getBoolean( if (conf.getBoolean(
ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES, ServerConfigurationKeys.HBASE_JETTY_LOGS_SERVE_ALIASES,
ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) { ServerConfigurationKeys.DEFAULT_HBASE_JETTY_LOGS_SERVE_ALIASES)) {
@SuppressWarnings("unchecked")
Map<String, String> params = logContext.getInitParams(); Map<String, String> params = logContext.getInitParams();
params.put( params.put(
"org.mortbay.jetty.servlet.Default.aliases", "true"); "org.mortbay.jetty.servlet.Default.aliases", "true");
Expand Down Expand Up @@ -1260,7 +1260,6 @@ public RequestQuoter(HttpServletRequest rawRequest) {
/** /**
* Return the set of parameter names, quoting each name. * Return the set of parameter names, quoting each name.
*/ */
@SuppressWarnings("unchecked")
@Override @Override
public Enumeration<String> getParameterNames() { public Enumeration<String> getParameterNames() {
return new Enumeration<String>() { return new Enumeration<String>() {
Expand Down Expand Up @@ -1301,7 +1300,6 @@ public String[] getParameterValues(String name) {
return result; return result;
} }


@SuppressWarnings("unchecked")
@Override @Override
public Map<String, String[]> getParameterMap() { public Map<String, String[]> getParameterMap() {
Map<String, String[]> result = new HashMap<String,String[]>(); Map<String, String[]> result = new HashMap<String,String[]>();
Expand Down

This file was deleted.

Expand Up @@ -175,9 +175,10 @@
import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.mortbay.jetty.Connector; import org.eclipse.jetty.server.Connector;
import org.mortbay.jetty.nio.SelectChannelConnector; import org.eclipse.jetty.server.Server;
import org.mortbay.jetty.servlet.Context; import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.webapp.WebAppContext;


import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
Expand Down Expand Up @@ -385,7 +386,7 @@ public void run() {
private FavoredNodesManager favoredNodesManager; private FavoredNodesManager favoredNodesManager;


/** jetty server for master to redirect requests to regionserver infoServer */ /** jetty server for master to redirect requests to regionserver infoServer */
private org.mortbay.jetty.Server masterJettyServer; private Server masterJettyServer;


public static class RedirectServlet extends HttpServlet { public static class RedirectServlet extends HttpServlet {
private static final long serialVersionUID = 2894774810058302472L; private static final long serialVersionUID = 2894774810058302472L;
Expand Down Expand Up @@ -517,14 +518,17 @@ private int putUpJettyServer() throws IOException {
if(RedirectServlet.regionServerInfoPort == infoPort) { if(RedirectServlet.regionServerInfoPort == infoPort) {
return infoPort; return infoPort;
} }
masterJettyServer = new org.mortbay.jetty.Server(); masterJettyServer = new Server();
Connector connector = new SelectChannelConnector(); ServerConnector connector = new ServerConnector(masterJettyServer);
connector.setHost(addr); connector.setHost(addr);
connector.setPort(infoPort); connector.setPort(infoPort);
masterJettyServer.addConnector(connector); masterJettyServer.addConnector(connector);
masterJettyServer.setStopAtShutdown(true); masterJettyServer.setStopAtShutdown(true);
Context context = new Context(masterJettyServer, "/", Context.NO_SESSIONS);
WebAppContext context = new WebAppContext(null, "/", null, null, null, null, WebAppContext.NO_SESSIONS);
context.addServlet(RedirectServlet.class, "/*"); context.addServlet(RedirectServlet.class, "/*");
context.setServer(masterJettyServer);

try { try {
masterJettyServer.start(); masterJettyServer.start();
} catch (Exception e) { } catch (Exception e) {
Expand Down
Expand Up @@ -17,35 +17,49 @@
*/ */
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;


import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;

import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.client.ClusterConnection; import org.apache.hadoop.hbase.client.ClusterConnection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
import org.apache.hadoop.hbase.io.WALLink; import org.apache.hadoop.hbase.io.WALLink;
import org.apache.hadoop.hbase.procedure2.util.StringUtils; import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.replication.*; import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.replication.ReplicationFactory;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.hbase.replication.ReplicationPeers;
import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
import org.apache.hadoop.hbase.replication.ReplicationQueues;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
import org.apache.hadoop.hbase.replication.ReplicationTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.mortbay.util.IO;


import com.google.common.util.concurrent.AtomicLongMap; import com.google.common.util.concurrent.AtomicLongMap;


import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;

/** /**
* Provides information about the existing states of replication, replication peers and queues. * Provides information about the existing states of replication, replication peers and queues.
* *
Expand Down
Expand Up @@ -19,13 +19,34 @@


package org.apache.hadoop.hbase.client.locking; package org.apache.hadoop.hbase.client.locking;


import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;

import java.util.Random;
import java.util.concurrent.TimeUnit;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.*; import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockHeartbeatResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.LockServiceProtos.LockType;
import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
Expand All @@ -34,16 +55,11 @@
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.mortbay.log.Log;

import static org.mockito.Mockito.*;
import static org.junit.Assert.*;

import java.util.Random;
import java.util.concurrent.TimeUnit;


@Category({ClientTests.class, SmallTests.class}) @Category({ClientTests.class, SmallTests.class})
public class TestEntityLocks { public class TestEntityLocks {
private static final Log LOG = LogFactory.getLog(TestEntityLocks.class);

private final Configuration conf = HBaseConfiguration.create(); private final Configuration conf = HBaseConfiguration.create();


private final LockService.BlockingInterface master = private final LockService.BlockingInterface master =
Expand Down Expand Up @@ -80,13 +96,13 @@ public void setUp() throws Exception {
private boolean waitLockTimeOut(EntityLock lock, long maxWaitTimeMillis) { private boolean waitLockTimeOut(EntityLock lock, long maxWaitTimeMillis) {
long startMillis = System.currentTimeMillis(); long startMillis = System.currentTimeMillis();
while (lock.isLocked()) { while (lock.isLocked()) {
Log.info("Sleeping..."); LOG.info("Sleeping...");
Threads.sleepWithoutInterrupt(100); Threads.sleepWithoutInterrupt(100);
if (!lock.isLocked()) { if (!lock.isLocked()) {
return true; return true;
} }
if (System.currentTimeMillis() - startMillis > maxWaitTimeMillis) { if (System.currentTimeMillis() - startMillis > maxWaitTimeMillis) {
Log.info("Timedout..."); LOG.info("Timedout...");
return false; return false;
} }
} }
Expand Down
Expand Up @@ -40,7 +40,7 @@ public class HttpServerFunctionalTest extends Assert {
/** JVM property for the webapp test dir : {@value} */ /** JVM property for the webapp test dir : {@value} */
public static final String TEST_BUILD_WEBAPPS = "test.build.webapps"; public static final String TEST_BUILD_WEBAPPS = "test.build.webapps";
/** expected location of the test.build.webapps dir: {@value} */ /** expected location of the test.build.webapps dir: {@value} */
private static final String BUILD_WEBAPPS_DIR = "build/test/webapps"; private static final String BUILD_WEBAPPS_DIR = "src/main/resources/hbase-webapps";


/** name of the test webapp: {@value} */ /** name of the test webapp: {@value} */
private static final String TEST = "test"; private static final String TEST = "test";
Expand Down

0 comments on commit e68ab09

Please sign in to comment.