Skip to content

Commit

Permalink
HBASE-28312 The bad auth exception can not be passed to client rpc ca…
Browse files Browse the repository at this point in the history
…lls properly (#5629)

Signed-off-by: Bryan Beaudreault <bbeaudreault@apache.org>
(cherry picked from commit 6017937)
  • Loading branch information
Apache9 committed Jan 17, 2024
1 parent 83bb03a commit 0772b46
Show file tree
Hide file tree
Showing 14 changed files with 251 additions and 50 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@
package org.apache.hadoop.hbase.ipc;

import static org.apache.hadoop.hbase.ipc.IPCUtil.buildRequestHeader;
import static org.apache.hadoop.hbase.ipc.IPCUtil.createRemoteException;
import static org.apache.hadoop.hbase.ipc.IPCUtil.getTotalSizeWhenWrittenDelimited;
import static org.apache.hadoop.hbase.ipc.IPCUtil.isFatalConnectionException;
import static org.apache.hadoop.hbase.ipc.IPCUtil.setCancelled;
import static org.apache.hadoop.hbase.ipc.IPCUtil.write;

Expand Down Expand Up @@ -69,6 +67,7 @@

import org.apache.hbase.thirdparty.com.google.protobuf.Message;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;

Expand Down Expand Up @@ -704,6 +703,25 @@ private void readResponse() {
// Read the header
ResponseHeader responseHeader = ResponseHeader.parseDelimitedFrom(in);
int id = responseHeader.getCallId();
if (LOG.isTraceEnabled()) {
LOG.trace("got response header " + TextFormat.shortDebugString(responseHeader)
+ ", totalSize: " + totalSize + " bytes");
}
RemoteException remoteExc;
if (responseHeader.hasException()) {
ExceptionResponse exceptionResponse = responseHeader.getException();
remoteExc = IPCUtil.createRemoteException(exceptionResponse);
if (IPCUtil.isFatalConnectionException(exceptionResponse)) {
// Here we will cleanup all calls so do not need to fall back, just return.
synchronized (this) {
closeConn(remoteExc);
}
return;
}
} else {
remoteExc = null;
}

call = calls.remove(id); // call.done have to be set before leaving this method
expectedCall = (call != null && !call.isDone());
if (!expectedCall) {
Expand All @@ -714,46 +732,34 @@ private void readResponse() {
// this connection.
int readSoFar = getTotalSizeWhenWrittenDelimited(responseHeader);
int whatIsLeftToRead = totalSize - readSoFar;
LOG.debug("Unknown callId: " + id + ", skipping over this response of " + whatIsLeftToRead
+ " bytes");
IOUtils.skipFully(in, whatIsLeftToRead);
if (call != null) {
call.callStats.setResponseSizeBytes(totalSize);
call.callStats
.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.callStats.getStartTime());
}
return;
}
if (responseHeader.hasException()) {
ExceptionResponse exceptionResponse = responseHeader.getException();
RemoteException re = createRemoteException(exceptionResponse);
call.setException(re);
call.callStats.setResponseSizeBytes(totalSize);
call.callStats
.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.callStats.getStartTime());
if (isFatalConnectionException(exceptionResponse)) {
synchronized (this) {
closeConn(re);
}
}
} else {
Message value = null;
if (call.responseDefaultType != null) {
Message.Builder builder = call.responseDefaultType.newBuilderForType();
ProtobufUtil.mergeDelimitedFrom(builder, in);
value = builder.build();
}
CellScanner cellBlockScanner = null;
if (responseHeader.hasCellBlockMeta()) {
int size = responseHeader.getCellBlockMeta().getLength();
byte[] cellBlock = new byte[size];
IOUtils.readFully(this.in, cellBlock, 0, cellBlock.length);
cellBlockScanner = this.rpcClient.cellBlockBuilder.createCellScanner(this.codec,
this.compressor, cellBlock);
}
call.setResponse(value, cellBlockScanner);
call.callStats.setResponseSizeBytes(totalSize);
call.callStats
.setCallTimeMs(EnvironmentEdgeManager.currentTime() - call.callStats.getStartTime());
call.callStats.setResponseSizeBytes(totalSize);
if (remoteExc != null) {
call.setException(remoteExc);
return;
}
Message value = null;
if (call.responseDefaultType != null) {
Message.Builder builder = call.responseDefaultType.newBuilderForType();
ProtobufUtil.mergeDelimitedFrom(builder, in);
value = builder.build();
}
CellScanner cellBlockScanner = null;
if (responseHeader.hasCellBlockMeta()) {
int size = responseHeader.getCellBlockMeta().getLength();
byte[] cellBlock = new byte[size];
IOUtils.readFully(this.in, cellBlock, 0, cellBlock.length);
cellBlockScanner =
this.rpcClient.cellBlockBuilder.createCellScanner(this.codec, this.compressor, cellBlock);
}
call.setResponse(value, cellBlockScanner);
} catch (IOException e) {
if (expectedCall) {
call.setException(e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream;
Expand All @@ -62,6 +64,8 @@
@InterfaceAudience.Private
class IPCUtil {

private static final Logger LOG = LoggerFactory.getLogger(IPCUtil.class);

/**
* Write out header, param, and cell block if there is one.
* @param dos Stream to write into
Expand Down Expand Up @@ -159,8 +163,19 @@ static RemoteException createRemoteException(final ExceptionResponse e) {
}

/** Returns True if the exception is a fatal connection exception. */
static boolean isFatalConnectionException(final ExceptionResponse e) {
return e.getExceptionClassName().equals(FatalConnectionException.class.getName());
static boolean isFatalConnectionException(ExceptionResponse e) {
if (e.getExceptionClassName().equals(FatalConnectionException.class.getName())) {
return true;
}
// try our best to check for sub classes of FatalConnectionException
try {
return e.getExceptionClassName() != null && FatalConnectionException.class.isAssignableFrom(
Class.forName(e.getExceptionClassName(), false, IPCUtil.class.getClassLoader()));
// Class.forName may throw ExceptionInInitializerError so we have to catch Throwable here
} catch (Throwable t) {
LOG.debug("Can not get class object for {}", e.getExceptionClassName(), t);
return false;
}
}

static IOException toIOE(Throwable t) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,8 @@ public void run(Timeout timeout) throws Exception {
}
}

protected final byte[] getConnectionHeaderPreamble() {
// will be overridden in tests
protected byte[] getConnectionHeaderPreamble() {
// Assemble the preamble up in a buffer first and then send it. Writing individual elements,
// they are getting sent across piecemeal according to wireshark and then server is messing
// up the reading on occasion (the passed in stream is not buffered yet).
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;

/**
* Just a dummy exception for testing IPCUtil.isFatalConnectionException.
*/
public class DummyException extends Exception {

private static final long serialVersionUID = 215191975455115118L;

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;

/**
* Just a dummy exception for testing IPCUtil.isFatalConnectionException.
*/
public class DummyFatalConnectionException extends FatalConnectionException {

private static final long serialVersionUID = -1966815615846798490L;

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;

import java.io.IOException;
Expand All @@ -44,6 +45,8 @@
import org.apache.hbase.thirdparty.io.netty.channel.DefaultEventLoop;
import org.apache.hbase.thirdparty.io.netty.channel.EventLoop;

import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ExceptionResponse;

@Category({ ClientTests.class, SmallTests.class })
public class TestIPCUtil {

Expand Down Expand Up @@ -159,4 +162,23 @@ public void run() {
eventLoop.shutdownGracefully().get();
}
}

@Test
public void testIsFatalConnectionException() {
// intentionally not reference the class object directly, so here we will not load the class, to
// make sure that in isFatalConnectionException, we can use initialized = false when calling
// Class.forName
ExceptionResponse resp = ExceptionResponse.newBuilder()
.setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyFatalConnectionException").build();
assertTrue(IPCUtil.isFatalConnectionException(resp));

resp = ExceptionResponse.newBuilder()
.setExceptionClassName("org.apache.hadoop.hbase.ipc.DummyException").build();
assertFalse(IPCUtil.isFatalConnectionException(resp));

// class not found
resp = ExceptionResponse.newBuilder()
.setExceptionClassName("org.apache.hadoop.hbase.ipc.WhatEver").build();
assertFalse(IPCUtil.isFatalConnectionException(resp));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class NettyRpcServerPreambleHandler extends SimpleChannelInboundHandler<ByteBuf>

private final NettyRpcServer rpcServer;
private final NettyServerRpcConnection conn;
private boolean processPreambleError;

public NettyRpcServerPreambleHandler(NettyRpcServer rpcServer, NettyServerRpcConnection conn) {
this.rpcServer = rpcServer;
Expand All @@ -46,10 +47,18 @@ public NettyRpcServerPreambleHandler(NettyRpcServer rpcServer, NettyServerRpcCon

@Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception {
if (processPreambleError) {
// if we failed to process preamble, we will close the connection immediately, but it is
// possible that we have already received some bytes after the 'preamble' so when closing, the
// netty framework will still pass them here. So we set a flag here to just skip processing
// these broken messages.
return;
}
ByteBuffer buf = ByteBuffer.allocate(msg.readableBytes());
msg.readBytes(buf);
buf.flip();
if (!conn.processPreamble(buf)) {
processPreambleError = true;
conn.close();
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,11 @@
import static org.apache.hadoop.hbase.ipc.TestProtobufRpcServiceImpl.newStub;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.everyItem;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
Expand All @@ -53,6 +55,7 @@
import java.net.InetSocketAddress;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
Expand Down Expand Up @@ -545,4 +548,24 @@ public void testTracingErrorIpc() throws IOException {
hasTraceId(traceRule.getSpans().iterator().next().getTraceId()))));
}
}

protected abstract AbstractRpcClient<?> createBadAuthRpcClient(Configuration conf);

@Test
public void testBadPreambleHeader() throws IOException, ServiceException {
Configuration clientConf = new Configuration(CONF);
RpcServer rpcServer = createRpcServer("testRpcServer", Collections.emptyList(),
new InetSocketAddress("localhost", 0), CONF, new FifoRpcScheduler(CONF, 1));
try (AbstractRpcClient<?> client = createBadAuthRpcClient(clientConf)) {
rpcServer.start();
BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
ServiceException se = assertThrows(ServiceException.class,
() -> stub.echo(null, EchoRequestProto.newBuilder().setMessage("hello").build()));
IOException ioe = ProtobufUtil.handleRemoteException(se);
assertThat(ioe, instanceOf(BadAuthException.class));
assertThat(ioe.getMessage(), containsString("authName=unknown"));
} finally {
rpcServer.stop();
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;

import java.io.IOException;

public class BadAuthNettyRpcConnection extends NettyRpcConnection {

public BadAuthNettyRpcConnection(NettyRpcClient rpcClient, ConnectionId remoteId)
throws IOException {
super(rpcClient, remoteId);
}

@Override
protected byte[] getConnectionHeaderPreamble() {
byte[] header = super.getConnectionHeaderPreamble();
// set an invalid auth code
header[header.length - 1] = -10;
return header;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -107,4 +107,24 @@ protected RpcServer createTestFailingRpcServer(String name,
Configuration conf, RpcScheduler scheduler) throws IOException {
return new TestFailingRpcServer(null, name, services, bindAddress, conf, scheduler);
}

@Override
protected AbstractRpcClient<?> createBadAuthRpcClient(Configuration conf) {
return new BlockingRpcClient(conf) {

@Override
protected BlockingRpcConnection createConnection(ConnectionId remoteId) throws IOException {
return new BlockingRpcConnection(this, remoteId) {
@Override
protected byte[] getConnectionHeaderPreamble() {
byte[] header = super.getConnectionHeaderPreamble();
// set an invalid auth code
header[header.length - 1] = -10;
return header;
}
};
}

};
}
}

0 comments on commit 0772b46

Please sign in to comment.