Skip to content

Commit

Permalink
HDFS-14509. DN throws InvalidToken due to inequality of password when…
Browse files Browse the repository at this point in the history
… upgrade NN 2.x to 3.x. Contributed by Yuxuan Wang and Konstantin Shvachko.
  • Loading branch information
Chen Liang committed Oct 8, 2019
1 parent 5462d21 commit 72ae371
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 0 deletions.
Expand Up @@ -33,6 +33,7 @@
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AccessModeProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenSecretProto;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.UserGroupInformation;
Expand Down Expand Up @@ -142,6 +143,7 @@ public byte[] getHandshakeMsg() {
}

public void setHandshakeMsg(byte[] bytes) {
cache = null; // invalidate the cache
handshakeMsg = bytes;
}

Expand Down Expand Up @@ -214,6 +216,15 @@ public void readFields(DataInput in) throws IOException {
if (!dis.markSupported()) {
throw new IOException("Could not peek first byte.");
}

// this.cache should be assigned the raw bytes from the input data for
// upgrading compatibility. If we won't mutate fields and call getBytes()
// for something (e.g retrieve password), we should return the raw bytes
// instead of serializing the instance self fields to bytes, because we may
// lose newly added fields which we can't recognize
this.cache = IOUtils.readFullyToByteArray(dis);
dis.reset();

dis.mark(1);
final byte firstByte = dis.readByte();
dis.reset();
Expand Down
Expand Up @@ -32,13 +32,15 @@
import java.io.DataInputStream;
import java.io.File;
import java.io.IOException;
import java.io.DataOutput;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Calendar;
import java.util.EnumSet;
import java.util.GregorianCalendar;
import java.util.Set;

import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -842,4 +844,51 @@ public void testBlockTokenRanges() throws IOException {
}
}
}

@Test
public void testRetrievePasswordWithUnknownFields() throws IOException {
BlockTokenIdentifier id = new BlockTokenIdentifier();
BlockTokenIdentifier spyId = Mockito.spy(id);
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
DataOutput out = (DataOutput) invocation.getArguments()[0];
invocation.callRealMethod();
// write something at the end that BlockTokenIdentifier#readFields()
// will ignore, but which is still a part of the password
out.write(7);
return null;
}
}).when(spyId).write(Mockito.any());

BlockTokenSecretManager sm =
new BlockTokenSecretManager(blockKeyUpdateInterval, blockTokenLifetime,
0, 1, "fake-pool", null, false);
// master create password
byte[] password = sm.createPassword(spyId);

BlockTokenIdentifier slaveId = new BlockTokenIdentifier();
slaveId.readFields(
new DataInputStream(new ByteArrayInputStream(spyId.getBytes())));

// slave retrieve password
assertArrayEquals(password, sm.retrievePassword(slaveId));
}

@Test
public void testRetrievePasswordWithRecognizableFieldsOnly()
throws IOException {
BlockTokenSecretManager sm =
new BlockTokenSecretManager(blockKeyUpdateInterval, blockTokenLifetime,
0, 1, "fake-pool", null, false);
// master create password
BlockTokenIdentifier masterId = new BlockTokenIdentifier();
byte[] password = sm.createPassword(masterId);
// set cache to null, so that master getBytes() were only recognizable bytes
masterId.setExpiryDate(masterId.getExpiryDate());
BlockTokenIdentifier slaveId = new BlockTokenIdentifier();
slaveId.readFields(
new DataInputStream(new ByteArrayInputStream(masterId.getBytes())));
assertArrayEquals(password, sm.retrievePassword(slaveId));
}
}

0 comments on commit 72ae371

Please sign in to comment.