diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 52425211dc688..6c9ab586b0e25 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -3241,8 +3241,32 @@ public AclStatus getAclStatus(Path path) throws IOException {
*/
public void setXAttr(Path path, String name, byte[] value)
throws IOException {
- setXAttr(path, name, value, EnumSet.of(XAttrSetFlag.CREATE,
- XAttrSetFlag.REPLACE));
+ setXAttr(path, name, value, false);
+ }
+
+ /**
+ * Set an xattr of a file or directory.
+ * The name must be prefixed with the namespace followed by ".". For example,
+ * "user.attr".
+ *
+ * Refer to the HDFS extended attributes user documentation for details.
+ *
+ * @param path Path to modify
+ * @param name xattr name.
+ * @param value xattr value.
+ * @param enumValue if value is enumerable
+ * @throws IOException IO failure
+ * @throws UnsupportedOperationException if the operation is unsupported
+ * (default outcome).
+ */
+ public void setXAttr(Path path, String name, byte[] value, boolean enumValue)
+ throws IOException {
+ EnumSet flags = EnumSet.of(XAttrSetFlag.CREATE,
+ XAttrSetFlag.REPLACE);
+ if (enumValue) {
+ flags.add(XAttrSetFlag.ENUM_VALUE);
+ }
+ setXAttr(path, name, value, flags);
}
/**
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
index cdbe51e330701..14c5cdae713f6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
@@ -626,6 +626,12 @@ public void setXAttr(Path path, String name, byte[] value)
fs.setXAttr(path, name, value);
}
+ @Override
+ public void setXAttr(Path path, String name, byte[] value, boolean numerable)
+ throws IOException {
+ fs.setXAttr(path, name, value, numerable);
+ }
+
@Override
public void setXAttr(Path path, String name, byte[] value,
EnumSet flag) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrSetFlag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrSetFlag.java
index 345a3d81e2c29..55e3dd4b954fd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrSetFlag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrSetFlag.java
@@ -38,7 +38,13 @@ public enum XAttrSetFlag {
* Replace a existing xattr.
* If the xattr does not exist, exception will be thrown.
*/
- REPLACE((short) 0x02);
+ REPLACE((short) 0x02),
+
+ /**
+ * Value is enumerable.
+ * Value will be stored efficiently.
+ */
+ ENUM_VALUE((short) 0x04);
private final short flag;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
index 1d6eb28c9b1b5..b37b80b56080a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
@@ -133,30 +133,38 @@ private void printXAttr(String name, byte[] value) throws IOException{
*/
public static class SetfattrCommand extends FsCommand {
public static final String NAME = SET_FATTR;
- public static final String USAGE = "{-n name [-v value] | -x name} ";
+ public static final String USAGE = "{-n name [-v value [-e]] | -x name} ";
public static final String DESCRIPTION =
- "Sets an extended attribute name and value for a file or directory.\n" +
- "-n name: The extended attribute name.\n" +
- "-v value: The extended attribute value. There are three different " +
- "encoding methods for the value. If the argument is enclosed in double " +
- "quotes, then the value is the string inside the quotes. If the " +
- "argument is prefixed with 0x or 0X, then it is taken as a hexadecimal " +
- "number. If the argument begins with 0s or 0S, then it is taken as a " +
- "base64 encoding.\n" +
- "-x name: Remove the extended attribute.\n" +
- ": The file or directory.\n";
+ "Sets an extended attribute name and value for a file or directory.\n" +
+ "-n name: The extended attribute name.\n" +
+ "-v value: The extended attribute value. There are three different " +
+ "encoding methods for the value. If the argument is enclosed in double " +
+ "quotes, then the value is the string inside the quotes. If the " +
+ "argument is prefixed with 0x or 0X, then it is taken as a hexadecimal " +
+ "number. If the argument begins with 0s or 0S, then it is taken as a " +
+ "base64 encoding.\n" +
+ "-e: if set, means the value is enumerable, HDFS will try to store it " +
+ "efficiently.\n" +
+ "-x name: Remove the extended attribute.\n" +
+ ": The file or directory.\n";
private String name = null;
private byte[] value = null;
private String xname = null;
+ private boolean enumValue = false;
@Override
protected void processOptions(LinkedList args) throws IOException {
name = StringUtils.popOptionWithArgument("-n", args);
String v = StringUtils.popOptionWithArgument("-v", args);
+ enumValue = StringUtils.popOption("-e", args);
if (v != null) {
value = XAttrCodec.decodeValue(v);
}
+ if (enumValue && v == null) {
+ throw new HadoopIllegalArgumentException(
+ "Can not specify '-e' when '-v' is empty.");
+ }
xname = StringUtils.popOptionWithArgument("-x", args);
if (name != null && xname != null) {
@@ -179,7 +187,7 @@ protected void processOptions(LinkedList args) throws IOException {
@Override
protected void processPath(PathData item) throws IOException {
if (name != null) {
- item.fs.setXAttr(item.path, name, value);
+ item.fs.setXAttr(item.path, name, value, enumValue);
} else if (xname != null) {
item.fs.removeXAttr(item.path, xname);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
index b227e16908828..6442dfae16aa9 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java
@@ -198,6 +198,9 @@ public void removeAclEntries(Path path, List aclSpec)
public void setXAttr(Path path, String name, byte[] value)
throws IOException;
+ void setXAttr(Path path, String name, byte[] value, boolean numerable)
+ throws IOException;
+
public void setXAttr(Path path, String name, byte[] value,
EnumSet flag) throws IOException;
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/fs/XAttr.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/fs/XAttr.java
index a06ff0a508201..428935b004c04 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/fs/XAttr.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/fs/XAttr.java
@@ -68,11 +68,13 @@ public enum NameSpace {
private final NameSpace ns;
private final String name;
private final byte[] value;
+ private final boolean enumerable;
public static class Builder {
private NameSpace ns = NameSpace.USER;
private String name;
private byte[] value;
+ private boolean numerable;
public Builder setNameSpace(NameSpace ns) {
this.ns = ns;
@@ -89,15 +91,21 @@ public Builder setValue(byte[] value) {
return this;
}
+ public Builder setEnumerable(boolean isNumerable) {
+ this.numerable = isNumerable;
+ return this;
+ }
+
public XAttr build() {
- return new XAttr(ns, name, value);
+ return new XAttr(ns, name, value, numerable);
}
}
- private XAttr(NameSpace ns, String name, byte[] value) {
+ private XAttr(NameSpace ns, String name, byte[] value, boolean enumerable) {
this.ns = ns;
this.name = name;
this.value = value;
+ this.enumerable = enumerable;
}
public NameSpace getNameSpace() {
@@ -112,12 +120,17 @@ public byte[] getValue() {
return value;
}
+ public boolean isEnumerable() {
+ return enumerable;
+ }
+
@Override
public int hashCode() {
return new HashCodeBuilder(811, 67)
.append(name)
.append(ns)
.append(value)
+ .append(enumerable)
.toHashCode();
}
@@ -133,6 +146,7 @@ public boolean equals(Object obj) {
.append(ns, rhs.ns)
.append(name, rhs.name)
.append(value, rhs.value)
+ .append(enumerable, rhs.enumerable)
.isEquals();
}
@@ -152,12 +166,13 @@ public boolean equalsIgnoreValue(Object obj) {
return new EqualsBuilder()
.append(ns, rhs.ns)
.append(name, rhs.name)
+ .append(isEnumerable(), rhs.isEnumerable())
.isEquals();
}
@Override
public String toString() {
- return "XAttr [ns=" + ns + ", name=" + name + ", value="
+ return "XAttr [ns=" + ns + ", name=" + name + ", enumerable-" + enumerable + ", value="
+ Arrays.toString(value) + "]";
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index 116ed41703858..559b44165dedc 100755
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -2906,7 +2906,7 @@ public void setXAttr(String src, String name, byte[] value,
EnumSet flag) throws IOException {
checkOpen();
try (TraceScope ignored = newPathTraceScope("setXAttr", src)) {
- namenode.setXAttr(src, XAttrHelper.buildXAttr(name, value), flag);
+ namenode.setXAttr(src, XAttrHelper.buildXAttr(name, value, flag), flag);
} catch (RemoteException re) {
throw re.unwrapRemoteException(AccessControlException.class,
FileNotFoundException.class,
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
index 7e8812fc13f9b..8ad30ca290f39 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hdfs;
+import java.util.EnumSet;
import java.util.List;
import java.util.Map;
@@ -24,6 +25,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttr.NameSpace;
+import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.util.Lists;
import org.apache.hadoop.util.StringUtils;
@@ -47,6 +49,10 @@ public static XAttr buildXAttr(String name) {
* Both name and namespace are case sensitive.
*/
public static XAttr buildXAttr(String name, byte[] value) {
+ return buildXAttr(name, value, null);
+ }
+
+ public static XAttr buildXAttr(String name, byte[] value, final EnumSet flag) {
Preconditions.checkNotNull(name, "XAttr name cannot be null.");
final int prefixIndex = name.indexOf(".");
@@ -78,8 +84,13 @@ public static XAttr buildXAttr(String name, byte[] value) {
"prefixed with user/trusted/security/system/raw, followed by a '.'");
}
+ boolean isNumerable = false;
+ if (flag != null && flag.contains(XAttrSetFlag.ENUM_VALUE)) {
+ isNumerable = true;
+ }
+
return (new XAttr.Builder()).setNameSpace(ns).setName(name.
- substring(prefixIndex + 1)).setValue(value).build();
+ substring(prefixIndex + 1)).setValue(value).setEnumerable(isNumerable).build();
}
/**
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
index 496a5cf46146d..320459301d4c5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
@@ -1106,6 +1106,9 @@ public static XAttrProto convertXAttrProto(XAttr a) {
if (a.getValue() != null) {
builder.setValue(getByteString(a.getValue()));
}
+ if (a.isEnumerable()) {
+ builder.setEnumerable(true);
+ }
return builder.build();
}
@@ -1353,6 +1356,9 @@ public static int convert(EnumSet flag) {
if (flag.contains(XAttrSetFlag.REPLACE)) {
value |= XAttrSetFlagProto.XATTR_REPLACE.getNumber();
}
+ if (flag.contains(XAttrSetFlag.ENUM_VALUE)) {
+ value |= XAttrSetFlagProto.XATTR_ENUMERABLE.getNumber();
+ }
return value;
}
@@ -2929,6 +2935,10 @@ public static EnumSet convert(int flag) {
XAttrSetFlagProto.XATTR_REPLACE_VALUE) {
result.add(XAttrSetFlag.REPLACE);
}
+ if ((flag & XAttrSetFlagProto.XATTR_ENUMERABLE.getNumber()) ==
+ XAttrSetFlagProto.XATTR_ENUMERABLE.getNumber()) {
+ result.add(XAttrSetFlag.ENUM_VALUE);
+ }
return result;
}
@@ -2941,6 +2951,9 @@ public static XAttr convertXAttr(XAttrProto a) {
if (a.hasValue()) {
builder.setValue(a.getValue().toByteArray());
}
+ if (a.hasEnumerable()) {
+ builder.setEnumerable(a.getEnumerable());
+ }
return builder.build();
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
index a53aa86c16dc8..74698900f48f5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
@@ -33,11 +33,13 @@ message XAttrProto {
required XAttrNamespaceProto namespace = 1;
required string name = 2;
optional bytes value = 3;
+ optional bool enumerable = 4;
}
enum XAttrSetFlagProto {
XATTR_CREATE = 0x01;
XATTR_REPLACE = 0x02;
+ XATTR_ENUMERABLE = 0x04;
}
message SetXAttrRequestProto {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
index c8e0cd224e1af..3c877e990d7d9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
@@ -5587,6 +5587,10 @@ private static void appendXAttrsToXml(ContentHandler contentHandler,
XMLUtils.addSaxString(contentHandler, "NAMESPACE",
xAttr.getNameSpace().toString());
XMLUtils.addSaxString(contentHandler, "NAME", xAttr.getName());
+ if (xAttr.isEnumerable()) {
+ XMLUtils.addSaxString(contentHandler, "NUMERABLE",
+ String.valueOf(xAttr.isEnumerable()));
+ }
if (xAttr.getValue() != null) {
try {
XMLUtils.addSaxString(contentHandler, "VALUE",
@@ -5610,7 +5614,8 @@ private static List readXAttrsFromXml(Stanza st)
for (Stanza a: stanzas) {
XAttr.Builder builder = new XAttr.Builder();
builder.setNameSpace(XAttr.NameSpace.valueOf(a.getValue("NAMESPACE"))).
- setName(a.getValue("NAME"));
+ setName(a.getValue("NAME")).
+ setEnumerable(Boolean.parseBoolean(a.getValueOrNull("NUMERABLE")));
String v = a.getValueOrNull("VALUE");
if (v != null) {
try {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java
index 1f21871ac7b02..6cdbeeb3e9ac0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatPBINode.java
@@ -92,6 +92,9 @@ public final class FSImageFormatPBINode {
/* See the comments in fsimage.proto for an explanation of the following. */
public static final int XATTR_NAMESPACE_EXT_OFFSET = 5;
public static final int XATTR_NAMESPACE_EXT_MASK = 1;
+ public static final int XATTR_NUMERABLE_OFFSET = 4;
+ public static final int XATTR_NUMERABLE_MASK = 1;
+ public static final int XATTR_VALUE_MASK = (1 << 24) - 1;
private static final Logger LOG =
LoggerFactory.getLogger(FSImageFormatPBINode.class);
@@ -121,10 +124,16 @@ public static List loadXAttrs(
for (XAttrCompactProto xAttrCompactProto : proto.getXAttrsList()) {
int v = xAttrCompactProto.getName();
byte[] value = null;
- if (xAttrCompactProto.getValue() != null) {
- value = xAttrCompactProto.getValue().toByteArray();
+ if (XAttrFormat.isNumerable(v)) {
+ assert xAttrCompactProto.hasValueInt();
+ int valueInt = xAttrCompactProto.getValueInt();
+ b.add(XAttrFormat.toXAttr(v, valueInt, stringTable));
+ } else {
+ if (xAttrCompactProto.getValue() != null) {
+ value = xAttrCompactProto.getValue().toByteArray();
+ }
+ b.add(XAttrFormat.toXAttr(v, value, stringTable));
}
- b.add(XAttrFormat.toXAttr(v, value, stringTable));
}
return b;
@@ -648,8 +657,12 @@ private static XAttrFeatureProto.Builder buildXAttrs(XAttrFeature f) {
newBuilder();
int v = XAttrFormat.toInt(a);
xAttrCompactBuilder.setName(v);
- if (a.getValue() != null) {
- xAttrCompactBuilder.setValue(PBHelperClient.getByteString(a.getValue()));
+ if (a.isEnumerable()) {
+ xAttrCompactBuilder.setValueInt(XAttrValueFormat.toInt(a.getValue()));
+ } else {
+ if (a.getValue() != null) {
+ xAttrCompactBuilder.setValue(PBHelperClient.getByteString(a.getValue()));
+ }
}
b.addXAttrs(xAttrCompactBuilder.build());
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java
index 61fdd7d5b3e18..78c5a18213146 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SerialNumberManager.java
@@ -30,7 +30,7 @@ public enum SerialNumberManager {
GLOBAL(),
USER(PermissionStatusFormat.USER, AclEntryStatusFormat.NAME),
GROUP(PermissionStatusFormat.GROUP, AclEntryStatusFormat.NAME),
- XATTR(XAttrFormat.NAME);
+ XATTR(XAttrFormat.NAME, XAttrValueFormat.VALUE);
private static final SerialNumberManager[] values = values();
private static final int maxEntryBits;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFeature.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFeature.java
index 11263bb9a01e2..ddd8e2f7092cc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFeature.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFeature.java
@@ -50,7 +50,8 @@ public XAttrFeature(List xAttrs) {
ImmutableList.Builder b = null;
for (XAttr attr : xAttrs) {
if (attr.getValue() == null ||
- attr.getValue().length <= PACK_THRESHOLD) {
+ attr.getValue().length <= PACK_THRESHOLD ||
+ attr.isEnumerable()) {
toPack.add(attr);
} else {
if (b == null) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java
index 4d46e691df217..f53035ae270b7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrFormat.java
@@ -19,6 +19,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
@@ -36,10 +37,11 @@
* incompatible.
*
*/
-
+@SuppressWarnings("checkstyle:MemberName")
public enum XAttrFormat implements LongBitFormat.Enum {
- RESERVED(null, 5),
- NS_EXT(RESERVED.BITS, 1),
+ RESERVED(null, 4),
+ NUMERABLE(RESERVED.BITS, 1),
+ NS_EXT(NUMERABLE.BITS, 1),
NAME(NS_EXT.BITS, 24),
NS(NAME.BITS, 2);
@@ -72,17 +74,23 @@ public static String getName(int record) {
return SerialNumberManager.XATTR.getString(nid);
}
+ public static boolean isNumerable(int record) {
+ return (int)NUMERABLE.BITS.retrieve(record) == 1;
+ }
+
static int toInt(XAttr a) {
int nid = SerialNumberManager.XATTR.getSerialNumber(a.getName());
int nsOrd = a.getNameSpace().ordinal();
long value = NS.BITS.combine(nsOrd & NS_MASK, 0L);
value = NS_EXT.BITS.combine(nsOrd >>> NS_EXT_SHIFT, value);
value = NAME.BITS.combine(nid, value);
+ value = NUMERABLE.BITS.combine(a.isEnumerable() ? 1 : 0, value);
return (int)value;
}
static XAttr toXAttr(int record, byte[] value,
SerialNumberManager.StringTable stringTable) {
+ assert NUMERABLE.BITS.retrieve(record) == 0;
int nid = (int)NAME.BITS.retrieve(record);
String name = SerialNumberManager.XATTR.getString(nid, stringTable);
return new XAttr.Builder()
@@ -92,6 +100,18 @@ static XAttr toXAttr(int record, byte[] value,
.build();
}
+ static XAttr toXAttr(int record, int valueInt,
+ SerialNumberManager.StringTable stringTable) {
+ assert NUMERABLE.BITS.retrieve(record) == 1;
+ int nid = (int)NAME.BITS.retrieve(record);
+ String name = SerialNumberManager.XATTR.getString(nid, stringTable);
+ return new XAttr.Builder()
+ .setNameSpace(getNamespace(record))
+ .setName(name)
+ .setValue(XAttrValueFormat.getValue(valueInt))
+ .build();
+ }
+
/**
* Unpack byte[] to XAttrs.
*
@@ -111,13 +131,23 @@ static List toXAttrs(byte[] attrs) {
i += 4;
builder.setNameSpace(XAttrFormat.getNamespace(v));
builder.setName(XAttrFormat.getName(v));
- int vlen = ((0xff & attrs[i]) << 8) | (0xff & attrs[i + 1]);
- i += 2;
- if (vlen > 0) {
- byte[] value = new byte[vlen];
- System.arraycopy(attrs, i, value, 0, vlen);
- builder.setValue(value);
- i += vlen;
+ boolean isEnumerable = XAttrFormat.isNumerable(v);
+ builder.setEnumerable(isEnumerable);
+ if (!isEnumerable) {
+ int vlen = ((0xff & attrs[i]) << 8) | (0xff & attrs[i + 1]);
+ i += 2;
+ if (vlen > 0) {
+ byte[] value = new byte[vlen];
+ System.arraycopy(attrs, i, value, 0, vlen);
+ builder.setValue(value);
+ i += vlen;
+ }
+ } else {
+ // big-endian
+ v = Ints.fromBytes(attrs[i], attrs[i + 1],
+ attrs[i + 2], attrs[i + 3]);
+ i += 4;
+ builder.setValue(XAttrValueFormat.getValue(v));
}
xAttrs.add(builder.build());
}
@@ -145,19 +175,40 @@ static XAttr getXAttr(byte[] attrs, String prefixedName) {
i += 4;
XAttr.NameSpace namespace = XAttrFormat.getNamespace(v);
String name = XAttrFormat.getName(v);
- int vlen = ((0xff & attrs[i]) << 8) | (0xff & attrs[i + 1]);
- i += 2;
+ boolean isEnumerable = XAttrFormat.isNumerable(v);
+ // If enumerable, tmpInt is int for XAttrValueFormat
+ // It not enumerable, tmpInt is for vlen
+ int tmpInt;
+ if (isEnumerable) {
+ // big-endian
+ tmpInt = Ints.fromBytes(attrs[i], attrs[i + 1],
+ attrs[i + 2], attrs[i + 3]);
+ i += 4;
+ } else {
+ tmpInt = ((0xff & attrs[i]) << 8) | (0xff & attrs[i + 1]);
+ i += 2;
+ }
if (xAttr.getNameSpace() == namespace &&
- xAttr.getName().equals(name)) {
- if (vlen > 0) {
- byte[] value = new byte[vlen];
- System.arraycopy(attrs, i, value, 0, vlen);
+ xAttr.getName().equals(name)) {
+ if (isEnumerable) {
return new XAttr.Builder().setNameSpace(namespace).
- setName(name).setValue(value).build();
+ setName(name).
+ setValue(XAttrValueFormat.getValue(tmpInt)).
+ build();
+ } else {
+ if (tmpInt > 0) {
+ byte[] value = new byte[tmpInt];
+ System.arraycopy(attrs, i, value, 0, tmpInt);
+ return new XAttr.Builder().setNameSpace(namespace).
+ setName(name).setValue(value).build();
+ }
+ return xAttr;
+ }
+ } else {
+ if (!isEnumerable) {
+ i += tmpInt;
}
- return xAttr;
}
- i += vlen;
}
return null;
}
@@ -178,13 +229,19 @@ static byte[] toBytes(List xAttrs) {
// big-endian
int v = XAttrFormat.toInt(a);
out.write(Ints.toByteArray(v));
- int vlen = a.getValue() == null ? 0 : a.getValue().length;
- Preconditions.checkArgument(vlen < XATTR_VALUE_LEN_MAX,
- "The length of xAttr values is too long.");
- out.write((byte)(vlen >> 8));
- out.write((byte)(vlen));
- if (vlen > 0) {
- out.write(a.getValue());
+ if (!a.isEnumerable()) {
+ int vlen = a.getValue() == null ? 0 : a.getValue().length;
+ Preconditions.checkArgument(vlen < XATTR_VALUE_LEN_MAX,
+ "The length of xAttr values is too long.");
+ out.write((byte) (vlen >> 8));
+ out.write((byte) (vlen));
+ if (vlen > 0) {
+ out.write(a.getValue());
+ }
+ } else {
+ out.write(Ints.toByteArray(
+ SerialNumberManager.XATTR.getSerialNumber(
+ new String(a.getValue(), StandardCharsets.UTF_8))));
}
}
} catch (IOException e) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrValueFormat.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrValueFormat.java
new file mode 100644
index 0000000000000..3fb61322fee89
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/XAttrValueFormat.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode;
+
+import org.apache.hadoop.hdfs.util.LongBitFormat;
+
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Class to pack XAttrs Value.
+ *
+ * Note: this format is used both in-memory and on-disk. Changes will be
+ * incompatible.
+ *
+ */
+
+public enum XAttrValueFormat implements LongBitFormat.Enum {
+ VALUE(null, 24);
+
+ private final LongBitFormat BITS;
+
+ XAttrValueFormat(LongBitFormat previous, int length) {
+ BITS = new LongBitFormat(name(), previous, length, 0);
+ }
+
+ @Override
+ public int getLength() {
+ return BITS.getLength();
+ }
+
+ public static byte[] getValue(int record) {
+ int nid = (int)VALUE.BITS.retrieve(record);
+ return SerialNumberManager.XATTR.getString(nid).getBytes(StandardCharsets.UTF_8);
+ }
+
+ static int toInt(byte[] value) {
+ int vid = SerialNumberManager.XATTR.getSerialNumber(new String(value, StandardCharsets.UTF_8));
+ long res = 0L;
+ res = VALUE.BITS.combine(vid, res);
+ return (int)res;
+ }
+
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
index 6a2049acb4b54..e79058874ad19 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
@@ -18,15 +18,8 @@
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import org.apache.hadoop.util.Preconditions;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.ACL_ENTRY_NAME_MASK;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.ACL_ENTRY_NAME_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.ACL_ENTRY_SCOPE_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.ACL_ENTRY_TYPE_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_MASK;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAME_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_MASK;
+
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.*;
import static org.apache.hadoop.hdfs.tools.offlineImageViewer.PBImageXmlWriter.*;
import java.io.BufferedOutputStream;
@@ -891,18 +884,25 @@ private INodeSection.XAttrFeatureProto.Builder xattrsXmlToProto(Node xattrs)
int nsIdx = XAttrProtos.XAttrProto.
XAttrNamespaceProto.valueOf(ns).ordinal();
String name = xattr.removeChildStr(SECTION_NAME);
- String valStr = xattr.removeChildStr(INODE_SECTION_VAL);
- byte[] val = null;
- if (valStr == null) {
- String valHex = xattr.removeChildStr(INODE_SECTION_VAL_HEX);
- if (valHex == null) {
- throw new IOException(" had no or entry.");
- }
- val = new HexBinaryAdapter().unmarshal(valHex);
+ boolean numerable = xattr.removeChildBool(INODE_SECTION_NUMERABLE);
+ if (numerable) {
+ String valStr = xattr.removeChildStr(INODE_SECTION_VAL);
+ int valueId = registerStringId(valStr);
+ b.setValueInt(valueId);
} else {
- val = valStr.getBytes("UTF8");
+ String valStr = xattr.removeChildStr(INODE_SECTION_VAL);
+ byte[] val = null;
+ if (valStr == null) {
+ String valHex = xattr.removeChildStr(INODE_SECTION_VAL_HEX);
+ if (valHex == null) {
+ throw new IOException(" had no or entry.");
+ }
+ val = new HexBinaryAdapter().unmarshal(valHex);
+ } else {
+ val = valStr.getBytes("UTF8");
+ }
+ b.setValue(ByteString.copyFrom(val));
}
- b.setValue(ByteString.copyFrom(val));
// The XAttrCompactProto name field uses a fairly complex format
// to encode both the string table ID of the xattr name and the
@@ -911,7 +911,8 @@ private INodeSection.XAttrFeatureProto.Builder xattrsXmlToProto(Node xattrs)
int encodedName = (nameId << XATTR_NAME_OFFSET) |
((nsIdx & XATTR_NAMESPACE_MASK) << XATTR_NAMESPACE_OFFSET) |
(((nsIdx >> 2) & XATTR_NAMESPACE_EXT_MASK)
- << XATTR_NAMESPACE_EXT_OFFSET);
+ << XATTR_NAMESPACE_EXT_OFFSET) |
+ (numerable ? 1 : 0) << XATTR_NUMERABLE_OFFSET;
b.setName(encodedName);
xattr.verifyNoRemainingKeys("xattr");
bld.addXAttrs(b);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
index 8030ead8b5212..93bc52fcc4557 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
@@ -73,12 +73,7 @@
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
import org.apache.hadoop.thirdparty.protobuf.ByteString;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_MASK;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_MASK;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAME_OFFSET;
-import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAME_MASK;
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.*;
/**
* PBImageXmlWriter walks over an fsimage structure and writes out
@@ -177,6 +172,7 @@ public final class PBImageXmlWriter {
public static final String INODE_SECTION_QUOTA = "quota";
public static final String INODE_SECTION_TARGET = "target";
public static final String INODE_SECTION_NS = "ns";
+ public static final String INODE_SECTION_NUMERABLE = "numerable";
public static final String INODE_SECTION_VAL = "val";
public static final String INODE_SECTION_VAL_HEX = "valHex";
public static final String INODE_SECTION_INODE = "inode";
@@ -438,11 +434,19 @@ private void dumpXattrs(INodeSection.XAttrFeatureProto xattrs) {
o(SECTION_NAME, SerialNumberManager.XATTR.getString(
XATTR_NAME_MASK & (encodedName >> XATTR_NAME_OFFSET),
stringTable));
- ByteString val = xattr.getValue();
- if (val.isValidUtf8()) {
- o(INODE_SECTION_VAL, val.toStringUtf8());
+ boolean numerable = (XATTR_NUMERABLE_MASK & (encodedName >> XATTR_NUMERABLE_OFFSET)) == 1;
+ o(INODE_SECTION_NUMERABLE, numerable);
+ if (numerable) {
+ int valueInt = xattr.getValueInt();
+ o(INODE_SECTION_VAL, SerialNumberManager.XATTR.getString(XATTR_VALUE_MASK & valueInt,
+ stringTable));
} else {
- o(INODE_SECTION_VAL_HEX, Hex.encodeHexString(val.toByteArray()));
+ ByteString val = xattr.getValue();
+ if (val.isValidUtf8()) {
+ o(INODE_SECTION_VAL, val.toStringUtf8());
+ } else {
+ o(INODE_SECTION_VAL_HEX, Hex.encodeHexString(val.toByteArray()));
+ }
}
out.print("" + INODE_SECTION_XATTR + ">");
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
index 2bdd302a1e4ef..d9b56513fc506 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
@@ -119,10 +119,12 @@ message INodeSection {
* so only 2 bits were needed. At that time, this bit was reserved. When a
* 5th namespace was created (raw) this bit became used as a 3rd namespace
* bit.
- * [27:32) -- reserved for future uses.
+ * [27:28) -- indicating if the value is enumerable
+ * [28:32) -- reserved for future uses.
*/
required fixed32 name = 1;
optional bytes value = 2;
+ optional fixed32 valueInt = 3;
}
message XAttrFeatureProto {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
index b4ba775732921..1d1116c384e7b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
@@ -321,6 +321,8 @@ public static void createOriginalFSImage() throws IOException {
// OIV should be able to handle XAttr values that can't be expressed
// as UTF8
hdfs.setXAttr(xattr, "user.a4", new byte[]{ -0x3d, 0x28 });
+ // Set enumerable attribute
+ hdfs.setXAttr(xattr, "user.a5", new byte[]{0x37, 0x38, 0x39}, true);
writtenFiles.put(xattr.toString(), hdfs.getFileStatus(xattr));
// Set ACLs
hdfs.setAcl(