From 8aeaa9c418862fc1d4fb4e166da72a2fac2493dc Mon Sep 17 00:00:00 2001
From: Simon Spero
Date: Sun, 25 Jun 2017 18:28:07 -0400
Subject: [PATCH] COMPRESS-400 : Squash commit of COMPRESS-400-REDUX.
Add support for extra PAX headers (local and global).
Signed-off-by: Simon Spero
---
.../archivers/tar/TarArchiveEntry.java | 150 ++++++++++++-
.../archivers/tar/TarArchiveInputStream.java | 62 +-----
.../archivers/tar/TarArchiveOutputStream.java | 200 ++++++++++--------
.../archivers/tar/TarArchiveEntryTest.java | 55 ++++-
.../tar/TarArchiveInputStreamTest.java | 24 +++
.../tar/TarArchiveOutputStreamTest.java | 50 ++++-
6 files changed, 395 insertions(+), 146 deletions(-)
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
index 849532cd9b3..8595252db15 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
@@ -20,10 +20,11 @@
import java.io.File;
import java.io.IOException;
+import java.util.Collections;
import java.util.Date;
+import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
-
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipEncoding;
import org.apache.commons.compress.utils.ArchiveUtils;
@@ -133,7 +134,7 @@
* char prefix[131]; // offset 345
* char atime[12]; // offset 476
* char ctime[12]; // offset 488
- * char mfill[8]; // offset 500
+ * char mfill[8]; // offset 500
* char xmagic[4]; // offset 508 "tar"
* };
*
@@ -207,6 +208,9 @@ public class TarArchiveEntry implements ArchiveEntry, TarConstants {
/** The entry's file reference */
private final File file;
+ /** Extra, user supplied pax headers */
+ private final Map extraPaxHeaders = new HashMap<>();
+
/** Maximum length of a user's name in the tar file */
public static final int MAX_NAMELEN = 31;
@@ -219,6 +223,7 @@ public class TarArchiveEntry implements ArchiveEntry, TarConstants {
/** Convert millis to seconds */
public static final int MILLIS_PER_SECOND = 1000;
+
/**
* Construct an empty entry and prepares the header values.
*/
@@ -942,6 +947,147 @@ public boolean isSparse() {
return isGNUSparse() || isStarSparse();
}
+ /**
+ * get extra PAX Headers
+ * @return read-only map containing any extra PAX Headers
+ * @since 1.15
+ */
+ public Map getExtraPaxHeaders() {
+ return Collections.unmodifiableMap(extraPaxHeaders);
+ }
+
+ /**
+ * clear all extra PAX headers.
+ * @since 1.15
+ */
+ public void clearExtraPaxHeaders() {
+ extraPaxHeaders.clear();
+ }
+
+ /**
+ * add a PAX header to this entry. If the header corresponds to an existing field in the entry,
+ * that field will be set; otherwise the header will be added to the extraPaxHeaders Map
+ * @param name The full name of the header to set.
+ * @param value value of header.
+ * @since 1.15
+ */
+ public void addPaxHeader(String name,String value) {
+ processPaxHeader(name,value);
+ }
+
+ /**
+ * get named extra PAX header
+ * @param name The full name of an extended PAX header to retrieve
+ * @return The value of the header, if any.
+ * @since 1.15
+ */
+ public String getExtraPaxHeader(String name) {
+ return extraPaxHeaders.get(name);
+ }
+
+ /**
+ * Update the entry using a map of pax headers.
+ * @param headers
+ * @since 1.15
+ */
+ void updateEntryFromPaxHeaders(Map headers) {
+ for (final Map.Entry ent : headers.entrySet()) {
+ final String key = ent.getKey();
+ final String val = ent.getValue();
+ processPaxHeader(key, val, headers);
+ }
+ }
+
+ /**
+ * process one pax header, using the entries extraPaxHeaders map as source for extra headers
+ * used when handling entries for sparse files.
+ * @param key
+ * @param val
+ * @since 1.15
+ */
+ private void processPaxHeader(String key, String val) {
+ processPaxHeader(key,val,extraPaxHeaders);
+ }
+
+ /**
+ * Process one pax header, using the supplied map as source for extra headers to be used when handling
+ * entries for sparse files
+ *
+ * @param key the header name.
+ * @param val the header value.
+ * @param headers map of headers used for dealing with sparse file.
+ * @since 1.15
+ */
+ private void processPaxHeader(String key, String val, Map headers) {
+ /*
+ * The following headers are defined for Pax.
+ * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields
+ * mtime
+ * comment
+ * gid, gname
+ * linkpath
+ * size
+ * uid,uname
+ * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those
+ *
+ * GNU sparse files use additional members, we use
+ * GNU.sparse.size to detect the 0.0 and 0.1 versions and
+ * GNU.sparse.realsize for 1.0.
+ *
+ * star files use additional members of which we use
+ * SCHILY.filetype in order to detect star sparse files.
+ *
+ * If called from addExtraPaxHeader, these additional headers must be already present .
+ */
+ switch (key) {
+ case "path":
+ setName(val);
+ break;
+ case "linkpath":
+ setLinkName(val);
+ break;
+ case "gid":
+ setGroupId(Long.parseLong(val));
+ break;
+ case "gname":
+ setGroupName(val);
+ break;
+ case "uid":
+ setUserId(Long.parseLong(val));
+ break;
+ case "uname":
+ setUserName(val);
+ break;
+ case "size":
+ setSize(Long.parseLong(val));
+ break;
+ case "mtime":
+ setModTime((long) (Double.parseDouble(val) * 1000));
+ break;
+ case "SCHILY.devminor":
+ setDevMinor(Integer.parseInt(val));
+ break;
+ case "SCHILY.devmajor":
+ setDevMajor(Integer.parseInt(val));
+ break;
+ case "GNU.sparse.size":
+ fillGNUSparse0xData(headers);
+ break;
+ case "GNU.sparse.realsize":
+ fillGNUSparse1xData(headers);
+ break;
+ case "SCHILY.filetype":
+ if ("sparse".equals(val)) {
+ fillStarSparseData(headers);
+ }
+ break;
+ default:
+ extraPaxHeaders.put(key,val);
+ }
+ }
+
+
+
/**
* If this entry represents a file, and the file is a directory, return
* an array of TarEntries for this entry's children.
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
index 281ad5b931e..4f090ecb64d 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
@@ -28,7 +28,6 @@
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
@@ -260,7 +259,7 @@ public synchronized void reset() {
* @throws IOException on error
*/
public TarArchiveEntry getNextTarEntry() throws IOException {
- if (hasHitEOF) {
+ if (isAtEOF()) {
return null;
}
@@ -396,8 +395,8 @@ protected byte[] getLongNameData() throws IOException {
*/
private byte[] getRecord() throws IOException {
byte[] headerBuf = readRecord();
- hasHitEOF = isEOFRecord(headerBuf);
- if (hasHitEOF && headerBuf != null) {
+ setAtEOF(isEOFRecord(headerBuf));
+ if (isAtEOF() && headerBuf != null) {
tryToConsumeSecondEOFRecord();
consumeRemainderOfLastBlock();
headerBuf = null;
@@ -504,55 +503,8 @@ Map parsePaxHeaders(final InputStream i)
}
private void applyPaxHeadersToCurrentEntry(final Map headers) {
- /*
- * The following headers are defined for Pax.
- * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields
- * mtime
- * comment
- * gid, gname
- * linkpath
- * size
- * uid,uname
- * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those
- *
- * GNU sparse files use additional members, we use
- * GNU.sparse.size to detect the 0.0 and 0.1 versions and
- * GNU.sparse.realsize for 1.0.
- *
- * star files use additional members of which we use
- * SCHILY.filetype in order to detect star sparse files.
- */
- for (final Entry ent : headers.entrySet()){
- final String key = ent.getKey();
- final String val = ent.getValue();
- if ("path".equals(key)){
- currEntry.setName(val);
- } else if ("linkpath".equals(key)){
- currEntry.setLinkName(val);
- } else if ("gid".equals(key)){
- currEntry.setGroupId(Long.parseLong(val));
- } else if ("gname".equals(key)){
- currEntry.setGroupName(val);
- } else if ("uid".equals(key)){
- currEntry.setUserId(Long.parseLong(val));
- } else if ("uname".equals(key)){
- currEntry.setUserName(val);
- } else if ("size".equals(key)){
- currEntry.setSize(Long.parseLong(val));
- } else if ("mtime".equals(key)){
- currEntry.setModTime((long) (Double.parseDouble(val) * 1000));
- } else if ("SCHILY.devminor".equals(key)){
- currEntry.setDevMinor(Integer.parseInt(val));
- } else if ("SCHILY.devmajor".equals(key)){
- currEntry.setDevMajor(Integer.parseInt(val));
- } else if ("GNU.sparse.size".equals(key)) {
- currEntry.fillGNUSparse0xData(headers);
- } else if ("GNU.sparse.realsize".equals(key)) {
- currEntry.fillGNUSparse1xData(headers);
- } else if ("SCHILY.filetype".equals(key) && "sparse".equals(val)) {
- currEntry.fillStarSparseData(headers);
- }
- }
+ currEntry.updateEntryFromPaxHeaders(headers);
+
}
/**
@@ -643,7 +595,7 @@ private void tryToConsumeSecondEOFRecord() throws IOException {
public int read(final byte[] buf, final int offset, int numToRead) throws IOException {
int totalRead = 0;
- if (hasHitEOF || isDirectory() || entryOffset >= entrySize) {
+ if (isAtEOF() || isDirectory() || entryOffset >= entrySize) {
return -1;
}
@@ -659,7 +611,7 @@ public int read(final byte[] buf, final int offset, int numToRead) throws IOExce
if (numToRead > 0) {
throw new IOException("Truncated TAR archive");
}
- hasHitEOF = true;
+ setAtEOF(true);
} else {
count(totalRead);
entryOffset += totalRead;
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
index 6b317057d13..340e35c1e29 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
@@ -22,6 +22,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
+import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Date;
@@ -35,31 +36,46 @@
import org.apache.commons.compress.utils.CountingOutputStream;
/**
- * The TarOutputStream writes a UNIX tar archive as an OutputStream.
- * Methods are provided to put entries, and then write their contents
- * by writing to this stream using write().
+ * The TarOutputStream writes a UNIX tar archive as an OutputStream. Methods are provided to put
+ * entries, and then write their contents by writing to this stream using write().
+ *
* @NotThreadSafe
*/
public class TarArchiveOutputStream extends ArchiveOutputStream {
- /** Fail if a long file name is required in the archive. */
+
+ /**
+ * Fail if a long file name is required in the archive.
+ */
public static final int LONGFILE_ERROR = 0;
- /** Long paths will be truncated in the archive. */
+ /**
+ * Long paths will be truncated in the archive.
+ */
public static final int LONGFILE_TRUNCATE = 1;
- /** GNU tar extensions are used to store long file names in the archive. */
+ /**
+ * GNU tar extensions are used to store long file names in the archive.
+ */
public static final int LONGFILE_GNU = 2;
- /** POSIX/PAX extensions are used to store long file names in the archive. */
+ /**
+ * POSIX/PAX extensions are used to store long file names in the archive.
+ */
public static final int LONGFILE_POSIX = 3;
- /** Fail if a big number (e.g. size > 8GiB) is required in the archive. */
+ /**
+ * Fail if a big number (e.g. size > 8GiB) is required in the archive.
+ */
public static final int BIGNUMBER_ERROR = 0;
- /** star/GNU tar/BSD tar extensions are used to store big number in the archive. */
+ /**
+ * star/GNU tar/BSD tar extensions are used to store big number in the archive.
+ */
public static final int BIGNUMBER_STAR = 1;
- /** POSIX/PAX extensions are used to store big numbers in the archive. */
+ /**
+ * POSIX/PAX extensions are used to store big numbers in the archive.
+ */
public static final int BIGNUMBER_POSIX = 2;
private static final int RECORD_SIZE = 512;
@@ -76,10 +92,14 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
private boolean closed = false;
- /** Indicates if putArchiveEntry has been called without closeArchiveEntry */
+ /**
+ * Indicates if putArchiveEntry has been called without closeArchiveEntry
+ */
private boolean haveUnclosedEntry = false;
- /** indicates if this archive is finished */
+ /**
+ * indicates if this archive is finished
+ */
private boolean finished = false;
private final OutputStream out;
@@ -97,6 +117,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
*/
public TarArchiveOutputStream(final OutputStream os) {
@@ -105,6 +126,7 @@ public TarArchiveOutputStream(final OutputStream os) {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param encoding name of the encoding to use for file names
* @since 1.4
@@ -115,6 +137,7 @@ public TarArchiveOutputStream(final OutputStream os, final String encoding) {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param blockSize the block size to use. Must be a multiple of 512 bytes.
*/
@@ -125,6 +148,7 @@ public TarArchiveOutputStream(final OutputStream os, final int blockSize) {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param blockSize the block size to use
* @param recordSize the record size to use. Must be 512 bytes.
@@ -139,6 +163,7 @@ public TarArchiveOutputStream(final OutputStream os, final int blockSize,
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param blockSize the block size to use . Must be a multiple of 512 bytes.
* @param recordSize the record size to use. Must be 512 bytes.
@@ -189,10 +214,10 @@ public TarArchiveOutputStream(final OutputStream os, final int blockSize,
}
/**
- * Set the long file mode.
- * This can be LONGFILE_ERROR(0), LONGFILE_TRUNCATE(1) or LONGFILE_GNU(2).
- * This specifies the treatment of long file names (names >= TarConstants.NAMELEN).
- * Default is LONGFILE_ERROR.
+ * Set the long file mode. This can be LONGFILE_ERROR(0), LONGFILE_TRUNCATE(1) or
+ * LONGFILE_GNU(2). This specifies the treatment of long file names (names >=
+ * TarConstants.NAMELEN). Default is LONGFILE_ERROR.
+ *
* @param longFileMode the mode to use
*/
public void setLongFileMode(final int longFileMode) {
@@ -200,10 +225,11 @@ public void setLongFileMode(final int longFileMode) {
}
/**
- * Set the big number mode.
- * This can be BIGNUMBER_ERROR(0), BIGNUMBER_POSIX(1) or BIGNUMBER_STAR(2).
- * This specifies the treatment of big files (sizes > TarConstants.MAXSIZE) and other numeric values to big to fit into a traditional tar header.
+ * Set the big number mode. This can be BIGNUMBER_ERROR(0), BIGNUMBER_POSIX(1) or
+ * BIGNUMBER_STAR(2). This specifies the treatment of big files (sizes >
+ * TarConstants.MAXSIZE) and other numeric values to big to fit into a traditional tar header.
* Default is BIGNUMBER_ERROR.
+ *
* @param bigNumberMode the mode to use
* @since 1.4
*/
@@ -213,8 +239,9 @@ public void setBigNumberMode(final int bigNumberMode) {
/**
* Whether to add a PAX extension header for non-ASCII file names.
- * @since 1.4
+ *
* @param b whether to add a PAX extension header for non-ASCII file names.
+ * @since 1.4
*/
public void setAddPaxHeadersForNonAsciiNames(final boolean b) {
addPaxHeadersForNonAsciiNames = b;
@@ -258,6 +285,7 @@ public void finish() throws IOException {
/**
* Closes the underlying OutputStream.
+ *
* @throws IOException on error
*/
@Override
@@ -284,13 +312,11 @@ public int getRecordSize() {
}
/**
- * Put an entry on the output stream. This writes the entry's
- * header record and positions the output stream for writing
- * the contents of the entry. Once this method is called, the
- * stream is ready for calls to write() to write the entry's
- * contents. Once the contents are written, closeArchiveEntry()
- * MUST be called to ensure that all buffered data
- * is completely written to the output stream.
+ * Put an entry on the output stream. This writes the entry's header record and positions the
+ * output stream for writing the contents of the entry. Once this method is called, the stream
+ * is ready for calls to write() to write the entry's contents. Once the contents are written,
+ * closeArchiveEntry() MUST be called to ensure that all buffered data is completely
+ * written to the output stream.
*
* @param archiveEntry The TarEntry to be written to the archive.
* @throws IOException on error
@@ -302,7 +328,17 @@ public void putArchiveEntry(final ArchiveEntry archiveEntry) throws IOException
throw new IOException("Stream has already been finished");
}
final TarArchiveEntry entry = (TarArchiveEntry) archiveEntry;
- final Map paxHeaders = new HashMap<>();
+ if (entry.isGlobalPaxHeader()) {
+ final byte[] data = encodeExtendedPaxHeadersContents(entry.getExtraPaxHeaders());
+ entry.setSize(data.length);
+ entry.writeEntryHeader(recordBuf, zipEncoding, bigNumberMode == BIGNUMBER_STAR);
+ writeRecord(recordBuf);
+ currSize= entry.getSize();
+ currBytes = 0;
+ this.haveUnclosedEntry = true;
+ write(data);
+ closeArchiveEntry();
+ } else {final Map paxHeaders = new HashMap<>();
final String entryName = entry.getName();
final boolean paxHeaderContainsPath = handleLongName(entry, entryName, paxHeaders, "path",
TarConstants.LF_GNUTYPE_LONGNAME, "file name");
@@ -312,50 +348,50 @@ public void putArchiveEntry(final ArchiveEntry archiveEntry) throws IOException
&& handleLongName(entry, linkName, paxHeaders, "linkpath",
TarConstants.LF_GNUTYPE_LONGLINK, "link name");
- if (bigNumberMode == BIGNUMBER_POSIX) {
- addPaxHeadersForBigNumbers(paxHeaders, entry);
- } else if (bigNumberMode != BIGNUMBER_STAR) {
- failForBigNumbers(entry);
- }
+ if (bigNumberMode == BIGNUMBER_POSIX) {
+ addPaxHeadersForBigNumbers(paxHeaders, entry);
+ } else if (bigNumberMode != BIGNUMBER_STAR) {
+ failForBigNumbers(entry);
+ }
- if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsPath
- && !ASCII.canEncode(entryName)) {
- paxHeaders.put("path", entryName);
- }
+ if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsPath
+ && !ASCII.canEncode(entryName)) {
+ paxHeaders.put("path", entryName);
+ }
- if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsLinkPath
- && (entry.isLink() || entry.isSymbolicLink())
- && !ASCII.canEncode(linkName)) {
- paxHeaders.put("linkpath", linkName);
- }
+ if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsLinkPath
+ && (entry.isLink() || entry.isSymbolicLink())
+ && !ASCII.canEncode(linkName)) {
+ paxHeaders.put("linkpath", linkName);
+ }
+ paxHeaders.putAll(entry.getExtraPaxHeaders());
- if (paxHeaders.size() > 0) {
- writePaxHeaders(entry, entryName, paxHeaders);
- }
+ if (paxHeaders.size() > 0) {
+ writePaxHeaders(entry, entryName, paxHeaders);
+ }
entry.writeEntryHeader(recordBuf, zipEncoding,
bigNumberMode == BIGNUMBER_STAR);
writeRecord(recordBuf);
- currBytes = 0;
+ currBytes = 0;
- if (entry.isDirectory()) {
- currSize = 0;
- } else {
- currSize = entry.getSize();
+ if (entry.isDirectory()) {
+ currSize = 0;
+ } else {
+ currSize = entry.getSize();
+ }
+ currName = entryName;
+ haveUnclosedEntry = true;
}
- currName = entryName;
- haveUnclosedEntry = true;
}
/**
- * Close an entry. This method MUST be called for all file
- * entries that contain data. The reason is that we must
- * buffer data written to the stream in order to satisfy
- * the buffer's record based writes. Thus, there may be
- * data fragments still being assembled that must be written
- * to the output stream before this entry is closed and the
- * next entry written.
+ * Close an entry. This method MUST be called for all file entries that contain data. The reason
+ * is that we must buffer data written to the stream in order to satisfy the buffer's record
+ * based writes. Thus, there may be data fragments still being assembled that must be written to
+ * the output stream before this entry is closed and the next entry written.
+ *
* @throws IOException on error
*/
@Override
@@ -387,12 +423,10 @@ public void closeArchiveEntry() throws IOException {
}
/**
- * Writes bytes to the current tar archive entry. This method
- * is aware of the current entry and will throw an exception if
- * you attempt to write bytes past the length specified for the
- * current entry. The method is also (painfully) aware of the
- * record buffering required by TarBuffer, and manages buffers
- * that are not a multiple of recordsize in length, including
+ * Writes bytes to the current tar archive entry. This method is aware of the current entry and
+ * will throw an exception if you attempt to write bytes past the length specified for the
+ * current entry. The method is also (painfully) aware of the record buffering required by
+ * TarBuffer, and manages buffers that are not a multiple of recordsize in length, including
* assembling records from small buffers.
*
* @param wBuf The buffer to write to the archive.
@@ -471,6 +505,7 @@ public void write(final byte[] wBuf, int wOffset, int numToWrite) throws IOExcep
/**
* Writes a PAX extended header with the given map as contents.
+ *
* @since 1.4
*/
void writePaxHeaders(final TarArchiveEntry entry,
@@ -484,6 +519,15 @@ void writePaxHeaders(final TarArchiveEntry entry,
TarConstants.LF_PAX_EXTENDED_HEADER_LC);
transferModTime(entry, pex);
+ final byte[] data = encodeExtendedPaxHeadersContents(headers);
+ pex.setSize(data.length);
+ putArchiveEntry(pex);
+ write(data);
+ closeArchiveEntry();
+ }
+
+ private byte[] encodeExtendedPaxHeadersContents(Map headers)
+ throws UnsupportedEncodingException {
final StringWriter w = new StringWriter();
for (final Map.Entry h : headers.entrySet()) {
final String key = h.getKey();
@@ -505,11 +549,7 @@ void writePaxHeaders(final TarArchiveEntry entry,
}
w.write(line);
}
- final byte[] data = w.toString().getBytes(CharsetNames.UTF_8);
- pex.setSize(data.length);
- putArchiveEntry(pex);
- write(data);
- closeArchiveEntry();
+ return w.toString().getBytes(CharsetNames.UTF_8);
}
private String stripTo7Bits(final String name) {
@@ -578,9 +618,8 @@ private void writeRecord(final byte[] record) throws IOException {
}
/**
- * Write an archive record to the archive, where the record may be
- * inside of a larger array buffer. The buffer must be "offset plus
- * record size" long.
+ * Write an archive record to the archive, where the record may be inside of a larger array
+ * buffer. The buffer must be "offset plus record size" long.
*
* @param buf The buffer containing the record data to write.
* @param offset The offset of the record data within buf.
@@ -672,16 +711,11 @@ private void failForBigNumber(final String field, final long value, final long m
/**
* Handles long file or link names according to the longFileMode setting.
*
- * I.e. if the given name is too long to be written to a plain
- * tar header then
- *
- * - it creates a pax header who's name is given by the
- * paxHeaderName parameter if longFileMode is POSIX
- * - it creates a GNU longlink entry who's type is given by
- * the linkType parameter if longFileMode is GNU
- * - it throws an exception if longFileMode is ERROR
- * - it truncates the name if longFileMode is TRUNCATE
- *
+ * I.e. if the given name is too long to be written to a plain tar header then
- it
+ * creates a pax header who's name is given by the paxHeaderName parameter if longFileMode is
+ * POSIX
- it creates a GNU longlink entry who's type is given by the linkType parameter
+ * if longFileMode is GNU
- it throws an exception if longFileMode is ERROR
- it
+ * truncates the name if longFileMode is TRUNCATE
*
* @param entry entry the name belongs to
* @param name the name to write
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java
index ca0b4d91dac..5e35d33c63c 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java
@@ -18,16 +18,24 @@
package org.apache.commons.compress.archivers.tar;
-import static org.junit.Assert.*;
-import org.junit.Test;
-
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
import java.util.Locale;
-
import org.apache.commons.compress.AbstractTestCase;
+import org.junit.Test;
public class TarArchiveEntryTest implements TarConstants {
@@ -121,6 +129,45 @@ public void testMaxFileSize(){
t.setSize(0100000000000L);
}
+ @Test public void testExtraPaxHeaders() throws IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ TarArchiveOutputStream tos = new TarArchiveOutputStream(bos);
+
+ TarArchiveEntry entry = new TarArchiveEntry("./weasels");
+ entry.addPaxHeader("APACHE.mustelida","true");
+ entry.addPaxHeader("SCHILY.xattr.user.org.apache.weasels","maximum weasels");
+ entry.addPaxHeader("size","1");
+ assertEquals("extra header count",2,entry.getExtraPaxHeaders().size());
+ assertEquals("APACHE.mustelida","true",
+ entry.getExtraPaxHeader("APACHE.mustelida"));
+ assertEquals("SCHILY.xattr.user.org.apache.weasels","maximum weasels",
+ entry.getExtraPaxHeader("SCHILY.xattr.user.org.apache.weasels"));
+ assertEquals("size",entry.getSize(),1);
+
+ tos.putArchiveEntry(entry);
+ tos.write('W');
+ tos.closeArchiveEntry();
+ tos.close();
+ assertNotEquals("should have extra headers before clear",0,entry.getExtraPaxHeaders().size());
+ entry.clearExtraPaxHeaders();
+ assertEquals("extra headers should be empty after clear",0,entry.getExtraPaxHeaders().size());
+ TarArchiveInputStream tis = new TarArchiveInputStream(new ByteArrayInputStream(bos.toByteArray()));
+ entry = tis.getNextTarEntry();
+ assertNotNull("couldn't get entry",entry);
+
+ assertEquals("extra header count",2,entry.getExtraPaxHeaders().size());
+ assertEquals("APACHE.mustelida","true",
+ entry.getExtraPaxHeader("APACHE.mustelida"));
+ assertEquals("user.org.apache.weasels","maximum weasels",
+ entry.getExtraPaxHeader("SCHILY.xattr.user.org.apache.weasels"));
+
+ assertEquals('W',tis.read());
+ assertTrue("should be at end of entry",tis.read() <0);
+
+ assertNull("should be at end of file",tis.getNextTarEntry());
+ tis.close();
+ }
+
@Test
public void testLinkFlagConstructor() {
final TarArchiveEntry t = new TarArchiveEntry("/foo", LF_GNUTYPE_LONGNAME);
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
index 8e0c7a568e3..e73982d18f7 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
@@ -23,6 +23,7 @@
import static org.apache.commons.compress.AbstractTestCase.rmdir;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -312,6 +313,29 @@ public void survivesPaxHeaderWithNameEndingInSlash() throws Exception {
assertNull(is.getNextTarEntry());
}
}
+ @Test
+ public void testGetAndSetOfPaxEntry() throws Exception {
+ try (TarArchiveInputStream is = getTestStream("/COMPRESS-356.tar")) {
+ final TarArchiveEntry entry = is.getNextTarEntry();
+ assertEquals("package/package.json", entry.getName());
+ assertEquals(is.getCurrentEntry(),entry);
+ TarArchiveEntry weaselEntry = new TarArchiveEntry(entry.getName());
+ weaselEntry.setSize(entry.getSize());
+ is.setCurrentEntry(weaselEntry);
+ assertEquals(entry,is.getCurrentEntry());
+ assertFalse(entry == is.getCurrentEntry());
+ assertTrue(weaselEntry == is.getCurrentEntry());
+ try {
+ is.setCurrentEntry(null);
+ is.read();
+ fail("should abort because current entry is nulled");
+ } catch(IllegalStateException e) {
+ // expected
+ }
+ is.setCurrentEntry(entry);
+ is.read();
+ }
+ }
private TarArchiveInputStream getTestStream(final String name) {
return new TarArchiveInputStream(
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
index 497a7640ea0..b8d213b278b 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
@@ -18,7 +18,12 @@
package org.apache.commons.compress.archivers.tar;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -27,13 +32,14 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
import java.security.MessageDigest;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
-
import org.apache.commons.compress.AbstractTestCase;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
@@ -689,6 +695,46 @@ private byte[] getResourceContents(String name) throws IOException {
}
return bos.toByteArray();
}
+ @Test public void testPutGlobalPaxHeaderEntry() throws IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ TarArchiveOutputStream tos = new TarArchiveOutputStream(bos);
+ int pid = 73;
+ int globCount = 1;
+ byte lfPaxGlobalExtendedHeader = TarConstants.LF_PAX_GLOBAL_EXTENDED_HEADER;
+ TarArchiveEntry globalHeader = new TarArchiveEntry("/tmp/GlobalHead." + pid + "." + globCount,
+ lfPaxGlobalExtendedHeader);
+ globalHeader.addPaxHeader("SCHILLY.xattr.user.org.apache.weasels","global-weasels");
+ tos.putArchiveEntry(globalHeader);
+ TarArchiveEntry entry = new TarArchiveEntry("message");
+ String x = "If at first you don't succeed, give up";
+ entry.setSize(x.length());
+ tos.putArchiveEntry(entry);
+ tos.write(x.getBytes());
+ tos.closeArchiveEntry();
+ entry = new TarArchiveEntry("counter-message");
+ String y = "Nothing succeeds like excess";
+ entry.setSize(y.length());
+ entry.addPaxHeader("SCHILLY.xattr.user.org.apache.weasels.species","unknown");
+ tos.putArchiveEntry(entry);
+ tos.write(y.getBytes());
+ tos.closeArchiveEntry();
+ tos.close();
+ TarArchiveInputStream in = new TarArchiveInputStream(new ByteArrayInputStream(bos.toByteArray()));
+ TarArchiveEntry entryIn = in.getNextTarEntry();
+ assertNotNull(entryIn);
+ assertEquals("message",entryIn.getName());
+ assertEquals("global-weasels",entryIn.getExtraPaxHeader("SCHILLY.xattr.user.org.apache.weasels"));
+ Reader reader = new InputStreamReader(in);
+ for(int i=0;i