From dcb5b30c554024455c604a66ed26b62af30d8188 Mon Sep 17 00:00:00 2001 From: Torsten Curdt Date: Mon, 6 Oct 2008 21:49:04 +0000 Subject: [PATCH 001/189] switching trunk to redesign branch git-svn-id: https://svn.apache.org/repos/asf/commons/sandbox/compress/branches/olddesign@702285 13f79535-47bb-0310-9956-ffa450edef68 From ee34100e58f8206a7dcd42515b017e74bc665d9c Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 20 Mar 2009 12:50:18 +0000 Subject: [PATCH 002/189] revert part of the move, need to figure out why only the top level dir was copied first git-svn-id: https://svn.apache.org/repos/asf/commons/sandbox/compress/branches/olddesign@756437 13f79535-47bb-0310-9956-ffa450edef68 From 1501850680e03a385be8dffa3631c9178d7c12dc Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 20 Mar 2009 13:14:08 +0000 Subject: [PATCH 003/189] copy compress from sandbox to proper git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/olddesign@756443 13f79535-47bb-0310-9956-ffa450edef68 From b6e0764c3ece41a29de0b4d5779f90ac65af30fd Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 1 Aug 2011 03:53:02 +0000 Subject: [PATCH 004/189] create a branching point from trunk before merging the ZIP64 branch so that we have a starting point if we want to do a Java 1.4 compatible 1.2.x release git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/commons-compress-1.2.x@1152684 13f79535-47bb-0310-9956-ffa450edef68 From dd10670b414582d71d55559c8f2bcf02d5a7d543 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 29 Dec 2013 06:47:08 +0000 Subject: [PATCH 005/189] Branch for Compress 2.0 experiments git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1553997 13f79535-47bb-0310-9956-ffa450edef68 From 7288ebea6e387d2be4f1d8712e9bcb7756abbf7b Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 30 Dec 2013 09:55:11 +0000 Subject: [PATCH 006/189] WIP for defining the ArchiveEntry API - need to think about permissions and non-File POSIX stuff git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1554174 13f79535-47bb-0310-9956-ffa450edef68 --- findbugs-exclude-filter.xml | 27 ++ pmd-ruleset.xml | 34 ++ pom.xml | 340 ++++++++++++++++++ .../compress2/archivers/ArchiveEntry.java | 71 ++++ .../archivers/ArchiveEntryParameters.java | 186 ++++++++++ .../compress2/archivers/OwnerInformation.java | 87 +++++ .../commons/compress2/archivers/package.html | 24 ++ .../archivers/ArchiveEntryParametersTest.java | 132 +++++++ 8 files changed, 901 insertions(+) create mode 100644 findbugs-exclude-filter.xml create mode 100644 pmd-ruleset.xml create mode 100644 pom.xml create mode 100644 src/main/java/org/apache/commons/compress2/archivers/ArchiveEntry.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/ArchiveEntryParameters.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/package.html create mode 100644 src/test/java/org/apache/commons/compress2/archivers/ArchiveEntryParametersTest.java diff --git a/findbugs-exclude-filter.xml b/findbugs-exclude-filter.xml new file mode 100644 index 00000000000..99b2e8ad150 --- /dev/null +++ b/findbugs-exclude-filter.xml @@ -0,0 +1,27 @@ + + + + + + + + diff --git a/pmd-ruleset.xml b/pmd-ruleset.xml new file mode 100644 index 00000000000..1135cf76f9f --- /dev/null +++ b/pmd-ruleset.xml @@ -0,0 +1,34 @@ + + + + Standard Ruleset but excluding the "no octal + constants" rule + + + + + + + + diff --git a/pom.xml b/pom.xml new file mode 100644 index 00000000000..09f4cfdc3d5 --- /dev/null +++ b/pom.xml @@ -0,0 +1,340 @@ + + + + 4.0.0 + + org.apache.commons + commons-parent + 32 + + + org.apache.commons + commons-compress2 + 2.0.0-SNAPSHOT + Apache Commons Compress + http://commons.apache.org/proper/commons-compress/ + + +Apache Commons Compress software defines an API for working with compression and archive formats. +These include: bzip2, gzip, pack200, lzma, xz and ar, cpio, jar, tar, zip, dump, 7z, arj. + + + + 1.5 + 1.5 + compress + COMPRESS + 12310904 + + 2.0.0 + RC1 + + + + jira + http://issues.apache.org/jira/browse/COMPRESS + + + + + junit + junit + 4.11 + test + + + org.tukaani + xz + 1.4 + + + + + + Torsten Curdt + tcurdt + tcurdt at apache.org + + + Stefan Bodewig + bodewig + bodewig at apache.org + + + Sebastian Bazley + sebb + sebb at apache.org + + + Christian Grobmeier + grobmeier + grobmeier at apache.org + + + Julius Davies + julius + julius at apache.org + + + Damjan Jovanovic + damjan + damjan at apache.org + + + Emmanuel Bourg + ebourg + ebourg at apache.org + + + + + + Wolfgang Glas + wolfgang.glas at ev-i.at + + + Christian Kohlschütte + ck@newsclub.de + + + Bear Giles + bgiles@coyotesong.com + + + Michael Kuss + mail at michael minus kuss.de + + + Lasse Collin + lasse.collin@tukaani.org + + + John Kodis + + + BELUGA BEHR + + + + + scm:svn:http://svn.apache.org/repos/asf/commons/proper/compress/trunk + scm:svn:https://svn.apache.org/repos/asf/commons/proper/compress/trunk + http://svn.apache.org/repos/asf/commons/proper/compress/trunk + + + + + + + maven-assembly-plugin + + + src/main/assembly/bin.xml + src/main/assembly/src.xml + + gnu + + + + maven-jar-plugin + + + + + org.apache.commons.compress2 + + + + + + org.apache.felix + maven-bundle-plugin + + + org.tukaani.xz;resolution:=optional + + + + + org.apache.maven.plugins + maven-scm-publish-plugin + + + javadocs + + + + + + + + + + + org.apache.maven.plugins + maven-changes-plugin + ${commons.changes.version} + + + %URL%/%ISSUE% + + + + + + changes-report + jira-report + + + + + + + org.codehaus.mojo + cobertura-maven-plugin + 2.6 + + + + org.apache.maven.plugins + maven-pmd-plugin + 2.5 + + 200 + ${maven.compiler.source} + + ${basedir}/pmd-ruleset.xml + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + true + ${maven.compiler.source} + ${commons.encoding} + ${commons.docEncoding} + true + + ${commons.javadoc.java.link} + ${commons.javadoc.javaee.link} + + + + Immutable + a + This class is immutable + + + NotThreadSafe + a + This class is not thread-safe + + + ThreadSafe + a + This class is thread-safe + + + + + + org.codehaus.mojo + findbugs-maven-plugin + 2.5.3 + + Normal + Default + ${basedir}/findbugs-exclude-filter.xml + + + + org.apache.rat + apache-rat-plugin + ${commons.rat.version} + + + + src/test/resources/** + + PROPOSAL.txt + .pmd + .gitignore + .gitattributes + + + + + + + + + + run-zipit + + + + org.apache.maven.plugins + maven-antrun-plugin + + + process-test-resources + + + + + + + run + + + + + + maven-surefire-plugin + + + **/zip/*IT.java + + + + + + + + run-tarit + + + + maven-surefire-plugin + + + **/tar/*IT.java + + + + + + + + + diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveEntry.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveEntry.java new file mode 100644 index 00000000000..0ee4df8dc37 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveEntry.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import java.util.Date; +import java.util.Map; +import java.util.Set; + +/** + * Represents an entry of an archive. + * @Immutable + */ +public interface ArchiveEntry { + + /** Special value indicating that the size is unknown */ + static final long SIZE_UNKNOWN = -1; + + /** + * Gets the name of the entry in this archive. May refer to a file or directory or other item. + * + *

The name will use '/' as directory separator and end with a '/' if and only if the entry represents a + * directory.

+ * + * @return The name of this entry in the archive. + */ + String getName(); + + /** + * Gets the uncompressed size of this entry. May be -1 ({@link #SIZE_UNKNOWN}) if the size is unknown. + * + * @return the uncompressed size of this entry. + */ + long getSize(); + + /** + * Returns true if this entry refers to a directory. + * + * @return true if this entry refers to a directory. + */ + boolean isDirectory(); + + /** + * Gets the last modified date of this entry. + * + * @return the last modified date of this entry. + */ + Date getLastModifiedDate(); + + /** + * Provides information about the owner. + * + * @return information about the entry's owner or null if the format doesn't support owner information + */ + OwnerInformation getOwnerInformation(); +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveEntryParameters.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveEntryParameters.java new file mode 100644 index 00000000000..c892b5a2cf4 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveEntryParameters.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import java.io.File; +import java.util.Date; + +/** + * A parameter object useful for creating new ArchiveEntries. + * @NotThreadSafe + */ +public class ArchiveEntryParameters { + + private static final char SLASH = '/'; + + private String name; + private long size = ArchiveEntry.SIZE_UNKNOWN; + private boolean dirFlag = false; + private Date lastModified; + private OwnerInformation owner; + + /** + * Creates parameters as a copy of an existing entry. + * @param otherEntry the other entry. + * @return parameters copied from the other entry + */ + public static ArchiveEntryParameters copyOf(ArchiveEntry otherEntry) { + return new ArchiveEntryParameters() + .withName(otherEntry.getName()) + .asDirectory(otherEntry.isDirectory()) + .withSize(otherEntry.getSize()) + .withLastModifiedDate(otherEntry.getLastModifiedDate()) + .withOwnerInformation(otherEntry.getOwnerInformation()); + } + + /** + * Populates parameters from a File instance. + * @param file the File to read information from + * @return parameters populated from the file instance + */ + public static ArchiveEntryParameters fromFile(File file) { + return new ArchiveEntryParameters() + .withName(file.getName()) + .asDirectory(file.isDirectory()) + .withSize(file.exists() ? file.length() : ArchiveEntry.SIZE_UNKNOWN) + .withLastModifiedDate(new Date(file.lastModified())); + } + + /** + * Sets the name. + * + *

The name will be normalized to only contain '/' separators and end with a '/' if and only if the entry + * represents a directory.

+ * + * @param name the name of the entry to build + * @return the parameters object + */ + public ArchiveEntryParameters withName(String name) { + this.name = name; + return this; + } + + /** + * Sets the size of the entry. + * @param size the size of the entry to build + * @return the parameters object + */ + public ArchiveEntryParameters withSize(long size) { + this.size = size; + return this; + } + + /** + * Marks the entry to build as a directory. + * @param b whether the entry is supposed to represent a directory + * @return the parameters object + */ + public ArchiveEntryParameters asDirectory(boolean b) { + this.dirFlag = b; + return this; + } + + /** + * Sets the last modified date of the entry. + * @param lastModified the last modified date of the entry to build + * @return the parameters object + */ + public ArchiveEntryParameters withLastModifiedDate(Date lastModified) { + this.lastModified = clone(lastModified); + return this; + } + + /** + * Sets the owner information of the entry. + * @param owner the owner information for the entry to build + * @return the parameters object + */ + public ArchiveEntryParameters withOwnerInformation(OwnerInformation owner) { + this.owner = owner; + return this; + } + + /** + * Gets the configured name. + * + *

The name will use '/' as directory separator and end with a '/' if and only if the entry represents a + * directory.

+ * + * @return the normalized name + */ + public String getName() { + return normalize(name, dirFlag); + } + + /** + * Gets the configured size or {@link #SIZE_UNKNOWN}) if the size is not configured. + * + * @return the configured size + */ + public long getSize() { + return dirFlag ? 0 : size; + } + + /** + * Returns true if parameters are configured to represent a directory. + * + * @return true if this parameters refer to a directory. + */ + public boolean isDirectory() { + return dirFlag; + } + + /** + * Gets the configured last modified date. + * + * @return the configured last modified date or null if no date was configured. + */ + public Date getLastModifiedDate() { + return clone(lastModified); + } + + /** + * Gets the configured information about the owner. + * + * @return information about the entry's owner or null if no information was configured + */ + public OwnerInformation getOwnerInformation() { + return owner; + } + + private static String normalize(String name, boolean dirFlag) { + if (name != null) { + name = name.replace('\\', SLASH); + int nameLength = name.length(); + boolean endsWithSlash = nameLength > 0 && name.charAt(nameLength - 1) == SLASH; + if (endsWithSlash != dirFlag) { + if (dirFlag) { + name += SLASH; + } else { + name = name.substring(0, nameLength - 1); + } + } + } + return name; + } + + private static Date clone(Date d) { + return d == null ? null : (Date) d.clone(); + } +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java b/src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java new file mode 100644 index 00000000000..ff6cec4c547 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +/** + * Encapsulates owner information of an archive entry. + * + *

Fields that are not supported by the archive format may be null.

+ * @Immutable + */ +public class OwnerInformation { + + private final String userName, groupName; + private final int userId, groupId; + + /** + * Creates an OwnerInformation without names. + * @param userId numerical id of the owner + * @param groupId numerical id of the group owning the entry + */ + public OwnerInformation(int userId, int groupId) { + this(null, null, userId, groupId); + } + + /** + * Creates an OwnerInformation. + * @param userName the name of the owner + * @param groupName the name of the group owning the entry + * @param userId numerical id of the owner + * @param groupId numerical id of the group owning the entry + */ + public OwnerInformation(String userName, String groupName, int userId, int groupId) { + this.userName = userName; + this.groupName = groupName; + this.userId = userId; + this.groupId = groupId; + } + + /** + * Gets the name of the owner. + * @return the name of the owner, may be null + */ + public String getUserName() { + return userName; + } + + /** + * Gets the name of the group owning the entry. + * @return the name of the group owning the entry, may be null + */ + public String getGroupName() { + return groupName; + } + + /** + * Gets numerical id of the owner. + * @return numerical id of the owner + */ + public int getUserId() { + return userId; + } + + /** + * Gets numerical id of the group owning the entry. + * @return numerical id of the group owning the entry + */ + public int getGroupId() { + return groupId; + } + +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/package.html b/src/main/java/org/apache/commons/compress2/archivers/package.html new file mode 100644 index 00000000000..df1922b4a10 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/package.html @@ -0,0 +1,24 @@ + + + +

Provides a unified API and factories for dealing with archives + in different formats.

+ + diff --git a/src/test/java/org/apache/commons/compress2/archivers/ArchiveEntryParametersTest.java b/src/test/java/org/apache/commons/compress2/archivers/ArchiveEntryParametersTest.java new file mode 100644 index 00000000000..6da9a68f1c3 --- /dev/null +++ b/src/test/java/org/apache/commons/compress2/archivers/ArchiveEntryParametersTest.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import static org.junit.Assert.assertEquals; + +import java.io.File; +import java.io.IOException; +import java.util.Date; +import org.junit.Test; + +public class ArchiveEntryParametersTest { + + @Test + public void defaultValues() { + ArchiveEntryParameters p = new ArchiveEntryParameters(); + assertEquals(null, p.getName()); + assertEquals(-1, p.getSize()); + assertEquals(false, p.isDirectory()); + assertEquals(null, p.getLastModifiedDate()); + assertEquals(null, p.getOwnerInformation()); + } + + @Test + public void shouldAddTrailingSlashForDirectories() { + ArchiveEntryParameters p = new ArchiveEntryParameters() + .withName("foo").asDirectory(true); + assertEquals("foo/", p.getName()); + p.withName("foo/"); + assertEquals("foo/", p.getName()); + p.withName(""); + assertEquals("/", p.getName()); + } + + @Test + public void shouldStripTrailingSlashForNonDirectories() { + ArchiveEntryParameters p = new ArchiveEntryParameters() + .withName("foo").asDirectory(false); + assertEquals("foo", p.getName()); + p.withName("foo/"); + assertEquals("foo", p.getName()); + p.withName(""); + assertEquals("", p.getName()); + } + + @Test + public void sizeShouldBe0ForDirectories() { + ArchiveEntryParameters p = new ArchiveEntryParameters() + .asDirectory(true); + assertEquals(0, p.getSize()); + p.withSize(42); + assertEquals(0, p.getSize()); + } + + @Test + public void copyActuallyCopies() { + final Date d = new Date(); + final OwnerInformation o = new OwnerInformation(17, 4); + ArchiveEntryParameters p = ArchiveEntryParameters.copyOf(new ArchiveEntry() { + public String getName() {return "baz";} + public long getSize() {return 42;} + public boolean isDirectory() {return false;} + public Date getLastModifiedDate() {return d;} + public OwnerInformation getOwnerInformation() {return o;} + }); + assertEquals("baz", p.getName()); + assertEquals(42, p.getSize()); + assertEquals(false, p.isDirectory()); + assertEquals(d, p.getLastModifiedDate()); + assertEquals(o, p.getOwnerInformation()); + } + + @Test + public void fromExistingFileHasExpectedValues() throws IOException { + final Date d = new Date(); + File f = File.createTempFile("pre", "suf"); + f.deleteOnExit(); + f.setLastModified(d.getTime()); + ArchiveEntryParameters p = ArchiveEntryParameters.fromFile(f); + assert p.getName().endsWith("suf"); + assert p.getName().startsWith("pre"); + assertEquals(0, p.getSize()); + assertEquals(false, p.isDirectory()); + assertWithinTwoSecondsOf(d, p.getLastModifiedDate()); + assertEquals(null, p.getOwnerInformation()); + } + + @Test + public void fromExistingDirectoryHasExpectedValues() throws IOException { + final Date d = new Date(); + File f = File.createTempFile("pre", "suf"); + assert f.delete(); + f.mkdirs(); + f.deleteOnExit(); + f.setLastModified(d.getTime()); + ArchiveEntryParameters p = ArchiveEntryParameters.fromFile(f); + assert p.getName().endsWith("suf/"); + assert p.getName().startsWith("pre"); + assertEquals(0, p.getSize()); + assertEquals(true, p.isDirectory()); + assertWithinTwoSecondsOf(d, p.getLastModifiedDate()); + assertEquals(null, p.getOwnerInformation()); + } + + @Test + public void fromNonExistingFileHasNoSize() throws IOException { + File f = File.createTempFile("pre", "suf"); + assert f.delete(); + ArchiveEntryParameters p = ArchiveEntryParameters.fromFile(f); + assertEquals(-1, p.getSize()); + } + + private static void assertWithinTwoSecondsOf(Date expected, Date actual) { + assert Math.abs(expected.getTime() - actual.getTime()) < 2000; + } +} From 9c8e266b55558421cb69f1f979459ccb951bf92e Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 11:52:20 +0000 Subject: [PATCH 007/189] add N&L - the LZMA notice is currently invalid but will need to come back later anyway git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556190 13f79535-47bb-0310-9956-ffa450edef68 --- LICENSE.txt | 201 ++++++++++++++++++++++++++++++++++++++++++++++++++++ NOTICE.txt | 11 +++ 2 files changed, 212 insertions(+) create mode 100644 LICENSE.txt create mode 100644 NOTICE.txt diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/NOTICE.txt b/NOTICE.txt new file mode 100644 index 00000000000..db0a7592a61 --- /dev/null +++ b/NOTICE.txt @@ -0,0 +1,11 @@ +Apache Commons Compress +Copyright 2002-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +The files in the package org.apache.commons.compress.archivers.sevenz +were derived from the LZMA SDK, version 9.20 (C/ and CPP/7zip/), +which has been placed in the public domain: + +"LZMA SDK is placed in the public domain." (http://www.7-zip.org/sdk.html) From 21af9e8a25cea4090d04c7dd34fe98529d75bfef Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 12:40:12 +0000 Subject: [PATCH 008/189] Initial design idea for IO of archives, absolutely a WIP. I'll try to port over the AR archiver to see how much effort the switch from a stream based API to a channel based one would take. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556196 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/archivers/ArchiveInput.java | 45 +++++++++++ .../compress2/archivers/ArchiveOutput.java | 79 +++++++++++++++++++ .../archivers/spi/AbstractArchiveInput.java | 68 ++++++++++++++++ .../archivers/spi/AbstractArchiveOutput.java | 56 +++++++++++++ .../archivers/spi/SimpleArchiveEntry.java | 79 +++++++++++++++++++ 5 files changed, 327 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveInput.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveOutput.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java new file mode 100644 index 00000000000..ea4e9c4391d --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import java.nio.channels.ReadableByteChannel; + +/** + * A channel that reads {@link ArchiveEntry}s. + * @NotThreadSafe + */ +public interface ArchiveInput extends ReadableByteChannel, Iterable { + + /** + * Whether this channel is able to read the contents of the given entry. + * + *

Some archive formats support variants or details that are not supported (yet).

+ * + * @param entry + * the entry to test + * @return whether the entry's content can be read + */ + boolean canReadEntryData(A entry); + + /** + * Returns the current number of bytes read from this channel. + * @return the number of read bytes + */ + long getBytesRead(); +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java new file mode 100644 index 00000000000..be0229cf769 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import java.io.IOException; +import java.nio.channels.WritableByteChannel; + +/** + * A channel that writes {@link ArchiveEntry}s. + * @NotThreadSafe + */ +public interface ArchiveOutput
extends WritableByteChannel { + + /** + * Creates an ArchiveEntry for the given parameters. + * @param params the parameters describing the archive entry. + * @return a new archive entry. + */ + A createEntry(ArchiveEntryParameters params); + + /** + * Whether this channel is able to write the contents of the given entry. + * + *

Some archive formats support variants or details that are not supported (yet).

+ * + * @param archiveEntry + * the entry to test + * @return whether the entry's content can be read + */ + boolean canWriteEntryData(A archiveEntry); + + /** + * Initializes the channel for writing a new {@link ArchiveEntry}. + * + *

The caller must then write the content to the channel and call {@link #closeEntry()} to complete the + * process.

+ * + * @param entry describes the entry + * @throws IOException + */ + void putEntry(A entry) throws IOException; + + /** + * Closes the archive entry, writing any trailer information that may be required. + * @throws IOException + */ + void closeEntry() throws IOException; + + /** + * Finishes the addition of entries to this stream, without closing it. + * + *

Additional data can be written, if the format supports it.

+ * + * @throws IOException if the user forgets to close the last entry. + */ + void finish() throws IOException; + + /** + * Returns the current number of bytes written to this channel. + * @return the number of written bytes + */ + long getBytesWritten(); +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveInput.java b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveInput.java new file mode 100644 index 00000000000..7feeae8b401 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveInput.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers.spi; + +import org.apache.commons.compress2.archivers.ArchiveInput; +import org.apache.commons.compress2.archivers.ArchiveEntry; + +/** + * Base class implementations may use. + * @NotThreadSafe + */ +public abstract class AbstractArchiveInput implements ArchiveInput { + + /** holds the number of bytes read from this channel */ + private long bytesRead = 0; + + @Override + public long getBytesRead() { + return bytesRead; + } + + /** + * {@inheritDoc} + *

This implementation always returns true.

+ */ + @Override + public boolean canReadEntryData(A archiveEntry) { + return true; + } + + /** + * Increments the counter of already read bytes. + * Doesn't increment if the EOF has been hit (read == -1) + * + * @param read the number of bytes read + */ + protected void count(long read) { + if (read != -1) { + bytesRead = bytesRead + read; + } + } + + /** + * Decrements the counter of already read bytes. + * + * @param pushedBack the number of bytes pushed back. + */ + protected void pushedBackBytes(long pushedBack) { + bytesRead -= pushedBack; + } + +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveOutput.java b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveOutput.java new file mode 100644 index 00000000000..3eff5132f79 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveOutput.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers.spi; + +import org.apache.commons.compress2.archivers.ArchiveOutput; +import org.apache.commons.compress2.archivers.ArchiveEntry; + +/** + * Base class implementations may use. + * @NotThreadSafe + */ +public abstract class AbstractArchiveOutput implements ArchiveOutput { + + /** holds the number of bytes written to this channel */ + private long bytesWritten = 0; + + @Override + public long getBytesWritten() { + return bytesWritten; + } + + /** + * {@inheritDoc} + *

This implementation always returns true.

+ */ + @Override + public boolean canWriteEntryData(A archiveEntry) { + return true; + } + + /** + * Increments the counter of written bytes. + * + * @param written the number of bytes written + */ + protected void count(long written) { + bytesWritten += written; + } + +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java b/src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java new file mode 100644 index 00000000000..6e63a429827 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers.spi; + +import java.util.Date; + +import org.apache.commons.compress2.archivers.ArchiveEntry; +import org.apache.commons.compress2.archivers.ArchiveEntryParameters; +import org.apache.commons.compress2.archivers.OwnerInformation; + +/** + * Container for the basic information of an {@link ArchiveEntry}. + * @Immutable + */ +public class SimpleArchiveEntry implements ArchiveEntry { + private final String name; + private final long size; + private final boolean dirFlag; + private final Date lastModified; + private final OwnerInformation owner; + + /** + * Creates a SimpleArchiveEntry from a parameter object. + * @param params the parameters describing the archive entry. + */ + public SimpleArchiveEntry(ArchiveEntryParameters params) { + this.name = params.getName(); + this.size = params.getSize(); + this.dirFlag = params.isDirectory(); + this.lastModified = params.getLastModifiedDate(); + this.owner = params.getOwnerInformation(); + } + + @Override + public String getName() { + return name; + } + + @Override + public long getSize() { + return size; + } + + @Override + public boolean isDirectory() { + return dirFlag; + } + + @Override + public Date getLastModifiedDate() { + return clone(lastModified); + } + + @Override + public OwnerInformation getOwnerInformation() { + return owner; + } + + // TODO second instance (after ArchiveEntryParameters) + private static Date clone(Date d) { + return d == null ? null : (Date) d.clone(); + } +} From a54476642f03d18715150d55f518bbc424e5aa0e Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 13:01:58 +0000 Subject: [PATCH 009/189] implement equals git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556201 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/archivers/OwnerInformation.java | 23 +++++++++++++++ .../archivers/spi/SimpleArchiveEntry.java | 29 +++++++++++++++++++ 2 files changed, 52 insertions(+) diff --git a/src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java b/src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java index ff6cec4c547..e9ec7ca5a14 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java +++ b/src/main/java/org/apache/commons/compress2/archivers/OwnerInformation.java @@ -84,4 +84,27 @@ public int getGroupId() { return groupId; } + @Override + public int hashCode() { + return 17 * groupId + userId; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + OwnerInformation other = (OwnerInformation) obj; + return userId == other.userId + && groupId == other.groupId + && equals(userName, other.userName) + && equals(groupName, other.groupName); + } + + private static boolean equals(Object o1, Object o2) { + return o1 == null ? o2 == null : o1.equals(o2); + } } diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java b/src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java index 6e63a429827..81f089d191e 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/SimpleArchiveEntry.java @@ -72,8 +72,37 @@ public OwnerInformation getOwnerInformation() { return owner; } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (name == null ? 0 : name.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SimpleArchiveEntry other = (SimpleArchiveEntry) obj; + return equals(name, other.name) + && size == other.size + && dirFlag == other.dirFlag + && equals(lastModified, other.lastModified) + && equals(owner, other.owner); + } + // TODO second instance (after ArchiveEntryParameters) private static Date clone(Date d) { return d == null ? null : (Date) d.clone(); } + + // TODO second instance (after OwnerInformation) + private static boolean equals(Object o1, Object o2) { + return o1 == null ? o2 == null : o1.equals(o2); + } } From 963935d934b874c5482c623ffb87b4ed28d0b47d Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 13:02:31 +0000 Subject: [PATCH 010/189] porting the ArArchiveEntry is easy - as expected git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556202 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/formats/ar/ArArchiveEntry.java | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveEntry.java diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveEntry.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveEntry.java new file mode 100644 index 00000000000..9233bc4593f --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveEntry.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.formats.ar; + +import org.apache.commons.compress2.archivers.ArchiveEntryParameters; +import org.apache.commons.compress2.archivers.spi.SimpleArchiveEntry; + +/** + * Represents an archive entry in the "ar" format. + * + * Each AR archive starts with "!<arch>" followed by a LF. After these 8 bytes + * the archive entries are listed. The format of an entry header is as it follows: + * + *
+ * START BYTE   END BYTE    NAME                    FORMAT      LENGTH
+ * 0            15          File name               ASCII       16
+ * 16           27          Modification timestamp  Decimal     12
+ * 28           33          Owner ID                Decimal     6
+ * 34           39          Group ID                Decimal     6
+ * 40           47          File mode               Octal       8
+ * 48           57          File size (bytes)       Decimal     10
+ * 58           59          File magic              \140\012    2
+ * 
+ * + * This specifies that an ar archive entry header contains 60 bytes. + * + * Due to the limitation of the file name length to 16 bytes GNU and + * BSD has their own variants of this format. Currently Commons + * Compress can read but not write the GNU variant. It fully supports + * the BSD variant. + * + * @see
ar man page + * + * @Immutable + */ +public class ArArchiveEntry extends SimpleArchiveEntry { + + /** The header for each entry */ + public static final String HEADER = "!\n"; + + /** The trailer for each entry */ + public static final String TRAILER = "`\012"; + + // TODO revisit once the permissions stuff is sorted out + private final int mode; + private static final int DEFAULT_MODE = 33188; // = (octal) 0100644 + + /** + * Creates an ArArchiveEntry from a parameter object. + * @param params the parameters describing the archive entry. + */ + public ArArchiveEntry(ArchiveEntryParameters params) { + this(params, DEFAULT_MODE); + } + + /** + * Creates an ArArchiveEntry from a parameter object and an octal mode. + * @param params the parameters describing the archive entry. + * @param mode the file/dir mode of the entry + */ + public ArArchiveEntry(ArchiveEntryParameters params, int mode) { + super(params); + this.mode = mode; + } + + // TODO revisit once the permissions stuff is sorted out + public int getMode() { + return mode; + } + + // TODO revisit once the permissions stuff is sorted out + @Override + public boolean equals(Object obj) { + return super.equals(obj) && mode == ((ArArchiveEntry) obj).mode; + } + +} From 7af33ebd3a8519bb749023b64f707f0753086cbe Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 13:37:45 +0000 Subject: [PATCH 011/189] Output seems to relatively straight forward to port. No tests, though. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556213 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/formats/ar/ArArchiveOutput.java | 241 ++++++++++++++++++ 1 file changed, 241 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java new file mode 100644 index 00000000000..c99c21db1d9 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java @@ -0,0 +1,241 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.formats.ar; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.WritableByteChannel; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.compress2.archivers.ArchiveEntryParameters; +import org.apache.commons.compress2.archivers.spi.AbstractArchiveOutput; + +/** + * Implements the "ar" archive format as an output stream. + * + * @NotThreadSafe + */ +public class ArArchiveOutput extends AbstractArchiveOutput { + /** Fail if a long file name is required in the archive. */ + public static final int LONGFILE_ERROR = 0; + + /** BSD ar extensions are used to store long file names in the archive. */ + public static final int LONGFILE_BSD = 1; + + private final WritableByteChannel out; + private long entryOffset = 0; + private ArArchiveEntry prevEntry; + private boolean haveUnclosedEntry = false; + private int longFileMode = LONGFILE_ERROR; + + /** indicates if this archive is finished */ + private boolean finished = false; + + public ArArchiveOutput(WritableByteChannel pOut) { + this.out = pOut; + } + + /** + * Set the long file mode. + * This can be LONGFILE_ERROR(0) or LONGFILE_BSD(1). + * This specifies the treatment of long file names (names >= 16). + * Default is LONGFILE_ERROR. + * @param longFileMode the mode to use + */ + public void setLongFileMode(int longFileMode) { + this.longFileMode = longFileMode; + } + + private long writeArchiveHeader() throws IOException { + ByteBuffer header = StandardCharsets.US_ASCII.encode(ArArchiveEntry.HEADER); + int len = header.remaining(); + out.write(header); + count(len); + return len; + } + + @Override + public void closeEntry() throws IOException { + if (finished) { + throw new IOException("Stream has already been finished"); + } + if (prevEntry == null || !haveUnclosedEntry){ + throw new IOException("No current entry to close"); + } + if (entryOffset % 2 != 0) { + out.write(ByteBuffer.wrap(new byte[] { '\n' })); // Pad byte + count(1); + } + haveUnclosedEntry = false; + } + + @Override + public void putEntry(final ArArchiveEntry entry) throws IOException { + if (finished) { + throw new IOException("Stream has already been finished"); + } + + if (prevEntry == null) { + writeArchiveHeader(); + } else { + if (prevEntry.getSize() != entryOffset) { + throw new IOException("length does not match entry (" + prevEntry.getSize() + " != " + entryOffset); + } + + if (haveUnclosedEntry) { + closeEntry(); + } + } + + prevEntry = entry; + + writeEntryHeader(entry); + + entryOffset = 0; + haveUnclosedEntry = true; + } + + private long fill( final long pOffset, final long pNewOffset, byte pFill ) throws IOException { + final long diff = pNewOffset - pOffset; + if (diff > Integer.MAX_VALUE) { + throw new IOException("filling too much"); + } + + if (diff > 0) { + ByteBuffer b = ByteBuffer.allocate((int) diff); + for (int i = 0; i < diff; i++) { + b.put(pFill); + } + b.flip(); + write(b); + } + + return pNewOffset; + } + + private long write( final String data ) throws IOException { + return write(StandardCharsets.US_ASCII.encode(data)); + } + + private long writeEntryHeader( final ArArchiveEntry pEntry ) throws IOException { + + long offset = 0; + boolean mustAppendName = false; + + final String n = pEntry.getName(); + if (LONGFILE_ERROR == longFileMode && n.length() > 16) { + throw new IOException("filename too long, > 16 chars: "+n); + } + if (LONGFILE_BSD == longFileMode && + (n.length() > 16 || n.indexOf(" ") > -1)) { + mustAppendName = true; + // TODO re-introduce constant + offset += write("#1/" + String.valueOf(n.length())); + } else { + offset += write(n); + } + + offset = fill(offset, 16, (byte) ' '); + final String m = "" + pEntry.getLastModifiedDate(); + if (m.length() > 12) { + throw new IOException("modified too long"); + } + offset += write(m); + + offset = fill(offset, 28, (byte) ' '); + final String u = "" + pEntry.getOwnerInformation().getUserId(); + if (u.length() > 6) { + throw new IOException("userid too long"); + } + offset += write(u); + + offset = fill(offset, 34, (byte) ' '); + final String g = "" + pEntry.getOwnerInformation().getGroupId(); + if (g.length() > 6) { + throw new IOException("groupid too long"); + } + offset += write(g); + + offset = fill(offset, 40, (byte) ' '); + final String fm = "" + Integer.toString(pEntry.getMode(), 8); + if (fm.length() > 8) { + throw new IOException("filemode too long"); + } + offset += write(fm); + + offset = fill(offset, 48, (byte) ' '); + final String s = + String.valueOf(pEntry.getSize() + + (mustAppendName ? n.length() : 0)); + if (s.length() > 10) { + throw new IOException("size too long"); + } + offset += write(s); + + offset = fill(offset, 58, (byte) ' '); + + offset += write(ArArchiveEntry.TRAILER); + + if (mustAppendName) { + offset += write(n); + } + + return offset; + } + + @Override + public int write(ByteBuffer b) throws IOException { + int len = out.write(b); + count(len); + entryOffset += len; + return len; + } + + /** + * Calls finish if necessary, and then closes the nested Channel + */ + @Override + public void close() throws IOException { + if (!finished) { + finish(); + } + out.close(); + prevEntry = null; + } + + @Override + public ArArchiveEntry createEntry(ArchiveEntryParameters params) { + return new ArArchiveEntry(params); + } + + @Override + public void finish() throws IOException { + if (haveUnclosedEntry) { + throw new IOException("This archive contains unclosed entries."); + } else if(finished) { + throw new IOException("This archive has already been finished"); + } + finished = true; + } + + @Override + public boolean isOpen() { + return out.isOpen(); + } +} From 4b16f5e32fbfe6cdfbecd9792c07d7645f1683c4 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 17:31:27 +0000 Subject: [PATCH 012/189] implement input, this compiles but I'm not sure it works - tests are up next git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556286 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/archivers/ArchiveInput.java | 9 +- .../compress2/formats/ar/ArArchiveInput.java | 434 ++++++++++++++++++ .../compress2/formats/ar/ArArchiveOutput.java | 2 +- .../commons/compress2/formats/ar/IOUtils.java | 184 ++++++++ 4 files changed, 627 insertions(+), 2 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java create mode 100644 src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java index ea4e9c4391d..d956587af8d 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java @@ -18,13 +18,20 @@ */ package org.apache.commons.compress2.archivers; +import java.io.IOException; import java.nio.channels.ReadableByteChannel; /** * A channel that reads {@link ArchiveEntry}s. * @NotThreadSafe */ -public interface ArchiveInput extends ReadableByteChannel, Iterable { +public interface ArchiveInput extends ReadableByteChannel { + + /** + * Obtains the next entry. + * @return the next entry or null if the end of the channel has been reached. + */ + A next() throws IOException; /** * Whether this channel is able to read the contents of the given entry. diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java new file mode 100644 index 00000000000..c5f464008ce --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java @@ -0,0 +1,434 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.formats.ar; + +import java.io.EOFException; +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.charset.StandardCharsets; +import java.util.Date; + +import org.apache.commons.compress2.archivers.ArchiveEntryParameters; +import org.apache.commons.compress2.archivers.OwnerInformation; +import org.apache.commons.compress2.archivers.spi.AbstractArchiveInput; + +/** + * Implements the "ar" archive format. + * + * @NotThreadSafe + * + */ +public class ArArchiveInput extends AbstractArchiveInput { + + private final WrappedStream wrappedStream; + private long offset = 0; + private boolean closed; + + /* + * If next has been called, the entry metadata is stored in + * currentEntry. + */ + private ArArchiveEntry currentEntry = null; + + // Storage area for extra long names (GNU ar) + private byte[] namebuffer = null; + + /* + * The offset where the current entry started. -1 if no entry has been + * called + */ + private long entryOffset = -1; + + // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) + private final byte[] NAME_BUF = new byte[16]; + private final byte[] LAST_MODIFIED_BUF = new byte[12]; + private final byte[] ID_BUF = new byte[6]; + private final byte[] FILE_MODE_BUF = new byte[8]; + private final byte[] LENGTH_BUF = new byte[10]; + + /** + * Constructs an Ar input with the referenced channel + * + * @param pInput + * the ar input + */ + public ArArchiveInput(final ReadableByteChannel pInput) { + wrappedStream = new WrappedStream(Channels.newInputStream(pInput)); + closed = false; + } + + /** + * Returns the next AR entry in this stream. + * + * @return the next AR entry. + * @throws IOException + * if the entry could not be read + */ + @Override + public ArArchiveEntry next() throws IOException { + if (currentEntry != null) { + final long entryEnd = entryOffset + currentEntry.getSize(); + IOUtils.skip(wrappedStream, entryEnd - offset); + currentEntry = null; + } + + if (offset == 0) { + final byte[] expected = StandardCharsets.US_ASCII.encode(ArArchiveEntry.HEADER).array(); + final byte[] realized = new byte[expected.length]; + final int read = IOUtils.readFully(wrappedStream, realized); + if (read != expected.length) { + throw new IOException("failed to read header. Occured at byte: " + getBytesRead()); + } + for (int i = 0; i < expected.length; i++) { + if (expected[i] != realized[i]) { + throw new IOException("invalid header " + toAsciiString(realized)); + } + } + } + + if (offset % 2 != 0 && wrappedStream.read() < 0) { + // hit eof + return null; + } + + if (wrappedStream.available() == 0) { + return null; + } + + IOUtils.readFully(wrappedStream, NAME_BUF); + IOUtils.readFully(wrappedStream, LAST_MODIFIED_BUF); + IOUtils.readFully(wrappedStream, ID_BUF); + int userId = asInt(ID_BUF, true); + IOUtils.readFully(wrappedStream, ID_BUF); + IOUtils.readFully(wrappedStream, FILE_MODE_BUF); + IOUtils.readFully(wrappedStream, LENGTH_BUF); + + { + final byte[] expected = StandardCharsets.US_ASCII.encode(ArArchiveEntry.TRAILER).array(); + final byte[] realized = new byte[expected.length]; + final int read = IOUtils.readFully(wrappedStream, realized); + if (read != expected.length) { + throw new IOException("failed to read entry trailer. Occured at byte: " + getBytesRead()); + } + for (int i = 0; i < expected.length; i++) { + if (expected[i] != realized[i]) { + throw new IOException("invalid entry trailer. not read the content? Occured at byte: " + getBytesRead()); + } + } + } + + entryOffset = offset; + +// GNU ar uses a '/' to mark the end of the filename; this allows for the use of spaces without the use of an extended filename. + + // entry name is stored as ASCII string + String temp = toAsciiString(NAME_BUF).trim(); + if (isGNUStringTable(temp)) { // GNU extended filenames entry + currentEntry = readGNUStringTable(LENGTH_BUF); + return next(); + } + + long len = asLong(LENGTH_BUF); + if (temp.endsWith("/")) { // GNU terminator + temp = temp.substring(0, temp.length() - 1); + } else if (isGNULongName(temp)) { + int off = Integer.parseInt(temp.substring(1));// get the offset + temp = getExtendedName(off); // convert to the long name + } else if (isBSDLongName(temp)) { + temp = getBSDLongName(temp); + // entry length contained the length of the file name in + // addition to the real length of the entry. + // assume file name was ASCII, there is no "standard" otherwise + int nameLen = temp.length(); + len -= nameLen; + entryOffset += nameLen; + } + + currentEntry = new ArArchiveEntry(new ArchiveEntryParameters().withName(temp).withSize(len) + .withOwnerInformation(new OwnerInformation(userId, asInt(ID_BUF, true))) + .withLastModifiedDate(new Date(asLong(LAST_MODIFIED_BUF))), + asInt(FILE_MODE_BUF, 8)); + return currentEntry; + } + + /** + * Get an extended name from the GNU extended name buffer. + * + * @param offset pointer to entry within the buffer + * @return the extended file name; without trailing "/" if present. + * @throws IOException if name not found or buffer not set up + */ + private String getExtendedName(int offset) throws IOException{ + if (namebuffer == null) { + throw new IOException("Cannot process GNU long filename as no // record was found"); + } + for(int i=offset; i < namebuffer.length; i++){ + if (namebuffer[i]=='\012'){ + if (namebuffer[i-1]=='/') { + i--; // drop trailing / + } + return toAsciiString(namebuffer, offset, i-offset); + } + } + throw new IOException("Failed to read entry: "+offset); + } + private long asLong(byte[] input) { + return Long.parseLong(toAsciiString(input).trim()); + } + + private int asInt(byte[] input) { + return asInt(input, 10, false); + } + + private int asInt(byte[] input, boolean treatBlankAsZero) { + return asInt(input, 10, treatBlankAsZero); + } + + private int asInt(byte[] input, int base) { + return asInt(input, base, false); + } + + private int asInt(byte[] input, int base, boolean treatBlankAsZero) { + String string = toAsciiString(input).trim(); + if (string.length() == 0 && treatBlankAsZero) { + return 0; + } + return Integer.parseInt(string, base); + } + + @Override + public void close() throws IOException { + if (!closed) { + closed = true; + wrappedStream.close(); + } + currentEntry = null; + } + + @Override + public boolean isOpen() { + return !closed; + } + + @Override + public int read(ByteBuffer b) throws IOException { + byte[] tmp = new byte[b.remaining()]; + int read = wrappedStream.read(tmp); + if (read > 0) { + b.put(tmp, 0, read); + } + return read; + } + + private class WrappedStream extends FilterInputStream { + private WrappedStream(InputStream i) { + super(i); + } + + private InputStream getIn() { + return in; + } + + /* + * (non-Javadoc) + * + * @see java.io.InputStream#read(byte[], int, int) + */ + @Override + public int read(byte[] b, final int off, final int len) throws IOException { + int toRead = len; + if (currentEntry != null) { + final long entryEnd = entryOffset + currentEntry.getSize(); + if (len > 0 && entryEnd > offset) { + toRead = (int) Math.min(len, entryEnd - offset); + } else { + return -1; + } + } + final int ret = in.read(b, off, toRead); + count(ret); + offset += ret > 0 ? ret : 0; + return ret; + } + + private final byte[] SINGLE = new byte[1]; + private static final int BYTE_MASK = 0xFF; + + @Override + public int read() throws IOException { + int num = read(SINGLE, 0, 1); + return num == -1 ? -1 : SINGLE[0] & BYTE_MASK; + } + } + + /** + * Checks if the signature matches ASCII "!<arch>" followed by a single LF + * control character + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is an Ar archive stream, false otherwise + */ + public static boolean matches(byte[] signature, int length) { + // 3c21 7261 6863 0a3e + + if (length < 8) { + return false; + } + if (signature[0] != 0x21) { + return false; + } + if (signature[1] != 0x3c) { + return false; + } + if (signature[2] != 0x61) { + return false; + } + if (signature[3] != 0x72) { + return false; + } + if (signature[4] != 0x63) { + return false; + } + if (signature[5] != 0x68) { + return false; + } + if (signature[6] != 0x3e) { + return false; + } + if (signature[7] != 0x0a) { + return false; + } + + return true; + } + + static final String BSD_LONGNAME_PREFIX = "#1/"; + private static final int BSD_LONGNAME_PREFIX_LEN = + BSD_LONGNAME_PREFIX.length(); + private static final String BSD_LONGNAME_PATTERN = + "^" + BSD_LONGNAME_PREFIX + "\\d+"; + + /** + * Does the name look like it is a long name (or a name containing + * spaces) as encoded by BSD ar? + * + *

From the FreeBSD ar(5) man page:

+ *
+     * BSD   In the BSD variant, names that are shorter than 16
+     *       characters and without embedded spaces are stored
+     *       directly in this field.  If a name has an embedded
+     *       space, or if it is longer than 16 characters, then
+     *       the string "#1/" followed by the decimal represen-
+     *       tation of the length of the file name is placed in
+     *       this field. The actual file name is stored immedi-
+     *       ately after the archive header.  The content of the
+     *       archive member follows the file name.  The ar_size
+     *       field of the header (see below) will then hold the
+     *       sum of the size of the file name and the size of
+     *       the member.
+     * 
+ */ + private static boolean isBSDLongName(String name) { + return name != null && name.matches(BSD_LONGNAME_PATTERN); + } + + /** + * Reads the real name from the current stream assuming the very + * first bytes to be read are the real file name. + * + * @see #isBSDLongName + */ + private String getBSDLongName(String bsdLongName) throws IOException { + int nameLen = + Integer.parseInt(bsdLongName.substring(BSD_LONGNAME_PREFIX_LEN)); + byte[] name = new byte[nameLen]; + int read = IOUtils.readFully(wrappedStream.getIn(), name); + count(read); + if (read != nameLen) { + throw new EOFException(); + } + return toAsciiString(name); + } + + private static final String GNU_STRING_TABLE_NAME = "//"; + + /** + * Is this the name of the "Archive String Table" as used by + * SVR4/GNU to store long file names? + * + *

GNU ar stores multiple extended filenames in the data section + * of a file with the name "//", this record is referred to by + * future headers.

+ * + *

A header references an extended filename by storing a "/" + * followed by a decimal offset to the start of the filename in + * the extended filename data section.

+ * + *

The format of the "//" file itself is simply a list of the + * long filenames, each separated by one or more LF + * characters. Note that the decimal offsets are number of + * characters, not line or string number within the "//" file.

+ */ + private static boolean isGNUStringTable(String name) { + return GNU_STRING_TABLE_NAME.equals(name); + } + + /** + * Reads the GNU archive String Table. + * + * @see #isGNUStringTable + */ + private ArArchiveEntry readGNUStringTable(byte[] length) throws IOException { + int bufflen = asInt(length); // Assume length will fit in an int + namebuffer = new byte[bufflen]; + int read = wrappedStream.read(namebuffer, 0, bufflen); + if (read != bufflen){ + throw new IOException("Failed to read complete // record: expected=" + + bufflen + " read=" + read); + } + return new ArArchiveEntry(new ArchiveEntryParameters().withName(GNU_STRING_TABLE_NAME).withSize(bufflen)); + } + + private static final String GNU_LONGNAME_PATTERN = "^/\\d+"; + + /** + * Does the name look like it is a long name (or a name containing + * spaces) as encoded by SVR4/GNU ar? + * + * @see #isGNUStringTable + */ + private boolean isGNULongName(String name) { + return name != null && name.matches(GNU_LONGNAME_PATTERN); + } + + private static String toAsciiString(byte[] b) { + return toAsciiString(b, 0, b.length); + } + + private static String toAsciiString(byte[] b, int offset, int length) { + return StandardCharsets.US_ASCII.decode(ByteBuffer.wrap(b, offset, length)).toString(); + } +} diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java index c99c21db1d9..6166d093aae 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java @@ -27,7 +27,7 @@ import org.apache.commons.compress2.archivers.spi.AbstractArchiveOutput; /** - * Implements the "ar" archive format as an output stream. + * Implements the "ar" archive format. * * @NotThreadSafe */ diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java b/src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java new file mode 100644 index 00000000000..8cd577c6d96 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.formats.ar; + +import java.io.ByteArrayOutputStream; +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * THIS CLASS WILL CERTAINLY NOT STAY HERE. + * @Immutable + */ +final class IOUtils { + + /** Private constructor to prevent instantiation of this utility class. */ + private IOUtils(){ + } + + /** + * Copies the content of a InputStream into an OutputStream. + * Uses a default buffer size of 8024 bytes. + * + * @param input + * the InputStream to copy + * @param output + * the target Stream + * @throws IOException + * if an error occurs + */ + public static long copy(final InputStream input, final OutputStream output) throws IOException { + return copy(input, output, 8024); + } + + /** + * Copies the content of a InputStream into an OutputStream + * + * @param input + * the InputStream to copy + * @param output + * the target Stream + * @param buffersize + * the buffer size to use + * @throws IOException + * if an error occurs + */ + public static long copy(final InputStream input, final OutputStream output, int buffersize) throws IOException { + final byte[] buffer = new byte[buffersize]; + int n = 0; + long count=0; + while (-1 != (n = input.read(buffer))) { + output.write(buffer, 0, n); + count += n; + } + return count; + } + + /** + * Skips the given number of bytes by repeatedly invoking skip on + * the given input stream if necessary. + * + *

This method will only skip less than the requested number of + * bytes if the end of the input stream has been reached.

+ * + * @param input stream to skip bytes in + * @param numToSkip the number of bytes to skip + * @return the number of bytes actually skipped + * @throws IOException + */ + public static long skip(InputStream input, long numToSkip) throws IOException { + long available = numToSkip; + while (numToSkip > 0) { + long skipped = input.skip(numToSkip); + if (skipped == 0) { + break; + } + numToSkip -= skipped; + } + return available - numToSkip; + } + + /** + * Reads as much from input as possible to fill the given array. + * + *

This method may invoke read repeatedly to fill the array and + * only read less bytes than the length of the array if the end of + * the stream has been reached.

+ * + * @param input stream to read from + * @param b buffer to fill + * @return the number of bytes actually read + * @throws IOException + */ + public static int readFully(InputStream input, byte[] b) throws IOException { + return readFully(input, b, 0, b.length); + } + + /** + * Reads as much from input as possible to fill the given array + * with the given amount of bytes. + * + *

This method may invoke read repeatedly to read the bytes and + * only read less bytes than the requested length if the end of + * the stream has been reached.

+ * + * @param input stream to read from + * @param b buffer to fill + * @param offset offset into the buffer to start filling at + * @param len of bytes to read + * @return the number of bytes actually read + * @throws IOException + * if an I/O error has occurred + */ + public static int readFully(InputStream input, byte[] b, int offset, int len) + throws IOException { + if (len < 0 || offset < 0 || len + offset > b.length) { + throw new IndexOutOfBoundsException(); + } + int count = 0, x = 0; + while (count != len) { + x = input.read(b, offset + count, len - count); + if (x == -1) { + break; + } + count += x; + } + return count; + } + + // toByteArray(InputStream) copied from: + // commons/proper/io/trunk/src/main/java/org/apache/commons/io/IOUtils.java?revision=1428941 + // January 8th, 2013 + // + // Assuming our copy() works just as well as theirs! :-) + + /** + * Gets the contents of an InputStream as a byte[]. + *

+ * This method buffers the input internally, so there is no need to use a + * BufferedInputStream. + * + * @param input the InputStream to read from + * @return the requested byte array + * @throws NullPointerException if the input is null + * @throws IOException if an I/O error occurs + * @since 1.5 + */ + public static byte[] toByteArray(final InputStream input) throws IOException { + final ByteArrayOutputStream output = new ByteArrayOutputStream(); + copy(input, output); + return output.toByteArray(); + } + + /** + * Closes the given Closeable and swallows any IOException that may occur. + * @param c Closeable to close, can be null + * @since 1.7 + */ + public static void closeQuietly(Closeable c) { + if (c != null) { + try { + c.close(); + } catch (IOException ignored) { // NOPMD + } + } + } +} From 999fb5f2fe5f974cd76a18b4e0c873022f51e700 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 17:59:15 +0000 Subject: [PATCH 013/189] AR stores date as seconds since epoch git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556293 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/commons/compress2/formats/ar/ArArchiveInput.java | 2 +- .../apache/commons/compress2/formats/ar/ArArchiveOutput.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java index c5f464008ce..c795add7587 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java @@ -166,7 +166,7 @@ public ArArchiveEntry next() throws IOException { currentEntry = new ArArchiveEntry(new ArchiveEntryParameters().withName(temp).withSize(len) .withOwnerInformation(new OwnerInformation(userId, asInt(ID_BUF, true))) - .withLastModifiedDate(new Date(asLong(LAST_MODIFIED_BUF))), + .withLastModifiedDate(new Date(asLong(LAST_MODIFIED_BUF) * 1000)), asInt(FILE_MODE_BUF, 8)); return currentEntry; } diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java index 6166d093aae..45481ab3caa 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java @@ -152,7 +152,7 @@ private long writeEntryHeader( final ArArchiveEntry pEntry ) throws IOException } offset = fill(offset, 16, (byte) ' '); - final String m = "" + pEntry.getLastModifiedDate(); + final String m = "" + (pEntry.getLastModifiedDate().getTime() / 1000); if (m.length() > 12) { throw new IOException("modified too long"); } From ed7ec468f5e7a74cf3b9f7c42bf0c62eaa2fd3e2 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 18:48:56 +0000 Subject: [PATCH 014/189] deal with missing owner information git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556311 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/formats/ar/ArArchiveOutput.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java index 45481ab3caa..8a694768a4c 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java @@ -24,6 +24,7 @@ import java.nio.charset.StandardCharsets; import org.apache.commons.compress2.archivers.ArchiveEntryParameters; +import org.apache.commons.compress2.archivers.OwnerInformation; import org.apache.commons.compress2.archivers.spi.AbstractArchiveOutput; /** @@ -159,14 +160,14 @@ private long writeEntryHeader( final ArArchiveEntry pEntry ) throws IOException offset += write(m); offset = fill(offset, 28, (byte) ' '); - final String u = "" + pEntry.getOwnerInformation().getUserId(); + final String u = "" + getUserId(pEntry.getOwnerInformation()); if (u.length() > 6) { throw new IOException("userid too long"); } offset += write(u); offset = fill(offset, 34, (byte) ' '); - final String g = "" + pEntry.getOwnerInformation().getGroupId(); + final String g = "" + getGroupId(pEntry.getOwnerInformation()); if (g.length() > 6) { throw new IOException("groupid too long"); } @@ -238,4 +239,12 @@ public void finish() throws IOException { public boolean isOpen() { return out.isOpen(); } + + private int getUserId(OwnerInformation info) { + return info == null ? 0 : info.getUserId(); + } + + private int getGroupId(OwnerInformation info) { + return info == null ? 0 : info.getGroupId(); + } } From 3700530dd90fde8ed39299d5f715d3dcf043ff33 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 7 Jan 2014 18:50:04 +0000 Subject: [PATCH 015/189] add an incomplete testcase - it isn't testing much but I've manually verified the archive is valid and the extracted files are correct git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556312 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress2/formats/ar/IOUtils.java | 20 +++ .../compress2/formats/ar/RoundTripTest.java | 158 ++++++++++++++++++ src/test/resources/test1.xml | 4 + src/test/resources/test2.xml | 5 + src/test/resources/test3.xml | 10 ++ src/test/resources/test4.xml | 6 + 6 files changed, 203 insertions(+) create mode 100644 src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java create mode 100644 src/test/resources/test1.xml create mode 100644 src/test/resources/test2.xml create mode 100644 src/test/resources/test3.xml create mode 100644 src/test/resources/test4.xml diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java b/src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java index 8cd577c6d96..a7a429e0aeb 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/IOUtils.java @@ -23,6 +23,9 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; /** * THIS CLASS WILL CERTAINLY NOT STAY HERE. @@ -72,6 +75,23 @@ public static long copy(final InputStream input, final OutputStream output, int return count; } + public static long copy(final ReadableByteChannel input, final WritableByteChannel output) throws IOException { + return copy(input, output, 4096); + } + + public static long copy(final ReadableByteChannel input, final WritableByteChannel output, int buffersize) throws IOException { + ByteBuffer buffer = ByteBuffer.allocate(buffersize); + int n = 0; + long count=0; + while (-1 != (n = input.read(buffer))) { + buffer.flip(); + output.write(buffer); + buffer.clear(); + count += n; + } + return count; + } + /** * Skips the given number of bytes by repeatedly invoking skip on * the given input stream if necessary. diff --git a/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java b/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java new file mode 100644 index 00000000000..d3369ce75e3 --- /dev/null +++ b/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.formats.ar; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.net.URI; +import java.net.URL; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.util.Locale; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import org.apache.commons.compress2.archivers.ArchiveEntryParameters; + +public class RoundTripTest { + + private File dir; + + @Before + public void createTempDir() throws Exception { + dir = mkdir("dir"); + } + + @After + public void removeTempDir() throws Exception { + rmdir(dir); + } + + @Test + public void testArUnarchive() throws Exception { + final File output = new File(dir, "bla.ar"); + { + final File file1 = getFile("test1.xml"); + final File file2 = getFile("test2.xml"); + + final WritableByteChannel out = new FileOutputStream(output).getChannel(); + final ArArchiveOutput os = new ArArchiveOutput(out); + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file1))); + IOUtils.copy(new FileInputStream(file1).getChannel(), os); + os.closeEntry(); + + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file2))); + IOUtils.copy(new FileInputStream(file2).getChannel(), os); + os.closeEntry(); + os.close(); + out.close(); + } + + // UnArArchive Operation + final File input = output; + final ReadableByteChannel is = new FileInputStream(input).getChannel(); + final ArArchiveInput in = new ArArchiveInput(is); + final ArArchiveEntry entry = in.next(); + + File target = new File(dir, entry.getName()); + final WritableByteChannel out = new FileOutputStream(target).getChannel(); + + IOUtils.copy(in, out); + + out.close(); + in.close(); + is.close(); + } + + public static File mkdir(String name) throws IOException { + File f = File.createTempFile(name, ""); + f.delete(); + f.mkdir(); + return f; + } + + public static File getFile(String path) throws IOException { + URL url = RoundTripTest.class.getClassLoader().getResource(path); + if (url == null) { + throw new FileNotFoundException("couldn't find " + path); + } + URI uri = null; + try { + uri = url.toURI(); + } catch (java.net.URISyntaxException ex) { +// throw new IOException(ex); // JDK 1.6+ + IOException ioe = new IOException(); + ioe.initCause(ex); + throw ioe; + } + return new File(uri); + } + + public static void rmdir(File f) { + String[] s = f.list(); + if (s != null) { + for (String element : s) { + final File file = new File(f, element); + if (file.isDirectory()){ + rmdir(file); + } + boolean ok = tryHardToDelete(file); + if (!ok && file.exists()){ + System.out.println("Failed to delete "+element+" in "+f.getPath()); + } + } + } + tryHardToDelete(f); // safer to delete and check + if (f.exists()){ + throw new Error("Failed to delete "+f.getPath()); + } + } + + private static final boolean ON_WINDOWS = + System.getProperty("os.name").toLowerCase(Locale.ENGLISH) + .indexOf("windows") > -1; + + /** + * Accommodate Windows bug encountered in both Sun and IBM JDKs. + * Others possible. If the delete does not work, call System.gc(), + * wait a little and try again. + * + * @return whether deletion was successful + * @since Stolen from FileUtils in Ant 1.8.0 + */ + public static boolean tryHardToDelete(File f) { + if (f != null && f.exists() && !f.delete()) { + if (ON_WINDOWS) { + System.gc(); + } + try { + Thread.sleep(10); + } catch (InterruptedException ex) { + // Ignore Exception + } + return f.delete(); + } + return true; + } +} diff --git a/src/test/resources/test1.xml b/src/test/resources/test1.xml new file mode 100644 index 00000000000..3690c19f1dd --- /dev/null +++ b/src/test/resources/test1.xml @@ -0,0 +1,4 @@ + + + + diff --git a/src/test/resources/test2.xml b/src/test/resources/test2.xml new file mode 100644 index 00000000000..dc9ddd02113 --- /dev/null +++ b/src/test/resources/test2.xml @@ -0,0 +1,5 @@ + + + + + diff --git a/src/test/resources/test3.xml b/src/test/resources/test3.xml new file mode 100644 index 00000000000..234c5b73f70 --- /dev/null +++ b/src/test/resources/test3.xml @@ -0,0 +1,10 @@ + + + +Lorem ipsum dolor sit amet, consetetur sadipscing elitr, +sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, +sed diam voluptua. +At vero eos et accusam et justo duo dolores et ea rebum. +Stet clita kasd gubergren, no sea takimata sanctus est +Lorem ipsum dolor sit amet. + diff --git a/src/test/resources/test4.xml b/src/test/resources/test4.xml new file mode 100644 index 00000000000..132c9d1c4f7 --- /dev/null +++ b/src/test/resources/test4.xml @@ -0,0 +1,6 @@ + + + +German Umlauts: ÜÄÖß +Stored as UTF-8 (Mac OSX 10.4.x) + From 1f9eff23dd3ee806f42d33fd3a8ad69142b3ce2e Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 8 Jan 2014 12:38:57 +0000 Subject: [PATCH 016/189] Random access archive input similar to what ZipFile provides git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556516 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/RandomAccessArchiveInput.java | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java diff --git a/src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java b/src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java new file mode 100644 index 00000000000..a970e93bb8c --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import java.nio.channels.ReadableByteChannel; + +/** + * ArchiveInput that provides random access to all entries. + * @NotThreadSafe + */ +public interface RandomAccessArchiveInput extends ArchiveInput, Iterable { + + /** + * Obtains all entries of a given name. + * @param name the name of the entries to look for + * @return all entries matching that name, will never be null. + */ + Iterable getEntries(String name); + + /** + * Obtains a channel the contents of given entry can be read from. + * @param entry the entry to read the contents of + * @return a channel to read the entry's contents from or null if the entry is not part of this archive. + */ + ReadableByteChannel getChannel(A entry); +} From 992ee5685977a7675f97053914c8e1861ab2427b Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 8 Jan 2014 12:55:56 +0000 Subject: [PATCH 017/189] note to self git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556518 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress2/archivers/RandomAccessArchiveInput.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java b/src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java index a970e93bb8c..9d994616541 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java +++ b/src/main/java/org/apache/commons/compress2/archivers/RandomAccessArchiveInput.java @@ -38,5 +38,6 @@ public interface RandomAccessArchiveInput extends Archiv * @param entry the entry to read the contents of * @return a channel to read the entry's contents from or null if the entry is not part of this archive. */ + // TODO SeekableByteChannel if using Java 7? ReadableByteChannel getChannel(A entry); } From f51552902223e2b7ba2517e51e7cd0883c1ef47b Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 8 Jan 2014 12:56:37 +0000 Subject: [PATCH 018/189] Archive(In|Out)put isn't really a Channel itself git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556519 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/archivers/ArchiveInput.java | 10 ++++- .../compress2/archivers/ArchiveOutput.java | 6 ++- .../compress2/formats/ar/ArArchiveInput.java | 14 +------ .../compress2/formats/ar/ArArchiveOutput.java | 40 +++++++++++-------- .../compress2/formats/ar/RoundTripTest.java | 10 ++--- 5 files changed, 44 insertions(+), 36 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java index d956587af8d..3d26756d575 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java @@ -18,6 +18,7 @@ */ package org.apache.commons.compress2.archivers; +import java.io.Closeable; import java.io.IOException; import java.nio.channels.ReadableByteChannel; @@ -25,7 +26,7 @@ * A channel that reads {@link ArchiveEntry}s. * @NotThreadSafe */ -public interface ArchiveInput extends ReadableByteChannel { +public interface ArchiveInput extends Closeable { /** * Obtains the next entry. @@ -33,6 +34,13 @@ public interface ArchiveInput extends ReadableByteChanne */ A next() throws IOException; + /** + * Obtains a channel the contents of the current entry can be read from. + * @return a channel to read the entry's contents from + */ + // TODO use some sort of Tuple or Map.Entry and combine with next() ? + ReadableByteChannel getChannel(); + /** * Whether this channel is able to read the contents of the given entry. * diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java index be0229cf769..386bf134b48 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java @@ -18,6 +18,7 @@ */ package org.apache.commons.compress2.archivers; +import java.io.Closeable; import java.io.IOException; import java.nio.channels.WritableByteChannel; @@ -25,7 +26,7 @@ * A channel that writes {@link ArchiveEntry}s. * @NotThreadSafe */ -public interface ArchiveOutput extends WritableByteChannel { +public interface ArchiveOutput extends Closeable { /** * Creates an ArchiveEntry for the given parameters. @@ -52,9 +53,10 @@ public interface ArchiveOutput extends WritableByteChann * process.

* * @param entry describes the entry + * @return a channel to write the entry's contents to * @throws IOException */ - void putEntry(A entry) throws IOException; + WritableByteChannel putEntry(A entry) throws IOException; /** * Closes the archive entry, writing any trailer information that may be required. diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java index c795add7587..c80e5326244 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java @@ -226,18 +226,8 @@ public void close() throws IOException { } @Override - public boolean isOpen() { - return !closed; - } - - @Override - public int read(ByteBuffer b) throws IOException { - byte[] tmp = new byte[b.remaining()]; - int read = wrappedStream.read(tmp); - if (read > 0) { - b.put(tmp, 0, read); - } - return read; + public ReadableByteChannel getChannel() { + return Channels.newChannel(wrappedStream); } private class WrappedStream extends FilterInputStream { diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java index 8a694768a4c..01ff6de04f9 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java @@ -87,7 +87,7 @@ public void closeEntry() throws IOException { } @Override - public void putEntry(final ArArchiveEntry entry) throws IOException { + public WritableByteChannel putEntry(final ArArchiveEntry entry) throws IOException { if (finished) { throw new IOException("Stream has already been finished"); } @@ -110,6 +110,7 @@ public void putEntry(final ArArchiveEntry entry) throws IOException { entryOffset = 0; haveUnclosedEntry = true; + return new CurrentChannel(); } private long fill( final long pOffset, final long pNewOffset, byte pFill ) throws IOException { @@ -124,14 +125,14 @@ private long fill( final long pOffset, final long pNewOffset, byte pFill ) throw b.put(pFill); } b.flip(); - write(b); + new CurrentChannel().write(b); } return pNewOffset; } private long write( final String data ) throws IOException { - return write(StandardCharsets.US_ASCII.encode(data)); + return new CurrentChannel().write(StandardCharsets.US_ASCII.encode(data)); } private long writeEntryHeader( final ArArchiveEntry pEntry ) throws IOException { @@ -200,14 +201,6 @@ private long writeEntryHeader( final ArArchiveEntry pEntry ) throws IOException return offset; } - @Override - public int write(ByteBuffer b) throws IOException { - int len = out.write(b); - count(len); - entryOffset += len; - return len; - } - /** * Calls finish if necessary, and then closes the nested Channel */ @@ -235,11 +228,6 @@ public void finish() throws IOException { finished = true; } - @Override - public boolean isOpen() { - return out.isOpen(); - } - private int getUserId(OwnerInformation info) { return info == null ? 0 : info.getUserId(); } @@ -247,4 +235,24 @@ private int getUserId(OwnerInformation info) { private int getGroupId(OwnerInformation info) { return info == null ? 0 : info.getGroupId(); } + + private class CurrentChannel implements WritableByteChannel { + @Override + public int write(ByteBuffer b) throws IOException { + int len = out.write(b); + count(len); + entryOffset += len; + return len; + } + + @Override + public boolean isOpen() { + return out.isOpen(); + } + + @Override + public void close() { + // NO-OP + } + } } diff --git a/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java b/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java index d3369ce75e3..74c7f369dfb 100644 --- a/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java +++ b/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java @@ -58,12 +58,12 @@ public void testArUnarchive() throws Exception { final WritableByteChannel out = new FileOutputStream(output).getChannel(); final ArArchiveOutput os = new ArArchiveOutput(out); - os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file1))); - IOUtils.copy(new FileInputStream(file1).getChannel(), os); + IOUtils.copy(new FileInputStream(file1).getChannel(), + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file1)))); os.closeEntry(); - os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file2))); - IOUtils.copy(new FileInputStream(file2).getChannel(), os); + IOUtils.copy(new FileInputStream(file2).getChannel(), + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file2)))); os.closeEntry(); os.close(); out.close(); @@ -78,7 +78,7 @@ public void testArUnarchive() throws Exception { File target = new File(dir, entry.getName()); final WritableByteChannel out = new FileOutputStream(target).getChannel(); - IOUtils.copy(in, out); + IOUtils.copy(in.getChannel(), out); out.close(); in.close(); From 2f0a1ad1e165b3bf410538fd647dd508b1897ea0 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 8 Jan 2014 12:58:34 +0000 Subject: [PATCH 019/189] putEntry closes the previous entry, why shouldn't finish do so as well? git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556521 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/commons/compress2/archivers/ArchiveOutput.java | 2 +- .../apache/commons/compress2/formats/ar/ArArchiveOutput.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java index 386bf134b48..112b6baaf2a 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java @@ -69,7 +69,7 @@ public interface ArchiveOutput extends Closeable { * *

Additional data can be written, if the format supports it.

* - * @throws IOException if the user forgets to close the last entry. + * @throws IOException */ void finish() throws IOException; diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java index 01ff6de04f9..4525fc5d6bd 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveOutput.java @@ -221,8 +221,8 @@ public ArArchiveEntry createEntry(ArchiveEntryParameters params) { @Override public void finish() throws IOException { if (haveUnclosedEntry) { - throw new IOException("This archive contains unclosed entries."); - } else if(finished) { + closeEntry(); + } else if (finished) { throw new IOException("This archive has already been finished"); } finished = true; From 5de3b4f79f9f03b5aba8f912637139bcdcc10456 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 8 Jan 2014 15:40:32 +0000 Subject: [PATCH 020/189] they are no channels (anymore) git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1556559 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/commons/compress2/archivers/ArchiveInput.java | 2 +- .../org/apache/commons/compress2/archivers/ArchiveOutput.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java index 3d26756d575..cdd0ed905cc 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveInput.java @@ -23,7 +23,7 @@ import java.nio.channels.ReadableByteChannel; /** - * A channel that reads {@link ArchiveEntry}s. + * Reads {@link ArchiveEntry}s. * @NotThreadSafe */ public interface ArchiveInput extends Closeable { diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java index 112b6baaf2a..c7d44c1b73b 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveOutput.java @@ -23,7 +23,7 @@ import java.nio.channels.WritableByteChannel; /** - * A channel that writes {@link ArchiveEntry}s. + * Writes {@link ArchiveEntry}s. * @NotThreadSafe */ public interface ArchiveOutput extends Closeable { From 7bcfee5a0caa4c09af604eb583c3bf54e8153674 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 17 Jan 2014 12:54:41 +0000 Subject: [PATCH 021/189] first cut a self-describing archive formats git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1559094 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/archivers/ArchiveFormat.java | 135 ++++++++++++++++ .../archivers/spi/AbstractArchiveFormat.java | 146 ++++++++++++++++++ 2 files changed, 281 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java create mode 100644 src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java new file mode 100644 index 00000000000..e841b328031 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import java.nio.ByteBuffer; +import java.nio.channels.Channel; +import java.nio.charset.Charset; +import java.io.File; +import java.io.IOException; + +/** + * Describes a given archive format and works as factory and content-probe at the same time. + * @Immutable + */ +public interface ArchiveFormat { + /** + * The name by which this format is known. + * @return the name by which this format is known + */ + String getName(); + + /** + * Does the format support writing? + * @return whether writing is supported + */ + boolean supportsWriting(); + /** + * Does the format support random access reading? + * @return whether random access reading is supported + */ + boolean supportsRandomAccessInput(); + /** + * Does the format support writing to arbitrary non-seekable channels? + * @return whether writing to arbitrary non-seekable channels is supported + */ + boolean supportsWritingToChannels(); + /** + * Does the format support reading from arbitrary non-seekable channels? + * @return whether writing to arbitrary non-seekable channels is supported + */ + boolean supportsReadingFromChannels(); + + /** + * Does the format support content-based detection? + * @return whether the format supports content-based detection. + */ + boolean supportsAutoDetection(); + /** + * If this format supports content-based detection, how many bytes does it need to read to know a channel is + * readable by this format? + * @return the minimal number of bytes needed + * @throws UnsupportedOperationException if this format doesn't support content based detection. + */ + int getNumberOfBytesRequiredForAutodetection() throws UnsupportedOperationException; + /** + * Lists formats that must not be consulted before this format during content-based detection. + * + *

For example JAR would return ZIP here so it first has a chance to claim the archive for itself.

+ * + * @return the names of the formats (as returned by {@link #getName}) that must not be consulted before this format during content-based detection. + */ + Iterable formatsToConsultLater(); + /** + * Verifies the given input is readable by this format. + * @param probe a buffer holding at least {@link #getNumberOfBytesRequiredForAutodetection} bytes + * @return whether the input is readable by this format + * @throws UnsupportedOperationException if this format doesn't support content based detection. + */ + boolean matches(ByteBuffer probe) throws UnsupportedOperationException; + + /** + * Reads an archive assuming the given charset for entry names. + * @param channel the channel to read from + * @param charset the charset used for encoding the entry names. + * @throws IOException + * @throws UnsupportedOperationException if this format cannot read from non-seekable channels. + */ + ArchiveInput readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; + /** + * Reads an archive assuming the given charset for entry names. + * @param file the file to read from + * @param charset the charset used for encoding the entry names. + * @throws IOException + */ + // TODO go for SeekableByteChannel rather than File when embracing Java7? + // TODO use Path rather than File? + ArchiveInput readFrom(File file, Charset charset) throws IOException; + /** + * Provides random access to an archive assuming the given charset for entry names. + * @param file the file to read from + * @param charset the charset used for encoding the entry names. + * @throws IOException + * @throws UnsupportedOperationException if this format doesn't support random access + */ + // TODO go for SeekableByteChannel rather than File when embracing Java7? + // TODO use Path rather than File? + RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) + throws IOException, UnsupportedOperationException; + + /** + * Writes an archive using the given charset for entry names. + * @param channel the channel to write to + * @param charset the charset to use for encoding the entry names. + * @throws IOException + * @throws UnsupportedOperationException if this format cannot write to non-seekable channels or doesn't support + * writing at all. + */ + ArchiveOutput writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; + /** + * Writes an archive using the given charset for entry names. + * @param file the file to write to + * @param charset the charset to use for encoding the entry names. + * @throws IOException + * @throws UnsupportedOperationException if this format doesn't support writing + */ + // TODO go for SeekableByteChannel rather than File when embracing Java7? + // TODO use Path rather than File? + ArchiveOutput writeTo(File file, Charset charset) throws IOException, UnsupportedOperationException; +} diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java new file mode 100644 index 00000000000..7db383961a1 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java @@ -0,0 +1,146 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers.spi; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.Channel; +//import java.nio.channels.FileChannel; +import java.nio.charset.Charset; +import java.util.Collections; +import org.apache.commons.compress2.archivers.ArchiveFormat; +import org.apache.commons.compress2.archivers.ArchiveInput; +import org.apache.commons.compress2.archivers.ArchiveOutput; +import org.apache.commons.compress2.archivers.RandomAccessArchiveInput; + +/** + * Base class implementations may use. + * @Immutable + */ +public abstract class AbstractArchiveFormat implements ArchiveFormat { + + /** + * {@inheritDoc} + *

This implementation always returns false.

+ */ + @Override + public boolean supportsWriting() { return false; } + /** + * {@inheritDoc} + *

This implementation always returns false.

+ */ + @Override + public boolean supportsRandomAccessInput() { return false; } + /** + * {@inheritDoc} + *

This implementation always returns false.

+ */ + @Override + public boolean supportsWritingToChannels() { return false; } + /** + * {@inheritDoc} + *

This implementation always returns false.

+ */ + @Override + public boolean supportsReadingFromChannels() { return false; } + + /** + * {@inheritDoc} + *

This implementation always returns false.

+ */ + @Override + public boolean supportsAutoDetection() { return false; } + /** + * {@inheritDoc} + *

This implementation always throws an UnsupportedOperationException.

+ */ + @Override + public int getNumberOfBytesRequiredForAutodetection() throws UnsupportedOperationException { + throw new UnsupportedOperationException("this format doesn't support content-based detection"); + } + /** + * {@inheritDoc} + *

This implementation always returns an empty collection.

+ */ + @Override + public Iterable formatsToConsultLater() { + return Collections.emptyList(); + } + /** + * {@inheritDoc} + *

This implementation always throws an UnsupportedOperationException.

+ */ + @Override + public boolean matches(ByteBuffer probe) throws UnsupportedOperationException { + throw new UnsupportedOperationException("this format doesn't support content-based detection"); + } + + /** + * {@inheritDoc} + *

This implementation always throws an UnsupportedOperationException.

+ */ + @Override + public ArchiveInput readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { + throw new UnsupportedOperationException("this format cannot read from non-seekable channels"); + } + /** + * {@inheritDoc} + *

This implementation delegates to {@link #readWithRandomAccessFrom} if random access is supported or {@link + * #readFrom(File, Charset)} otherwise.

+ */ + @Override + public ArchiveInput readFrom(File file, Charset charset) throws IOException { + if (supportsRandomAccessInput()) { + return readWithRandomAccessFrom(file, charset); + } + + // TODO use FileChannel.open in Java7 + return readFrom(new FileInputStream(file).getChannel(), charset); + } + /** + * {@inheritDoc} + *

This implementation always throws an UnsupportedOperationException.

+ */ + @Override + public RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) + throws IOException, UnsupportedOperationException { + throw new UnsupportedOperationException("this format cannot doesn't support random access"); + } + + /** + * {@inheritDoc} + *

This implementation always throws an UnsupportedOperationException.

+ */ + @Override + public ArchiveOutput writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { + throw new UnsupportedOperationException("this format is read-only"); + } + /** + * {@inheritDoc} + *

This implementation always delegates to {@link #writeTo(Channel, Charset)}.

+ */ + @Override + public ArchiveOutput writeTo(File file, Charset charset) throws IOException, UnsupportedOperationException { + // TODO use FileChannel.open in Java7 + return writeTo(new FileOutputStream(file).getChannel(), charset); + } +} From 9ef58d2ae275c5b1d51e286dc5acf4f065e5af37 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 19 Jan 2014 06:30:31 +0000 Subject: [PATCH 022/189] ArchiveInput/Output is parameterized, so must be ArchiveFormat git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1559476 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress2/archivers/ArchiveFormat.java | 12 ++++++------ .../archivers/spi/AbstractArchiveFormat.java | 13 +++++++------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java index e841b328031..29c45157200 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java @@ -28,7 +28,7 @@ * Describes a given archive format and works as factory and content-probe at the same time. * @Immutable */ -public interface ArchiveFormat { +public interface ArchiveFormat
{ /** * The name by which this format is known. * @return the name by which this format is known @@ -91,7 +91,7 @@ public interface ArchiveFormat { * @throws IOException * @throws UnsupportedOperationException if this format cannot read from non-seekable channels. */ - ArchiveInput readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; + ArchiveInput readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; /** * Reads an archive assuming the given charset for entry names. * @param file the file to read from @@ -100,7 +100,7 @@ public interface ArchiveFormat { */ // TODO go for SeekableByteChannel rather than File when embracing Java7? // TODO use Path rather than File? - ArchiveInput readFrom(File file, Charset charset) throws IOException; + ArchiveInput readFrom(File file, Charset charset) throws IOException; /** * Provides random access to an archive assuming the given charset for entry names. * @param file the file to read from @@ -110,7 +110,7 @@ public interface ArchiveFormat { */ // TODO go for SeekableByteChannel rather than File when embracing Java7? // TODO use Path rather than File? - RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) + RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) throws IOException, UnsupportedOperationException; /** @@ -121,7 +121,7 @@ RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) * @throws UnsupportedOperationException if this format cannot write to non-seekable channels or doesn't support * writing at all. */ - ArchiveOutput writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; + ArchiveOutput writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; /** * Writes an archive using the given charset for entry names. * @param file the file to write to @@ -131,5 +131,5 @@ RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) */ // TODO go for SeekableByteChannel rather than File when embracing Java7? // TODO use Path rather than File? - ArchiveOutput writeTo(File file, Charset charset) throws IOException, UnsupportedOperationException; + ArchiveOutput writeTo(File file, Charset charset) throws IOException, UnsupportedOperationException; } diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java index 7db383961a1..12dc6c2f88c 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java @@ -27,6 +27,7 @@ //import java.nio.channels.FileChannel; import java.nio.charset.Charset; import java.util.Collections; +import org.apache.commons.compress2.archivers.ArchiveEntry; import org.apache.commons.compress2.archivers.ArchiveFormat; import org.apache.commons.compress2.archivers.ArchiveInput; import org.apache.commons.compress2.archivers.ArchiveOutput; @@ -36,7 +37,7 @@ * Base class implementations may use. * @Immutable */ -public abstract class AbstractArchiveFormat implements ArchiveFormat { +public abstract class AbstractArchiveFormat implements ArchiveFormat { /** * {@inheritDoc} @@ -99,7 +100,7 @@ public boolean matches(ByteBuffer probe) throws UnsupportedOperationException { *

This implementation always throws an UnsupportedOperationException.

*/ @Override - public ArchiveInput readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { + public ArchiveInput
readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException("this format cannot read from non-seekable channels"); } /** @@ -108,7 +109,7 @@ public ArchiveInput readFrom(Channel channel, Charset charset) throws IOExceptio * #readFrom(File, Charset)} otherwise.

*/ @Override - public ArchiveInput readFrom(File file, Charset charset) throws IOException { + public ArchiveInput
readFrom(File file, Charset charset) throws IOException { if (supportsRandomAccessInput()) { return readWithRandomAccessFrom(file, charset); } @@ -121,7 +122,7 @@ public ArchiveInput readFrom(File file, Charset charset) throws IOException { *

This implementation always throws an UnsupportedOperationException.

*/ @Override - public RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) + public RandomAccessArchiveInput
readWithRandomAccessFrom(File file, Charset charset) throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException("this format cannot doesn't support random access"); } @@ -131,7 +132,7 @@ public RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset char *

This implementation always throws an UnsupportedOperationException.

*/ @Override - public ArchiveOutput writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { + public ArchiveOutput
writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException("this format is read-only"); } /** @@ -139,7 +140,7 @@ public ArchiveOutput writeTo(Channel channel, Charset charset) throws IOExceptio *

This implementation always delegates to {@link #writeTo(Channel, Charset)}.

*/ @Override - public ArchiveOutput writeTo(File file, Charset charset) throws IOException, UnsupportedOperationException { + public ArchiveOutput
writeTo(File file, Charset charset) throws IOException, UnsupportedOperationException { // TODO use FileChannel.open in Java7 return writeTo(new FileOutputStream(file).getChannel(), charset); } From ac5b137b67c15e3814b2b1a6e18ac89a1a14872c Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 19 Jan 2014 06:49:22 +0000 Subject: [PATCH 023/189] must be more specific about the direction of channels git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1559477 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress2/archivers/ArchiveFormat.java | 9 ++++++--- .../compress2/archivers/spi/AbstractArchiveFormat.java | 9 ++++++--- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java index 29c45157200..55e0794480b 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java @@ -19,7 +19,8 @@ package org.apache.commons.compress2.archivers; import java.nio.ByteBuffer; -import java.nio.channels.Channel; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; import java.nio.charset.Charset; import java.io.File; import java.io.IOException; @@ -91,7 +92,8 @@ public interface ArchiveFormat { * @throws IOException * @throws UnsupportedOperationException if this format cannot read from non-seekable channels. */ - ArchiveInput readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; + ArchiveInput readFrom(ReadableByteChannel channel, Charset charset) + throws IOException, UnsupportedOperationException; /** * Reads an archive assuming the given charset for entry names. * @param file the file to read from @@ -121,7 +123,8 @@ RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset charset) * @throws UnsupportedOperationException if this format cannot write to non-seekable channels or doesn't support * writing at all. */ - ArchiveOutput writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException; + ArchiveOutput writeTo(WritableByteChannel channel, Charset charset) + throws IOException, UnsupportedOperationException; /** * Writes an archive using the given charset for entry names. * @param file the file to write to diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java index 12dc6c2f88c..642040dcb9d 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java @@ -23,7 +23,8 @@ import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import java.nio.channels.Channel; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; //import java.nio.channels.FileChannel; import java.nio.charset.Charset; import java.util.Collections; @@ -100,7 +101,8 @@ public boolean matches(ByteBuffer probe) throws UnsupportedOperationException { *

This implementation always throws an UnsupportedOperationException.

*/ @Override - public ArchiveInput
readFrom(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { + public ArchiveInput readFrom(ReadableByteChannel channel, Charset charset) + throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException("this format cannot read from non-seekable channels"); } /** @@ -132,7 +134,8 @@ public RandomAccessArchiveInput readWithRandomAccessFrom(File file, Charset c *

This implementation always throws an UnsupportedOperationException.

*/ @Override - public ArchiveOutput
writeTo(Channel channel, Charset charset) throws IOException, UnsupportedOperationException { + public ArchiveOutput writeTo(WritableByteChannel channel, Charset charset) + throws IOException, UnsupportedOperationException { throw new UnsupportedOperationException("this format is read-only"); } /** From 0bf6a88e5039cfda0fccbf0d86df3424e5141af9 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 19 Jan 2014 07:03:40 +0000 Subject: [PATCH 024/189] implement ArchiveFormat for AR git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1559479 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/formats/ar/ArArchiveFormat.java | 106 ++++++++++++++++++ .../compress2/formats/ar/ArArchiveInput.java | 44 -------- .../formats/ar/ArArchiveFormatTest.java | 53 +++++++++ src/test/resources/test-archives/default.ar | 27 +++++ 4 files changed, 186 insertions(+), 44 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveFormat.java create mode 100644 src/test/java/org/apache/commons/compress2/formats/ar/ArArchiveFormatTest.java create mode 100644 src/test/resources/test-archives/default.ar diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveFormat.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveFormat.java new file mode 100644 index 00000000000..6521b5c0298 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveFormat.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.formats.ar; + +import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import org.apache.commons.compress2.archivers.spi.AbstractArchiveFormat; + +/** + * Format descriptor for the AR format. + */ +public class ArArchiveFormat extends AbstractArchiveFormat { + + private static final byte[] SIG = StandardCharsets.US_ASCII.encode(ArArchiveEntry.HEADER).array(); + + /** + * "AR" + */ + public static final String AR_FORMAT_NAME = "AR"; + + /** + * @return {@link #AR_FORMAT_NAME} + */ + @Override + public String getName() { + return AR_FORMAT_NAME; + } + + /** + * Yes. + */ + @Override + public boolean supportsWriting() { return true; } + /** + * Yes. + */ + @Override + public boolean supportsWritingToChannels() { return true; } + /** + * Yes. + */ + @Override + public boolean supportsReadingFromChannels() { return true; } + + /** + * Yes. + */ + @Override + public boolean supportsAutoDetection() { return true; } + + /** + * Each AR archive starts with "!<arch>" followed by a LF. + * @return 8 + */ + @Override + public int getNumberOfBytesRequiredForAutodetection() throws UnsupportedOperationException { + return SIG.length; + } + + /** + * Each AR archive starts with "!<arch>" followed by a LF. + */ + @Override + public boolean matches(ByteBuffer probe) throws UnsupportedOperationException { + byte[] sig = new byte[SIG.length]; + probe.get(sig); + return Arrays.equals(SIG, sig); + } + + /** + * This implementation ignores the charset as AR archives only support US-ASCII file names. + */ + @Override + public ArArchiveInput readFrom(ReadableByteChannel channel, Charset charset) { + return new ArArchiveInput(channel); + } + + /** + * This implementation ignores the charset as AR archives only support US-ASCII file names. + */ + @Override + public ArArchiveOutput writeTo(WritableByteChannel channel, Charset charset) { + return new ArArchiveOutput(channel); + } + +} diff --git a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java index c80e5326244..01fa89bcb3a 100644 --- a/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java +++ b/src/main/java/org/apache/commons/compress2/formats/ar/ArArchiveInput.java @@ -271,50 +271,6 @@ public int read() throws IOException { } } - /** - * Checks if the signature matches ASCII "!<arch>" followed by a single LF - * control character - * - * @param signature - * the bytes to check - * @param length - * the number of bytes to check - * @return true, if this stream is an Ar archive stream, false otherwise - */ - public static boolean matches(byte[] signature, int length) { - // 3c21 7261 6863 0a3e - - if (length < 8) { - return false; - } - if (signature[0] != 0x21) { - return false; - } - if (signature[1] != 0x3c) { - return false; - } - if (signature[2] != 0x61) { - return false; - } - if (signature[3] != 0x72) { - return false; - } - if (signature[4] != 0x63) { - return false; - } - if (signature[5] != 0x68) { - return false; - } - if (signature[6] != 0x3e) { - return false; - } - if (signature[7] != 0x0a) { - return false; - } - - return true; - } - static final String BSD_LONGNAME_PREFIX = "#1/"; private static final int BSD_LONGNAME_PREFIX_LEN = BSD_LONGNAME_PREFIX.length(); diff --git a/src/test/java/org/apache/commons/compress2/formats/ar/ArArchiveFormatTest.java b/src/test/java/org/apache/commons/compress2/formats/ar/ArArchiveFormatTest.java new file mode 100644 index 00000000000..051c0d40763 --- /dev/null +++ b/src/test/java/org/apache/commons/compress2/formats/ar/ArArchiveFormatTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.formats.ar; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.nio.ByteBuffer; + +import org.junit.Assert; +import org.junit.Test; + +public class ArArchiveFormatTest { + + @Test + public void shouldDetectFormat() throws IOException { + Assert.assertTrue(isAr("test-archives/default.ar")); + } + + @Test + public void shouldRejectXMLFile() throws IOException { + Assert.assertFalse(isAr("test1.xml")); + } + + + private boolean isAr(String file) throws IOException { + File f = RoundTripTest.getFile(file); + FileInputStream c = new FileInputStream(f); + try { + byte[] b = new byte[10]; + IOUtils.readFully(c, b); + return new ArArchiveFormat().matches(ByteBuffer.wrap(b)); + } finally { + c.close(); + } + } +} diff --git a/src/test/resources/test-archives/default.ar b/src/test/resources/test-archives/default.ar new file mode 100644 index 00000000000..c98e9b78936 --- /dev/null +++ b/src/test/resources/test-archives/default.ar @@ -0,0 +1,27 @@ +! +test1.xml 1201445869 501 501 100644 610 ` + + + +<<<<<<< HEAD:testdata/test.xml +======= + as +>>>>>>> 75cb63ff7005344589b57d17338b64783f8f430c:testdata/test.xml + + 1521 + 10.248.40.111 + JDBC + false + appsrv + Dev-DB + O10gIN1 + oracle.jdbc.driver.OracleDriver + thin + + +test2.xml 1201445869 501 501 100644 82 ` + + + + + From cd8337ca5cd6c4b833a79a010d38b66ddddc3aab Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 19 Jan 2014 13:42:39 +0000 Subject: [PATCH 025/189] some more tests to provide more coverage of the abstract ArchiveFormat - tests are in need of some DRYing up, I know git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1559497 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/formats/ar/RoundTripTest.java | 91 ++++++++++++++++++- 1 file changed, 88 insertions(+), 3 deletions(-) diff --git a/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java b/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java index 74c7f369dfb..250c50579b8 100644 --- a/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java +++ b/src/test/java/org/apache/commons/compress2/formats/ar/RoundTripTest.java @@ -30,10 +30,13 @@ import java.util.Locale; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.apache.commons.compress2.archivers.ArchiveEntryParameters; +import org.apache.commons.compress2.archivers.ArchiveInput; +import org.apache.commons.compress2.archivers.ArchiveOutput; public class RoundTripTest { @@ -50,8 +53,8 @@ public void removeTempDir() throws Exception { } @Test - public void testArUnarchive() throws Exception { - final File output = new File(dir, "bla.ar"); + public void testRoundtripUsingConstructors() throws Exception { + final File output = new File(dir, "constructors.ar"); { final File file1 = getFile("test1.xml"); final File file2 = getFile("test2.xml"); @@ -73,18 +76,100 @@ public void testArUnarchive() throws Exception { final File input = output; final ReadableByteChannel is = new FileInputStream(input).getChannel(); final ArArchiveInput in = new ArArchiveInput(is); - final ArArchiveEntry entry = in.next(); + ArArchiveEntry entry = in.next(); + Assert.assertEquals("test1.xml", entry.getName()); File target = new File(dir, entry.getName()); final WritableByteChannel out = new FileOutputStream(target).getChannel(); IOUtils.copy(in.getChannel(), out); + out.close(); + + entry = in.next(); + Assert.assertEquals("test2.xml", entry.getName()); + + in.close(); + is.close(); + } + + @Test + public void testRoundtripUsingFormatInstanceAndChannels() throws Exception { + ArArchiveFormat format = new ArArchiveFormat(); + final File output = new File(dir, "format-channels.ar"); + { + final File file1 = getFile("test1.xml"); + final File file2 = getFile("test2.xml"); + + final WritableByteChannel out = new FileOutputStream(output).getChannel(); + final ArchiveOutput os = format.writeTo(out, null); + IOUtils.copy(new FileInputStream(file1).getChannel(), + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file1)))); + os.closeEntry(); + IOUtils.copy(new FileInputStream(file2).getChannel(), + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file2)))); + os.closeEntry(); + os.close(); + out.close(); + } + + // UnArArchive Operation + final File input = output; + final ReadableByteChannel is = new FileInputStream(input).getChannel(); + final ArchiveInput in = format.readFrom(is, null); + ArArchiveEntry entry = in.next(); + Assert.assertEquals("test1.xml", entry.getName()); + + File target = new File(dir, entry.getName()); + final WritableByteChannel out = new FileOutputStream(target).getChannel(); + + IOUtils.copy(in.getChannel(), out); out.close(); + + entry = in.next(); + Assert.assertEquals("test2.xml", entry.getName()); + in.close(); is.close(); } + @Test + public void testRoundtripUsingFormatInstanceAndFiles() throws Exception { + ArArchiveFormat format = new ArArchiveFormat(); + final File output = new File(dir, "format-files.ar"); + { + final File file1 = getFile("test1.xml"); + final File file2 = getFile("test2.xml"); + + final ArchiveOutput os = format.writeTo(output, null); + IOUtils.copy(new FileInputStream(file1).getChannel(), + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file1)))); + os.closeEntry(); + + IOUtils.copy(new FileInputStream(file2).getChannel(), + os.putEntry(os.createEntry(ArchiveEntryParameters.fromFile(file2)))); + os.closeEntry(); + os.close(); + } + + // UnArArchive Operation + final File input = output; + final ArchiveInput in = format.readFrom(input, null); + ArArchiveEntry entry = in.next(); + Assert.assertEquals("test1.xml", entry.getName()); + + File target = new File(dir, entry.getName()); + final WritableByteChannel out = new FileOutputStream(target).getChannel(); + + IOUtils.copy(in.getChannel(), out); + out.close(); + + entry = in.next(); + Assert.assertEquals("test2.xml", entry.getName()); + + in.close(); + } + public static File mkdir(String name) throws IOException { File f = File.createTempFile(name, ""); f.delete(); From a439f83449df9ba413efd6ed46263a09de60f270 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 24 Jan 2014 17:44:35 +0000 Subject: [PATCH 026/189] discover archive formats via ServiceLoader git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1561086 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/archivers/Archivers.java | 200 ++++++++++++++++++ ....commons.compress2.archivers.ArchiveFormat | 1 + .../compress2/archivers/ArchiversTest.java | 78 +++++++ 3 files changed, 279 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress2/archivers/Archivers.java create mode 100644 src/main/resources/META-INF/services/org.apache.commons.compress2.archivers.ArchiveFormat create mode 100644 src/test/java/org/apache/commons/compress2/archivers/ArchiversTest.java diff --git a/src/main/java/org/apache/commons/compress2/archivers/Archivers.java b/src/main/java/org/apache/commons/compress2/archivers/Archivers.java new file mode 100644 index 00000000000..ff5cf424b33 --- /dev/null +++ b/src/main/java/org/apache/commons/compress2/archivers/Archivers.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.NoSuchElementException; +import java.util.ServiceConfigurationError; +import java.util.ServiceLoader; + +/** + * Loads ArchiveFormats defined as "services" from {@code + * META-INF/services/org.apache.commons.compress2.archivers.ArchiveFormat} and provides access to them. + * + *

Uses {@link java.util.ServiceLoader} under the covers but iterates over all formats found eagerly inside the + * constructor so errors are reported early.

+ */ +public class Archivers implements Iterable> { + private final ServiceLoader formatLoader; + private Map> archivers; + + /** + * Loads services using the current thread's context class loader. + * @throws ServiceConfigurationError if an error occurs reading a service file or instantiating a format + */ + public Archivers() throws ServiceConfigurationError { + this(Thread.currentThread().getContextClassLoader()); + } + + /** + * Loads services using the given class loader. + * @throws ServiceConfigurationError if an error occurs reading a service file or instantiating a format + */ + public Archivers(ClassLoader cl) throws ServiceConfigurationError { + this(ServiceLoader.load(ArchiveFormat.class, cl)); + } + + private Archivers(ServiceLoader loader) { + formatLoader = loader; + fillMap(); + } + + /** + * Clears the cached formats and rebuilds it. + * + * @see ServiceLoader#reload + */ + public void reload() { + formatLoader.reload(); + fillMap(); + } + + /** + * Iterator over all known formats. + */ + public Iterator> iterator() { + return archivers.values().iterator(); + } + + /** + * Iterates over all known formats that can write archives. + */ + public Iterable> getFormatsWithWriteSupport() { + return filter(WRITE_PREDICATE); + } + + /** + * Iterates over all known formats that can write archives to channels. + */ + public Iterable> getFormatsWithWriteSupportForChannels() { + return filter(WRITE_TO_CHANNEL_PREDICATE); + } + + /** + * Iterates over all known formats that can read archives from channels. + */ + public Iterable> getFormatsWithReadSupportForChannels() { + return filter(READ_FROM_CHANNEL_PREDICATE); + } + + /** + * Iterates over all known formats that provide random access input. + */ + public Iterable> getFormatsWithRandomAccessInput() { + return filter(RANDOM_ACCESS_PREDICATE); + } + + /** + * Gets a format by its name. + * @param name the {@link ArchiveFormat#getName name} of the format. + * @return the ArchiveFormat instance or null if not format is known by that name + */ + public ArchiveFormat getArchiveFormatByName(String name) { + return archivers.get(name); + } + + private void fillMap() throws ServiceConfigurationError { + // TODO make that a TreeMap sorted for auto-detection order + Map> a = + new HashMap>(); + for (ArchiveFormat f : formatLoader) { + a.put(f.getName(), f); + } + archivers = Collections.unmodifiableMap(a); + } + + private interface Predicate { boolean matches(T t); } + + private static final Predicate> WRITE_PREDICATE = + new Predicate>() { + public boolean matches(ArchiveFormat a) { + return a.supportsWriting(); + } + }; + + private static final Predicate> WRITE_TO_CHANNEL_PREDICATE = + new Predicate>() { + public boolean matches(ArchiveFormat a) { + return a.supportsWritingToChannels(); + } + }; + + private static final Predicate> READ_FROM_CHANNEL_PREDICATE = + new Predicate>() { + public boolean matches(ArchiveFormat a) { + return a.supportsReadingFromChannels(); + } + }; + + private static final Predicate> RANDOM_ACCESS_PREDICATE = + new Predicate>() { + public boolean matches(ArchiveFormat a) { + return a.supportsRandomAccessInput(); + } + }; + + private static final Predicate> AUTO_DETECTION_PREDICATE = + new Predicate>() { + public boolean matches(ArchiveFormat a) { + return a.supportsAutoDetection(); + } + }; + + private Iterable> + filter(final Predicate> p) { + return new Iterable>() { + public Iterator> iterator() { + return new FilteringIterator(Archivers.this.iterator(), p); + } + }; + } + + private static class FilteringIterator implements Iterator { + private final Iterator i; + private final Predicate filter; + private T lookAhead = null; + private FilteringIterator(Iterator i, Predicate filter) { + this.i = i; + this.filter = filter; + } + public void remove() { + i.remove(); + } + public T next() { + if (lookAhead == null) { + throw new NoSuchElementException(); + } + T next = lookAhead; + lookAhead = null; + return next; + } + public boolean hasNext() { + while (lookAhead == null && i.hasNext()) { + T next = i.next(); + if (filter.matches(next)) { + lookAhead = next; + } + } + return lookAhead != null; + } + } +} diff --git a/src/main/resources/META-INF/services/org.apache.commons.compress2.archivers.ArchiveFormat b/src/main/resources/META-INF/services/org.apache.commons.compress2.archivers.ArchiveFormat new file mode 100644 index 00000000000..0cf3bfc8938 --- /dev/null +++ b/src/main/resources/META-INF/services/org.apache.commons.compress2.archivers.ArchiveFormat @@ -0,0 +1 @@ +org.apache.commons.compress2.formats.ar.ArArchiveFormat diff --git a/src/test/java/org/apache/commons/compress2/archivers/ArchiversTest.java b/src/test/java/org/apache/commons/compress2/archivers/ArchiversTest.java new file mode 100644 index 00000000000..a32781e2c36 --- /dev/null +++ b/src/test/java/org/apache/commons/compress2/archivers/ArchiversTest.java @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress2.archivers; + + +import org.apache.commons.compress2.formats.ar.ArArchiveFormat; + +import org.junit.Assert; +import org.junit.Test; + +public class ArchiversTest { + + @Test + public void shouldFindArArchiveFormatByName() { + ArchiveFormat arFormat = + new Archivers().getArchiveFormatByName(ArArchiveFormat.AR_FORMAT_NAME); + Assert.assertNotNull(arFormat); + Assert.assertEquals(ArArchiveFormat.class, arFormat.getClass()); + } + + @Test + public void shouldFindArArchiveFormatWhenIterating() { + shouldFind(ArArchiveFormat.class, new Archivers()); + } + + @Test + public void shouldFindArArchiveFormatAsWritableFormat() { + shouldFind(ArArchiveFormat.class, new Archivers().getFormatsWithWriteSupport()); + } + + @Test + public void shouldFindArArchiveFormatAsChannelWritableFormat() { + shouldFind(ArArchiveFormat.class, new Archivers().getFormatsWithWriteSupportForChannels()); + } + + @Test + public void shouldFindArArchiveFormatAsChannelReadableFormat() { + shouldFind(ArArchiveFormat.class, new Archivers().getFormatsWithReadSupportForChannels()); + } + + @Test + public void shouldNotFindArArchiveFormatAsRandomAccessFormat() { + shouldNotFind(ArArchiveFormat.class, new Archivers().getFormatsWithRandomAccessInput()); + } + + private void shouldFind(Class archiveFormat, Iterable> i) { + for (ArchiveFormat a : i) { + if (archiveFormat.equals(a.getClass())) { + return; + } + } + Assert.fail("Expected to find " + archiveFormat); + } + + private void shouldNotFind(Class archiveFormat, Iterable> i) { + for (ArchiveFormat a : i) { + if (archiveFormat.equals(a.getClass())) { + Assert.fail("Didn't expect to find " + archiveFormat); + } + } + } +} From 04673b6a801ed5c66465996cf77dc32487846c50 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 25 Jan 2014 06:22:39 +0000 Subject: [PATCH 027/189] the list of formats to consult first may be too much - let's keep the implementations independent of each other git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1561264 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress2/archivers/ArchiveFormat.java | 8 -------- .../compress2/archivers/spi/AbstractArchiveFormat.java | 9 --------- 2 files changed, 17 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java index 55e0794480b..6a37e1fdb8d 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java +++ b/src/main/java/org/apache/commons/compress2/archivers/ArchiveFormat.java @@ -69,14 +69,6 @@ public interface ArchiveFormat
{ * @throws UnsupportedOperationException if this format doesn't support content based detection. */ int getNumberOfBytesRequiredForAutodetection() throws UnsupportedOperationException; - /** - * Lists formats that must not be consulted before this format during content-based detection. - * - *

For example JAR would return ZIP here so it first has a chance to claim the archive for itself.

- * - * @return the names of the formats (as returned by {@link #getName}) that must not be consulted before this format during content-based detection. - */ - Iterable formatsToConsultLater(); /** * Verifies the given input is readable by this format. * @param probe a buffer holding at least {@link #getNumberOfBytesRequiredForAutodetection} bytes diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java index 642040dcb9d..b108641a559 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java @@ -27,7 +27,6 @@ import java.nio.channels.WritableByteChannel; //import java.nio.channels.FileChannel; import java.nio.charset.Charset; -import java.util.Collections; import org.apache.commons.compress2.archivers.ArchiveEntry; import org.apache.commons.compress2.archivers.ArchiveFormat; import org.apache.commons.compress2.archivers.ArchiveInput; @@ -79,14 +78,6 @@ public abstract class AbstractArchiveFormat
implements A public int getNumberOfBytesRequiredForAutodetection() throws UnsupportedOperationException { throw new UnsupportedOperationException("this format doesn't support content-based detection"); } - /** - * {@inheritDoc} - *

This implementation always returns an empty collection.

- */ - @Override - public Iterable formatsToConsultLater() { - return Collections.emptyList(); - } /** * {@inheritDoc} *

This implementation always throws an UnsupportedOperationException.

From 1336cebfd8d644cc534c525644a27f2e2cb560be Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 25 Jan 2014 06:46:10 +0000 Subject: [PATCH 028/189] internally sort archives found in a way that during auto-detection the formats requiring less bytes come first git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1561268 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress2/archivers/Archivers.java | 37 +++++++++++++++++-- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress2/archivers/Archivers.java b/src/main/java/org/apache/commons/compress2/archivers/Archivers.java index ff5cf424b33..195f190e5b3 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/Archivers.java +++ b/src/main/java/org/apache/commons/compress2/archivers/Archivers.java @@ -18,13 +18,18 @@ */ package org.apache.commons.compress2.archivers; +import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; +import java.util.Comparator; import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.ServiceConfigurationError; import java.util.ServiceLoader; +import java.util.Set; +import java.util.TreeSet; /** * Loads ArchiveFormats defined as "services" from {@code @@ -113,10 +118,11 @@ public ArchiveFormat getArchiveFormatByName(String name) { } private void fillMap() throws ServiceConfigurationError { - // TODO make that a TreeMap sorted for auto-detection order + Set ts = new TreeSet(SORT_FOR_AUTO_DETECTION); + ts.addAll(asList(formatLoader)); Map> a = - new HashMap>(); - for (ArchiveFormat f : formatLoader) { + new LinkedHashMap>(); + for (ArchiveFormat f : ts) { a.put(f.getName(), f); } archivers = Collections.unmodifiableMap(a); @@ -168,6 +174,29 @@ public Iterator> iterator() { }; } + private static List asList(Iterable i) { + List l = new ArrayList(); + for (T t : i) { + l.add(t); + } + return l; + } + + private Comparator SORT_FOR_AUTO_DETECTION = new Comparator() { + public int compare(ArchiveFormat a1, ArchiveFormat a2) { + if (a1.supportsAutoDetection() && a2.supportsAutoDetection()) { + return a1.getNumberOfBytesRequiredForAutodetection() - a2.getNumberOfBytesRequiredForAutodetection(); + } + if (!a1.supportsAutoDetection() && !a2.supportsAutoDetection()) { + return 0; + } + if (a1.supportsAutoDetection()) { + return -1; + } + return 1; + } + }; + private static class FilteringIterator implements Iterator { private final Iterator i; private final Predicate filter; From 84d92916b9fead62bbe7c3120d480c3a82f40f31 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 27 Apr 2014 16:10:08 +0000 Subject: [PATCH 029/189] Compress2 will require Java7 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/branches/compress-2.0@1590412 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 6 +++--- .../archivers/spi/AbstractArchiveFormat.java | 13 ++++++------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/pom.xml b/pom.xml index 09f4cfdc3d5..653ae57af75 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ org.apache.commons commons-parent - 32 + 34 org.apache.commons @@ -35,8 +35,8 @@ These include: bzip2, gzip, pack200, lzma, xz and ar, cpio, jar, tar, zip, dump, - 1.5 - 1.5 + 1.7 + 1.7 compress COMPRESS 12310904 diff --git a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java index b108641a559..2e02555cac6 100644 --- a/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java +++ b/src/main/java/org/apache/commons/compress2/archivers/spi/AbstractArchiveFormat.java @@ -19,14 +19,13 @@ package org.apache.commons.compress2.archivers.spi; import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; -//import java.nio.channels.FileChannel; +import java.nio.channels.FileChannel; import java.nio.charset.Charset; +import java.nio.file.StandardOpenOption; import org.apache.commons.compress2.archivers.ArchiveEntry; import org.apache.commons.compress2.archivers.ArchiveFormat; import org.apache.commons.compress2.archivers.ArchiveInput; @@ -107,8 +106,7 @@ public ArchiveInput
readFrom(File file, Charset charset) throws IOException { return readWithRandomAccessFrom(file, charset); } - // TODO use FileChannel.open in Java7 - return readFrom(new FileInputStream(file).getChannel(), charset); + return readFrom(FileChannel.open(file.toPath(), StandardOpenOption.READ), charset); } /** * {@inheritDoc} @@ -135,7 +133,8 @@ public ArchiveOutput writeTo(WritableByteChannel channel, Charset charset) */ @Override public ArchiveOutput writeTo(File file, Charset charset) throws IOException, UnsupportedOperationException { - // TODO use FileChannel.open in Java7 - return writeTo(new FileOutputStream(file).getChannel(), charset); + return writeTo(FileChannel.open(file.toPath(), StandardOpenOption.WRITE, StandardOpenOption.CREATE, + StandardOpenOption.TRUNCATE_EXISTING), + charset); } } From 9067b02c65bf4316e0541579bdb883c6d3d07852 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Thu, 18 Dec 2014 20:51:57 +0000 Subject: [PATCH 030/189] COMPRESS-295 Add support for transferring a zip entry from one zip file to another git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1646531 13f79535-47bb-0310-9956-ffa450edef68 --- .gitignore | 2 + .../zip/ZipArchiveEntryPredicate.java | 32 +++++++ .../archivers/zip/ZipArchiveOutputStream.java | 87 +++++++++++++++--- .../compress/archivers/zip/ZipFile.java | 38 ++++++++ .../commons/compress/AbstractTestCase.java | 9 +- .../compress/archivers/ZipTestCase.java | 90 ++++++++++++++++--- 6 files changed, 232 insertions(+), 26 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java diff --git a/.gitignore b/.gitignore index 1ed1b0e6eb5..7b7dd970c98 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,6 @@ target .project .classpath .settings +.idea +*.iml *~ diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java new file mode 100644 index 00000000000..8808248a158 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers.zip; + +/** + * A predicate to test if a #ZipArchiveEntry matches a criteria. + * Some day this can extend java.util.function.Predicate + */ +public interface ZipArchiveEntryPredicate { + /** + * Indicate if the given entry should be included in the operation + * @param zipArchiveEntry the entry to test + * @return true if the entry should be included + */ + boolean test(ZipArchiveEntry zipArchiveEntry); +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 20985faa6b5..b2432169cdc 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -20,9 +20,11 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; +import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -432,27 +434,36 @@ public void finish() throws IOException { */ @Override public void closeArchiveEntry() throws IOException { - if (finished) { - throw new IOException("Stream has already been finished"); - } - - if (entry == null) { - throw new IOException("No current entry to close"); - } - - if (!entry.hasWritten) { - write(EMPTY, 0, 0); - } + preClose(); flushDeflater(); - final Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); long bytesWritten = written - entry.dataStart; long realCrc = crc.getValue(); crc.reset(); + doCloseEntry(realCrc, bytesWritten); + } + + /** + * Writes all necessary data for this entry. + * + * @throws IOException on error + * @throws Zip64RequiredException if the entry's uncompressed or + * compressed size exceeds 4 GByte and {@link #setUseZip64} + * is {@link Zip64Mode#Never}. + */ + private void closeCopiedEntry() throws IOException { + preClose(); + long realCrc = entry.entry.getCrc(); + entry.bytesRead = entry.entry.getSize(); + doCloseEntry(realCrc, entry.entry.getCompressedSize()); + } + + private void doCloseEntry(long realCrc, long bytesWritten) throws IOException { + final Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); final boolean actuallyNeedsZip64 = - handleSizesAndCrc(bytesWritten, realCrc, effectiveMode); + handleSizesAndCrc(bytesWritten, realCrc, effectiveMode); if (raf != null) { rewriteSizesAndCrc(actuallyNeedsZip64); @@ -462,6 +473,37 @@ public void closeArchiveEntry() throws IOException { entry = null; } + private void preClose() throws IOException { + if (finished) { + throw new IOException("Stream has already been finished"); + } + + if (entry == null) { + throw new IOException("No current entry to close"); + } + + if (!entry.hasWritten) { + write(EMPTY, 0, 0); + } + } + + /** + * Adds an archive entry with a raw input stream. + * + * The entry is put and closed immediately. + * + * @param entry The archive entry to add + * @param rawStream The raw input stream of a different entry. May be compressed/encrypted. + * @throws IOException If copying fails + */ + public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) + throws IOException { + ZipArchiveEntry ae = new ZipArchiveEntry((java.util.zip.ZipEntry)entry); + putArchiveEntry(ae); + copyFromZipInputStream(rawStream); + closeCopiedEntry(); + } + /** * Ensures all bytes sent to the deflater are written to the stream. */ @@ -768,6 +810,25 @@ public void write(byte[] b, int offset, int length) throws IOException { count(length); } + private void copyFromZipInputStream(InputStream src) throws IOException { + if (entry == null) { + throw new IllegalStateException("No current entry"); + } + ZipUtil.checkRequestedFeatures(entry.entry); + entry.hasWritten = true; + byte[] tmpBuf = new byte[4096]; + int length = src.read( tmpBuf ); + while ( length >= 0 ) + { + writeOut( tmpBuf, 0, length ); + written += length; + crc.update( tmpBuf, 0, length ); + + count( length ); + length = src.read( tmpBuf ); + } + } + /** * write implementation for DEFLATED entries. */ diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java index bcde9c84181..4436ea9a6dc 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java @@ -345,6 +345,44 @@ public boolean canReadEntryData(ZipArchiveEntry ze) { return ZipUtil.canHandleEntryData(ze); } + /** + * Expose the raw stream of the archive entry (compressed form) + *

+ * This method does not relate to how/if we understand the payload in the + * stream, since we really only intend to move it on to somewhere else. + * + * @param ze The entry to get the stream for + * @return The raw input stream containing (possibly) compressed data. + */ + private InputStream getRawInputStream(ZipArchiveEntry ze) { + if (!(ze instanceof Entry)) { + return null; + } + OffsetEntry offsetEntry = ((Entry) ze).getOffsetEntry(); + long start = offsetEntry.dataOffset; + return new BoundedInputStream(start, ze.getCompressedSize()); + } + + + /** + * Transfer selected entries from this zipfile to a given #ZipArchiveOutputStream. + * Compression and all other attributes will be as in this file. + * This method transfers entries based on the central directory of the zip file. + * + * @param target The zipArchiveOutputStream to write the entries to + * @param predicate A predicate that selects which entries to write + */ + public void copyRawEntries(ZipArchiveOutputStream target, ZipArchiveEntryPredicate predicate) + throws IOException { + Enumeration src = getEntriesInPhysicalOrder(); + while (src.hasMoreElements()) { + ZipArchiveEntry entry = src.nextElement(); + if (predicate.test( entry)) { + target.addRawArchiveEntry(entry, getRawInputStream(entry)); + } + } + } + /** * Returns an InputStream for reading the contents of the given entry. * diff --git a/src/test/java/org/apache/commons/compress/AbstractTestCase.java b/src/test/java/org/apache/commons/compress/AbstractTestCase.java index f004303a66f..f4bdcdf4d4a 100644 --- a/src/test/java/org/apache/commons/compress/AbstractTestCase.java +++ b/src/test/java/org/apache/commons/compress/AbstractTestCase.java @@ -392,8 +392,7 @@ protected String getExpectedString(ArchiveEntry entry) { * element of the two element array). */ protected File[] createTempDirAndFile() throws IOException { - File tmpDir = mkdir("testdir"); - tmpDir.deleteOnExit(); + File tmpDir = createTempDir(); File tmpFile = File.createTempFile("testfile", "", tmpDir); tmpFile.deleteOnExit(); FileOutputStream fos = new FileOutputStream(tmpFile); @@ -405,6 +404,12 @@ protected File[] createTempDirAndFile() throws IOException { } } + protected File createTempDir() throws IOException { + File tmpDir = mkdir("testdir"); + tmpDir.deleteOnExit(); + return tmpDir; + } + protected void closeQuietly(Closeable closeable){ if (closeable != null) { try { diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index 3a8757be6c7..a0f19aab06f 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -18,22 +18,14 @@ */ package org.apache.commons.compress.archivers; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; +import java.io.*; import java.util.ArrayList; import java.util.List; import org.apache.commons.compress.AbstractTestCase; -import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; -import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; -import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; -import org.apache.commons.compress.archivers.zip.ZipFile; -import org.apache.commons.compress.archivers.zip.ZipMethod; +import org.apache.commons.compress.archivers.zip.*; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Assert; public final class ZipTestCase extends AbstractTestCase { /** @@ -293,6 +285,82 @@ public void testExplicitDirectoryEntry() throws Exception { rmdir(tmp[0]); } } + String first_payload = "ABBA"; + String second_payload = "AAAAAAAAAAAA"; + ZipArchiveEntryPredicate allFilesPredicate = new ZipArchiveEntryPredicate() { + public boolean test(ZipArchiveEntry zipArchiveEntry) { + return true; + } + }; + + + public void testCopyRawEntriesFromFile + () + throws IOException { + + File[] tmp = createTempDirAndFile(); + File reference = createReferenceFile(tmp[0]); + + File a1 = File.createTempFile("src1.", ".zip", tmp[0]); + createFirstEntry(new ZipArchiveOutputStream(a1)).close(); + + File a2 = File.createTempFile("src2.", ".zip", tmp[0]); + createSecondEntry(new ZipArchiveOutputStream(a2)).close(); + + ZipFile zf1 = new ZipFile(a1); + ZipFile zf2 = new ZipFile(a2); + File fileResult = File.createTempFile("file-actual.", ".zip", tmp[0]); + ZipArchiveOutputStream zos2 = new ZipArchiveOutputStream(fileResult); + zf1.copyRawEntries(zos2, allFilesPredicate); + zf2.copyRawEntries(zos2, allFilesPredicate); + zos2.close(); + assertSameFileContents(reference, fileResult); + zf1.close(); + zf2.close(); + } + + private File createReferenceFile(File directory) throws IOException { + File reference = File.createTempFile("expected.", ".zip", directory); + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(reference); + createFirstEntry(zos); + createSecondEntry(zos); + zos.close(); + return reference; + } + + private ZipArchiveOutputStream createFirstEntry(ZipArchiveOutputStream zos) throws IOException { + createArchiveEntry(first_payload, zos, "file1.txt"); + return zos; + } + + private ZipArchiveOutputStream createSecondEntry(ZipArchiveOutputStream zos) throws IOException { + createArchiveEntry(second_payload, zos, "file2.txt"); + return zos; + } + + + private void assertSameFileContents(File expectedFile, File actualFile) throws IOException { + int size = (int) Math.max(expectedFile.length(), actualFile.length()); + byte[] expected = new byte[size]; + byte[] actual = new byte[size]; + final FileInputStream expectedIs = new FileInputStream(expectedFile); + final FileInputStream actualIs = new FileInputStream(actualFile); + IOUtils.readFully(expectedIs, expected); + IOUtils.readFully(actualIs, actual); + expectedIs.close(); + actualIs.close(); + Assert.assertArrayEquals(expected, actual); + } + + + private void createArchiveEntry(String payload, ZipArchiveOutputStream zos, String name) + throws IOException { + ZipArchiveEntry in = new ZipArchiveEntry(name); + zos.putArchiveEntry(in); + + zos.write(payload.getBytes()); + zos.closeArchiveEntry(); + } public void testFileEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); From 8a4ea62fe7fab06f6b3bb311b43c5fed3cfce3d8 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Thu, 18 Dec 2014 20:58:00 +0000 Subject: [PATCH 031/189] COMPRESS-296 Parallel compression. Add support for cloning ZipArchiveOutputStream With these changes it is possible to realize parallel compression. There is one more class coming that coordinates it all git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1646532 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 18 ++++++++++++++++++ .../compress/archivers/ZipTestCase.java | 12 ++++++++++++ 2 files changed, 30 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index b2432169cdc..60429b88add 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -504,6 +504,24 @@ public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) closeCopiedEntry(); } + /** + * Make a copy of this stream with all its settings, but point to a new file. + * Used for scatter/gather operations to make several streams from a user-supplied master. + * + * @param newFile The file to use for the copy of this stream + * @return A copy of this stream + */ + public ZipArchiveOutputStream cloneWith(File newFile) throws IOException { + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(newFile); + zos.setCreateUnicodeExtraFields(createUnicodeExtraFields); + zos.setMethod(method); + zos.setEncoding(encoding); + zos.setFallbackToUTF8(fallbackToUTF8); + zos.setUseLanguageEncodingFlag(useUTF8Flag); + zos.setUseZip64(zip64Mode); + return zos; + } + /** * Ensures all bytes sent to the deflater are written to the stream. */ diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index a0f19aab06f..73316a43c09 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -294,6 +294,18 @@ public boolean test(ZipArchiveEntry zipArchiveEntry) { }; + public void testCloneZipOutputStream( ) throws IOException { + File tempDir = createTempDir(); + File fred = new File(tempDir, "fred"); + ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(fred); + File frank = new File(tempDir, "frank"); + ZipArchiveOutputStream actual = zipArchiveOutputStream.cloneWith(frank); + zipArchiveOutputStream.close(); + actual.close(); + assertTrue( fred.exists()); + assertTrue( frank.exists()); + } + public void testCopyRawEntriesFromFile () throws IOException { From e214adb4b9164910d62614dbd7b3043cb0cc3467 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Mon, 22 Dec 2014 15:24:02 +0000 Subject: [PATCH 032/189] COMPRESS-296 Parallel compression. Added StreamCompressor and ScatterZipOutputStream. StreamCompressor is an extract of the deflation algorithm from ZipArchiveOutputStream, which unfortunately was too conflated with writing a file in a particular structure. Using the actual zip file format as an intermediate format for scatter-streams turned out to be fairly inefficient. ScatterZipOuputStream is 2-3x faster than using a zip file as intermediate format. It would be possibly to refactor ZipArchiveOutputStream to use StreamCompressor, but there would be a slight break in backward compatibility regarding the protected writeOut method, which is moved to the streamCompressor class. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647329 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ScatterZipOutputStream.java | 174 ++++++++++++++ .../archivers/zip/StreamCompressor.java | 226 ++++++++++++++++++ .../archivers/zip/ZipArchiveOutputStream.java | 18 -- .../compress/archivers/ZipTestCase.java | 13 - .../zip/ScatterZipOutputStreamTest.java | 58 +++++ .../archivers/zip/StreamCompressorTest.java | 58 +++++ 6 files changed, 516 insertions(+), 31 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java create mode 100644 src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java create mode 100644 src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java new file mode 100644 index 00000000000..6dd6d021de7 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -0,0 +1,174 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + + +import org.apache.commons.compress.utils.BoundedInputStream; + +import java.io.*; +import java.util.*; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.zip.Deflater; + +/** + * A zip output stream that is optimized for multi-threaded scatter/gather construction of zip files. + *

+ * The internal data format of the entries used by this class are entirely private to this class + * and are not part of any public api whatsoever. + *

+ * It is possible to extend this class to support different kinds of backing storage, the default + * implementation only supports file-based backing. + *

+ * Thread safety: This class supports multiple threads. But the "writeTo" method must be called + * by the thread that originally created the ZipArchiveEntry. + * + * @since 1.10 + */ +public abstract class ScatterZipOutputStream { + private final Queue items = new ConcurrentLinkedQueue(); + + private static class CompressedEntry { + final ZipArchiveEntry entry; + final long crc; + final long compressedSize; + final int method; + final long size; + + public CompressedEntry(ZipArchiveEntry entry, long crc, long compressedSize, int method, long size) { + this.entry = entry; + this.crc = crc; + this.compressedSize = compressedSize; + this.method = method; + this.size = size; + } + + public ZipArchiveEntry transferToArchiveEntry(){ + entry.setCompressedSize(compressedSize); + entry.setSize(size); + entry.setCrc(crc); + entry.setMethod(method); + return entry; + } + } + + /** + * Add an archive entry to this scatter stream. + * + * @param zipArchiveEntry The entry to write + * @param payload The content to write for the entry + * @param method The compression method + * @throws IOException If writing fails + */ + public void addArchiveEntry(ZipArchiveEntry zipArchiveEntry, InputStream payload, int method) throws IOException { + StreamCompressor sc = getStreamCompressor(); + sc.deflate(payload, method); + payload.close(); + items.add(new CompressedEntry(zipArchiveEntry, sc.getCrc32(), sc.getBytesWritten(), method, sc.getBytesRead())); + } + + /** + * Write the contents of this scatter stream to a target archive. + * + * @param target The archive to receive the contents of this #ScatterZipOutputStream + * @throws IOException If writing fails + */ + public void writeTo(ZipArchiveOutputStream target) throws IOException { + closeBackingStorage(); + InputStream data = getInputStream(); + for (CompressedEntry compressedEntry : items) { + final BoundedInputStream rawStream = new BoundedInputStream(data, compressedEntry.compressedSize); + target.addRawArchiveEntry(compressedEntry.transferToArchiveEntry(), rawStream); + rawStream.close(); + } + data.close(); + } + + /** + * Returns a stream compressor that can be used to compress the data. + *

+ * This method is expected to return the same instance every time. + * + * @return The stream compressor + * @throws FileNotFoundException + */ + protected abstract StreamCompressor getStreamCompressor() throws FileNotFoundException; + + /** + * An input stream that contains the scattered payload + * + * @return An InputStream, should be closed by the caller of this method. + * @throws IOException when something fails + */ + protected abstract InputStream getInputStream() throws IOException; + + + /** + * Closes whatever storage is backing this scatter stream + */ + protected abstract void closeBackingStorage() throws IOException; + + /** + * Create a ScatterZipOutputStream with default compression level that is backed by a file + * + * @param file The file to offload compressed data into. + * @return A ScatterZipOutputStream that is ready for use. + * @throws FileNotFoundException + */ + public static ScatterZipOutputStream fileBased(File file) throws FileNotFoundException { + return fileBased(file, Deflater.DEFAULT_COMPRESSION); + } + + /** + * Create a ScatterZipOutputStream that is backed by a file + * + * @param file The file to offload compressed data into. + * @param compressionLevel The compression level to use, @see #Deflater + * @return A ScatterZipOutputStream that is ready for use. + * @throws FileNotFoundException + */ + public static ScatterZipOutputStream fileBased(File file, int compressionLevel) throws FileNotFoundException { + return new FileScatterOutputStream(file, compressionLevel); + } + + private static class FileScatterOutputStream extends ScatterZipOutputStream { + final File target; + private StreamCompressor streamDeflater; + final FileOutputStream os; + + FileScatterOutputStream(File target, int compressionLevel) throws FileNotFoundException { + this.target = target; + os = new FileOutputStream(target); + streamDeflater = StreamCompressor.create(compressionLevel, os); + } + + @Override + protected StreamCompressor getStreamCompressor() throws FileNotFoundException { + return streamDeflater; + } + + @Override + protected InputStream getInputStream() throws IOException { + return new FileInputStream(target); + } + + @SuppressWarnings("ResultOfMethodCallIgnored") + public void closeBackingStorage() throws IOException { + os.close(); + } + } +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java new file mode 100644 index 00000000000..5856a7dfa3b --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -0,0 +1,226 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.DataOutput; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.zip.CRC32; +import java.util.zip.Deflater; + +/** + * Encapsulates a Deflater and crc calculator, handling multiple types of output streams. + * Currently #ZipEntry.DEFLATED and #ZipEntry.STORED are the only supported compression methods. + * + * @since 1.10 + */ +public abstract class StreamCompressor { + + /* + * Apparently Deflater.setInput gets slowed down a lot on Sun JVMs + * when it gets handed a really big buffer. See + * https://issues.apache.org/bugzilla/show_bug.cgi?id=45396 + * + * Using a buffer size of 8 kB proved to be a good compromise + */ + private static final int DEFLATER_BLOCK_SIZE = 8192; + + private final Deflater def; + + private final CRC32 crc = new CRC32(); + + int writtenToOutputStream = 0; + int sourcePayloadLength = 0; + long actualCrc; + + private final int bufferSize = 4096; + private final byte[] outputBuffer = new byte[bufferSize]; + private final byte[] readerBuf = new byte[bufferSize]; + + protected StreamCompressor(Deflater deflater) { + this.def = deflater; + } + + /** + * Create a stream compressor with the given compression level. + * + * @param compressionLevel The #Deflater compression level + * @param os The #OutputStream stream to receive output + * @return A stream compressor + */ + public static StreamCompressor create(int compressionLevel, OutputStream os) { + final Deflater deflater = new Deflater(compressionLevel, true); + return new OutputStreamCompressor(deflater, os); + } + + /** + * Create a stream compressor with the default compression level. + * + * @param os The #OutputStream stream to receive output + * @return A stream compressor + */ + public static StreamCompressor create( OutputStream os) { + return create(Deflater.DEFAULT_COMPRESSION, os); + } + + /** + * Create a stream compressor with the given compression level. + * + * @param compressionLevel The #Deflater compression level + * @param os The #DataOutput to receive output + * @return A stream compressor + */ + public static StreamCompressor create(int compressionLevel, DataOutput os) { + final Deflater deflater = new Deflater(compressionLevel, true); + return new DataOutputCompressor(deflater, os); + } + + /** + * The crc32 of the last deflated file + * @return the crc32 + */ + + public long getCrc32() { + return actualCrc; + } + + /** + * Return the number of bytes read from the source stream + * @return The number of bytes read, never negative + */ + public int getBytesRead() { + return sourcePayloadLength; + } + + /** + * The number of bytes written to the output + * @return The number of bytes, never negative + */ + public int getBytesWritten() { + return writtenToOutputStream; + } + + /** + * Deflate the given source using the supplied compression method + * @param source The source to compress + * @param method The #ZipArchiveEntry compression method + * @throws IOException When failures happen + */ + + public void deflate(InputStream source, int method) throws IOException { + reset(); + int length; + + while(( length = source.read(readerBuf, 0, readerBuf.length)) >= 0){ + crc.update(readerBuf, 0, length); + if (method == ZipArchiveEntry.DEFLATED) { + writeDeflated(readerBuf, 0, length); + } else { + writeOut(readerBuf, 0, length); + writtenToOutputStream += length; + } + sourcePayloadLength += length; + } + if (method == ZipArchiveEntry.DEFLATED) { + flushDeflater(); + } + actualCrc = crc.getValue(); + + + } + + private void reset(){ + crc.reset(); + def.reset(); + sourcePayloadLength = 0; + writtenToOutputStream = 0; + } + + private void flushDeflater() throws IOException { + def.finish(); + while (!def.finished()) { + deflate(); + } + } + + private void writeDeflated(byte[]b, int offset, int length) + throws IOException { + if (length > 0 && !def.finished()) { + if (length <= DEFLATER_BLOCK_SIZE) { + def.setInput(b, offset, length); + deflateUntilInputIsNeeded(); + } else { + final int fullblocks = length / DEFLATER_BLOCK_SIZE; + for (int i = 0; i < fullblocks; i++) { + def.setInput(b, offset + i * DEFLATER_BLOCK_SIZE, + DEFLATER_BLOCK_SIZE); + deflateUntilInputIsNeeded(); + } + final int done = fullblocks * DEFLATER_BLOCK_SIZE; + if (done < length) { + def.setInput(b, offset + done, length - done); + deflateUntilInputIsNeeded(); + } + } + } + } + + private void deflateUntilInputIsNeeded() throws IOException { + while (!def.needsInput()) { + deflate(); + } + } + + private void deflate() throws IOException { + int len = def.deflate(outputBuffer, 0, outputBuffer.length); + if (len > 0) { + writeOut(outputBuffer, 0, len); + writtenToOutputStream += len; + } + } + + protected abstract void writeOut(byte[] data, int offset, int length) throws IOException ; + + private static final class OutputStreamCompressor extends StreamCompressor { + private final OutputStream os; + + public OutputStreamCompressor(Deflater deflater, OutputStream os) { + super(deflater); + this.os = os; + } + + protected final void writeOut(byte[] data, int offset, int length) + throws IOException { + os.write(data, offset, length); + } + } + + private static final class DataOutputCompressor extends StreamCompressor { + private final DataOutput raf; + public DataOutputCompressor(Deflater deflater, DataOutput raf) { + super(deflater); + this.raf = raf; + } + + protected final void writeOut(byte[] data, int offset, int length) + throws IOException { + raf.write(data, offset, length); + } + } +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 60429b88add..b2432169cdc 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -504,24 +504,6 @@ public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) closeCopiedEntry(); } - /** - * Make a copy of this stream with all its settings, but point to a new file. - * Used for scatter/gather operations to make several streams from a user-supplied master. - * - * @param newFile The file to use for the copy of this stream - * @return A copy of this stream - */ - public ZipArchiveOutputStream cloneWith(File newFile) throws IOException { - ZipArchiveOutputStream zos = new ZipArchiveOutputStream(newFile); - zos.setCreateUnicodeExtraFields(createUnicodeExtraFields); - zos.setMethod(method); - zos.setEncoding(encoding); - zos.setFallbackToUTF8(fallbackToUTF8); - zos.setUseLanguageEncodingFlag(useUTF8Flag); - zos.setUseZip64(zip64Mode); - return zos; - } - /** * Ensures all bytes sent to the deflater are written to the stream. */ diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index 73316a43c09..fa8bf9e2bad 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -293,19 +293,6 @@ public boolean test(ZipArchiveEntry zipArchiveEntry) { } }; - - public void testCloneZipOutputStream( ) throws IOException { - File tempDir = createTempDir(); - File fred = new File(tempDir, "fred"); - ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(fred); - File frank = new File(tempDir, "frank"); - ZipArchiveOutputStream actual = zipArchiveOutputStream.cloneWith(frank); - zipArchiveOutputStream.close(); - actual.close(); - assertTrue( fred.exists()); - assertTrue( frank.exists()); - } - public void testCopyRawEntriesFromFile () throws IOException { diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java new file mode 100644 index 00000000000..431f6b0c9da --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.File; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +public class ScatterZipOutputStreamTest { + + @Test + public void putArchiveEntry() throws Exception { + File scatteFile = File.createTempFile("scattertest", ".notzip"); + ScatterZipOutputStream scatterZipOutputStream = ScatterZipOutputStream.fileBased(scatteFile); + final byte[] B_PAYLOAD = "RBBBBBBS".getBytes(); + final byte[] A_PAYLOAD = "XAAY".getBytes(); + + ZipArchiveEntry zab = new ZipArchiveEntry("b.txt"); + scatterZipOutputStream.addArchiveEntry(zab, new ByteArrayInputStream(B_PAYLOAD), ZipArchiveEntry.DEFLATED); + + ZipArchiveEntry zae = new ZipArchiveEntry("a.txt"); + scatterZipOutputStream.addArchiveEntry(zae, new ByteArrayInputStream(A_PAYLOAD), ZipArchiveEntry.DEFLATED); + + File target = File.createTempFile("scattertest", ".zip"); + ZipArchiveOutputStream outputStream = new ZipArchiveOutputStream(target); + scatterZipOutputStream.writeTo( outputStream); + outputStream.close(); + + ZipFile zf = new ZipFile(target); + final ZipArchiveEntry b_entry = zf.getEntries("b.txt").iterator().next(); + assertEquals(8, b_entry.getSize()); + assertArrayEquals(B_PAYLOAD, IOUtils.toByteArray(zf.getInputStream(b_entry))); + + final ZipArchiveEntry a_entry = zf.getEntries("a.txt").iterator().next(); + assertEquals(4, a_entry.getSize()); + assertArrayEquals(A_PAYLOAD, IOUtils.toByteArray(zf.getInputStream(a_entry))); + } +} \ No newline at end of file diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java new file mode 100644 index 00000000000..539c3e87bdb --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.util.zip.ZipEntry; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +public class StreamCompressorTest { + + @Test + public void storedEntries() throws Exception { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + StreamCompressor sc = StreamCompressor.create( baos); + sc.deflate(new ByteArrayInputStream("A".getBytes()), ZipEntry.STORED); + sc.deflate(new ByteArrayInputStream("BAD".getBytes()), ZipEntry.STORED); + assertEquals(3, sc.getBytesRead()); + assertEquals(3, sc.getBytesWritten()); + assertEquals(344750961, sc.getCrc32()); + sc.deflate(new ByteArrayInputStream("CAFE".getBytes()), ZipEntry.STORED); + assertEquals("ABADCAFE", baos.toString()); + } + + @Test + public void deflatedEntries() throws Exception { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + StreamCompressor sc = StreamCompressor.create( baos); + sc.deflate(new ByteArrayInputStream("AAAAAABBBBBB".getBytes()), ZipEntry.DEFLATED); + assertEquals(12, sc.getBytesRead()); + assertEquals(8, sc.getBytesWritten()); + assertEquals(3299542, sc.getCrc32()); + + final byte[] actuals = baos.toByteArray(); + byte[] expected = new byte[]{115,116,4,1,39,48,0,0}; + // Note that this test really asserts stuff about the java Deflater, which might be a little bit brittle + assertArrayEquals(expected, actuals); + } +} \ No newline at end of file From 56c83a5dd4284f9150e4959a5e266215a292ba28 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 22 Dec 2014 16:36:19 +0000 Subject: [PATCH 033/189] COMPRESS-297 only mark file as non-closed when it can be read git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647348 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 5 +++++ .../org/apache/commons/compress/archivers/zip/ZipFile.java | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 3ddc3e37855..f6b253d15da 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -52,6 +52,11 @@ The type attribute can be add,update,fix,remove. of the API that is officially supported. This will break existing code that uses the old package. + + ZipFile logs a warning in its finalizer when its constructor + has thrown an exception reading the file - for example if the + file doesn't exist. + Date: Tue, 23 Dec 2014 14:31:02 +0000 Subject: [PATCH 034/189] Removed wildcard imports git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647582 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ScatterZipOutputStream.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 6dd6d021de7..2f673013d68 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -20,8 +20,13 @@ import org.apache.commons.compress.utils.BoundedInputStream; -import java.io.*; -import java.util.*; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.zip.Deflater; From 1baac88bb027c33ffdcdbf44015c2764597bcd56 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 23 Dec 2014 17:39:21 +0000 Subject: [PATCH 035/189] zap wildcard imports git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647623 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/ZipTestCase.java | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index fa8bf9e2bad..db090201396 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -18,12 +18,22 @@ */ package org.apache.commons.compress.archivers; -import java.io.*; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import org.apache.commons.compress.AbstractTestCase; -import org.apache.commons.compress.archivers.zip.*; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntryPredicate; +import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; +import org.apache.commons.compress.archivers.zip.ZipFile; +import org.apache.commons.compress.archivers.zip.ZipMethod; import org.apache.commons.compress.utils.IOUtils; import org.junit.Assert; From bc685791be4207184ca5823dc0fb7615c129e415 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 23 Dec 2014 17:48:03 +0000 Subject: [PATCH 036/189] record @krosenvold's changes for COMPRESS-295 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647626 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index f6b253d15da..2579de7650d 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -52,6 +52,12 @@ The type attribute can be add,update,fix,remove. of the API that is officially supported. This will break existing code that uses the old package. + + New methods in ZipArchiveOutputStream and ZipFile allows + entries to be copied from one archive to another without + having to re-compress them. + ZipFile logs a warning in its finalizer when its constructor has thrown an exception reading the file - for example if the From 1418705648202405208e3f4d900dfcc81457cbac Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Wed, 24 Dec 2014 14:00:44 +0000 Subject: [PATCH 037/189] Cut overall object instantiation in half by changing file header generation algorithm, for a 10-15 percent performance improvement when archive contains lots of small files. Also extracted two private methods createLocalFileHeader and createCentralFileHeader in ZipArchiveOutputStream. These may have some interesting additional usages in the near future. This is basically a "tactical refactoring"; scatter/gather had a wonderful performance in the "scatter" algorithm, but spent an *equal* amount of time in the "gather" phase git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647787 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 12 + .../archivers/zip/GeneralPurposeBit.java | 32 +- .../archivers/zip/ZipArchiveOutputStream.java | 284 ++++++++++-------- .../compress/archivers/zip/ZipLong.java | 25 +- .../compress/archivers/zip/ZipShort.java | 16 +- .../compress/archivers/zip/ZipUtil.java | 37 ++- .../compress/archivers/zip/ZipLongTest.java | 12 + .../compress/archivers/zip/ZipShortTest.java | 12 + .../compress/archivers/zip/ZipUtilTest.java | 16 + 9 files changed, 302 insertions(+), 144 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 2579de7650d..f7eefce6a3a 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -44,6 +44,18 @@ The type attribute can be add,update,fix,remove. + + Cut overall object instantiation in half by changing file + header generation algorithm, for a 10-15 percent performance + improvement. + + Also extracted two private methods createLocalFileHeader + and createCentralFileHeader in ZipArchiveOutputStream. + These may have some interesting additional usages in the + near future. + + Moved the package diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java index d4b4c3dd392..2325cd75309 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java @@ -158,15 +158,29 @@ int getNumberOfShannonFanoTrees() { * Encodes the set bits in a form suitable for ZIP archives. */ public byte[] encode() { - return - ZipShort.getBytes((dataDescriptorFlag ? DATA_DESCRIPTOR_FLAG : 0) - | - (languageEncodingFlag ? UFT8_NAMES_FLAG : 0) - | - (encryptionFlag ? ENCRYPTION_FLAG : 0) - | - (strongEncryptionFlag ? STRONG_ENCRYPTION_FLAG : 0) - ); + byte[] result = new byte[2]; + encode(result, 0); + return result; + } + + + /** + * Encodes the set bits in a form suitable for ZIP archives. + * + * @param buf the output buffer + * @param offset + * The offset within the output buffer of the first byte to be written. + * must be non-negative and no larger than buf.length-2 + */ + public void encode(byte[] buf, int offset) { + ZipShort.putShort((dataDescriptorFlag ? DATA_DESCRIPTOR_FLAG : 0) + | + (languageEncodingFlag ? UFT8_NAMES_FLAG : 0) + | + (encryptionFlag ? ENCRYPTION_FLAG : 0) + | + (strongEncryptionFlag ? STRONG_ENCRYPTION_FLAG : 0) + , buf, offset); } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index b2432169cdc..77de11de556 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -24,7 +24,6 @@ import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; -import java.util.Enumeration; import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -45,6 +44,8 @@ import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC; import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC_SHORT; import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MIN_VERSION; +import static org.apache.commons.compress.archivers.zip.ZipLong.putLong; +import static org.apache.commons.compress.archivers.zip.ZipShort.putShort; /** * Reimplementation of {@link java.util.zip.ZipOutputStream @@ -77,6 +78,34 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream { static final int BUFFER_SIZE = 512; + private static final int LFH_SIG_OFFSET = 0; + private static final int LFH_VERSION_NEEDED_OFFSET = 4; + private static final int LFH_GPB_OFFSET = 6; + private static final int LFH_METHOD_OFFSET = 8; + private static final int LFH_TIME_OFFSET = 10; + private static final int LFH_CRC_OFFSET = 14; + private static final int LFH_COMPRESSED_SIZE_OFFSET = 18; + private static final int LFH_ORIGINAL_SIZE_OFFSET = 22; + private static final int LFH_FILENAME_LENGTH_OFFSET = 26; + private static final int LFH_EXTRA_LENGTH_OFFSET = 28; + private static final int LFH_FILENAME_OFFSET = 30; + private static final int CFH_SIG_OFFSET = 0; + private static final int CFH_VERSION_MADE_BY_OFFSET = 4; + private static final int CFH_VERSION_NEEDED_OFFSET = 6; + private static final int CFH_GPB_OFFSET = 8; + private static final int CFH_METHOD_OFFSET = 10; + private static final int CFH_TIME_OFFSET = 12; + private static final int CFH_CRC_OFFSET = 16; + private static final int CFH_COMPRESSED_SIZE_OFFSET = 20; + private static final int CFH_ORIGINAL_SIZE_OFFSET = 24; + private static final int CFH_FILENAME_LENGTH_OFFSET = 28; + private static final int CFH_EXTRA_LENGTH_OFFSET = 30; + private static final int CFH_COMMENT_LENGTH_OFFSET = 32; + private static final int CFH_DISK_NUMBER_OFFSET = 34; + private static final int CFH_INTERNAL_ATTRIBUTES_OFFSET = 36; + private static final int CFH_EXTERNAL_ATTRIBUTES_OFFSET = 38; + private static final int CFH_LFH_OFFSET = 42; + private static final int CFH_FILENAME_OFFSET = 46; /** indicates if this archive is finished. protected for use in Jar implementation */ protected boolean finished = false; @@ -663,7 +692,7 @@ public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException { def.setLevel(level); hasCompressionLevelChanged = false; } - writeLocalFileHeader(entry.entry); + writeLocalFileHeader((ZipArchiveEntry)archiveEntry); } /** @@ -931,7 +960,6 @@ protected final void deflate() throws IOException { * @throws IOException on error */ protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { - boolean encodable = zipEncoding.canEncode(ze.getName()); ByteBuffer name = getName(ze); @@ -939,78 +967,79 @@ protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { addUnicodeExtraFields(ze, encodable, name); } - offsets.put(ze, Long.valueOf(written)); + final byte[] localHeader = createLocalFileHeader(ze, name, encodable); + offsets.put(ze, written); + entry.localDataStart = written + 14; // Right before crc + writeOut( localHeader); + written += localHeader.length; + entry.dataStart = written; + } + + + private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolean encodable) { - writeOut(LFH_SIG); - written += WORD; + byte[] extra = ze.getLocalFileDataExtra(); + int len= LFH_FILENAME_OFFSET + name.limit() + extra.length; + byte[] buf = new byte[len]; + + System.arraycopy(LFH_SIG, 0, buf, LFH_SIG_OFFSET, WORD); //store method in local variable to prevent multiple method calls final int zipMethod = ze.getMethod(); - writeVersionNeededToExtractAndGeneralPurposeBits(zipMethod, - !encodable - && fallbackToUTF8, - hasZip64Extra(ze)); - written += WORD; + putShort(versionNeededToExtract(zipMethod, hasZip64Extra(ze)), buf, LFH_VERSION_NEEDED_OFFSET); + + GeneralPurposeBit generalPurposeBit = getGeneralPurposeBits(zipMethod, + !encodable + && fallbackToUTF8 + ); + generalPurposeBit.encode(buf, LFH_GPB_OFFSET); // compression method - writeOut(ZipShort.getBytes(zipMethod)); - written += SHORT; + putShort(zipMethod, buf, LFH_METHOD_OFFSET); - // last mod. time and date - writeOut(ZipUtil.toDosTime(ze.getTime())); - written += WORD; + ZipUtil.toDosTime(ze.getTime(), buf, LFH_TIME_OFFSET); // CRC // compressed length // uncompressed length - entry.localDataStart = written; if (zipMethod == DEFLATED || raf != null) { - writeOut(LZERO); + System.arraycopy(LZERO, 0, buf, LFH_CRC_OFFSET, WORD); if (hasZip64Extra(entry.entry)) { // point to ZIP64 extended information extra field for // sizes, may get rewritten once sizes are known if // stream is seekable - writeOut(ZipLong.ZIP64_MAGIC.getBytes()); - writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + ZipLong.ZIP64_MAGIC.putLong(buf, LFH_COMPRESSED_SIZE_OFFSET); + ZipLong.ZIP64_MAGIC.putLong(buf, LFH_ORIGINAL_SIZE_OFFSET); } else { - writeOut(LZERO); - writeOut(LZERO); + System.arraycopy(LZERO, 0, buf, LFH_COMPRESSED_SIZE_OFFSET, WORD); + System.arraycopy(LZERO, 0, buf, LFH_ORIGINAL_SIZE_OFFSET, WORD); } } else { - writeOut(ZipLong.getBytes(ze.getCrc())); - byte[] size = ZipLong.ZIP64_MAGIC.getBytes(); + putLong(ze.getCrc(), buf, LFH_CRC_OFFSET); if (!hasZip64Extra(ze)) { - size = ZipLong.getBytes(ze.getSize()); + putLong(ze.getSize(), buf, LFH_COMPRESSED_SIZE_OFFSET); + putLong(ze.getSize(), buf, LFH_ORIGINAL_SIZE_OFFSET); + } else { + ZipLong.ZIP64_MAGIC.putLong(buf, LFH_COMPRESSED_SIZE_OFFSET); + ZipLong.ZIP64_MAGIC.putLong(buf, LFH_ORIGINAL_SIZE_OFFSET); } - writeOut(size); - writeOut(size); } - // CheckStyle:MagicNumber OFF - written += 12; - // CheckStyle:MagicNumber ON - // file name length - writeOut(ZipShort.getBytes(name.limit())); - written += SHORT; + putShort(name.limit(), buf, LFH_FILENAME_LENGTH_OFFSET); // extra field length - byte[] extra = ze.getLocalFileDataExtra(); - writeOut(ZipShort.getBytes(extra.length)); - written += SHORT; + putShort(extra.length, buf, LFH_EXTRA_LENGTH_OFFSET); // file name - writeOut(name.array(), name.arrayOffset(), - name.limit() - name.position()); - written += name.limit(); + final int nameLen = name.limit() - name.position(); + System.arraycopy( name.array(), name.arrayOffset(), buf, LFH_FILENAME_OFFSET, nameLen); - // extra field - writeOut(extra); - written += extra.length; - - entry.dataStart = written; + System.arraycopy(extra, 0, buf, LFH_FILENAME_OFFSET + nameLen, extra.length); + return buf; } + /** * Adds UnicodeExtra fields for name and file comment if mode is * ALWAYS or the data cannot be encoded using the configured @@ -1078,8 +1107,6 @@ protected void writeDataDescriptor(ZipArchiveEntry ze) throws IOException { * Zip64Mode#Never}. */ protected void writeCentralFileHeader(ZipArchiveEntry ze) throws IOException { - writeOut(CFH_SIG); - written += WORD; final long lfhOffset = offsets.get(ze).longValue(); final boolean needsZip64Extra = hasZip64Extra(ze) @@ -1095,97 +1122,114 @@ protected void writeCentralFileHeader(ZipArchiveEntry ze) throws IOException { .ARCHIVE_TOO_BIG_MESSAGE); } + handleZip64Extra(ze, lfhOffset, needsZip64Extra); + byte[] centralFileHeader = createCentralFileHeader(ze, getName(ze), lfhOffset); + writeOut(centralFileHeader); + written += centralFileHeader.length; + } + /** + * Writes the central file header entry. + * @param ze the entry to write + * @param name The encoded name + * @param lfhOffset Local file header offset for this file + * @throws IOException on error + * @throws Zip64RequiredException if the archive's size exceeds 4 + * GByte and {@link Zip64Mode #setUseZip64} is {@link + * Zip64Mode#Never}. + */ + private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long lfhOffset) throws IOException { + byte[] extra = ze.getCentralDirectoryExtra(); + + // file comment length + String comm = ze.getComment(); + if (comm == null) { + comm = ""; + } + + ByteBuffer commentB = getEntryEncoding(ze).encode(comm); + int len= CFH_FILENAME_OFFSET + name.limit() + extra.length + commentB.limit(); + byte[] buf = new byte[len]; + + System.arraycopy(CFH_SIG, 0, buf, CFH_SIG_OFFSET, WORD); + + final boolean needsZip64Extra = hasZip64Extra(ze) + || ze.getCompressedSize() >= ZIP64_MAGIC + || ze.getSize() >= ZIP64_MAGIC + || lfhOffset >= ZIP64_MAGIC; + + if (needsZip64Extra && zip64Mode == Zip64Mode.Never) { + // must be the offset that is too big, otherwise an + // exception would have been throw in putArchiveEntry or + // closeArchiveEntry + throw new Zip64RequiredException(Zip64RequiredException + .ARCHIVE_TOO_BIG_MESSAGE); + } + + // todo: Do in caller ! handleZip64Extra(ze, lfhOffset, needsZip64Extra); + // version made by // CheckStyle:MagicNumber OFF - writeOut(ZipShort.getBytes((ze.getPlatform() << 8) | - (!hasUsedZip64 ? DATA_DESCRIPTOR_MIN_VERSION - : ZIP64_MIN_VERSION))); - written += SHORT; + putShort((ze.getPlatform() << 8) | (!hasUsedZip64 ? DATA_DESCRIPTOR_MIN_VERSION : ZIP64_MIN_VERSION), + buf, CFH_VERSION_MADE_BY_OFFSET); final int zipMethod = ze.getMethod(); final boolean encodable = zipEncoding.canEncode(ze.getName()); - writeVersionNeededToExtractAndGeneralPurposeBits(zipMethod, - !encodable - && fallbackToUTF8, - needsZip64Extra); - written += WORD; + putShort(versionNeededToExtract(zipMethod, needsZip64Extra), buf, CFH_VERSION_NEEDED_OFFSET); + getGeneralPurposeBits(zipMethod, !encodable && fallbackToUTF8).encode(buf, CFH_GPB_OFFSET); // compression method - writeOut(ZipShort.getBytes(zipMethod)); - written += SHORT; + putShort(zipMethod, buf, CFH_METHOD_OFFSET); + // last mod. time and date - writeOut(ZipUtil.toDosTime(ze.getTime())); - written += WORD; + ZipUtil.toDosTime(ze.getTime(), buf, CFH_TIME_OFFSET); // CRC // compressed length // uncompressed length - writeOut(ZipLong.getBytes(ze.getCrc())); + putLong(ze.getCrc(), buf, CFH_CRC_OFFSET); if (ze.getCompressedSize() >= ZIP64_MAGIC - || ze.getSize() >= ZIP64_MAGIC) { - writeOut(ZipLong.ZIP64_MAGIC.getBytes()); - writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + || ze.getSize() >= ZIP64_MAGIC) { + ZipLong.ZIP64_MAGIC.putLong(buf, CFH_COMPRESSED_SIZE_OFFSET); + ZipLong.ZIP64_MAGIC.putLong(buf, CFH_ORIGINAL_SIZE_OFFSET); } else { - writeOut(ZipLong.getBytes(ze.getCompressedSize())); - writeOut(ZipLong.getBytes(ze.getSize())); + putLong(ze.getCompressedSize(), buf, CFH_COMPRESSED_SIZE_OFFSET); + putLong(ze.getSize(), buf, CFH_ORIGINAL_SIZE_OFFSET); } - // CheckStyle:MagicNumber OFF - written += 12; - // CheckStyle:MagicNumber ON - - ByteBuffer name = getName(ze); - writeOut(ZipShort.getBytes(name.limit())); - written += SHORT; + putShort(name.limit(), buf, CFH_FILENAME_LENGTH_OFFSET); // extra field length - byte[] extra = ze.getCentralDirectoryExtra(); - writeOut(ZipShort.getBytes(extra.length)); - written += SHORT; - - // file comment length - String comm = ze.getComment(); - if (comm == null) { - comm = ""; - } - - ByteBuffer commentB = getEntryEncoding(ze).encode(comm); + putShort(extra.length, buf, CFH_EXTRA_LENGTH_OFFSET); - writeOut(ZipShort.getBytes(commentB.limit())); - written += SHORT; + putShort(commentB.limit(), buf, CFH_COMMENT_LENGTH_OFFSET); // disk number start - writeOut(ZERO); - written += SHORT; + System.arraycopy(ZERO, 0, buf, CFH_DISK_NUMBER_OFFSET, SHORT); // internal file attributes - writeOut(ZipShort.getBytes(ze.getInternalAttributes())); - written += SHORT; + putShort(ze.getInternalAttributes(), buf, CFH_INTERNAL_ATTRIBUTES_OFFSET); // external file attributes - writeOut(ZipLong.getBytes(ze.getExternalAttributes())); - written += WORD; + putLong(ze.getExternalAttributes(), buf, CFH_EXTERNAL_ATTRIBUTES_OFFSET); // relative offset of LFH - writeOut(ZipLong.getBytes(Math.min(lfhOffset, ZIP64_MAGIC))); - written += WORD; + putLong(Math.min(lfhOffset, ZIP64_MAGIC), buf, CFH_LFH_OFFSET); // file name - writeOut(name.array(), name.arrayOffset(), - name.limit() - name.position()); - written += name.limit(); + System.arraycopy(name.array(), name.arrayOffset(), buf, CFH_FILENAME_OFFSET, name.limit() - name.position()); + + int extraStart = CFH_FILENAME_OFFSET + name.limit(); + System.arraycopy(extra, 0, buf, extraStart, extra.length); - // extra field - writeOut(extra); - written += extra.length; + int commentLength = commentB.limit() - commentB.position(); + int commentStart = extraStart + commentLength; // file comment - writeOut(commentB.array(), commentB.arrayOffset(), - commentB.limit() - commentB.position()); - written += commentB.limit(); + System.arraycopy(commentB.array(), commentB.arrayOffset(), buf, commentStart, commentLength); + return buf; } /** @@ -1355,35 +1399,31 @@ private void deflateUntilInputIsNeeded() throws IOException { } } - private void writeVersionNeededToExtractAndGeneralPurposeBits(final int - zipMethod, - final boolean - utfFallback, - final boolean - zip64) - throws IOException { - - // CheckStyle:MagicNumber OFF - int versionNeededToExtract = INITIAL_VERSION; + private GeneralPurposeBit getGeneralPurposeBits(final int zipMethod, final boolean utfFallback) { GeneralPurposeBit b = new GeneralPurposeBit(); b.useUTF8ForNames(useUTF8Flag || utfFallback); - if (zipMethod == DEFLATED && raf == null) { - // requires version 2 as we are going to store length info - // in the data descriptor - versionNeededToExtract = DATA_DESCRIPTOR_MIN_VERSION; + if (isDeflatedToOutputStream(zipMethod)) { b.useDataDescriptor(true); } + return b; + } + + private int versionNeededToExtract(final int zipMethod, final boolean zip64) { if (zip64) { - versionNeededToExtract = ZIP64_MIN_VERSION; + return ZIP64_MIN_VERSION; } - // CheckStyle:MagicNumber ON + // requires version 2 as we are going to store length info + // in the data descriptor + return (isDeflatedToOutputStream(zipMethod)) ? + DATA_DESCRIPTOR_MIN_VERSION : + INITIAL_VERSION; + } - // version needed to extract - writeOut(ZipShort.getBytes(versionNeededToExtract)); - // general purpose bit flag - writeOut(b.encode()); + private boolean isDeflatedToOutputStream(int zipMethod) { + return zipMethod == DEFLATED && raf == null; } + /** * Creates a new zip entry taking some information from the given * file and using the provided name. diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java index c3815441af7..bf717fc8477 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java @@ -134,13 +134,30 @@ public long getValue() { */ public static byte[] getBytes(long value) { byte[] result = new byte[WORD]; - result[0] = (byte) ((value & BYTE_MASK)); - result[BYTE_1] = (byte) ((value & BYTE_1_MASK) >> BYTE_1_SHIFT); - result[BYTE_2] = (byte) ((value & BYTE_2_MASK) >> BYTE_2_SHIFT); - result[BYTE_3] = (byte) ((value & BYTE_3_MASK) >> BYTE_3_SHIFT); + putLong(value, result, 0); return result; } + /** + * put the value as four bytes in big endian byte order. + * @param value the Java long to convert to bytes + * @param buf the output buffer + * @param offset + * The offset within the output buffer of the first byte to be written. + * must be non-negative and no larger than buf.length-4 + */ + + public static void putLong(long value, byte[] buf, int offset) { + buf[offset++] = (byte) ((value & BYTE_MASK)); + buf[offset++] = (byte) ((value & BYTE_1_MASK) >> BYTE_1_SHIFT); + buf[offset++] = (byte) ((value & BYTE_2_MASK) >> BYTE_2_SHIFT); + buf[offset] = (byte) ((value & BYTE_3_MASK) >> BYTE_3_SHIFT); + } + + public void putLong(byte[] buf, int offset) { + putLong(value, buf, offset); + } + /** * Helper method to get the value as a Java long from four bytes starting at given array offset * @param bytes the array of bytes diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java index b74db86ab8e..53d6c98409f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java @@ -85,11 +85,23 @@ public int getValue() { */ public static byte[] getBytes(int value) { byte[] result = new byte[2]; - result[0] = (byte) (value & BYTE_MASK); - result[1] = (byte) ((value & BYTE_1_MASK) >> BYTE_1_SHIFT); + putShort(value, result, 0); return result; } + /** + * put the value as two bytes in big endian byte order. + * @param value the Java int to convert to bytes + * @param buf the output buffer + * @param offset + * The offset within the output buffer of the first byte to be written. + * must be non-negative and no larger than buf.length-2 + */ + public static void putShort(int value, byte[] buf, int offset) { + buf[offset] = (byte) (value & BYTE_MASK); + buf[offset+1] = (byte) ((value & BYTE_1_MASK) >> BYTE_1_SHIFT); + } + /** * Helper method to get the value as a java int from two bytes starting at given array offset * @param bytes the array of bytes diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java index a138f6e4056..8ba54454378 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java @@ -51,21 +51,38 @@ public static ZipLong toDosTime(Date time) { * @return the date as a byte array */ public static byte[] toDosTime(long t) { + byte[] result = new byte[4]; + toDosTime(t, result, 0); + return result; + } + + /** + * Convert a Date object to a DOS date/time field. + * + *

Stolen from InfoZip's fileio.c

+ * @param t number of milliseconds since the epoch + * @param buf the output buffer + * @param offset + * The offset within the output buffer of the first byte to be written. + * must be non-negative and no larger than buf.length-4 + */ + public static void toDosTime(long t, byte[] buf, int offset) { Calendar c = Calendar.getInstance(); c.setTimeInMillis(t); int year = c.get(Calendar.YEAR); if (year < 1980) { - return copy(DOS_TIME_MIN); // stop callers from changing the array + System.arraycopy(DOS_TIME_MIN, 0, buf, offset, DOS_TIME_MIN.length);// stop callers from changing the array + return; } int month = c.get(Calendar.MONTH) + 1; long value = ((year - 1980) << 25) - | (month << 21) - | (c.get(Calendar.DAY_OF_MONTH) << 16) - | (c.get(Calendar.HOUR_OF_DAY) << 11) - | (c.get(Calendar.MINUTE) << 5) - | (c.get(Calendar.SECOND) >> 1); - return ZipLong.getBytes(value); + | (month << 21) + | (c.get(Calendar.DAY_OF_MONTH) << 16) + | (c.get(Calendar.HOUR_OF_DAY) << 11) + | (c.get(Calendar.MINUTE) << 5) + | (c.get(Calendar.SECOND) >> 1); + ZipLong.putLong(value, buf, offset); } /** @@ -276,6 +293,12 @@ static byte[] copy(byte[] from) { } return null; } + static void copy(byte[] from, byte[] to, int offset) { + if (from != null) { + System.arraycopy(from, 0, to, offset, from.length); + } + } + /** * Whether this library is able to read or write the given entry. diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java index 7ee087a429b..21a4612d7a4 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java @@ -43,6 +43,18 @@ public void testToBytes() { assertEquals("fourth byte getBytes", 0x12, result[3]); } + /** + * Test conversion to bytes. + */ + public void testPut() { + byte[] arr = new byte[5]; + ZipLong.putLong(0x12345678, arr, 1); + assertEquals("first byte getBytes", 0x78, arr[1]); + assertEquals("second byte getBytes", 0x56, arr[2]); + assertEquals("third byte getBytes", 0x34, arr[3]); + assertEquals("fourth byte getBytes", 0x12, arr[4]); + } + /** * Test conversion from bytes. */ diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java index b2bf16d303a..8e77ff585d2 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java @@ -41,6 +41,18 @@ public void testToBytes() { assertEquals("second byte getBytes", 0x12, result[1]); } + + /** + * Test conversion to bytes. + */ + public void testPut() { + byte[] arr = new byte[3]; + ZipShort.putShort(0x1234, arr, 1); + assertEquals("first byte getBytes", 0x34, arr[1]); + assertEquals("second byte getBytes", 0x12, arr[2]); + } + + /** * Test conversion from bytes. */ diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java index 5f635fb037b..6111a5cc31c 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java @@ -86,6 +86,22 @@ public void testMinTime(){ assertEquals(b10,b2[0]); // first byte should still be the same } + public void testOutsideCalendar(){ + byte[] b1 = ZipUtil.toDosTime(160441200000L); // 1.1..1975 + assertEquals(0, b1[0]); + assertEquals(33, b1[1]); + assertEquals(0, b1[2]); + assertEquals(0, b1[3]); + } + + public void testInsideCalendar(){ + byte[] b1 = ZipUtil.toDosTime(476096400000L); // 1.1.1985, 10:00 am + assertEquals(0, b1[0]); + assertEquals(80, b1[1]); + assertEquals(65, b1[2]); + assertEquals(10, b1[3]); + } + public void testReverse() { byte[][] bTest = new byte[6][]; bTest[0] = new byte[]{}; From 6fd3c9fe3521e63f7e0d0b19dd260462a6001301 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Wed, 24 Dec 2014 14:47:35 +0000 Subject: [PATCH 038/189] Removed testcase added in previous commit. It seems like OpenJDK calendar operations are somewhat different from sun jdk, so this is not a viable test to make git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647798 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipUtilTest.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java index 6111a5cc31c..c2f1abbc528 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java @@ -94,14 +94,6 @@ public void testOutsideCalendar(){ assertEquals(0, b1[3]); } - public void testInsideCalendar(){ - byte[] b1 = ZipUtil.toDosTime(476096400000L); // 1.1.1985, 10:00 am - assertEquals(0, b1[0]); - assertEquals(80, b1[1]); - assertEquals(65, b1[2]); - assertEquals(10, b1[3]); - } - public void testReverse() { byte[][] bTest = new byte[6][]; bTest[0] = new byte[]{}; From 20403a5ab39f4a69d4149a485b23a002e6dde2c8 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 26 Dec 2014 11:33:55 +0000 Subject: [PATCH 039/189] upgrade commons-parent git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647958 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index f9bd7a45f9c..222c8d1e98b 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ org.apache.commons commons-parent - 35 + 36 org.apache.commons From 090e7cc2c6059e22016442c851f3e3dfd955aedd Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 26 Dec 2014 16:21:03 +0000 Subject: [PATCH 040/189] add back test but take UTC offset into account git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647993 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipUtilTest.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java index c2f1abbc528..cf7d13459cb 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.Calendar; import java.util.Date; +import java.util.TimeZone; public class ZipUtilTest extends TestCase { @@ -94,6 +95,16 @@ public void testOutsideCalendar(){ assertEquals(0, b1[3]); } + public void testInsideCalendar(){ + TimeZone tz = TimeZone.getDefault(); + long date = 476092800000L; // 1.1.1985, 09:00 am GMT + byte[] b1 = ZipUtil.toDosTime(date + tz.getOffset(date)); + assertEquals(0, b1[0]); + assertEquals(80, b1[1]); + assertEquals(65, b1[2]); + assertEquals(10, b1[3]); + } + public void testReverse() { byte[][] bTest = new byte[6][]; bTest[0] = new byte[]{}; From 5ad7e43cc3d3f0c4e7069d928b64d5d2adf28288 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 26 Dec 2014 17:30:46 +0000 Subject: [PATCH 041/189] so my assumption about time zones was wrong git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1647999 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipUtilTest.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java index cf7d13459cb..567ddd169c5 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java @@ -95,16 +95,6 @@ public void testOutsideCalendar(){ assertEquals(0, b1[3]); } - public void testInsideCalendar(){ - TimeZone tz = TimeZone.getDefault(); - long date = 476092800000L; // 1.1.1985, 09:00 am GMT - byte[] b1 = ZipUtil.toDosTime(date + tz.getOffset(date)); - assertEquals(0, b1[0]); - assertEquals(80, b1[1]); - assertEquals(65, b1[2]); - assertEquals(10, b1[3]); - } - public void testReverse() { byte[][] bTest = new byte[6][]; bTest[0] = new byte[]{}; From ace950bc8268436f2221464897058e3e0924569e Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 26 Dec 2014 17:35:38 +0000 Subject: [PATCH 042/189] amended for time zone in the wrong direction git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648000 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipUtilTest.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java index 567ddd169c5..ff29f2eb8ad 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java @@ -95,6 +95,16 @@ public void testOutsideCalendar(){ assertEquals(0, b1[3]); } + public void testInsideCalendar(){ + TimeZone tz = TimeZone.getDefault(); + long date = 476096400000L; // 1.1.1985, 10:00 am GMT + byte[] b1 = ZipUtil.toDosTime(date - tz.getOffset(date)); + assertEquals(0, b1[0]); + assertEquals(72, b1[1]); + assertEquals(65, b1[2]); + assertEquals(10, b1[3]); + } + public void testReverse() { byte[][] bTest = new byte[6][]; bTest[0] = new byte[]{}; From 5f99a51d41c0e8ab7359608806ce977d9419c248 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 28 Dec 2014 10:32:25 +0000 Subject: [PATCH 043/189] don't duplicate the 'do I need zip64' check when creating central directory headers git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648164 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 25 ++++--------------- 1 file changed, 5 insertions(+), 20 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 77de11de556..fe2e519a7ca 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -1125,21 +1125,21 @@ protected void writeCentralFileHeader(ZipArchiveEntry ze) throws IOException { handleZip64Extra(ze, lfhOffset, needsZip64Extra); - byte[] centralFileHeader = createCentralFileHeader(ze, getName(ze), lfhOffset); + byte[] centralFileHeader = createCentralFileHeader(ze, getName(ze), lfhOffset, + needsZip64Extra); writeOut(centralFileHeader); written += centralFileHeader.length; } + /** * Writes the central file header entry. * @param ze the entry to write * @param name The encoded name * @param lfhOffset Local file header offset for this file * @throws IOException on error - * @throws Zip64RequiredException if the archive's size exceeds 4 - * GByte and {@link Zip64Mode #setUseZip64} is {@link - * Zip64Mode#Never}. */ - private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long lfhOffset) throws IOException { + private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long lfhOffset, + boolean needsZip64Extra) throws IOException { byte[] extra = ze.getCentralDirectoryExtra(); // file comment length @@ -1154,21 +1154,6 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long System.arraycopy(CFH_SIG, 0, buf, CFH_SIG_OFFSET, WORD); - final boolean needsZip64Extra = hasZip64Extra(ze) - || ze.getCompressedSize() >= ZIP64_MAGIC - || ze.getSize() >= ZIP64_MAGIC - || lfhOffset >= ZIP64_MAGIC; - - if (needsZip64Extra && zip64Mode == Zip64Mode.Never) { - // must be the offset that is too big, otherwise an - // exception would have been throw in putArchiveEntry or - // closeArchiveEntry - throw new Zip64RequiredException(Zip64RequiredException - .ARCHIVE_TOO_BIG_MESSAGE); - } - - // todo: Do in caller ! handleZip64Extra(ze, lfhOffset, needsZip64Extra); - // version made by // CheckStyle:MagicNumber OFF putShort((ze.getPlatform() << 8) | (!hasUsedZip64 ? DATA_DESCRIPTOR_MIN_VERSION : ZIP64_MIN_VERSION), From efb1ca8041fd3aca48a12ee2fe87d64cdf91410c Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 29 Dec 2014 12:24:02 +0000 Subject: [PATCH 044/189] extract ZIP-scatter backing store responsibility into an interface git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648317 13f79535-47bb-0310-9956-ffa450edef68 --- .../FileBasedScatterGatherBackingStore.java | 53 +++++++++++++ .../zip/ScatterGatherBackingStore.java | 49 ++++++++++++ .../archivers/zip/ScatterZipOutputStream.java | 76 +++++-------------- .../archivers/zip/StreamCompressor.java | 36 +++++++++ 4 files changed, 156 insertions(+), 58 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java new file mode 100644 index 00000000000..cafa614aff9 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; + +/** + * ScatterGatherBackingStore that is backed by a file. + * + * @since 1.10 + */ +class FileBasedScatterGatherBackingStore implements ScatterGatherBackingStore { + private final File target; + private final FileOutputStream os; + + FileBasedScatterGatherBackingStore(File target) throws FileNotFoundException { + this.target = target; + os = new FileOutputStream(target); + } + + public InputStream getInputStream() throws IOException { + return new FileInputStream(target); + } + + @SuppressWarnings("ResultOfMethodCallIgnored") + public void close() throws IOException { + os.close(); + } + + public void writeOut(byte[] data, int offset, int length) throws IOException { + os.write(data, offset, length); + } +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java new file mode 100644 index 00000000000..84a9306434e --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; + +/** + * Abstraction over a scatter-output zip archives can be written to + * with a method to gather all content from an InputStream later on. + * + * @since 1.10 + */ +interface ScatterGatherBackingStore extends Closeable { + + /** + * An input stream that contains the scattered payload + * + * @return An InputStream, should be closed by the caller of this method. + * @throws IOException when something fails + */ + InputStream getInputStream() throws IOException; + + /** + * Writes a piece of payload. + * + * @param data the data to write + * @param offset offset inside data to start writing from + * @param length the amount of data to write + * @throws IOException when something fails + */ + void writeOut(byte[] data, int offset, int length) throws IOException ; +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 2f673013d68..9a01a96205e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -44,8 +44,10 @@ * * @since 1.10 */ -public abstract class ScatterZipOutputStream { +public class ScatterZipOutputStream { private final Queue items = new ConcurrentLinkedQueue(); + private final ScatterGatherBackingStore backingStore; + private final StreamCompressor streamCompressor; private static class CompressedEntry { final ZipArchiveEntry entry; @@ -71,6 +73,12 @@ public ZipArchiveEntry transferToArchiveEntry(){ } } + public ScatterZipOutputStream(ScatterGatherBackingStore backingStore, + StreamCompressor streamCompressor) { + this.backingStore = backingStore; + this.streamCompressor = streamCompressor; + } + /** * Add an archive entry to this scatter stream. * @@ -80,10 +88,11 @@ public ZipArchiveEntry transferToArchiveEntry(){ * @throws IOException If writing fails */ public void addArchiveEntry(ZipArchiveEntry zipArchiveEntry, InputStream payload, int method) throws IOException { - StreamCompressor sc = getStreamCompressor(); - sc.deflate(payload, method); + streamCompressor.deflate(payload, method); payload.close(); - items.add(new CompressedEntry(zipArchiveEntry, sc.getCrc32(), sc.getBytesWritten(), method, sc.getBytesRead())); + items.add(new CompressedEntry(zipArchiveEntry, streamCompressor.getCrc32(), + streamCompressor.getBytesWritten(), method, + streamCompressor.getBytesRead())); } /** @@ -93,8 +102,8 @@ public void addArchiveEntry(ZipArchiveEntry zipArchiveEntry, InputStream payload * @throws IOException If writing fails */ public void writeTo(ZipArchiveOutputStream target) throws IOException { - closeBackingStorage(); - InputStream data = getInputStream(); + backingStore.close(); + InputStream data = backingStore.getInputStream(); for (CompressedEntry compressedEntry : items) { final BoundedInputStream rawStream = new BoundedInputStream(data, compressedEntry.compressedSize); target.addRawArchiveEntry(compressedEntry.transferToArchiveEntry(), rawStream); @@ -103,30 +112,6 @@ public void writeTo(ZipArchiveOutputStream target) throws IOException { data.close(); } - /** - * Returns a stream compressor that can be used to compress the data. - *

- * This method is expected to return the same instance every time. - * - * @return The stream compressor - * @throws FileNotFoundException - */ - protected abstract StreamCompressor getStreamCompressor() throws FileNotFoundException; - - /** - * An input stream that contains the scattered payload - * - * @return An InputStream, should be closed by the caller of this method. - * @throws IOException when something fails - */ - protected abstract InputStream getInputStream() throws IOException; - - - /** - * Closes whatever storage is backing this scatter stream - */ - protected abstract void closeBackingStorage() throws IOException; - /** * Create a ScatterZipOutputStream with default compression level that is backed by a file * @@ -147,33 +132,8 @@ public static ScatterZipOutputStream fileBased(File file) throws FileNotFoundExc * @throws FileNotFoundException */ public static ScatterZipOutputStream fileBased(File file, int compressionLevel) throws FileNotFoundException { - return new FileScatterOutputStream(file, compressionLevel); - } - - private static class FileScatterOutputStream extends ScatterZipOutputStream { - final File target; - private StreamCompressor streamDeflater; - final FileOutputStream os; - - FileScatterOutputStream(File target, int compressionLevel) throws FileNotFoundException { - this.target = target; - os = new FileOutputStream(target); - streamDeflater = StreamCompressor.create(compressionLevel, os); - } - - @Override - protected StreamCompressor getStreamCompressor() throws FileNotFoundException { - return streamDeflater; - } - - @Override - protected InputStream getInputStream() throws IOException { - return new FileInputStream(target); - } - - @SuppressWarnings("ResultOfMethodCallIgnored") - public void closeBackingStorage() throws IOException { - os.close(); - } + ScatterGatherBackingStore bs = new FileBasedScatterGatherBackingStore(file); + StreamCompressor sc = StreamCompressor.create(compressionLevel, bs); + return new ScatterZipOutputStream(bs, sc); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index 5856a7dfa3b..b5e53a2cf57 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -91,6 +91,28 @@ public static StreamCompressor create(int compressionLevel, DataOutput os) { return new DataOutputCompressor(deflater, os); } + /** + * Create a stream compressor with the given compression level. + * + * @param compressionLevel The #Deflater compression level + * @param bs The #ScatterGatherBackingStore to receive output + * @return A stream compressor + */ + public static StreamCompressor create(int compressionLevel, ScatterGatherBackingStore bs) { + final Deflater deflater = new Deflater(compressionLevel, true); + return new ScatterGatherBackingStoreCompressor(deflater, bs); + } + + /** + * Create a stream compressor with the default compression level. + * + * @param bs The #ScatterGatherBackingStore to receive output + * @return A stream compressor + */ + public static StreamCompressor create( ScatterGatherBackingStore bs) { + return create(Deflater.DEFAULT_COMPRESSION, bs); + } + /** * The crc32 of the last deflated file * @return the crc32 @@ -197,6 +219,20 @@ private void deflate() throws IOException { protected abstract void writeOut(byte[] data, int offset, int length) throws IOException ; + private static final class ScatterGatherBackingStoreCompressor extends StreamCompressor { + private final ScatterGatherBackingStore bs; + + public ScatterGatherBackingStoreCompressor(Deflater deflater, ScatterGatherBackingStore bs) { + super(deflater); + this.bs = bs; + } + + protected final void writeOut(byte[] data, int offset, int length) + throws IOException { + bs.writeOut(data, offset, length); + } + } + private static final class OutputStreamCompressor extends StreamCompressor { private final OutputStream os; From 2db99ab5f3db2b66eb9a589c46f61adc6701c524 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Tue, 30 Dec 2014 19:23:43 +0000 Subject: [PATCH 045/189] Removed duplication between ZipArchiveOutputStream and StreamCompressor Reatained all compatibility in ZipArchiveOutputStream Made StreamCompressor and all instantiation of this package private. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648585 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ScatterZipOutputStream.java | 2 - .../archivers/zip/StreamCompressor.java | 118 +++++++--- .../archivers/zip/ZipArchiveOutputStream.java | 222 +++++++----------- 3 files changed, 166 insertions(+), 176 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 9a01a96205e..9c5c7ab9d6b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -21,9 +21,7 @@ import org.apache.commons.compress.utils.BoundedInputStream; import java.io.File; -import java.io.FileInputStream; import java.io.FileNotFoundException; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Queue; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index b5e53a2cf57..26ee7aa05fc 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -17,6 +17,7 @@ */ package org.apache.commons.compress.archivers.zip; +import java.io.Closeable; import java.io.DataOutput; import java.io.IOException; import java.io.InputStream; @@ -30,7 +31,7 @@ * * @since 1.10 */ -public abstract class StreamCompressor { +abstract class StreamCompressor implements Closeable { /* * Apparently Deflater.setInput gets slowed down a lot on Sun JVMs @@ -45,27 +46,26 @@ public abstract class StreamCompressor { private final CRC32 crc = new CRC32(); - int writtenToOutputStream = 0; - int sourcePayloadLength = 0; - long actualCrc; + private long writtenToOutputStream = 0; + private long sourcePayloadLength = 0; + private long totalWrittenToOutputStream = 0; private final int bufferSize = 4096; private final byte[] outputBuffer = new byte[bufferSize]; private final byte[] readerBuf = new byte[bufferSize]; - protected StreamCompressor(Deflater deflater) { + StreamCompressor(Deflater deflater) { this.def = deflater; } /** * Create a stream compressor with the given compression level. * - * @param compressionLevel The #Deflater compression level - * @param os The #OutputStream stream to receive output + * @param os The #OutputStream stream to receive output + * @param deflater The deflater to use * @return A stream compressor */ - public static StreamCompressor create(int compressionLevel, OutputStream os) { - final Deflater deflater = new Deflater(compressionLevel, true); + static StreamCompressor create(OutputStream os, Deflater deflater) { return new OutputStreamCompressor(deflater, os); } @@ -75,19 +75,18 @@ public static StreamCompressor create(int compressionLevel, OutputStream os) { * @param os The #OutputStream stream to receive output * @return A stream compressor */ - public static StreamCompressor create( OutputStream os) { - return create(Deflater.DEFAULT_COMPRESSION, os); + static StreamCompressor create(OutputStream os) { + return create(os, new Deflater(Deflater.DEFAULT_COMPRESSION, true)); } /** * Create a stream compressor with the given compression level. * - * @param compressionLevel The #Deflater compression level - * @param os The #DataOutput to receive output + * @param os The #DataOutput to receive output + * @param deflater The deflater to use for the compressor * @return A stream compressor */ - public static StreamCompressor create(int compressionLevel, DataOutput os) { - final Deflater deflater = new Deflater(compressionLevel, true); + static StreamCompressor create(DataOutput os, Deflater deflater) { return new DataOutputCompressor(deflater, os); } @@ -95,7 +94,7 @@ public static StreamCompressor create(int compressionLevel, DataOutput os) { * Create a stream compressor with the given compression level. * * @param compressionLevel The #Deflater compression level - * @param bs The #ScatterGatherBackingStore to receive output + * @param bs The #ScatterGatherBackingStore to receive output * @return A stream compressor */ public static StreamCompressor create(int compressionLevel, ScatterGatherBackingStore bs) { @@ -109,37 +108,51 @@ public static StreamCompressor create(int compressionLevel, ScatterGatherBacking * @param bs The #ScatterGatherBackingStore to receive output * @return A stream compressor */ - public static StreamCompressor create( ScatterGatherBackingStore bs) { + public static StreamCompressor create(ScatterGatherBackingStore bs) { return create(Deflater.DEFAULT_COMPRESSION, bs); } /** * The crc32 of the last deflated file + * * @return the crc32 */ public long getCrc32() { - return actualCrc; + return crc.getValue(); } /** * Return the number of bytes read from the source stream + * * @return The number of bytes read, never negative */ - public int getBytesRead() { + public long getBytesRead() { return sourcePayloadLength; } /** * The number of bytes written to the output + * * @return The number of bytes, never negative */ - public int getBytesWritten() { + public long getBytesWritten() { return writtenToOutputStream; } + /** + * The total number of bytes written to the output for all files + * + * @return The number of bytes, never negative + */ + public long getTotalBytesWritten() { + return totalWrittenToOutputStream; + } + + /** * Deflate the given source using the supplied compression method + * * @param source The source to compress * @param method The #ZipArchiveEntry compression method * @throws IOException When failures happen @@ -149,39 +162,56 @@ public void deflate(InputStream source, int method) throws IOException { reset(); int length; - while(( length = source.read(readerBuf, 0, readerBuf.length)) >= 0){ - crc.update(readerBuf, 0, length); - if (method == ZipArchiveEntry.DEFLATED) { - writeDeflated(readerBuf, 0, length); - } else { - writeOut(readerBuf, 0, length); - writtenToOutputStream += length; - } - sourcePayloadLength += length; + while ((length = source.read(readerBuf, 0, readerBuf.length)) >= 0) { + write(readerBuf, 0, length, method); } if (method == ZipArchiveEntry.DEFLATED) { flushDeflater(); } - actualCrc = crc.getValue(); - + } + /** + * Writes bytes to ZIP entry. + * + * @param b the byte array to write + * @param offset the start position to write from + * @param length the number of bytes to write + * @param method the comrpession method to use + * @return the number of bytes written to the stream this time + * @throws IOException on error + */ + long write(byte[] b, int offset, int length, int method) throws IOException { + long current = writtenToOutputStream; + crc.update(b, offset, length); + if (method == ZipArchiveEntry.DEFLATED) { + writeDeflated(b, offset, length); + } else { + writeCounted(b, offset, length); + } + sourcePayloadLength += length; + return writtenToOutputStream - current; } - private void reset(){ + + void reset() { crc.reset(); def.reset(); sourcePayloadLength = 0; writtenToOutputStream = 0; } - private void flushDeflater() throws IOException { + public void close() throws IOException { + def.end(); + } + + void flushDeflater() throws IOException { def.finish(); while (!def.finished()) { deflate(); } } - private void writeDeflated(byte[]b, int offset, int length) + private void writeDeflated(byte[] b, int offset, int length) throws IOException { if (length > 0 && !def.finished()) { if (length <= DEFLATER_BLOCK_SIZE) { @@ -212,12 +242,21 @@ private void deflateUntilInputIsNeeded() throws IOException { private void deflate() throws IOException { int len = def.deflate(outputBuffer, 0, outputBuffer.length); if (len > 0) { - writeOut(outputBuffer, 0, len); - writtenToOutputStream += len; + writeCounted(outputBuffer, 0, len); } } - protected abstract void writeOut(byte[] data, int offset, int length) throws IOException ; + public void writeCounted(byte[] data) throws IOException { + writeCounted(data, 0, data.length); + } + + public void writeCounted(byte[] data, int offset, int length) throws IOException { + writeOut(data, offset, length); + writtenToOutputStream += length; + totalWrittenToOutputStream += length; + } + + protected abstract void writeOut(byte[] data, int offset, int length) throws IOException; private static final class ScatterGatherBackingStoreCompressor extends StreamCompressor { private final ScatterGatherBackingStore bs; @@ -229,7 +268,7 @@ public ScatterGatherBackingStoreCompressor(Deflater deflater, ScatterGatherBacki protected final void writeOut(byte[] data, int offset, int length) throws IOException { - bs.writeOut(data, offset, length); + bs.writeOut(data, offset, length); } } @@ -243,12 +282,13 @@ public OutputStreamCompressor(Deflater deflater, OutputStream os) { protected final void writeOut(byte[] data, int offset, int length) throws IOException { - os.write(data, offset, length); + os.write(data, offset, length); } } private static final class DataOutputCompressor extends StreamCompressor { private final DataOutput raf; + public DataOutputCompressor(Deflater deflater, DataOutput raf) { super(deflater); this.raf = raf; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index fe2e519a7ca..86f91ea91b6 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -28,7 +28,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; -import java.util.zip.CRC32; import java.util.zip.Deflater; import java.util.zip.ZipException; @@ -181,15 +180,7 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream { private final List entries = new LinkedList(); - /** - * CRC instance to avoid parsing DEFLATED data twice. - */ - private final CRC32 crc = new CRC32(); - - /** - * Count the bytes written to out. - */ - private long written = 0; + private final StreamCompressor streamCompressor; /** * Start of central directory. @@ -235,18 +226,12 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream { private ZipEncoding zipEncoding = ZipEncodingHelper.getZipEncoding(DEFAULT_ENCODING); - /** - * This Deflater object is used for output. - * - */ - protected final Deflater def = new Deflater(level, true); /** - * This buffer serves as a Deflater. + * This Deflater object is used for output. * */ - private final byte[] buf = new byte[BUFFER_SIZE]; - + protected final Deflater def; /** * Optional random access output. */ @@ -286,6 +271,8 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream { public ZipArchiveOutputStream(OutputStream out) { this.out = out; this.raf = null; + def = new Deflater(level, true); + streamCompressor = StreamCompressor.create(out, def); } /** @@ -305,6 +292,8 @@ public ZipArchiveOutputStream(File file) throws IOException { _raf = null; o = new FileOutputStream(file); } + def = new Deflater(level, true); + streamCompressor = StreamCompressor.create(_raf, def); out = o; raf = _raf; } @@ -441,16 +430,16 @@ public void finish() throws IOException { throw new IOException("This archive contains unclosed entries."); } - cdOffset = written; + cdOffset = streamCompressor.getTotalBytesWritten(); for (ZipArchiveEntry ze : entries) { writeCentralFileHeader(ze); } - cdLength = written - cdOffset; + cdLength = streamCompressor.getTotalBytesWritten() - cdOffset; writeZip64CentralDirectory(); writeCentralDirectoryEnd(); offsets.clear(); entries.clear(); - def.end(); + streamCompressor.close(); finished = true; } @@ -467,11 +456,11 @@ public void closeArchiveEntry() throws IOException { flushDeflater(); - long bytesWritten = written - entry.dataStart; - long realCrc = crc.getValue(); - crc.reset(); - + long bytesWritten = streamCompressor.getTotalBytesWritten() - entry.dataStart; + long realCrc = streamCompressor.getCrc32(); + entry.bytesRead = streamCompressor.getBytesRead(); doCloseEntry(realCrc, bytesWritten); + streamCompressor.reset(); } /** @@ -484,16 +473,24 @@ public void closeArchiveEntry() throws IOException { */ private void closeCopiedEntry() throws IOException { preClose(); - long realCrc = entry.entry.getCrc(); entry.bytesRead = entry.entry.getSize(); - doCloseEntry(realCrc, entry.entry.getCompressedSize()); + doCloseCopiedEntry(); } private void doCloseEntry(long realCrc, long bytesWritten) throws IOException { final Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); - final boolean actuallyNeedsZip64 = - handleSizesAndCrc(bytesWritten, realCrc, effectiveMode); + final boolean actuallyNeedsZip64 = handleSizesAndCrc(bytesWritten, realCrc, effectiveMode); + + closeEntry(actuallyNeedsZip64); + } + private void doCloseCopiedEntry() throws IOException { + Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); + boolean actuallyNeedsZip64 = checkIfNeedsZip64(effectiveMode); + closeEntry(actuallyNeedsZip64); + } + + private void closeEntry(boolean actuallyNeedsZip64) throws IOException { if (raf != null) { rewriteSizesAndCrc(actuallyNeedsZip64); } @@ -538,10 +535,7 @@ public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) */ private void flushDeflater() throws IOException { if (entry.entry.getMethod() == DEFLATED) { - def.finish(); - while (!def.finished()) { - deflate(); - } + streamCompressor.flushDeflater(); } } @@ -563,7 +557,6 @@ private boolean handleSizesAndCrc(long bytesWritten, long crc, entry.entry.setCompressedSize(bytesWritten); entry.entry.setCrc(crc); - def.reset(); } else if (raf == null) { if (entry.entry.getCrc() != crc) { throw new ZipException("bad CRC checksum for entry " @@ -586,16 +579,25 @@ private boolean handleSizesAndCrc(long bytesWritten, long crc, entry.entry.setCrc(crc); } + return checkIfNeedsZip64(effectiveMode); + } + + /** + * Ensures the current entry's size and CRC information is set to + * the values just written, verifies it isn't too big in the + * Zip64Mode.Never case and returns whether the entry would + * require a Zip64 extra field. + */ + private boolean checkIfNeedsZip64(Zip64Mode effectiveMode) + throws ZipException { final boolean actuallyNeedsZip64 = effectiveMode == Zip64Mode.Always - || entry.entry.getSize() >= ZIP64_MAGIC - || entry.entry.getCompressedSize() >= ZIP64_MAGIC; + || entry.entry.getSize() >= ZIP64_MAGIC + || entry.entry.getCompressedSize() >= ZIP64_MAGIC; if (actuallyNeedsZip64 && effectiveMode == Zip64Mode.Never) { - throw new Zip64RequiredException(Zip64RequiredException - .getEntryTooBigMessage(entry.entry)); + throw new Zip64RequiredException(Zip64RequiredException.getEntryTooBigMessage(entry.entry)); } return actuallyNeedsZip64; } - /** * When using random access output, write the local file header * and potentiall the ZIP64 extra containing the correct CRC and @@ -692,7 +694,7 @@ public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException { def.setLevel(level); hasCompressionLevelChanged = false; } - writeLocalFileHeader((ZipArchiveEntry)archiveEntry); + writeLocalFileHeader((ZipArchiveEntry) archiveEntry); } /** @@ -828,17 +830,23 @@ public void write(byte[] b, int offset, int length) throws IOException { throw new IllegalStateException("No current entry"); } ZipUtil.checkRequestedFeatures(entry.entry); - entry.hasWritten = true; - if (entry.entry.getMethod() == DEFLATED) { - writeDeflated(b, offset, length); - } else { - writeOut(b, offset, length); - written += length; - } - crc.update(b, offset, length); - count(length); + long writtenThisTime = streamCompressor.write(b, offset, length, entry.entry.getMethod()); + count(writtenThisTime); + } + + /** + * Write bytes to output or random access file. + * @param data the byte array to write + * @throws IOException on error + */ + private void writeCounted(byte[] data) throws IOException { + streamCompressor.writeCounted(data); } + + + + private void copyFromZipInputStream(InputStream src) throws IOException { if (entry == null) { throw new IllegalStateException("No current entry"); @@ -846,41 +854,11 @@ private void copyFromZipInputStream(InputStream src) throws IOException { ZipUtil.checkRequestedFeatures(entry.entry); entry.hasWritten = true; byte[] tmpBuf = new byte[4096]; - int length = src.read( tmpBuf ); - while ( length >= 0 ) + int length; + while ((length = src.read( tmpBuf )) >= 0 ) { - writeOut( tmpBuf, 0, length ); - written += length; - crc.update( tmpBuf, 0, length ); - + streamCompressor.writeCounted(tmpBuf, 0, length); count( length ); - length = src.read( tmpBuf ); - } - } - - /** - * write implementation for DEFLATED entries. - */ - private void writeDeflated(byte[]b, int offset, int length) - throws IOException { - if (length > 0 && !def.finished()) { - entry.bytesRead += length; - if (length <= DEFLATER_BLOCK_SIZE) { - def.setInput(b, offset, length); - deflateUntilInputIsNeeded(); - } else { - final int fullblocks = length / DEFLATER_BLOCK_SIZE; - for (int i = 0; i < fullblocks; i++) { - def.setInput(b, offset + i * DEFLATER_BLOCK_SIZE, - DEFLATER_BLOCK_SIZE); - deflateUntilInputIsNeeded(); - } - final int done = fullblocks * DEFLATER_BLOCK_SIZE; - if (done < length) { - def.setInput(b, offset + done, length - done); - deflateUntilInputIsNeeded(); - } - } } } @@ -942,18 +920,6 @@ public void flush() throws IOException { */ static final byte[] ZIP64_EOCD_LOC_SIG = ZipLong.getBytes(0X07064B50L); - /** - * Writes next block of compressed data to the output stream. - * @throws IOException on error - */ - protected final void deflate() throws IOException { - int len = def.deflate(buf, 0, buf.length); - if (len > 0) { - writeOut(buf, 0, len); - written += len; - } - } - /** * Writes the local file header entry * @param ze the entry to write @@ -968,11 +934,11 @@ protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { } final byte[] localHeader = createLocalFileHeader(ze, name, encodable); - offsets.put(ze, written); - entry.localDataStart = written + 14; // Right before crc - writeOut( localHeader); - written += localHeader.length; - entry.dataStart = written; + long localHeaderStart = streamCompressor.getTotalBytesWritten(); + offsets.put(ze, localHeaderStart); + entry.localDataStart = localHeaderStart + LFH_CRC_OFFSET; // At crc offset + writeCounted(localHeader); + entry.dataStart = streamCompressor.getTotalBytesWritten(); } @@ -1084,18 +1050,15 @@ protected void writeDataDescriptor(ZipArchiveEntry ze) throws IOException { if (ze.getMethod() != DEFLATED || raf != null) { return; } - writeOut(DD_SIG); - writeOut(ZipLong.getBytes(ze.getCrc())); - int sizeFieldSize = WORD; + writeCounted(DD_SIG); + writeCounted(ZipLong.getBytes(ze.getCrc())); if (!hasZip64Extra(ze)) { - writeOut(ZipLong.getBytes(ze.getCompressedSize())); - writeOut(ZipLong.getBytes(ze.getSize())); + writeCounted(ZipLong.getBytes(ze.getCompressedSize())); + writeCounted(ZipLong.getBytes(ze.getSize())); } else { - sizeFieldSize = DWORD; - writeOut(ZipEightByteInteger.getBytes(ze.getCompressedSize())); - writeOut(ZipEightByteInteger.getBytes(ze.getSize())); + writeCounted(ZipEightByteInteger.getBytes(ze.getCompressedSize())); + writeCounted(ZipEightByteInteger.getBytes(ze.getSize())); } - written += 2 * WORD + 2 * sizeFieldSize; } /** @@ -1125,10 +1088,8 @@ protected void writeCentralFileHeader(ZipArchiveEntry ze) throws IOException { handleZip64Extra(ze, lfhOffset, needsZip64Extra); - byte[] centralFileHeader = createCentralFileHeader(ze, getName(ze), lfhOffset, - needsZip64Extra); - writeOut(centralFileHeader); - written += centralFileHeader.length; + byte[] centralFileHeader = createCentralFileHeader(ze, getName(ze), lfhOffset, needsZip64Extra); + writeCounted(centralFileHeader); } /** @@ -1249,11 +1210,11 @@ private void handleZip64Extra(ZipArchiveEntry ze, long lfhOffset, * and {@link Zip64Mode #setUseZip64} is {@link Zip64Mode#Never}. */ protected void writeCentralDirectoryEnd() throws IOException { - writeOut(EOCD_SIG); + writeCounted(EOCD_SIG); // disk numbers - writeOut(ZERO); - writeOut(ZERO); + writeCounted(ZERO); + writeCounted(ZERO); // number of entries int numberOfEntries = entries.size(); @@ -1269,18 +1230,17 @@ protected void writeCentralDirectoryEnd() throws IOException { byte[] num = ZipShort.getBytes(Math.min(numberOfEntries, ZIP64_MAGIC_SHORT)); - writeOut(num); - writeOut(num); + writeCounted(num); + writeCounted(num); // length and location of CD - writeOut(ZipLong.getBytes(Math.min(cdLength, ZIP64_MAGIC))); - writeOut(ZipLong.getBytes(Math.min(cdOffset, ZIP64_MAGIC))); + writeCounted(ZipLong.getBytes(Math.min(cdLength, ZIP64_MAGIC))); + writeCounted(ZipLong.getBytes(Math.min(cdOffset, ZIP64_MAGIC))); // ZIP file comment ByteBuffer data = this.zipEncoding.encode(comment); - writeOut(ZipShort.getBytes(data.limit())); - writeOut(data.array(), data.arrayOffset(), - data.limit() - data.position()); + writeCounted(ZipShort.getBytes(data.limit())); + streamCompressor.writeCounted(data.array(), data.arrayOffset(), data.limit() - data.position()); } private static final byte[] ONE = ZipLong.getBytes(1L); @@ -1307,7 +1267,7 @@ protected void writeZip64CentralDirectory() throws IOException { return; } - long offset = written; + long offset = streamCompressor.getBytesWritten(); writeOut(ZIP64_EOCD_SIG); // size, we don't have any variable length as we don't support @@ -1359,9 +1319,10 @@ protected void writeZip64CentralDirectory() throws IOException { * @throws IOException on error */ protected final void writeOut(byte[] data) throws IOException { - writeOut(data, 0, data.length); + streamCompressor.writeOut(data, 0, data.length); } + /** * Write bytes to output or random access file. * @param data the byte array to write @@ -1370,19 +1331,10 @@ protected final void writeOut(byte[] data) throws IOException { * @throws IOException on error */ protected final void writeOut(byte[] data, int offset, int length) - throws IOException { - if (raf != null) { - raf.write(data, offset, length); - } else { - out.write(data, offset, length); - } + throws IOException { + streamCompressor.writeOut(data, offset, length); } - private void deflateUntilInputIsNeeded() throws IOException { - while (!def.needsInput()) { - deflate(); - } - } private GeneralPurposeBit getGeneralPurposeBits(final int zipMethod, final boolean utfFallback) { GeneralPurposeBit b = new GeneralPurposeBit(); From e6be36836b60781e5fdafd91ed2c9065af206a98 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Wed, 31 Dec 2014 14:14:35 +0000 Subject: [PATCH 046/189] Added ParallelScatterZipCreator git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648704 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/InputStreamSupplier.java | 31 ++++ .../zip/ParallelScatterZipCreator.java | 155 ++++++++++++++++++ .../ScatterGatherBackingStoreSupplier.java | 30 ++++ .../zip/ParallelScatterZipCreatorTest.java | 84 ++++++++++ 4 files changed, 300 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java create mode 100644 src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java b/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java new file mode 100644 index 00000000000..ecc0d4caf50 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.InputStream; + +public interface InputStreamSupplier { + + /** + * Supply an input stream for a resource. + * @return the input stream, may be null if there is no content for the resource. + */ + InputStream get(); +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java new file mode 100644 index 00000000000..18b14546cd7 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.zip.Deflater; + +/** + * Creates a zip in parallel by using multiple threadlocal #ScatterZipOutputStream instances. + *

+ * Note that this class generally makes no guarantees about the order of things written to + * the output file. Things that need to come in a specific order (manifests, directories) + * must be handled by the client of this class, usually by writing these things to the + * #ZipArchiveOutputStream *before* calling #writeTo on this class. + */ +public class ParallelScatterZipCreator { + private List streams = Collections.synchronizedList(new ArrayList()); + private final ExecutorService es; + + private final long startedAt = System.currentTimeMillis(); + private long compressionDoneAt = 0; + private long scatterDoneAt; + + static ScatterGatherBackingStoreSupplier defaultSupplier = new DefaultSupplier(); + + static class DefaultSupplier implements ScatterGatherBackingStoreSupplier { + AtomicInteger storeNum = new AtomicInteger(0); + + public ScatterGatherBackingStore get() throws IOException { + File tempFile = File.createTempFile("parallelscatter", "n" + storeNum.incrementAndGet()); + return new FileBasedScatterGatherBackingStore(tempFile); + } + } + + static ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier scatterGatherBackingStoreSupplier) + throws IOException { + ScatterGatherBackingStore bs = scatterGatherBackingStoreSupplier.get(); + StreamCompressor sc = StreamCompressor.create(Deflater.DEFAULT_COMPRESSION, bs); + return new ScatterZipOutputStream(bs, sc); + } + + + ThreadLocal tlScatterStreams = new ThreadLocal() { + @Override + protected ScatterZipOutputStream initialValue() { + try { + ScatterZipOutputStream scatterStream = createDeferred(defaultSupplier); + streams.add(scatterStream); + return scatterStream; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }; + + /** + * Create a ParallelScatterZipCreator with default threads + */ + public ParallelScatterZipCreator() { + this(Runtime.getRuntime().availableProcessors()); + } + + /** + * Create a ParallelScatterZipCreator + * + * @param nThreads the number of threads to use in parallel. + */ + public ParallelScatterZipCreator(int nThreads) { + es = Executors.newFixedThreadPool(nThreads); + } + + /** + * Adds an archive entry to this archive. + *

+ * This method is expected to be called from a single client thread + * + * @param zipArchiveEntry The entry to add. Compression method + * @param source The source input stream supplier + */ + + public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputStreamSupplier source) { + final int method = zipArchiveEntry.getMethod(); + if (method == -1) throw new IllegalArgumentException("Method must be set on the supplied zipArchiveEntry"); + // Consider if we want to constrain the number of items that can enqueue here. + es.submit(new Callable() { + public ScatterZipOutputStream call() throws Exception { + ScatterZipOutputStream streamToUse = tlScatterStreams.get(); + streamToUse.addArchiveEntry(zipArchiveEntry, source.get(), method); + return streamToUse; + } + + }); + } + + + /** + * Write the contents this to the target #ZipArchiveOutputStream. + *

+ * It may be beneficial to write things like directories and manifest files to the targetStream + * before calling this method. + * + * @param targetStream The ZipArchiveOutputStream to receive the contents of the scatter streams + * @throws IOException If writing fails + * @throws InterruptedException If we get interrupted + */ + public void writeTo(ZipArchiveOutputStream targetStream) throws IOException, InterruptedException { + es.shutdown(); + es.awaitTermination(1000 * 60, TimeUnit.SECONDS); + + // It is important that all threads terminate before we go on, ensure happens-before relationship + compressionDoneAt = System.currentTimeMillis(); + + for (ScatterZipOutputStream scatterStream : streams) { + scatterStream.writeTo(targetStream); + } + + scatterDoneAt = System.currentTimeMillis(); + // Maybe close ScatterZipOS. We should do something to get rid of tempfiles. + } + + /** + * Returns a message describing the overall statistics of the compression run + * + * @return A string + */ + public String getStatisticsMessage() { + return "Compression: " + (compressionDoneAt - startedAt) + "ms," + + "Merging files: " + (scatterDoneAt - compressionDoneAt) + "ms"; + } +} + diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java new file mode 100644 index 00000000000..54359dcee5e --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; + +public interface ScatterGatherBackingStoreSupplier { + /** + * Get a ScatterGatherBackingStore. + * + * @return a ScatterGatherBackingStore, not null + */ + ScatterGatherBackingStore get() throws IOException; +} diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java new file mode 100644 index 00000000000..6a482db8619 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.*; + +@SuppressWarnings("OctalInteger") +public class ParallelScatterZipCreatorTest { + + @Test + public void concurrent() + throws Exception { + File result = File.createTempFile("parallelScatterGather1", ""); + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(result); + zos.setEncoding("UTF-8"); + ParallelScatterZipCreator zipCreator = new ParallelScatterZipCreator(); + + Map entries = writeEntries(zipCreator); + zipCreator.writeTo(zos); + zos.close(); + + removeEntriesFoundInZipFile(result, entries); + assertTrue(entries.size() == 0); + assertNotNull( zipCreator.getStatisticsMessage()); + } + + private void removeEntriesFoundInZipFile(File result, Map entries) throws IOException { + ZipFile zf = new ZipFile(result); + Enumeration entriesInPhysicalOrder = zf.getEntriesInPhysicalOrder(); + while (entriesInPhysicalOrder.hasMoreElements()){ + ZipArchiveEntry zipArchiveEntry = entriesInPhysicalOrder.nextElement(); + InputStream inputStream = zf.getInputStream(zipArchiveEntry); + byte[] actual = IOUtils.toByteArray(inputStream); + byte[] expected = entries.remove(zipArchiveEntry.getName()); + assertArrayEquals( expected, actual); + } + zf.close(); + } + + private Map writeEntries(ParallelScatterZipCreator zipCreator) { + Map entries = new HashMap(); + for (int i = 0; i < 10000; i++){ + ZipArchiveEntry za = new ZipArchiveEntry( "file" + i); + final String payload = "content" + i; + final byte[] payloadBytes = payload.getBytes(); + entries.put( za.getName(), payloadBytes); + za.setMethod(ZipArchiveEntry.DEFLATED); + za.setSize(payload.length()); + za.setUnixMode(UnixStat.FILE_FLAG | 0664); + zipCreator.addArchiveEntry(za, new InputStreamSupplier() { + public InputStream get() { + return new ByteArrayInputStream(payloadBytes); + } + }); + } + return entries; + } +} \ No newline at end of file From af0b40d408df1d0f7278f05c6f1b59797116b7f2 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Thu, 1 Jan 2015 19:39:17 +0000 Subject: [PATCH 047/189] document parallel zip creation git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648926 13f79535-47bb-0310-9956-ffa450edef68 --- NOTICE.txt | 2 +- src/site/xdoc/index.xml | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/NOTICE.txt b/NOTICE.txt index db0a7592a61..edd2f2c78ee 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,5 +1,5 @@ Apache Commons Compress -Copyright 2002-2014 The Apache Software Foundation +Copyright 2002-2015 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index a2d69bf0fe1..5f8b6964937 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -70,6 +70,8 @@ org.apache.commons.compress.compressors.lzw and the code is now an official part of Commons Compress' API. +

  • A new class in the zip package can create archives + while compressing different entries in parallel.
  • From 98f98c49c31b0760285a9d8a39712c7932f3a650 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Thu, 1 Jan 2015 20:08:16 +0000 Subject: [PATCH 048/189] Unix mode bits were not being preserved in addRawEntry git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648932 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 1 + .../compress/archivers/ZipTestCase.java | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 86f91ea91b6..803ffa6ff40 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -525,6 +525,7 @@ private void preClose() throws IOException { public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) throws IOException { ZipArchiveEntry ae = new ZipArchiveEntry((java.util.zip.ZipEntry)entry); + ae.setUnixMode( entry.getUnixMode()); putArchiveEntry(ae); copyFromZipInputStream(rawStream); closeCopiedEntry(); diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index db090201396..afac8f7ef72 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -18,6 +18,7 @@ */ package org.apache.commons.compress.archivers; +import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -26,6 +27,7 @@ import java.io.OutputStream; import java.util.ArrayList; import java.util.List; +import java.util.zip.ZipEntry; import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; @@ -328,6 +330,25 @@ public boolean test(ZipArchiveEntry zipArchiveEntry) { zf2.close(); } + public void testUnixModeInAddRaw() throws IOException { + + File[] tmp = createTempDirAndFile(); + + File a1 = File.createTempFile("unixModeBits.", ".zip", tmp[0]); + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(a1); + + ZipArchiveEntry archiveEntry = new ZipArchiveEntry("fred"); + archiveEntry.setUnixMode(0664); + archiveEntry.setMethod(ZipEntry.DEFLATED); + zos.addRawArchiveEntry(archiveEntry, new ByteArrayInputStream("fud".getBytes())); + zos.close(); + + ZipFile zf1 = new ZipFile(a1); + ZipArchiveEntry fred = zf1.getEntry("fred"); + assertEquals(0664, fred.getUnixMode()); + zf1.close(); + } + private File createReferenceFile(File directory) throws IOException { File reference = File.createTempFile("expected.", ".zip", directory); ZipArchiveOutputStream zos = new ZipArchiveOutputStream(reference); From e1914af4fe1cdfe3ea4b2e14d01056d1575482c7 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Thu, 1 Jan 2015 20:37:22 +0000 Subject: [PATCH 049/189] Things are never quite as simple as they seem Last commit broke build git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648937 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ZipArchiveOutputStream.java | 5 ++++- .../org/apache/commons/compress/archivers/ZipTestCase.java | 3 +-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 803ffa6ff40..626218a92c3 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -525,7 +525,10 @@ private void preClose() throws IOException { public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) throws IOException { ZipArchiveEntry ae = new ZipArchiveEntry((java.util.zip.ZipEntry)entry); - ae.setUnixMode( entry.getUnixMode()); + int unixMode = entry.getUnixMode(); + if (unixMode > 0){ + ae.setUnixMode(unixMode); + } putArchiveEntry(ae); copyFromZipInputStream(rawStream); closeCopiedEntry(); diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index afac8f7ef72..c34bc9542bd 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -305,8 +305,7 @@ public boolean test(ZipArchiveEntry zipArchiveEntry) { } }; - public void testCopyRawEntriesFromFile - () + public void testCopyRawEntriesFromFile() throws IOException { File[] tmp = createTempDirAndFile(); From 790fd918b30bb3e5c905adc19e795006c99579d7 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Thu, 1 Jan 2015 21:33:23 +0000 Subject: [PATCH 050/189] Made some stuff public Work with the plexus-archiver poc revealed I needed these public git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1648947 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/FileBasedScatterGatherBackingStore.java | 4 ++-- .../compress/archivers/zip/ParallelScatterZipCreator.java | 2 +- .../compress/archivers/zip/ScatterGatherBackingStore.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java index cafa614aff9..2ff5a29290b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java @@ -29,11 +29,11 @@ * * @since 1.10 */ -class FileBasedScatterGatherBackingStore implements ScatterGatherBackingStore { +public class FileBasedScatterGatherBackingStore implements ScatterGatherBackingStore { private final File target; private final FileOutputStream os; - FileBasedScatterGatherBackingStore(File target) throws FileNotFoundException { + public FileBasedScatterGatherBackingStore(File target) throws FileNotFoundException { this.target = target; os = new FileOutputStream(target); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 18b14546cd7..3b33840d271 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -56,7 +56,7 @@ public ScatterGatherBackingStore get() throws IOException { } } - static ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier scatterGatherBackingStoreSupplier) + public static ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier scatterGatherBackingStoreSupplier) throws IOException { ScatterGatherBackingStore bs = scatterGatherBackingStoreSupplier.get(); StreamCompressor sc = StreamCompressor.create(Deflater.DEFAULT_COMPRESSION, bs); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java index 84a9306434e..869e84f6e11 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java @@ -27,7 +27,7 @@ * * @since 1.10 */ -interface ScatterGatherBackingStore extends Closeable { +public interface ScatterGatherBackingStore extends Closeable { /** * An input stream that contains the scattered payload From fa2e5bd29069144fa35fde720604c549205f99f0 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 2 Jan 2015 14:42:27 +0000 Subject: [PATCH 051/189] implement Cloneable in GeneralPurposeBit git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649051 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/GeneralPurposeBit.java | 11 ++++++++++- .../compress/archivers/zip/GeneralPurposeBitTest.java | 11 +++++++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java index 2325cd75309..350514df756 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java @@ -24,7 +24,7 @@ * @since 1.1 * @NotThreadSafe */ -public final class GeneralPurposeBit { +public final class GeneralPurposeBit implements Cloneable { /** * Indicates that the file is encrypted. @@ -220,4 +220,13 @@ public boolean equals(Object o) { && g.languageEncodingFlag == languageEncodingFlag && g.dataDescriptorFlag == dataDescriptorFlag; } + + public Object clone() { + try { + return super.clone(); + } catch (CloneNotSupportedException ex) { + // impossible + throw new RuntimeException("GeneralPurposeBit is not Cloneable?", ex); + } + } } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java index d457915df56..b33f6f385de 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java @@ -81,7 +81,7 @@ public void testEncryption() { assertTrue(Arrays.equals(flags, b.encode())); } - public void testStringEncryption() { + public void testStrongEncryption() { byte[] flags = new byte[] {(byte) 65, (byte) 0}; assertTrue(GeneralPurposeBit.parse(flags, 0).usesStrongEncryption()); GeneralPurposeBit b = new GeneralPurposeBit(); @@ -93,4 +93,11 @@ public void testStringEncryption() { assertFalse(GeneralPurposeBit.parse(flags, 0).usesStrongEncryption()); } -} \ No newline at end of file + public void testClone() { + GeneralPurposeBit b = new GeneralPurposeBit(); + b.useStrongEncryption(true); + b.useUTF8ForNames(true); + assertEquals(b, b.clone()); + assertNotSame(b, b.clone()); + } +} From 9ee2bbf49b59f6e07cd3e203b4b3c749c80f64bb Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 2 Jan 2015 14:48:13 +0000 Subject: [PATCH 052/189] ZipArchiveEntry's copy-constructor was incomplete git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649052 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipArchiveEntry.java | 3 +++ .../compress/archivers/zip/ZipArchiveEntryTest.java | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 8dbc10191f7..36fac954cde 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -138,6 +138,9 @@ public ZipArchiveEntry(ZipArchiveEntry entry) throws ZipException { setInternalAttributes(entry.getInternalAttributes()); setExternalAttributes(entry.getExternalAttributes()); setExtraFields(entry.getExtraFields(true)); + setPlatform(entry.platform); + setGeneralPurposeBit(entry.gpb == null ? null : + (GeneralPurposeBit) entry.gpb.clone()); } /** diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java index ba94abd52bd..ee9c10a4896 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java @@ -253,4 +253,13 @@ public void testNullCommentEqualsEmptyComment() { assertFalse(entry1.equals(entry3)); assertFalse(entry2.equals(entry3)); } + + public void testCopyConstructor() throws Exception { + ZipArchiveEntry archiveEntry = new ZipArchiveEntry("fred"); + archiveEntry.setUnixMode(0664); + archiveEntry.setMethod(ZipEntry.DEFLATED); + archiveEntry.getGeneralPurposeBit().useStrongEncryption(true); + ZipArchiveEntry copy = new ZipArchiveEntry(archiveEntry); + assertEquals(archiveEntry, copy); + } } From d5f5da94126ed329c336391e150be81984c78b3e Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 2 Jan 2015 14:49:11 +0000 Subject: [PATCH 053/189] no need to special case UnixMode now that copy-constructor has been fixed git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649055 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ZipArchiveOutputStream.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 626218a92c3..1bc0510e185 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -524,11 +524,7 @@ private void preClose() throws IOException { */ public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) throws IOException { - ZipArchiveEntry ae = new ZipArchiveEntry((java.util.zip.ZipEntry)entry); - int unixMode = entry.getUnixMode(); - if (unixMode > 0){ - ae.setUnixMode(unixMode); - } + ZipArchiveEntry ae = new ZipArchiveEntry(entry); putArchiveEntry(ae); copyFromZipInputStream(rawStream); closeCopiedEntry(); From ee60c6dfa9b8756689bae301168c8a64c79fc92c Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 2 Jan 2015 15:00:00 +0000 Subject: [PATCH 054/189] add @Override to clone method git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649058 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/commons/compress/archivers/zip/GeneralPurposeBit.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java index 350514df756..6e80e364b6b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java @@ -221,6 +221,7 @@ public boolean equals(Object o) { && g.dataDescriptorFlag == dataDescriptorFlag; } + @Override public Object clone() { try { return super.clone(); From 955b510c221420620d624c29805f1cd4076e8499 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 2 Jan 2015 15:11:07 +0000 Subject: [PATCH 055/189] better use getters than direct attribute access git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649061 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipArchiveEntry.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 36fac954cde..085403d610a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -138,9 +138,10 @@ public ZipArchiveEntry(ZipArchiveEntry entry) throws ZipException { setInternalAttributes(entry.getInternalAttributes()); setExternalAttributes(entry.getExternalAttributes()); setExtraFields(entry.getExtraFields(true)); - setPlatform(entry.platform); - setGeneralPurposeBit(entry.gpb == null ? null : - (GeneralPurposeBit) entry.gpb.clone()); + setPlatform(entry.getPlatform()); + GeneralPurposeBit other = entry.getGeneralPurposeBit(); + setGeneralPurposeBit(other == null ? null : + (GeneralPurposeBit) other.clone()); } /** From 386b36f357ee2f050819ff71c6ae09d26a12c34e Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 2 Jan 2015 21:23:09 +0000 Subject: [PATCH 056/189] Review comments git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649128 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ParallelScatterZipCreator.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 3b33840d271..1993a74cce2 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -40,14 +40,13 @@ public class ParallelScatterZipCreator { private List streams = Collections.synchronizedList(new ArrayList()); private final ExecutorService es; + private final ScatterGatherBackingStoreSupplier defaultSupplier; private final long startedAt = System.currentTimeMillis(); private long compressionDoneAt = 0; private long scatterDoneAt; - static ScatterGatherBackingStoreSupplier defaultSupplier = new DefaultSupplier(); - - static class DefaultSupplier implements ScatterGatherBackingStoreSupplier { + private static class DefaultSupplier implements ScatterGatherBackingStoreSupplier { AtomicInteger storeNum = new AtomicInteger(0); public ScatterGatherBackingStore get() throws IOException { @@ -64,7 +63,7 @@ public static ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSup } - ThreadLocal tlScatterStreams = new ThreadLocal() { + private ThreadLocal tlScatterStreams = new ThreadLocal() { @Override protected ScatterZipOutputStream initialValue() { try { @@ -90,6 +89,7 @@ public ParallelScatterZipCreator() { * @param nThreads the number of threads to use in parallel. */ public ParallelScatterZipCreator(int nThreads) { + defaultSupplier = new DefaultSupplier(); es = Executors.newFixedThreadPool(nThreads); } From 627b4fc968ab343218286584e662aecfdaeb20c3 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 2 Jan 2015 21:53:01 +0000 Subject: [PATCH 057/189] Reinstated protected deflate() method git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649133 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/StreamCompressor.java | 2 +- .../compress/archivers/zip/ZipArchiveOutputStream.java | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index 26ee7aa05fc..5570a1b7e9f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -239,7 +239,7 @@ private void deflateUntilInputIsNeeded() throws IOException { } } - private void deflate() throws IOException { + void deflate() throws IOException { int len = def.deflate(outputBuffer, 0, outputBuffer.length); if (len > 0) { writeCounted(outputBuffer, 0, len); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 1bc0510e185..4ee2511fa35 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -920,6 +920,14 @@ public void flush() throws IOException { */ static final byte[] ZIP64_EOCD_LOC_SIG = ZipLong.getBytes(0X07064B50L); + /** + * Writes next block of compressed data to the output stream. + * @throws IOException on error + */ + protected final void deflate() throws IOException { + streamCompressor.deflate(); + } + /** * Writes the local file header entry * @param ze the entry to write From 7ea31ac9b55685f596e3f51370b7fa177ac25753 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 2 Jan 2015 22:20:38 +0000 Subject: [PATCH 058/189] Added docs on parallel zip git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649140 13f79535-47bb-0310-9956-ffa450edef68 --- src/site/xdoc/zip.xml | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/src/site/xdoc/zip.xml b/src/site/xdoc/zip.xml index 6ce9da867b8..d3041349d19 100644 --- a/src/site/xdoc/zip.xml +++ b/src/site/xdoc/zip.xml @@ -486,6 +486,35 @@ archive.

    + + + +

    Starting with Compress 1.10 there is now built-in support for + parallel creation of zip archives

    + +

    Multiple threads can write + to their own ScatterZipOutputStream + instance that is backed to file or to some user-implemented form of + storage (implementing ScatterGatherBackingStore).

    + +

    When the threads finish, they can join these streams together + to a complete zip file using the writeTo method + that will write a single ScatterOutputStream to a target + ZipArchiveOutputStream.

    + +

    To assist this process, clients can use + ParallelScatterZipCreator that will handle threads + pools and correct memory model consistency so the client + can avoid these issues. Please note that when writing well-formed + Zip files this way, it is usually necessary to keep a + separate ScatterZipOutputStream that receives all directories + and writes this to the target ZipArchiveOutputStream before + the ones created through ParallelScatterZipCreator. This is the responsibility of the client.

    + +

    There is no guarantee of order of the entries when writing a Zip + file with ParallelScatterZipCreator.

    +
    + From 4e6b2cb2de9f563d81f322caa7e804fd7e09b869 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 2 Jan 2015 22:21:32 +0000 Subject: [PATCH 059/189] Made site build on jdk8. Fixed stricter javadoc requirements git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649142 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 +- .../archivers/sevenz/SevenZMethodConfiguration.java | 2 +- .../archivers/zip/ParallelScatterZipCreator.java | 10 ++++++---- .../compress/archivers/zip/ScatterZipOutputStream.java | 8 ++++---- .../deflate/DeflateCompressorInputStream.java | 2 +- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/pom.xml b/pom.xml index 222c8d1e98b..7c4402dd1ab 100644 --- a/pom.xml +++ b/pom.xml @@ -279,7 +279,7 @@ Unix Compress, DEFLATE and ar, cpio, jar, tar, zip, dump, 7z, arj. org.codehaus.mojo findbugs-maven-plugin - 2.5.5 + 3.0.0 Normal Default diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java index d8d34a53b9a..753a561b0a3 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java @@ -23,7 +23,7 @@ *

    The exact type and interpretation of options depends on the * method being configured. Currently supported are:

    * - * + *
    * * * diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 1993a74cce2..c4b1bf492a7 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -31,11 +31,11 @@ /** * Creates a zip in parallel by using multiple threadlocal #ScatterZipOutputStream instances. - *

    + *

    * Note that this class generally makes no guarantees about the order of things written to * the output file. Things that need to come in a specific order (manifests, directories) * must be handled by the client of this class, usually by writing these things to the - * #ZipArchiveOutputStream *before* calling #writeTo on this class. + * #ZipArchiveOutputStream *before* calling #writeTo on this class.

    */ public class ParallelScatterZipCreator { private List streams = Collections.synchronizedList(new ArrayList()); @@ -95,8 +95,9 @@ public ParallelScatterZipCreator(int nThreads) { /** * Adds an archive entry to this archive. - *

    + *

    * This method is expected to be called from a single client thread + *

    * * @param zipArchiveEntry The entry to add. Compression method * @param source The source input stream supplier @@ -119,9 +120,10 @@ public ScatterZipOutputStream call() throws Exception { /** * Write the contents this to the target #ZipArchiveOutputStream. - *

    + *

    * It may be beneficial to write things like directories and manifest files to the targetStream * before calling this method. + *

    * * @param targetStream The ZipArchiveOutputStream to receive the contents of the scatter streams * @throws IOException If writing fails diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 9c5c7ab9d6b..622d0a5a10d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -30,13 +30,13 @@ /** * A zip output stream that is optimized for multi-threaded scatter/gather construction of zip files. - *

    + *

    * The internal data format of the entries used by this class are entirely private to this class * and are not part of any public api whatsoever. - *

    - * It is possible to extend this class to support different kinds of backing storage, the default + *

    + *

    It is possible to extend this class to support different kinds of backing storage, the default * implementation only supports file-based backing. - *

    + *

    * Thread safety: This class supports multiple threads. But the "writeTo" method must be called * by the thread that originally created the ZipArchiveEntry. * diff --git a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java index d8396487f98..a81a7a0da1c 100644 --- a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java @@ -48,7 +48,7 @@ public DeflateCompressorInputStream(InputStream inputStream) { * from the specified input stream. * * @param inputStream where to read the compressed data - * @param compressor parameters + * @param parameters parameters */ public DeflateCompressorInputStream(InputStream inputStream, DeflateParameters parameters) { From 67097df60b50ba705a04b9fe1f1516ed7559cb07 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 2 Jan 2015 22:37:54 +0000 Subject: [PATCH 060/189] Added capability to delete temp files to ScatterGatherBackingStore git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649146 13f79535-47bb-0310-9956-ffa450edef68 --- .../zip/FileBasedScatterGatherBackingStore.java | 6 +++++- .../archivers/zip/ParallelScatterZipCreator.java | 15 ++++++++++----- .../archivers/zip/ScatterGatherBackingStore.java | 15 ++++++++++++--- .../archivers/zip/ScatterZipOutputStream.java | 2 +- 4 files changed, 28 insertions(+), 10 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java index 2ff5a29290b..5e64086656e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java @@ -43,11 +43,15 @@ public InputStream getInputStream() throws IOException { } @SuppressWarnings("ResultOfMethodCallIgnored") - public void close() throws IOException { + public void closeForWriting() throws IOException { os.close(); } public void writeOut(byte[] data, int offset, int length) throws IOException { os.write(data, offset, length); } + + public void close() throws IOException { + target.delete(); + } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index c4b1bf492a7..fa9450bfee8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -20,7 +20,6 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; @@ -29,6 +28,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.Deflater; +import static java.util.Collections.synchronizedList; + /** * Creates a zip in parallel by using multiple threadlocal #ScatterZipOutputStream instances. *

    @@ -38,7 +39,8 @@ * #ZipArchiveOutputStream *before* calling #writeTo on this class.

    */ public class ParallelScatterZipCreator { - private List streams = Collections.synchronizedList(new ArrayList()); + private List streams = synchronizedList(new ArrayList()); + private List backingStores = synchronizedList(new ArrayList()); private final ExecutorService es; private final ScatterGatherBackingStoreSupplier defaultSupplier; @@ -55,14 +57,14 @@ public ScatterGatherBackingStore get() throws IOException { } } - public static ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier scatterGatherBackingStoreSupplier) + private ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier scatterGatherBackingStoreSupplier) throws IOException { ScatterGatherBackingStore bs = scatterGatherBackingStoreSupplier.get(); + backingStores.add( bs); StreamCompressor sc = StreamCompressor.create(Deflater.DEFAULT_COMPRESSION, bs); return new ScatterZipOutputStream(bs, sc); } - private ThreadLocal tlScatterStreams = new ThreadLocal() { @Override protected ScatterZipOutputStream initialValue() { @@ -141,7 +143,10 @@ public void writeTo(ZipArchiveOutputStream targetStream) throws IOException, Int } scatterDoneAt = System.currentTimeMillis(); - // Maybe close ScatterZipOS. We should do something to get rid of tempfiles. + + for (ScatterGatherBackingStore backingStore : backingStores) { + backingStore.close(); + } } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java index 869e84f6e11..9e33c53375c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java @@ -22,8 +22,12 @@ import java.io.InputStream; /** - * Abstraction over a scatter-output zip archives can be written to - * with a method to gather all content from an InputStream later on. + *

    Abstraction over a scatter-output zip archives can be written to + * with a method to gather all content from an InputStream later on.

    + * + *

    It is the responsibility of the allocator of an instance of this class + * to close this. Closing it should clear off any allocated structures + * and preferably delete files.

    * * @since 1.10 */ @@ -45,5 +49,10 @@ public interface ScatterGatherBackingStore extends Closeable { * @param length the amount of data to write * @throws IOException when something fails */ - void writeOut(byte[] data, int offset, int length) throws IOException ; + void writeOut(byte[] data, int offset, int length) throws IOException; + + /** + * Closes this backing store for further writing. + */ + void closeForWriting() throws IOException; } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 622d0a5a10d..3b373c31559 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -100,7 +100,7 @@ public void addArchiveEntry(ZipArchiveEntry zipArchiveEntry, InputStream payload * @throws IOException If writing fails */ public void writeTo(ZipArchiveOutputStream target) throws IOException { - backingStore.close(); + backingStore.closeForWriting(); InputStream data = backingStore.getInputStream(); for (CompressedEntry compressedEntry : items) { final BoundedInputStream rawStream = new BoundedInputStream(data, compressedEntry.compressedSize); From 4d9b92a6741c03c43a6dbae23238a75f462bdb39 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 2 Jan 2015 22:46:36 +0000 Subject: [PATCH 061/189] Re-made StreamCompressor public. Unfortunately the createDeferred method was hiding this class from the client code. When createDeferred was made private, client needs StreamCompressor git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649149 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/commons/compress/archivers/zip/StreamCompressor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index 5570a1b7e9f..4172751c0ef 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -31,7 +31,7 @@ * * @since 1.10 */ -abstract class StreamCompressor implements Closeable { +public abstract class StreamCompressor implements Closeable { /* * Apparently Deflater.setInput gets slowed down a lot on Sun JVMs From dcd60860c586fe97572e1c6c0373074c0ec6b135 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 3 Jan 2015 04:58:35 +0000 Subject: [PATCH 062/189] allow close to be used even if stream hasn't been closed before git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649172 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/FileBasedScatterGatherBackingStore.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java index 5e64086656e..42c24b8f9a5 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java @@ -32,6 +32,7 @@ public class FileBasedScatterGatherBackingStore implements ScatterGatherBackingStore { private final File target; private final FileOutputStream os; + private boolean closed; public FileBasedScatterGatherBackingStore(File target) throws FileNotFoundException { this.target = target; @@ -44,7 +45,10 @@ public InputStream getInputStream() throws IOException { @SuppressWarnings("ResultOfMethodCallIgnored") public void closeForWriting() throws IOException { - os.close(); + if (!closed) { + os.close(); + closed = true; + } } public void writeOut(byte[] data, int offset, int length) throws IOException { @@ -52,6 +56,7 @@ public void writeOut(byte[] data, int offset, int length) throws IOException { } public void close() throws IOException { + closeForWriting(); target.delete(); } } From af60df2bc3bd51c1a0f866b8106014be171b62e2 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Sat, 3 Jan 2015 09:48:46 +0000 Subject: [PATCH 063/189] Cleaned up close semantics of backing store, made this governed by ScatterZipOutputStream Also added a way to provide a custom supplier git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649181 13f79535-47bb-0310-9956-ffa450edef68 --- .../zip/ParallelScatterZipCreator.java | 17 ++++++++++++++--- .../archivers/zip/ScatterZipOutputStream.java | 12 +++++++++++- .../zip/ScatterZipOutputStreamTest.java | 1 + 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index fa9450bfee8..a7a58e06a61 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -42,7 +42,7 @@ public class ParallelScatterZipCreator { private List streams = synchronizedList(new ArrayList()); private List backingStores = synchronizedList(new ArrayList()); private final ExecutorService es; - private final ScatterGatherBackingStoreSupplier defaultSupplier; + private final ScatterGatherBackingStoreSupplier supplier; private final long startedAt = System.currentTimeMillis(); private long compressionDoneAt = 0; @@ -69,7 +69,7 @@ private ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier @Override protected ScatterZipOutputStream initialValue() { try { - ScatterZipOutputStream scatterStream = createDeferred(defaultSupplier); + ScatterZipOutputStream scatterStream = createDeferred(supplier); streams.add(scatterStream); return scatterStream; } catch (IOException e) { @@ -91,7 +91,17 @@ public ParallelScatterZipCreator() { * @param nThreads the number of threads to use in parallel. */ public ParallelScatterZipCreator(int nThreads) { - defaultSupplier = new DefaultSupplier(); + this( nThreads, new DefaultSupplier()); + } + + /** + * Create a ParallelScatterZipCreator + * + * @param nThreads the number of threads to use in parallel. + * @param backingStoreSupplier The supplier of backing store which shall be used + */ + public ParallelScatterZipCreator(int nThreads, ScatterGatherBackingStoreSupplier backingStoreSupplier) { + supplier = backingStoreSupplier; es = Executors.newFixedThreadPool(nThreads); } @@ -140,6 +150,7 @@ public void writeTo(ZipArchiveOutputStream targetStream) throws IOException, Int for (ScatterZipOutputStream scatterStream : streams) { scatterStream.writeTo(targetStream); + scatterStream.close(); } scatterDoneAt = System.currentTimeMillis(); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 3b373c31559..487af3bade3 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -20,6 +20,7 @@ import org.apache.commons.compress.utils.BoundedInputStream; +import java.io.Closeable; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; @@ -42,7 +43,7 @@ * * @since 1.10 */ -public class ScatterZipOutputStream { +public class ScatterZipOutputStream implements Closeable { private final Queue items = new ConcurrentLinkedQueue(); private final ScatterGatherBackingStore backingStore; private final StreamCompressor streamCompressor; @@ -110,6 +111,15 @@ public void writeTo(ZipArchiveOutputStream target) throws IOException { data.close(); } + + /** + * Closes this stream, freeing all resources involved in the creation of this stream. + * @throws IOException If closing fails + */ + public void close() throws IOException { + backingStore.close(); + } + /** * Create a ScatterZipOutputStream with default compression level that is backed by a file * diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java index 431f6b0c9da..d6de358f402 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java @@ -45,6 +45,7 @@ public void putArchiveEntry() throws Exception { ZipArchiveOutputStream outputStream = new ZipArchiveOutputStream(target); scatterZipOutputStream.writeTo( outputStream); outputStream.close(); + scatterZipOutputStream.close(); ZipFile zf = new ZipFile(target); final ZipArchiveEntry b_entry = zf.getEntries("b.txt").iterator().next(); From 8a942e0d97363cbe094cd4b9a083bc9815ca02db Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Sat, 3 Jan 2015 11:59:56 +0000 Subject: [PATCH 064/189] Cleaned up close semantics further. Increased copy buffer size somewhat git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649189 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ParallelScatterZipCreator.java | 14 +++++++------- .../archivers/zip/ScatterZipOutputStream.java | 3 +-- .../archivers/zip/ZipArchiveOutputStream.java | 7 +++---- .../archivers/zip/ScatterZipOutputStreamTest.java | 6 ++++-- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index a7a58e06a61..fed3d9fd308 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -19,6 +19,7 @@ import java.io.File; import java.io.IOException; +import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; @@ -40,7 +41,6 @@ */ public class ParallelScatterZipCreator { private List streams = synchronizedList(new ArrayList()); - private List backingStores = synchronizedList(new ArrayList()); private final ExecutorService es; private final ScatterGatherBackingStoreSupplier supplier; @@ -60,7 +60,6 @@ public ScatterGatherBackingStore get() throws IOException { private ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier scatterGatherBackingStoreSupplier) throws IOException { ScatterGatherBackingStore bs = scatterGatherBackingStoreSupplier.get(); - backingStores.add( bs); StreamCompressor sc = StreamCompressor.create(Deflater.DEFAULT_COMPRESSION, bs); return new ScatterZipOutputStream(bs, sc); } @@ -122,7 +121,12 @@ public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputSt es.submit(new Callable() { public ScatterZipOutputStream call() throws Exception { ScatterZipOutputStream streamToUse = tlScatterStreams.get(); - streamToUse.addArchiveEntry(zipArchiveEntry, source.get(), method); + InputStream payload = source.get(); + try { + streamToUse.addArchiveEntry(zipArchiveEntry, payload, method); + } finally { + payload.close(); + } return streamToUse; } @@ -154,10 +158,6 @@ public void writeTo(ZipArchiveOutputStream targetStream) throws IOException, Int } scatterDoneAt = System.currentTimeMillis(); - - for (ScatterGatherBackingStore backingStore : backingStores) { - backingStore.close(); - } } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 487af3bade3..4c7cf512fec 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -82,13 +82,12 @@ public ScatterZipOutputStream(ScatterGatherBackingStore backingStore, * Add an archive entry to this scatter stream. * * @param zipArchiveEntry The entry to write - * @param payload The content to write for the entry + * @param payload The content to write for the entry. The caller is responsible for closing this. * @param method The compression method * @throws IOException If writing fails */ public void addArchiveEntry(ZipArchiveEntry zipArchiveEntry, InputStream payload, int method) throws IOException { streamCompressor.deflate(payload, method); - payload.close(); items.add(new CompressedEntry(zipArchiveEntry, streamCompressor.getCrc32(), streamCompressor.getBytesWritten(), method, streamCompressor.getBytesRead())); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 4ee2511fa35..bd8dcb53871 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -845,7 +845,7 @@ private void writeCounted(byte[] data) throws IOException { - + final byte[] copyBuffer = new byte[16384]; private void copyFromZipInputStream(InputStream src) throws IOException { if (entry == null) { @@ -853,11 +853,10 @@ private void copyFromZipInputStream(InputStream src) throws IOException { } ZipUtil.checkRequestedFeatures(entry.entry); entry.hasWritten = true; - byte[] tmpBuf = new byte[4096]; int length; - while ((length = src.read( tmpBuf )) >= 0 ) + while ((length = src.read(copyBuffer)) >= 0 ) { - streamCompressor.writeCounted(tmpBuf, 0, length); + streamCompressor.writeCounted(copyBuffer, 0, length); count( length ); } } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java index d6de358f402..395d85949e3 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java @@ -36,10 +36,12 @@ public void putArchiveEntry() throws Exception { final byte[] A_PAYLOAD = "XAAY".getBytes(); ZipArchiveEntry zab = new ZipArchiveEntry("b.txt"); - scatterZipOutputStream.addArchiveEntry(zab, new ByteArrayInputStream(B_PAYLOAD), ZipArchiveEntry.DEFLATED); + ByteArrayInputStream payload = new ByteArrayInputStream(B_PAYLOAD); + scatterZipOutputStream.addArchiveEntry(zab, payload, ZipArchiveEntry.DEFLATED); ZipArchiveEntry zae = new ZipArchiveEntry("a.txt"); - scatterZipOutputStream.addArchiveEntry(zae, new ByteArrayInputStream(A_PAYLOAD), ZipArchiveEntry.DEFLATED); + ByteArrayInputStream payload1 = new ByteArrayInputStream(A_PAYLOAD); + scatterZipOutputStream.addArchiveEntry(zae, payload1, ZipArchiveEntry.DEFLATED); File target = File.createTempFile("scattertest", ".zip"); ZipArchiveOutputStream outputStream = new ZipArchiveOutputStream(target); From c4deb3fd4b81e9f174e771476cd19a0d3330fdc9 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 4 Jan 2015 10:34:41 +0000 Subject: [PATCH 065/189] testcase demonstrating zip64 EOCDL corruption git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649312 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/Zip64SupportIT.java | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java index fc416b812bb..c676b8949f0 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java @@ -130,6 +130,45 @@ public class Zip64SupportIT { "zip6/5GB_of_Zeros"); } + @Test public void writeAndRead5GBOfZerosUsingZipFile() throws Throwable { + File f = null; + try { + f = write5GBZerosFile("writeAndRead5GBOfZerosUsingZipFile"); + read5GBOfZerosUsingZipFileImpl(f, "5GB_of_Zeros"); + } finally { + if (f != null) { + AbstractTestCase.tryHardToDelete(f); + } + } + } + + private static File write5GBZerosFile(String testName) throws Throwable { + File f = getTempFile(testName); + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(f); + try { + zos.setUseZip64(Zip64Mode.Always); + byte[] buf = new byte[ONE_MILLION]; + ZipArchiveEntry zae = new ZipArchiveEntry("5GB_of_Zeros"); + zae.setSize(FIVE_BILLION); + zae.setMethod(ZipEntry.DEFLATED); + zae.setCrc(0x8a408f16L); + zos.putArchiveEntry(zae); + for (int j = 0; j < FIVE_BILLION / 1000 / 1000; j++) { + zos.write(buf); + } + zos.closeArchiveEntry(); + zos.close(); + } catch (IOException ex) { + System.err.println("Failed to write archive because of: " + + ex.getMessage() + + " - likely not enough disk space."); + assumeTrue(false); + } finally { + zos.destroy(); + } + return f; + } + @Test public void read100KFilesUsingZipFile() throws Throwable { read100KFilesUsingZipFileImpl(get100KFileFile()); } From 6d04cbcb717845a38ee57e7ed0653fb9cc171e73 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 4 Jan 2015 12:25:48 +0000 Subject: [PATCH 066/189] consistently use limit() - position() for length calculations I htink position() has always been zero but using limit() as size in one place and limit() - position() in other is very confusing. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649321 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index bd8dcb53871..5be1e1c65ed 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -618,9 +618,11 @@ private void rewriteSizesAndCrc(boolean actuallyNeedsZip64) } if (hasZip64Extra(entry.entry)) { + ByteBuffer name = getName(entry.entry); + int nameLen = name.limit() - name.position(); // seek to ZIP64 extra, skip header and size information raf.seek(entry.localDataStart + 3 * WORD + 2 * SHORT - + getName(entry.entry).limit() + 2 * SHORT); + + nameLen + 2 * SHORT); // inside the ZIP64 extra uncompressed size comes // first, unlike the LFH, CD or data descriptor writeOut(ZipEightByteInteger.getBytes(entry.entry.getSize())); @@ -952,7 +954,8 @@ protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolean encodable) { byte[] extra = ze.getLocalFileDataExtra(); - int len= LFH_FILENAME_OFFSET + name.limit() + extra.length; + final int nameLen = name.limit() - name.position(); + int len= LFH_FILENAME_OFFSET + nameLen + extra.length; byte[] buf = new byte[len]; System.arraycopy(LFH_SIG, 0, buf, LFH_SIG_OFFSET, WORD); @@ -999,13 +1002,12 @@ private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolea } } // file name length - putShort(name.limit(), buf, LFH_FILENAME_LENGTH_OFFSET); + putShort(nameLen, buf, LFH_FILENAME_LENGTH_OFFSET); // extra field length putShort(extra.length, buf, LFH_EXTRA_LENGTH_OFFSET); // file name - final int nameLen = name.limit() - name.position(); System.arraycopy( name.array(), name.arrayOffset(), buf, LFH_FILENAME_OFFSET, nameLen); System.arraycopy(extra, 0, buf, LFH_FILENAME_OFFSET + nameLen, extra.length); @@ -1117,7 +1119,9 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long } ByteBuffer commentB = getEntryEncoding(ze).encode(comm); - int len= CFH_FILENAME_OFFSET + name.limit() + extra.length + commentB.limit(); + final int nameLen = name.limit() - name.position(); + final int commentLen = commentB.limit() - commentB.position(); + int len= CFH_FILENAME_OFFSET + nameLen + extra.length + commentLen; byte[] buf = new byte[len]; System.arraycopy(CFH_SIG, 0, buf, CFH_SIG_OFFSET, WORD); @@ -1152,12 +1156,12 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long putLong(ze.getSize(), buf, CFH_ORIGINAL_SIZE_OFFSET); } - putShort(name.limit(), buf, CFH_FILENAME_LENGTH_OFFSET); + putShort(nameLen, buf, CFH_FILENAME_LENGTH_OFFSET); // extra field length putShort(extra.length, buf, CFH_EXTRA_LENGTH_OFFSET); - putShort(commentB.limit(), buf, CFH_COMMENT_LENGTH_OFFSET); + putShort(commentLen, buf, CFH_COMMENT_LENGTH_OFFSET); // disk number start System.arraycopy(ZERO, 0, buf, CFH_DISK_NUMBER_OFFSET, SHORT); @@ -1172,16 +1176,15 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long putLong(Math.min(lfhOffset, ZIP64_MAGIC), buf, CFH_LFH_OFFSET); // file name - System.arraycopy(name.array(), name.arrayOffset(), buf, CFH_FILENAME_OFFSET, name.limit() - name.position()); + System.arraycopy(name.array(), name.arrayOffset(), buf, CFH_FILENAME_OFFSET, nameLen); - int extraStart = CFH_FILENAME_OFFSET + name.limit(); + int extraStart = CFH_FILENAME_OFFSET + nameLen; System.arraycopy(extra, 0, buf, extraStart, extra.length); - int commentLength = commentB.limit() - commentB.position(); - int commentStart = extraStart + commentLength; + int commentStart = extraStart + commentLen; // file comment - System.arraycopy(commentB.array(), commentB.arrayOffset(), buf, commentStart, commentLength); + System.arraycopy(commentB.array(), commentB.arrayOffset(), buf, commentStart, commentLen); return buf; } @@ -1246,8 +1249,9 @@ protected void writeCentralDirectoryEnd() throws IOException { // ZIP file comment ByteBuffer data = this.zipEncoding.encode(comment); - writeCounted(ZipShort.getBytes(data.limit())); - streamCompressor.writeCounted(data.array(), data.arrayOffset(), data.limit() - data.position()); + int dataLen = data.limit() - data.position(); + writeCounted(ZipShort.getBytes(dataLen)); + streamCompressor.writeCounted(data.array(), data.arrayOffset(), dataLen); } private static final byte[] ONE = ZipLong.getBytes(1L); From 0b33d56cf025e034927290ec9ee651ddc8acd495 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 4 Jan 2015 12:29:57 +0000 Subject: [PATCH 067/189] was using the wrong count for locator git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649322 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipArchiveOutputStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 5be1e1c65ed..0f14fdb6928 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -1278,7 +1278,7 @@ protected void writeZip64CentralDirectory() throws IOException { return; } - long offset = streamCompressor.getBytesWritten(); + long offset = streamCompressor.getTotalBytesWritten(); writeOut(ZIP64_EOCD_SIG); // size, we don't have any variable length as we don't support From 9f41819098fee8877fa85835adbeec3acec19d07 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Sun, 4 Jan 2015 17:35:46 +0000 Subject: [PATCH 068/189] Renamed method to getBytesWrittenForLastEntry git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1649374 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ScatterZipOutputStream.java | 2 +- .../compress/archivers/zip/StreamCompressor.java | 16 ++++++++-------- .../archivers/zip/StreamCompressorTest.java | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 4c7cf512fec..9d6548dd9ca 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -89,7 +89,7 @@ public ScatterZipOutputStream(ScatterGatherBackingStore backingStore, public void addArchiveEntry(ZipArchiveEntry zipArchiveEntry, InputStream payload, int method) throws IOException { streamCompressor.deflate(payload, method); items.add(new CompressedEntry(zipArchiveEntry, streamCompressor.getCrc32(), - streamCompressor.getBytesWritten(), method, + streamCompressor.getBytesWrittenForLastEntry(), method, streamCompressor.getBytesRead())); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index 4172751c0ef..d1a9062073f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -46,7 +46,7 @@ public abstract class StreamCompressor implements Closeable { private final CRC32 crc = new CRC32(); - private long writtenToOutputStream = 0; + private long writtenToOutputStreamForLastEntry = 0; private long sourcePayloadLength = 0; private long totalWrittenToOutputStream = 0; @@ -132,12 +132,12 @@ public long getBytesRead() { } /** - * The number of bytes written to the output + * The number of bytes written to the output for the last entry * * @return The number of bytes, never negative */ - public long getBytesWritten() { - return writtenToOutputStream; + public long getBytesWrittenForLastEntry() { + return writtenToOutputStreamForLastEntry; } /** @@ -181,7 +181,7 @@ public void deflate(InputStream source, int method) throws IOException { * @throws IOException on error */ long write(byte[] b, int offset, int length, int method) throws IOException { - long current = writtenToOutputStream; + long current = writtenToOutputStreamForLastEntry; crc.update(b, offset, length); if (method == ZipArchiveEntry.DEFLATED) { writeDeflated(b, offset, length); @@ -189,7 +189,7 @@ long write(byte[] b, int offset, int length, int method) throws IOException { writeCounted(b, offset, length); } sourcePayloadLength += length; - return writtenToOutputStream - current; + return writtenToOutputStreamForLastEntry - current; } @@ -197,7 +197,7 @@ void reset() { crc.reset(); def.reset(); sourcePayloadLength = 0; - writtenToOutputStream = 0; + writtenToOutputStreamForLastEntry = 0; } public void close() throws IOException { @@ -252,7 +252,7 @@ public void writeCounted(byte[] data) throws IOException { public void writeCounted(byte[] data, int offset, int length) throws IOException { writeOut(data, offset, length); - writtenToOutputStream += length; + writtenToOutputStreamForLastEntry += length; totalWrittenToOutputStream += length; } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java index 539c3e87bdb..905751f51dc 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/StreamCompressorTest.java @@ -35,7 +35,7 @@ public void storedEntries() throws Exception { sc.deflate(new ByteArrayInputStream("A".getBytes()), ZipEntry.STORED); sc.deflate(new ByteArrayInputStream("BAD".getBytes()), ZipEntry.STORED); assertEquals(3, sc.getBytesRead()); - assertEquals(3, sc.getBytesWritten()); + assertEquals(3, sc.getBytesWrittenForLastEntry()); assertEquals(344750961, sc.getCrc32()); sc.deflate(new ByteArrayInputStream("CAFE".getBytes()), ZipEntry.STORED); assertEquals("ABADCAFE", baos.toString()); @@ -47,7 +47,7 @@ public void deflatedEntries() throws Exception { StreamCompressor sc = StreamCompressor.create( baos); sc.deflate(new ByteArrayInputStream("AAAAAABBBBBB".getBytes()), ZipEntry.DEFLATED); assertEquals(12, sc.getBytesRead()); - assertEquals(8, sc.getBytesWritten()); + assertEquals(8, sc.getBytesWrittenForLastEntry()); assertEquals(3299542, sc.getCrc32()); final byte[] actuals = baos.toByteArray(); From cbbeae7719943308494825574a0002ea649b3de3 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Wed, 7 Jan 2015 21:11:04 +0000 Subject: [PATCH 069/189] Modified ZipOutputStream to be able to avoid seeking and rewriting headers when operating through addRaw. The basic idea is that an entry added through addRaw is fully known at the time we call add, so we do not need to go back in the file to rewrite fields. Adding this third mode increases the pain of multiple code paths, and we should probably consider a ground-up rewrite at some point. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650166 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 142 ++++++++++++------ .../compress/archivers/ZipTestCase.java | 43 +++++- 2 files changed, 131 insertions(+), 54 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 0f14fdb6928..3ebdbf835bc 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -459,7 +459,9 @@ public void closeArchiveEntry() throws IOException { long bytesWritten = streamCompressor.getTotalBytesWritten() - entry.dataStart; long realCrc = streamCompressor.getCrc32(); entry.bytesRead = streamCompressor.getBytesRead(); - doCloseEntry(realCrc, bytesWritten); + final Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); + final boolean actuallyNeedsZip64 = handleSizesAndCrc(bytesWritten, realCrc, effectiveMode); + closeEntry(actuallyNeedsZip64, false); streamCompressor.reset(); } @@ -470,28 +472,20 @@ public void closeArchiveEntry() throws IOException { * @throws Zip64RequiredException if the entry's uncompressed or * compressed size exceeds 4 GByte and {@link #setUseZip64} * is {@link Zip64Mode#Never}. + * @param phased This entry is second phase of a 2-phase zip creation, size, compressed size and crc + * are known in ZipArchiveEntry */ - private void closeCopiedEntry() throws IOException { + private void closeCopiedEntry(boolean phased) throws IOException { preClose(); entry.bytesRead = entry.entry.getSize(); - doCloseCopiedEntry(); - } - - private void doCloseEntry(long realCrc, long bytesWritten) throws IOException { - final Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); - final boolean actuallyNeedsZip64 = handleSizesAndCrc(bytesWritten, realCrc, effectiveMode); - - closeEntry(actuallyNeedsZip64); - } - - private void doCloseCopiedEntry() throws IOException { Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); boolean actuallyNeedsZip64 = checkIfNeedsZip64(effectiveMode); - closeEntry(actuallyNeedsZip64); + closeEntry(actuallyNeedsZip64, phased); } - private void closeEntry(boolean actuallyNeedsZip64) throws IOException { - if (raf != null) { + + private void closeEntry(boolean actuallyNeedsZip64, boolean phased) throws IOException { + if (!phased && raf != null) { rewriteSizesAndCrc(actuallyNeedsZip64); } @@ -516,6 +510,10 @@ private void preClose() throws IOException { /** * Adds an archive entry with a raw input stream. * + * If crc, size and compressed size are supplied on the entry, these values will be used as-is. + * Zip64 status is re-established based on the settings in this stream, and the supplied value + * is ignored. + * * The entry is put and closed immediately. * * @param entry The archive entry to add @@ -525,9 +523,18 @@ private void preClose() throws IOException { public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) throws IOException { ZipArchiveEntry ae = new ZipArchiveEntry(entry); - putArchiveEntry(ae); + if (hasZip64Extra(ae)) { + // Will be re-added as required. this may make the file generated with this method + // somewhat smaller than standard mode, + // since standard mode is unable to remove the zip 64 header. + ae.removeExtraField(Zip64ExtendedInformationExtraField.HEADER_ID); + } + boolean is2PhaseSource = ae.getCrc() != -1 + && ae.getSize() != ArchiveEntry.SIZE_UNKNOWN + && ae.getCompressedSize() != -1; + putArchiveEntry(ae, is2PhaseSource); copyFromZipInputStream(rawStream); - closeCopiedEntry(); + closeCopiedEntry(is2PhaseSource); } /** @@ -590,14 +597,20 @@ private boolean handleSizesAndCrc(long bytesWritten, long crc, */ private boolean checkIfNeedsZip64(Zip64Mode effectiveMode) throws ZipException { - final boolean actuallyNeedsZip64 = effectiveMode == Zip64Mode.Always - || entry.entry.getSize() >= ZIP64_MAGIC - || entry.entry.getCompressedSize() >= ZIP64_MAGIC; + final boolean actuallyNeedsZip64 = isZip64Required(entry.entry, effectiveMode); if (actuallyNeedsZip64 && effectiveMode == Zip64Mode.Never) { throw new Zip64RequiredException(Zip64RequiredException.getEntryTooBigMessage(entry.entry)); } return actuallyNeedsZip64; } + + private boolean isZip64Required(ZipArchiveEntry entry1, Zip64Mode requestedMode) { + return requestedMode == Zip64Mode.Always || isTooLageForZip32(entry1); + } + + private boolean isTooLageForZip32(ZipArchiveEntry zipArchiveEntry){ + return zipArchiveEntry.getSize() >= ZIP64_MAGIC || zipArchiveEntry.getCompressedSize() >= ZIP64_MAGIC; + } /** * When using random access output, write the local file header * and potentiall the ZIP64 extra containing the correct CRC and @@ -659,6 +672,22 @@ private void rewriteSizesAndCrc(boolean actuallyNeedsZip64) */ @Override public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException { + putArchiveEntry(archiveEntry, false); + } + + /** + * Writes the headers for an archive entry to the output stream. + * The caller must then write the content to the stream and call + * {@link #closeArchiveEntry()} to complete the process. + + * @param archiveEntry The archiveEntry + * @param phased If true size, compressedSize and crc required to be known up-front in the archiveEntry + * @throws ClassCastException if entry is not an instance of ZipArchiveEntry + * @throws Zip64RequiredException if the entry's uncompressed or + * compressed size is known to exceed 4 GByte and {@link #setUseZip64} + * is {@link Zip64Mode#Never}. + */ + private void putArchiveEntry(ArchiveEntry archiveEntry, boolean phased) throws IOException { if (finished) { throw new IOException("Stream has already been finished"); } @@ -682,13 +711,18 @@ public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException { // just a placeholder, real data will be in data // descriptor or inserted later via RandomAccessFile ZipEightByteInteger size = ZipEightByteInteger.ZERO; - if (entry.entry.getMethod() == STORED - && entry.entry.getSize() != ArchiveEntry.SIZE_UNKNOWN) { + ZipEightByteInteger compressedSize = ZipEightByteInteger.ZERO; + if (phased){ + size = new ZipEightByteInteger(entry.entry.getSize()); + compressedSize = new ZipEightByteInteger(entry.entry.getCompressedSize()); + } else if (entry.entry.getMethod() == STORED + && entry.entry.getSize() != ArchiveEntry.SIZE_UNKNOWN) { // actually, we already know the sizes size = new ZipEightByteInteger(entry.entry.getSize()); + compressedSize = size; } z64.setSize(size); - z64.setCompressedSize(size); + z64.setCompressedSize(compressedSize); entry.entry.setExtra(); } @@ -696,7 +730,7 @@ public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException { def.setLevel(level); hasCompressionLevelChanged = false; } - writeLocalFileHeader((ZipArchiveEntry) archiveEntry); + writeLocalFileHeader((ZipArchiveEntry) archiveEntry, phased); } /** @@ -935,6 +969,10 @@ protected final void deflate() throws IOException { * @throws IOException on error */ protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { + writeLocalFileHeader(ze, false); + } + + private void writeLocalFileHeader(ZipArchiveEntry ze, boolean phased) throws IOException { boolean encodable = zipEncoding.canEncode(ze.getName()); ByteBuffer name = getName(ze); @@ -942,7 +980,7 @@ protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { addUnicodeExtraFields(ze, encodable, name); } - final byte[] localHeader = createLocalFileHeader(ze, name, encodable); + final byte[] localHeader = createLocalFileHeader(ze, name, encodable, phased); long localHeaderStart = streamCompressor.getTotalBytesWritten(); offsets.put(ze, localHeaderStart); entry.localDataStart = localHeaderStart + LFH_CRC_OFFSET; // At crc offset @@ -951,8 +989,8 @@ protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { } - private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolean encodable) { - + private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolean encodable, + boolean phased) { byte[] extra = ze.getLocalFileDataExtra(); final int nameLen = name.limit() - name.position(); int len= LFH_FILENAME_OFFSET + nameLen + extra.length; @@ -963,7 +1001,11 @@ private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolea //store method in local variable to prevent multiple method calls final int zipMethod = ze.getMethod(); - putShort(versionNeededToExtract(zipMethod, hasZip64Extra(ze)), buf, LFH_VERSION_NEEDED_OFFSET); + if (phased && !isZip64Required(entry.entry, zip64Mode)){ + putShort(INITIAL_VERSION, buf, LFH_VERSION_NEEDED_OFFSET); + } else { + putShort(versionNeededToExtract(zipMethod, hasZip64Extra(ze)), buf, LFH_VERSION_NEEDED_OFFSET); + } GeneralPurposeBit generalPurposeBit = getGeneralPurposeBits(zipMethod, !encodable @@ -977,29 +1019,31 @@ private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolea ZipUtil.toDosTime(ze.getTime(), buf, LFH_TIME_OFFSET); // CRC - // compressed length - // uncompressed length - if (zipMethod == DEFLATED || raf != null) { + if (phased){ + putLong(ze.getCrc(), buf, LFH_CRC_OFFSET); + } else if (zipMethod == DEFLATED || raf != null) { System.arraycopy(LZERO, 0, buf, LFH_CRC_OFFSET, WORD); - if (hasZip64Extra(entry.entry)) { - // point to ZIP64 extended information extra field for - // sizes, may get rewritten once sizes are known if - // stream is seekable - ZipLong.ZIP64_MAGIC.putLong(buf, LFH_COMPRESSED_SIZE_OFFSET); - ZipLong.ZIP64_MAGIC.putLong(buf, LFH_ORIGINAL_SIZE_OFFSET); - } else { - System.arraycopy(LZERO, 0, buf, LFH_COMPRESSED_SIZE_OFFSET, WORD); - System.arraycopy(LZERO, 0, buf, LFH_ORIGINAL_SIZE_OFFSET, WORD); - } } else { putLong(ze.getCrc(), buf, LFH_CRC_OFFSET); - if (!hasZip64Extra(ze)) { - putLong(ze.getSize(), buf, LFH_COMPRESSED_SIZE_OFFSET); - putLong(ze.getSize(), buf, LFH_ORIGINAL_SIZE_OFFSET); - } else { - ZipLong.ZIP64_MAGIC.putLong(buf, LFH_COMPRESSED_SIZE_OFFSET); - ZipLong.ZIP64_MAGIC.putLong(buf, LFH_ORIGINAL_SIZE_OFFSET); - } + } + + // compressed length + // uncompressed length + if (hasZip64Extra(entry.entry)){ + // point to ZIP64 extended information extra field for + // sizes, may get rewritten once sizes are known if + // stream is seekable + ZipLong.ZIP64_MAGIC.putLong(buf, LFH_COMPRESSED_SIZE_OFFSET); + ZipLong.ZIP64_MAGIC.putLong(buf, LFH_ORIGINAL_SIZE_OFFSET); + } else if (phased) { + putLong(ze.getCompressedSize(), buf, LFH_COMPRESSED_SIZE_OFFSET); + putLong(ze.getSize(), buf, LFH_ORIGINAL_SIZE_OFFSET); + } else if (zipMethod == DEFLATED || raf != null) { + System.arraycopy(LZERO, 0, buf, LFH_COMPRESSED_SIZE_OFFSET, WORD); + System.arraycopy(LZERO, 0, buf, LFH_ORIGINAL_SIZE_OFFSET, WORD); + } else { // Stored + putLong(ze.getSize(), buf, LFH_COMPRESSED_SIZE_OFFSET); + putLong(ze.getSize(), buf, LFH_ORIGINAL_SIZE_OFFSET); } // file name length putShort(nameLen, buf, LFH_FILENAME_LENGTH_OFFSET); diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index c34bc9542bd..2f3bf0d2637 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -30,6 +30,7 @@ import java.util.zip.ZipEntry; import org.apache.commons.compress.AbstractTestCase; +import org.apache.commons.compress.archivers.zip.Zip64Mode; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveEntryPredicate; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; @@ -309,13 +310,17 @@ public void testCopyRawEntriesFromFile() throws IOException { File[] tmp = createTempDirAndFile(); - File reference = createReferenceFile(tmp[0]); + File reference = createReferenceFile(tmp[0], Zip64Mode.Never, "expected."); File a1 = File.createTempFile("src1.", ".zip", tmp[0]); - createFirstEntry(new ZipArchiveOutputStream(a1)).close(); + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(a1); + zos.setUseZip64(Zip64Mode.Never); + createFirstEntry(zos).close(); File a2 = File.createTempFile("src2.", ".zip", tmp[0]); - createSecondEntry(new ZipArchiveOutputStream(a2)).close(); + ZipArchiveOutputStream zos1 = new ZipArchiveOutputStream(a2); + zos1.setUseZip64(Zip64Mode.Never); + createSecondEntry(zos1).close(); ZipFile zf1 = new ZipFile(a1); ZipFile zf2 = new ZipFile(a2); @@ -324,11 +329,38 @@ public void testCopyRawEntriesFromFile() zf1.copyRawEntries(zos2, allFilesPredicate); zf2.copyRawEntries(zos2, allFilesPredicate); zos2.close(); + // copyRawEntries does not add superfluous zip64 header like regular zip output stream + // does when using Zip64Mode.AsNeeded so all the source material has to be Zip64Mode.Never, + // if exact binary equality is to be achieved assertSameFileContents(reference, fileResult); zf1.close(); zf2.close(); } + public void testCopyRawZip64EntryFromFile() + throws IOException { + + File[] tmp = createTempDirAndFile(); + File reference = File.createTempFile("z64reference.", ".zip", tmp[0]); + ZipArchiveOutputStream zos1 = new ZipArchiveOutputStream(reference); + zos1.setUseZip64(Zip64Mode.Always); + createFirstEntry(zos1); + zos1.close(); + + File a1 = File.createTempFile("zip64src.", ".zip", tmp[0]); + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(a1); + zos.setUseZip64(Zip64Mode.Always); + createFirstEntry(zos).close(); + + ZipFile zf1 = new ZipFile(a1); + File fileResult = File.createTempFile("file-actual.", ".zip", tmp[0]); + ZipArchiveOutputStream zos2 = new ZipArchiveOutputStream(fileResult); + zos2.setUseZip64(Zip64Mode.Always); + zf1.copyRawEntries(zos2, allFilesPredicate); + zos2.close(); + assertSameFileContents(reference, fileResult); + zf1.close(); + } public void testUnixModeInAddRaw() throws IOException { File[] tmp = createTempDirAndFile(); @@ -348,9 +380,10 @@ public void testUnixModeInAddRaw() throws IOException { zf1.close(); } - private File createReferenceFile(File directory) throws IOException { - File reference = File.createTempFile("expected.", ".zip", directory); + private File createReferenceFile(File directory, Zip64Mode zipMode, String prefix) throws IOException { + File reference = File.createTempFile(prefix, ".zip", directory); ZipArchiveOutputStream zos = new ZipArchiveOutputStream(reference); + zos.setUseZip64(zipMode); createFirstEntry(zos); createSecondEntry(zos); zos.close(); From eec10fcf0c795bdfacef4c9933585f65f953fd58 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Wed, 7 Jan 2015 21:14:28 +0000 Subject: [PATCH 070/189] Writing central directory in chunks instead of one by one Increased size of copybuf git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650168 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 48 ++++++++++++++----- 1 file changed, 36 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 3ebdbf835bc..7ab4b70db24 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -17,6 +17,7 @@ */ package org.apache.commons.compress.archivers.zip; +import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -25,6 +26,7 @@ import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -431,9 +433,8 @@ public void finish() throws IOException { } cdOffset = streamCompressor.getTotalBytesWritten(); - for (ZipArchiveEntry ze : entries) { - writeCentralFileHeader(ze); - } + writeCentralDirectoryInChunks(); + cdLength = streamCompressor.getTotalBytesWritten() - cdOffset; writeZip64CentralDirectory(); writeCentralDirectoryEnd(); @@ -443,6 +444,25 @@ public void finish() throws IOException { finished = true; } + private void writeCentralDirectoryInChunks() throws IOException { + int NUM_PER_WRITE = 1000; + ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(70 * NUM_PER_WRITE); + Iterator iterator = entries.iterator(); + ZipArchiveEntry ze; + int count = 0; + while (iterator.hasNext()){ + ze = iterator.next(); + byteArrayOutputStream.write(createCentralFileHeader(ze)); + count++; + if (count > NUM_PER_WRITE){ + writeCounted( byteArrayOutputStream.toByteArray()); + byteArrayOutputStream.reset(); + count = 0; + } + } + writeCounted( byteArrayOutputStream.toByteArray()); + } + /** * Writes all necessary data for this entry. * @throws IOException on error @@ -881,7 +901,7 @@ private void writeCounted(byte[] data) throws IOException { - final byte[] copyBuffer = new byte[16384]; + final byte[] copyBuffer = new byte[32768]; private void copyFromZipInputStream(InputStream src) throws IOException { if (entry == null) { @@ -1123,27 +1143,31 @@ protected void writeDataDescriptor(ZipArchiveEntry ze) throws IOException { * Zip64Mode#Never}. */ protected void writeCentralFileHeader(ZipArchiveEntry ze) throws IOException { + byte[] centralFileHeader = createCentralFileHeader(ze); + writeCounted(centralFileHeader); + } + + private byte[] createCentralFileHeader(ZipArchiveEntry ze) throws IOException { final long lfhOffset = offsets.get(ze).longValue(); final boolean needsZip64Extra = hasZip64Extra(ze) - || ze.getCompressedSize() >= ZIP64_MAGIC - || ze.getSize() >= ZIP64_MAGIC - || lfhOffset >= ZIP64_MAGIC; + || ze.getCompressedSize() >= ZIP64_MAGIC + || ze.getSize() >= ZIP64_MAGIC + || lfhOffset >= ZIP64_MAGIC; if (needsZip64Extra && zip64Mode == Zip64Mode.Never) { // must be the offset that is too big, otherwise an // exception would have been throw in putArchiveEntry or // closeArchiveEntry throw new Zip64RequiredException(Zip64RequiredException - .ARCHIVE_TOO_BIG_MESSAGE); + .ARCHIVE_TOO_BIG_MESSAGE); } handleZip64Extra(ze, lfhOffset, needsZip64Extra); - byte[] centralFileHeader = createCentralFileHeader(ze, getName(ze), lfhOffset, needsZip64Extra); - writeCounted(centralFileHeader); - } + return createCentralFileHeader(ze, getName(ze), lfhOffset, needsZip64Extra); + }; /** * Writes the central file header entry. @@ -1208,7 +1232,7 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long putShort(commentLen, buf, CFH_COMMENT_LENGTH_OFFSET); // disk number start - System.arraycopy(ZERO, 0, buf, CFH_DISK_NUMBER_OFFSET, SHORT); + System.arraycopy(ZERO, 0, buf, CFH_DISK_NUMBER_OFFSET, SHORT); // internal file attributes putShort(ze.getInternalAttributes(), buf, CFH_INTERNAL_ATTRIBUTES_OFFSET); From b7af8b37f7609fe65ae4df93fe7bb4adf4bbb03a Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Wed, 7 Jan 2015 21:22:24 +0000 Subject: [PATCH 071/189] Minor optimizations. Exctrated use of calendar, since all operations involving the calendar classes are costly including getInstance. - Allocated array of correct size Arguably these issues are really only irritations for anyone measuring the performance of commons-compress; in other words not all that significant in terms of real performance gain, at least not the kind an end-user would notice. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650170 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipArchiveEntry.java | 2 +- .../compress/archivers/zip/ZipArchiveOutputStream.java | 9 +++++++-- .../apache/commons/compress/archivers/zip/ZipFile.java | 2 +- .../apache/commons/compress/archivers/zip/ZipUtil.java | 6 +++++- 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 085403d610a..c430e071698 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -345,7 +345,7 @@ public ZipExtraField[] getExtraFields(boolean includeUnparseable) { if (includeUnparseable && unparseableExtra != null) { result.add(unparseableExtra); } - return result.toArray(new ZipExtraField[0]); + return result.toArray(new ZipExtraField[result.size()]); } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 7ab4b70db24..15b8ffbb0f9 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -25,6 +25,7 @@ import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; +import java.util.Calendar; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; @@ -1036,7 +1037,7 @@ private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolea // compression method putShort(zipMethod, buf, LFH_METHOD_OFFSET); - ZipUtil.toDosTime(ze.getTime(), buf, LFH_TIME_OFFSET); + ZipUtil.toDosTime(calendarInstance, ze.getTime(), buf, LFH_TIME_OFFSET); // CRC if (phased){ @@ -1169,6 +1170,10 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze) throws IOException { return createCentralFileHeader(ze, getName(ze), lfhOffset, needsZip64Extra); }; + private final Calendar calendarInstance = Calendar.getInstance(); + + private final Calendar calendarInstance = Calendar.getInstance(); + /** * Writes the central file header entry. * @param ze the entry to write @@ -1209,7 +1214,7 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long // last mod. time and date - ZipUtil.toDosTime(ze.getTime(), buf, CFH_TIME_OFFSET); + ZipUtil.toDosTime(calendarInstance, ze.getTime(), buf, CFH_TIME_OFFSET); // CRC // compressed length diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java index 9370b010a17..13dff0567ae 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java @@ -279,7 +279,7 @@ public Enumeration getEntries() { * @since 1.1 */ public Enumeration getEntriesInPhysicalOrder() { - ZipArchiveEntry[] allEntries = entries.toArray(new ZipArchiveEntry[0]); + ZipArchiveEntry[] allEntries = entries.toArray(new ZipArchiveEntry[entries.size()]); Arrays.sort(allEntries, OFFSET_COMPARATOR); return Collections.enumeration(Arrays.asList(allEntries)); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java index 8ba54454378..026da708e3f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java @@ -67,7 +67,10 @@ public static byte[] toDosTime(long t) { * must be non-negative and no larger than buf.length-4 */ public static void toDosTime(long t, byte[] buf, int offset) { - Calendar c = Calendar.getInstance(); + toDosTime(Calendar.getInstance(), t, buf, offset); + } + + static void toDosTime(Calendar c, long t, byte[] buf, int offset) { c.setTimeInMillis(t); int year = c.get(Calendar.YEAR); @@ -85,6 +88,7 @@ public static void toDosTime(long t, byte[] buf, int offset) { ZipLong.putLong(value, buf, offset); } + /** * Assumes a negative integer really is a positive integer that * has wrapped around and re-creates the original value. From ebef905b2bd446da972b9c74cf2e76b59666ec0c Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Thu, 8 Jan 2015 01:36:16 +0000 Subject: [PATCH 072/189] Remove duplicate field git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650200 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/zip/ZipArchiveOutputStream.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 15b8ffbb0f9..21dca650ba0 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -1172,8 +1172,6 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze) throws IOException { private final Calendar calendarInstance = Calendar.getInstance(); - private final Calendar calendarInstance = Calendar.getInstance(); - /** * Writes the central file header entry. * @param ze the entry to write From a1362359e3b4b364112e60e79f4b6ebf335ba69c Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 9 Jan 2015 17:37:26 +0000 Subject: [PATCH 073/189] use for loop rather than iterator git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650609 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 21dca650ba0..449cc3cc92a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -27,7 +27,6 @@ import java.nio.ByteBuffer; import java.util.Calendar; import java.util.HashMap; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -448,20 +447,16 @@ public void finish() throws IOException { private void writeCentralDirectoryInChunks() throws IOException { int NUM_PER_WRITE = 1000; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(70 * NUM_PER_WRITE); - Iterator iterator = entries.iterator(); - ZipArchiveEntry ze; int count = 0; - while (iterator.hasNext()){ - ze = iterator.next(); + for (ZipArchiveEntry ze : entries) { byteArrayOutputStream.write(createCentralFileHeader(ze)); - count++; - if (count > NUM_PER_WRITE){ - writeCounted( byteArrayOutputStream.toByteArray()); + if (++count > NUM_PER_WRITE){ + writeCounted(byteArrayOutputStream.toByteArray()); byteArrayOutputStream.reset(); count = 0; } } - writeCounted( byteArrayOutputStream.toByteArray()); + writeCounted(byteArrayOutputStream.toByteArray()); } /** From deeb355c2253b49f39f0be7bf16de1e119f69581 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 9 Jan 2015 17:58:15 +0000 Subject: [PATCH 074/189] replace a few magic numbers with constants git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650615 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ParallelScatterZipCreator.java | 4 +++- .../commons/compress/archivers/zip/ZipArchiveEntry.java | 3 ++- .../compress/archivers/zip/ZipArchiveInputStream.java | 2 +- .../compress/archivers/zip/ZipArchiveOutputStream.java | 6 +++--- .../apache/commons/compress/archivers/zip/ZipMethod.java | 8 +++++++- 5 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index fed3d9fd308..0431e079bc0 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -116,7 +116,9 @@ public ParallelScatterZipCreator(int nThreads, ScatterGatherBackingStoreSupplier public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputStreamSupplier source) { final int method = zipArchiveEntry.getMethod(); - if (method == -1) throw new IllegalArgumentException("Method must be set on the supplied zipArchiveEntry"); + if (method == ZipMethod.UNKNOWN_CODE) { + throw new IllegalArgumentException("Method must be set on the supplied zipArchiveEntry"); + } // Consider if we want to constrain the number of items that can enqueue here. es.submit(new Callable() { public ScatterZipOutputStream call() throws Exception { diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index c430e071698..c15097a965e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -53,6 +53,7 @@ public class ZipArchiveEntry extends java.util.zip.ZipEntry public static final int PLATFORM_UNIX = 3; public static final int PLATFORM_FAT = 0; + public static final int CRC_UNKNOWN = -1; private static final int SHORT_MASK = 0xFFFF; private static final int SHORT_SHIFT = 16; private static final byte[] EMPTY = new byte[0]; @@ -67,7 +68,7 @@ public class ZipArchiveEntry extends java.util.zip.ZipEntry * @see COMPRESS-93 */ - private int method = -1; + private int method = ZipMethod.UNKNOWN_CODE; /** * The {@link java.util.zip.ZipEntry#setSize} method in the base diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java index a5b9e4d137b..226d29e5986 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -274,7 +274,7 @@ public ZipArchiveEntry getNextZipEntry() throws IOException { processZip64Extra(size, cSize); - if (current.entry.getCompressedSize() != -1) { + if (current.entry.getCompressedSize() != ZipArchiveEntry.SIZE_UNKNOWN) { if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) { current.in = new UnshrinkingInputStream(new BoundedInputStream(in, current.entry.getCompressedSize())); } else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) { diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 449cc3cc92a..adab8641560 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -545,9 +545,9 @@ public void addRawArchiveEntry(ZipArchiveEntry entry, InputStream rawStream) // since standard mode is unable to remove the zip 64 header. ae.removeExtraField(Zip64ExtendedInformationExtraField.HEADER_ID); } - boolean is2PhaseSource = ae.getCrc() != -1 + boolean is2PhaseSource = ae.getCrc() != ZipArchiveEntry.CRC_UNKNOWN && ae.getSize() != ArchiveEntry.SIZE_UNKNOWN - && ae.getCompressedSize() != -1; + && ae.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN; putArchiveEntry(ae, is2PhaseSource); copyFromZipInputStream(rawStream); closeCopiedEntry(is2PhaseSource); @@ -778,7 +778,7 @@ private void validateSizeInformation(Zip64Mode effectiveMode) + " STORED method when not writing to a" + " file"); } - if (entry.entry.getCrc() == -1) { + if (entry.entry.getCrc() == ZipArchiveEntry.CRC_UNKNOWN) { throw new ZipException("crc checksum is required for STORED" + " method when not writing to a file"); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java index 4dafafb314e..52075e61702 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java @@ -164,7 +164,9 @@ public enum ZipMethod { /** * Unknown compression method. */ - UNKNOWN(-1); + UNKNOWN(); + + static final int UNKNOWN_CODE = -1; private final int code; @@ -178,6 +180,10 @@ public enum ZipMethod { codeToEnum = Collections.unmodifiableMap(cte); } + private ZipMethod() { + this(UNKNOWN_CODE); + } + /** * private constructor for enum style class. */ From 516c38fd95fff30841f9af370f50cf1f7733c209 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 9 Jan 2015 19:13:15 +0000 Subject: [PATCH 075/189] Changed ZipArchiveEntry to use more optimized data structure for an overall performance improvement of about 10% for the use case "many small files", for instance a jar file. LinkedHashMap was not a very good structure for such small lists and performs badly in terms of locality git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650632 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveEntry.java | 107 +++++++++++++----- .../archivers/zip/ZipArchiveOutputStream.java | 5 +- 2 files changed, 80 insertions(+), 32 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index c15097a965e..c1eaaafc2f2 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -23,7 +23,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; -import java.util.LinkedHashMap; import java.util.List; import java.util.zip.ZipException; @@ -81,11 +80,12 @@ public class ZipArchiveEntry extends java.util.zip.ZipEntry private int internalAttributes = 0; private int platform = PLATFORM_FAT; private long externalAttributes = 0; - private LinkedHashMap extraFields = null; + private ZipExtraField[] extraFields; private UnparseableExtraFieldData unparseableExtra = null; private String name = null; private byte[] rawName = null; private GeneralPurposeBit gpb = new GeneralPurposeBit(); + private static final ZipExtraField[] noExtraFields = new ZipExtraField[0]; /** * Creates a new zip entry with the specified name. @@ -138,7 +138,7 @@ public ZipArchiveEntry(ZipArchiveEntry entry) throws ZipException { this((java.util.zip.ZipEntry) entry); setInternalAttributes(entry.getInternalAttributes()); setExternalAttributes(entry.getExternalAttributes()); - setExtraFields(entry.getExtraFields(true)); + setExtraFields(getAllExtraFieldsNoCopy()); setPlatform(entry.getPlatform()); GeneralPurposeBit other = entry.getGeneralPurposeBit(); setGeneralPurposeBit(other == null ? null : @@ -180,7 +180,7 @@ public Object clone() { e.setInternalAttributes(getInternalAttributes()); e.setExternalAttributes(getExternalAttributes()); - e.setExtraFields(getExtraFields(true)); + e.setExtraFields(getAllExtraFieldsNoCopy()); return e; } @@ -307,14 +307,15 @@ protected void setPlatform(int platform) { * @param fields an array of extra fields */ public void setExtraFields(ZipExtraField[] fields) { - extraFields = new LinkedHashMap(); + List newFields = new ArrayList(); for (ZipExtraField field : fields) { if (field instanceof UnparseableExtraFieldData) { unparseableExtra = (UnparseableExtraFieldData) field; } else { - extraFields.put(field.getHeaderId(), field); + newFields.add( field); } } + extraFields = newFields.toArray(new ZipExtraField[newFields.size()]); setExtra(); } @@ -323,7 +324,8 @@ public void setExtraFields(ZipExtraField[] fields) { * @return an array of the extra fields */ public ZipExtraField[] getExtraFields() { - return getExtraFields(false); + final ZipExtraField[] parseableExtraFields = getParseableExtraFields(); + return Arrays.copyOf(parseableExtraFields, parseableExtraFields.length); } /** @@ -336,19 +338,47 @@ public ZipExtraField[] getExtraFields() { * @since 1.1 */ public ZipExtraField[] getExtraFields(boolean includeUnparseable) { + return includeUnparseable ? + getAllExtraFields() : + getParseableExtraFields(); + } + + private ZipExtraField[] getParseableExtraFields() { if (extraFields == null) { - return !includeUnparseable || unparseableExtra == null - ? new ZipExtraField[0] - : new ZipExtraField[] { unparseableExtra }; + return noExtraFields; } - List result = - new ArrayList(extraFields.values()); - if (includeUnparseable && unparseableExtra != null) { - result.add(unparseableExtra); + return extraFields; + } + + /** + * Get all extra fields, including unparseable ones. + * @return An array of all extra fields. Not necessarily a copy of internal data structures, hence private method + */ + private ZipExtraField[] getAllExtraFieldsNoCopy() { + if (extraFields == null) { + return getUnparseableOnly(); } - return result.toArray(new ZipExtraField[result.size()]); + return unparseableExtra != null ? getMergedFields() : extraFields; + } + + private ZipExtraField[] copyOf(ZipExtraField[] src){ + return Arrays.copyOf(src, src.length); + } + + private ZipExtraField[] getMergedFields() { + final ZipExtraField[] zipExtraFields = Arrays.copyOf(extraFields, extraFields.length + 1); + zipExtraFields[zipExtraFields.length] = unparseableExtra; + return zipExtraFields; } + private ZipExtraField[] getUnparseableOnly() { + return unparseableExtra == null ? noExtraFields : new ZipExtraField[] { unparseableExtra }; + } + + private ZipExtraField[] getAllExtraFields() { + final ZipExtraField[] allExtraFieldsNoCopy = getAllExtraFieldsNoCopy(); + return (allExtraFieldsNoCopy == extraFields) ? copyOf( allExtraFieldsNoCopy) : allExtraFieldsNoCopy; + } /** * Adds an extra field - replacing an already present extra field * of the same type. @@ -362,9 +392,15 @@ public void addExtraField(ZipExtraField ze) { unparseableExtra = (UnparseableExtraFieldData) ze; } else { if (extraFields == null) { - extraFields = new LinkedHashMap(); + extraFields = new ZipExtraField[]{ ze}; + } else { + if (getExtraField(ze.getHeaderId())!= null){ + removeExtraField(ze.getHeaderId()); + } + final ZipExtraField[] zipExtraFields = Arrays.copyOf(extraFields, extraFields.length + 1); + zipExtraFields[zipExtraFields.length -1] = ze; + extraFields = zipExtraFields; } - extraFields.put(ze.getHeaderId(), ze); } setExtra(); } @@ -380,12 +416,15 @@ public void addAsFirstExtraField(ZipExtraField ze) { if (ze instanceof UnparseableExtraFieldData) { unparseableExtra = (UnparseableExtraFieldData) ze; } else { - LinkedHashMap copy = extraFields; - extraFields = new LinkedHashMap(); - extraFields.put(ze.getHeaderId(), ze); - if (copy != null) { - copy.remove(ze.getHeaderId()); - extraFields.putAll(copy); + if (getExtraField(ze.getHeaderId()) != null){ + removeExtraField(ze.getHeaderId()); + } + ZipExtraField[] copy = extraFields; + int newLen = extraFields != null ? extraFields.length + 1: 1; + extraFields = new ZipExtraField[newLen]; + extraFields[0] = ze; + if (copy != null){ + System.arraycopy(copy, 0, extraFields, 1, extraFields.length - 1); } } setExtra(); @@ -399,9 +438,17 @@ public void removeExtraField(ZipShort type) { if (extraFields == null) { throw new java.util.NoSuchElementException(); } - if (extraFields.remove(type) == null) { + + List newResult = new ArrayList(); + for (ZipExtraField extraField : extraFields) { + if (!type.equals(extraField.getHeaderId())){ + newResult.add( extraField); + } + } + if (extraFields.length == newResult.size()) { throw new java.util.NoSuchElementException(); } + extraFields = newResult.toArray(new ZipExtraField[newResult.size()]); setExtra(); } @@ -425,7 +472,11 @@ public void removeUnparseableExtraFieldData() { */ public ZipExtraField getExtraField(ZipShort type) { if (extraFields != null) { - return extraFields.get(type); + for (ZipExtraField extraField : extraFields) { + if (type.equals(extraField.getHeaderId())) { + return extraField; + } + } } return null; } @@ -470,7 +521,7 @@ public void setExtra(byte[] extra) throws RuntimeException { * modify super's data directly. */ protected void setExtra() { - super.setExtra(ExtraFieldUtils.mergeLocalFileDataData(getExtraFields(true))); + super.setExtra(ExtraFieldUtils.mergeLocalFileDataData(getAllExtraFieldsNoCopy())); } /** @@ -501,7 +552,7 @@ public byte[] getLocalFileDataExtra() { * @return the central directory extra data */ public byte[] getCentralDirectoryExtra() { - return ExtraFieldUtils.mergeCentralDirectoryData(getExtraFields(true)); + return ExtraFieldUtils.mergeCentralDirectoryData(getAllExtraFieldsNoCopy()); } /** @@ -528,7 +579,7 @@ public boolean isDirectory() { */ protected void setName(String name) { if (name != null && getPlatform() == PLATFORM_FAT - && name.indexOf("/") == -1) { + && !name.contains("/")) { name = name.replace('\\', '/'); } this.name = name; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index adab8641560..f9dc69240ae 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -1023,10 +1023,7 @@ private byte[] createLocalFileHeader(ZipArchiveEntry ze, ByteBuffer name, boolea putShort(versionNeededToExtract(zipMethod, hasZip64Extra(ze)), buf, LFH_VERSION_NEEDED_OFFSET); } - GeneralPurposeBit generalPurposeBit = getGeneralPurposeBits(zipMethod, - !encodable - && fallbackToUTF8 - ); + GeneralPurposeBit generalPurposeBit = getGeneralPurposeBits(zipMethod, !encodable && fallbackToUTF8); generalPurposeBit.encode(buf, LFH_GPB_OFFSET); // compression method From 09aecce588d736955b803c9df908013960699836 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Sat, 10 Jan 2015 16:15:54 +0000 Subject: [PATCH 076/189] Fixed style issues as reported by IntelliJ git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650766 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/dump/Dirent.java | 2 +- .../archivers/dump/DumpArchiveInputStream.java | 18 +++++++++--------- .../commons/compress/archivers/sevenz/CLI.java | 2 +- .../archivers/sevenz/DeltaDecoder.java | 2 +- .../archivers/sevenz/LZMA2Decoder.java | 2 +- .../compress/archivers/sevenz/SevenZFile.java | 2 +- .../archivers/sevenz/SevenZOutputFile.java | 12 ++++++------ .../archivers/tar/TarArchiveEntry.java | 10 +++++----- .../compress/archivers/tar/TarUtils.java | 3 +-- .../zip/ParallelScatterZipCreator.java | 6 +++--- .../archivers/zip/ZipArchiveOutputStream.java | 2 +- .../compress/archivers/zip/ZipMethod.java | 4 ++-- .../compressors/z/ZCompressorInputStream.java | 2 +- .../commons/compress/AbstractTestCase.java | 3 +-- .../compress/archivers/LongPathTest.java | 7 +++---- .../archivers/zip/UTF8ZipFilesTest.java | 4 ++-- .../FramedSnappyCompressorInputStreamTest.java | 2 +- 17 files changed, 40 insertions(+), 43 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java b/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java index 34e0ef79182..7979b1d5503 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java @@ -79,6 +79,6 @@ String getName() { */ @Override public String toString() { - return String.format("[%d]: %s", Integer.valueOf(ino), name); + return String.format("[%d]: %s", ino, name); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index bf411e37a0e..6381e899893 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -127,7 +127,7 @@ public DumpArchiveInputStream(InputStream is, String encoding) // put in a dummy record for the root node. Dirent root = new Dirent(2, 2, 4, "."); - names.put(Integer.valueOf(2), root); + names.put(2, root); // use priority based on queue to ensure parent directories are // released first. @@ -306,7 +306,7 @@ public DumpArchiveEntry getNextEntry() throws IOException { } entry.setName(path); - entry.setSimpleName(names.get(Integer.valueOf(entry.getIno())).getName()); + entry.setSimpleName(names.get(entry.getIno()).getName()); entry.setOffset(filepos); return entry; @@ -327,9 +327,9 @@ private void readDirectoryEntry(DumpArchiveEntry entry) raw.readRecord(); } - if (!names.containsKey(Integer.valueOf(entry.getIno())) && + if (!names.containsKey(entry.getIno()) && DumpArchiveConstants.SEGMENT_TYPE.INODE == entry.getHeaderType()) { - pending.put(Integer.valueOf(entry.getIno()), entry); + pending.put(entry.getIno(), entry); } int datalen = DumpArchiveConstants.TP_SIZE * entry.getHeaderCount(); @@ -367,7 +367,7 @@ private void readDirectoryEntry(DumpArchiveEntry entry) } */ - names.put(Integer.valueOf(ino), d); + names.put(ino, d); // check whether this allows us to fill anything in the pending list. for (Map.Entry e : pending.entrySet()) { @@ -384,7 +384,7 @@ private void readDirectoryEntry(DumpArchiveEntry entry) // remove anything that we found. (We can't do it earlier // because of concurrent modification exceptions.) for (DumpArchiveEntry e : queue) { - pending.remove(Integer.valueOf(e.getIno())); + pending.remove(e.getIno()); } } @@ -413,12 +413,12 @@ private String getPath(DumpArchiveEntry entry) { Dirent dirent = null; for (int i = entry.getIno();; i = dirent.getParentIno()) { - if (!names.containsKey(Integer.valueOf(i))) { + if (!names.containsKey(i)) { elements.clear(); break; } - dirent = names.get(Integer.valueOf(i)); + dirent = names.get(i); elements.push(dirent.getName()); if (dirent.getIno() == dirent.getParentIno()) { @@ -428,7 +428,7 @@ private String getPath(DumpArchiveEntry entry) { // if an element is missing defer the work and read next entry. if (elements.isEmpty()) { - pending.put(Integer.valueOf(entry.getIno()), entry); + pending.put(entry.getIno(), entry); return null; } diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java index 27a53b705f7..95a9d41ac05 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java @@ -58,7 +58,7 @@ private String getContentMethods(SevenZArchiveEntry entry) { first = false; sb.append(m.getMethod()); if (m.getOptions() != null) { - sb.append("(" + m.getOptions() + ")"); + sb.append("(").append(m.getOptions()).append(")"); } } return sb.toString(); diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java index 81dcb7a23dd..7951e712555 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java @@ -54,7 +54,7 @@ byte[] getOptionsAsProperties(Object options) { @Override Object getOptionsFromCoder(Coder coder, InputStream in) { - return Integer.valueOf(getOptionsFromCoder(coder)); + return getOptionsFromCoder(coder); } private int getOptionsFromCoder(Coder coder) { diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java index 9d3b8aff648..7df09f84d98 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java @@ -62,7 +62,7 @@ byte[] getOptionsAsProperties(Object opts) { @Override Object getOptionsFromCoder(Coder coder, InputStream in) { - return Integer.valueOf(getDictionarySize(coder)); + return getDictionarySize(coder); } private int getDictSize(Object opts) { diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index 1b146862887..a8c368c6789 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -163,7 +163,7 @@ private Archive readHeaders(byte[] password) throws IOException { final byte archiveVersionMinor = file.readByte(); if (archiveVersionMajor != 0) { throw new IOException(String.format("Unsupported 7z version (%d,%d)", - Byte.valueOf(archiveVersionMajor), Byte.valueOf(archiveVersionMinor))); + archiveVersionMajor, archiveVersionMinor)); } final long startHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(file.readInt()); diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java index 84d1431b837..72ca42f08c8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java @@ -490,9 +490,9 @@ private void writeFileEmptyFiles(final DataOutput header) throws IOException { boolean hasEmptyFiles = false; int emptyStreamCounter = 0; final BitSet emptyFiles = new BitSet(0); - for (int i = 0; i < files.size(); i++) { - if (!files.get(i).hasStream()) { - boolean isDir = files.get(i).isDirectory(); + for (SevenZArchiveEntry file1 : files) { + if (!file1.hasStream()) { + boolean isDir = file1.isDirectory(); emptyFiles.set(emptyStreamCounter++, !isDir); hasEmptyFiles |= !isDir; } @@ -513,9 +513,9 @@ private void writeFileAntiItems(final DataOutput header) throws IOException { boolean hasAntiItems = false; final BitSet antiItems = new BitSet(0); int antiItemCounter = 0; - for (int i = 0; i < files.size(); i++) { - if (!files.get(i).hasStream()) { - boolean isAnti = files.get(i).isAntiItem(); + for (SevenZArchiveEntry file1 : files) { + if (!file1.hasStream()) { + boolean isAnti = file1.isAntiItem(); antiItems.set(antiItemCounter++, isAnti); hasAntiItems |= isAnti; } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java index cc5db1ca6d3..a61a9b096a4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -378,7 +378,7 @@ public boolean isDescendent(TarArchiveEntry desc) { * @return This entry's name. */ public String getName() { - return name.toString(); + return name; } /** @@ -405,7 +405,7 @@ public void setMode(int mode) { * @return This entry's link name. */ public String getLinkName() { - return linkName.toString(); + return linkName; } /** @@ -461,7 +461,7 @@ public void setGroupId(int groupId) { * @return This entry's user name. */ public String getUserName() { - return userName.toString(); + return userName; } /** @@ -479,7 +479,7 @@ public void setUserName(String userName) { * @return This entry's group name. */ public String getGroupName() { - return groupName.toString(); + return groupName; } /** @@ -1035,7 +1035,7 @@ private static String normalizeFileName(String fileName, fileName = fileName.substring(2); } } - } else if (osname.indexOf("netware") > -1) { + } else if (osname.contains("netware")) { int colon = fileName.indexOf(':'); if (colon != -1) { fileName = fileName.substring(colon + 1); diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java index 1782ffe682a..94e175c210d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -62,8 +62,7 @@ public String decode(byte[] buffer) { final int length = buffer.length; StringBuilder result = new StringBuilder(length); - for (int i = 0; i < length; ++i) { - byte b = buffer[i]; + for (byte b : buffer) { if (b == 0) { // Trailing null break; } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 0431e079bc0..9621329459c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -40,7 +40,7 @@ * #ZipArchiveOutputStream *before* calling #writeTo on this class.

    */ public class ParallelScatterZipCreator { - private List streams = synchronizedList(new ArrayList()); + private final List streams = synchronizedList(new ArrayList()); private final ExecutorService es; private final ScatterGatherBackingStoreSupplier supplier; @@ -49,7 +49,7 @@ public class ParallelScatterZipCreator { private long scatterDoneAt; private static class DefaultSupplier implements ScatterGatherBackingStoreSupplier { - AtomicInteger storeNum = new AtomicInteger(0); + final AtomicInteger storeNum = new AtomicInteger(0); public ScatterGatherBackingStore get() throws IOException { File tempFile = File.createTempFile("parallelscatter", "n" + storeNum.incrementAndGet()); @@ -64,7 +64,7 @@ private ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier return new ScatterZipOutputStream(bs, sc); } - private ThreadLocal tlScatterStreams = new ThreadLocal() { + private final ThreadLocal tlScatterStreams = new ThreadLocal() { @Override protected ScatterZipOutputStream initialValue() { try { diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index f9dc69240ae..7f7e7e7f7e2 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -1142,7 +1142,7 @@ protected void writeCentralFileHeader(ZipArchiveEntry ze) throws IOException { private byte[] createCentralFileHeader(ZipArchiveEntry ze) throws IOException { - final long lfhOffset = offsets.get(ze).longValue(); + final long lfhOffset = offsets.get(ze); final boolean needsZip64Extra = hasZip64Extra(ze) || ze.getCompressedSize() >= ZIP64_MAGIC || ze.getSize() >= ZIP64_MAGIC diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java index 52075e61702..5289eb5d081 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java @@ -175,7 +175,7 @@ public enum ZipMethod { static { Map cte = new HashMap(); for (ZipMethod method : values()) { - cte.put(Integer.valueOf(method.getCode()), method); + cte.put(method.getCode(), method); } codeToEnum = Collections.unmodifiableMap(cte); } @@ -208,6 +208,6 @@ public int getCode() { * method is not known. */ public static ZipMethod getMethodByCode(int code) { - return codeToEnum.get(Integer.valueOf(code)); + return codeToEnum.get(code); } } diff --git a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java index a72c09e5789..fb410528826 100644 --- a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java @@ -144,7 +144,7 @@ protected int decompressNextSymbol() throws IOException { addRepeatOfPreviousCode(); addedUnfinishedEntry = true; } else if (code > tableSize) { - throw new IOException(String.format("Invalid %d bit code 0x%x", Integer.valueOf(codeSize), Integer.valueOf(code))); + throw new IOException(String.format("Invalid %d bit code 0x%x", codeSize, code)); } return expandCodeToOutputStack(code, addedUnfinishedEntry); } diff --git a/src/test/java/org/apache/commons/compress/AbstractTestCase.java b/src/test/java/org/apache/commons/compress/AbstractTestCase.java index f4bdcdf4d4a..3f6fcd58c91 100644 --- a/src/test/java/org/apache/commons/compress/AbstractTestCase.java +++ b/src/test/java/org/apache/commons/compress/AbstractTestCase.java @@ -121,8 +121,7 @@ public static void rmdir(File f) { } private static final boolean ON_WINDOWS = - System.getProperty("os.name").toLowerCase(Locale.ENGLISH) - .indexOf("windows") > -1; + System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows"); /** * Accommodate Windows bug encountered in both Sun and IBM JDKs. diff --git a/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java b/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java index de552f29b8f..38e35a6f4e3 100644 --- a/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java @@ -140,12 +140,11 @@ public void testArchive() throws Exception { assertTrue(ais instanceof ArArchiveInputStream); // CPIO does not store directories or directory names expected.clear(); - for(int i=0; i < fileList.size(); i++){ - String ent = fileList.get(i); - if (!ent.endsWith("/")){// not a directory + for (String ent : fileList) { + if (!ent.endsWith("/")) {// not a directory final int lastSlash = ent.lastIndexOf('/'); if (lastSlash >= 0) { // extract path name - expected.add(ent.substring(lastSlash+1, ent.length())); + expected.add(ent.substring(lastSlash + 1, ent.length())); } else { expected.add(ent); } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java index 08e452d1164..874c55e3efb 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java @@ -366,8 +366,8 @@ private static void assertUnicodeName(ZipArchiveEntry ze, } public void testUtf8Interoperability() throws IOException { - File file1 = super.getFile("utf8-7zip-test.zip"); - File file2 = super.getFile("utf8-winzip-test.zip"); + File file1 = getFile("utf8-7zip-test.zip"); + File file2 = getFile("utf8-winzip-test.zip"); testFile(file1,CP437); testFile(file2,CP437); diff --git a/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java index dd411dd0bd4..aa1083e7163 100644 --- a/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java @@ -154,7 +154,7 @@ public void testUnskippableChunk() { fail("expected an exception"); in.close(); } catch (IOException ex) { - assertTrue(ex.getMessage().indexOf("unskippable chunk") > -1); + assertTrue(ex.getMessage().contains("unskippable chunk")); } } From 646e03cbab5d0af719efbb22dcf0ee037aa531cc Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Sat, 10 Jan 2015 19:00:31 +0000 Subject: [PATCH 077/189] Fixed one findbugs warning, ignored another git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650794 13f79535-47bb-0310-9956-ffa450edef68 --- findbugs-exclude-filter.xml | 8 ++++++++ .../commons/compress/archivers/zip/StreamCompressor.java | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/findbugs-exclude-filter.xml b/findbugs-exclude-filter.xml index 7c207b2270b..f70be02bee3 100644 --- a/findbugs-exclude-filter.xml +++ b/findbugs-exclude-filter.xml @@ -157,4 +157,12 @@ + + + + + + + + diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index d1a9062073f..4eba69bc52c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -50,7 +50,7 @@ public abstract class StreamCompressor implements Closeable { private long sourcePayloadLength = 0; private long totalWrittenToOutputStream = 0; - private final int bufferSize = 4096; + private static final int bufferSize = 4096; private final byte[] outputBuffer = new byte[bufferSize]; private final byte[] readerBuf = new byte[bufferSize]; From f7070b2c01020438d4a7712ae304b53ced16febc Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 11 Jan 2015 05:32:00 +0000 Subject: [PATCH 078/189] make closed flag volatile as suggested by Dawid Weiss - COMPRESS-297 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1650873 13f79535-47bb-0310-9956-ffa450edef68 --- .../java/org/apache/commons/compress/archivers/zip/ZipFile.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java index 13dff0567ae..348d86b92e9 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java @@ -134,7 +134,7 @@ private static final class OffsetEntry { /** * Whether the file is closed. */ - private boolean closed = true; + private volatile boolean closed = true; // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) private final byte[] DWORD_BUF = new byte[DWORD]; From dd0109a00a076699240e81ee47db20374b65e546 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 12 Jan 2015 13:47:48 +0000 Subject: [PATCH 079/189] make getParseableExtraFields and getAllExtraFileds symmetric git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1651091 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ZipArchiveEntry.java | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index c1eaaafc2f2..60827588563 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -324,8 +324,7 @@ public void setExtraFields(ZipExtraField[] fields) { * @return an array of the extra fields */ public ZipExtraField[] getExtraFields() { - final ZipExtraField[] parseableExtraFields = getParseableExtraFields(); - return Arrays.copyOf(parseableExtraFields, parseableExtraFields.length); + return getParseableExtraFields(); } /** @@ -343,13 +342,18 @@ public ZipExtraField[] getExtraFields(boolean includeUnparseable) { getParseableExtraFields(); } - private ZipExtraField[] getParseableExtraFields() { + private ZipExtraField[] getParseableExtraFieldsNoCopy() { if (extraFields == null) { return noExtraFields; } return extraFields; } + private ZipExtraField[] getParseableExtraFields() { + final ZipExtraField[] parseableExtraFields = getParseableExtraFieldsNoCopy(); + return (parseableExtraFields == extraFields) ? copyOf(parseableExtraFields) : parseableExtraFields; + } + /** * Get all extra fields, including unparseable ones. * @return An array of all extra fields. Not necessarily a copy of internal data structures, hence private method From 7d99a02fc8bed02f6071a928f3828d2c76f8d1e4 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Mon, 12 Jan 2015 17:02:09 +0000 Subject: [PATCH 080/189] Fixed last findbug issue, which was real git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1651142 13f79535-47bb-0310-9956-ffa450edef68 --- .../zip/ParallelScatterZipCreator.java | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 9621329459c..8f72962c482 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -23,8 +23,10 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.zip.Deflater; @@ -43,6 +45,7 @@ public class ParallelScatterZipCreator { private final List streams = synchronizedList(new ArrayList()); private final ExecutorService es; private final ScatterGatherBackingStoreSupplier supplier; + private final List futures = new ArrayList(); private final long startedAt = System.currentTimeMillis(); private long compressionDoneAt = 0; @@ -120,8 +123,8 @@ public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputSt throw new IllegalArgumentException("Method must be set on the supplied zipArchiveEntry"); } // Consider if we want to constrain the number of items that can enqueue here. - es.submit(new Callable() { - public ScatterZipOutputStream call() throws Exception { + Future future = es.submit(new Callable() { + public Void call() throws Exception { ScatterZipOutputStream streamToUse = tlScatterStreams.get(); InputStream payload = source.get(); try { @@ -129,10 +132,11 @@ public ScatterZipOutputStream call() throws Exception { } finally { payload.close(); } - return streamToUse; + return null; } }); + futures.add( future); } @@ -146,8 +150,16 @@ public ScatterZipOutputStream call() throws Exception { * @param targetStream The ZipArchiveOutputStream to receive the contents of the scatter streams * @throws IOException If writing fails * @throws InterruptedException If we get interrupted + * @throws ExecutionException If something happens in the parallel execution */ - public void writeTo(ZipArchiveOutputStream targetStream) throws IOException, InterruptedException { + public void writeTo(ZipArchiveOutputStream targetStream) + throws IOException, InterruptedException, ExecutionException { + + // Make sure we catch any exceptions from parallel phase + for (Future future : futures) { + future.get(); + } + es.shutdown(); es.awaitTermination(1000 * 60, TimeUnit.SECONDS); From 2bf135d0e5f17a7e0ada98ad5d1d95eb813b11a6 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Tue, 13 Jan 2015 06:59:24 +0000 Subject: [PATCH 081/189] Changed from nThreads to receiving an ExecutorService There are a lot of different models/versions of executorservices, also varying according to client JDK level. Give client full control of how the executor service is created and also possibly how to schedule tasks through a slightly lower-level cerateCallable/submit api. Termination of ExecutorService is still controlled by ParallelScatterZipCreator, as must be. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1651285 13f79535-47bb-0310-9956-ffa450edef68 --- .../zip/ParallelScatterZipCreator.java | 69 +++++++++++++------ .../zip/ParallelScatterZipCreatorTest.java | 68 +++++++++++++++--- 2 files changed, 107 insertions(+), 30 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 8f72962c482..1afbf7c20af 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -40,18 +40,22 @@ * the output file. Things that need to come in a specific order (manifests, directories) * must be handled by the client of this class, usually by writing these things to the * #ZipArchiveOutputStream *before* calling #writeTo on this class.

    + *

    + * The client can supply an ExecutorService, but for reasons of memory model consistency, + * this will be shut down by this class prior to completion. + *

    */ public class ParallelScatterZipCreator { private final List streams = synchronizedList(new ArrayList()); private final ExecutorService es; - private final ScatterGatherBackingStoreSupplier supplier; + private final ScatterGatherBackingStoreSupplier backingStoreSupplier; private final List futures = new ArrayList(); private final long startedAt = System.currentTimeMillis(); private long compressionDoneAt = 0; private long scatterDoneAt; - private static class DefaultSupplier implements ScatterGatherBackingStoreSupplier { + private static class DefaultBackingStoreSupplier implements ScatterGatherBackingStoreSupplier { final AtomicInteger storeNum = new AtomicInteger(0); public ScatterGatherBackingStore get() throws IOException { @@ -71,7 +75,7 @@ private ScatterZipOutputStream createDeferred(ScatterGatherBackingStoreSupplier @Override protected ScatterZipOutputStream initialValue() { try { - ScatterZipOutputStream scatterStream = createDeferred(supplier); + ScatterZipOutputStream scatterStream = createDeferred(backingStoreSupplier); streams.add(scatterStream); return scatterStream; } catch (IOException e) { @@ -84,27 +88,30 @@ protected ScatterZipOutputStream initialValue() { * Create a ParallelScatterZipCreator with default threads */ public ParallelScatterZipCreator() { - this(Runtime.getRuntime().availableProcessors()); + this(Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())); } /** * Create a ParallelScatterZipCreator * - * @param nThreads the number of threads to use in parallel. + * @param executorService The executorService to use for parallel scheduling. For technical reasons, + * this will be shut down by this class. */ - public ParallelScatterZipCreator(int nThreads) { - this( nThreads, new DefaultSupplier()); + public ParallelScatterZipCreator(ExecutorService executorService) { + this(executorService, new DefaultBackingStoreSupplier()); } /** * Create a ParallelScatterZipCreator * - * @param nThreads the number of threads to use in parallel. + * @param executorService The executorService to use. For technical reasons, this will be shut down + * by this class. * @param backingStoreSupplier The supplier of backing store which shall be used */ - public ParallelScatterZipCreator(int nThreads, ScatterGatherBackingStoreSupplier backingStoreSupplier) { - supplier = backingStoreSupplier; - es = Executors.newFixedThreadPool(nThreads); + public ParallelScatterZipCreator(ExecutorService executorService, + ScatterGatherBackingStoreSupplier backingStoreSupplier) { + this.backingStoreSupplier = backingStoreSupplier; + es = executorService; } /** @@ -113,19 +120,43 @@ public ParallelScatterZipCreator(int nThreads, ScatterGatherBackingStoreSupplier * This method is expected to be called from a single client thread *

    * - * @param zipArchiveEntry The entry to add. Compression method + * @param zipArchiveEntry The entry to add. * @param source The source input stream supplier */ public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputStreamSupplier source) { + submit(createCallable(zipArchiveEntry, source)); + } + + /** + * Submit a callable for compression + * @param callable The callable to run + */ + public void submit(Callable callable) { + futures.add(es.submit(callable)); + } + + /** + * Create a callable that will compress the given archive entry. + * + *

    This method is expected to be called from a single client thread.

    + *

    + * This method is used by clients that want finer grained control over how the callable is + * created, possibly wanting to wrap this callable in a different callable

    + * + * @param zipArchiveEntry The entry to add. + * @param source The source input stream supplier + * @return A callable that will be used to check for errors + */ + + public Callable createCallable(final ZipArchiveEntry zipArchiveEntry, final InputStreamSupplier source) { final int method = zipArchiveEntry.getMethod(); if (method == ZipMethod.UNKNOWN_CODE) { throw new IllegalArgumentException("Method must be set on the supplied zipArchiveEntry"); } - // Consider if we want to constrain the number of items that can enqueue here. - Future future = es.submit(new Callable() { - public Void call() throws Exception { - ScatterZipOutputStream streamToUse = tlScatterStreams.get(); + return new Callable() { + public Object call() throws Exception { + final ScatterZipOutputStream streamToUse = tlScatterStreams.get(); InputStream payload = source.get(); try { streamToUse.addArchiveEntry(zipArchiveEntry, payload, method); @@ -134,9 +165,7 @@ public Void call() throws Exception { } return null; } - - }); - futures.add( future); + }; } @@ -161,7 +190,7 @@ public void writeTo(ZipArchiveOutputStream targetStream) } es.shutdown(); - es.awaitTermination(1000 * 60, TimeUnit.SECONDS); + es.awaitTermination(1000 * 60, TimeUnit.SECONDS); // == Infinity. We really *must* wait for this to complete // It is important that all threads terminate before we go on, ensure happens-before relationship compressionDoneAt = System.currentTimeMillis(); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java index 6a482db8619..a512e369dd0 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java @@ -27,12 +27,17 @@ import java.util.Enumeration; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import static org.junit.Assert.*; @SuppressWarnings("OctalInteger") public class ParallelScatterZipCreatorTest { + private final int NUMITEMS = 5000; + @Test public void concurrent() throws Exception { @@ -44,12 +49,36 @@ public void concurrent() Map entries = writeEntries(zipCreator); zipCreator.writeTo(zos); zos.close(); - removeEntriesFoundInZipFile(result, entries); assertTrue(entries.size() == 0); assertNotNull( zipCreator.getStatisticsMessage()); } + @Test + public void callableApi() + throws Exception { + File result = File.createTempFile("parallelScatterGather2", ""); + ZipArchiveOutputStream zos = new ZipArchiveOutputStream(result); + zos.setEncoding("UTF-8"); + ExecutorService es = Executors.newFixedThreadPool(1); + + ScatterGatherBackingStoreSupplier supp = new ScatterGatherBackingStoreSupplier() { + public ScatterGatherBackingStore get() throws IOException { + return new FileBasedScatterGatherBackingStore(File.createTempFile("parallelscatter", "n1")); + } + }; + + ParallelScatterZipCreator zipCreator = new ParallelScatterZipCreator(es, supp); + Map entries = writeEntriesAsCallable(zipCreator); + zipCreator.writeTo(zos); + zos.close(); + + + removeEntriesFoundInZipFile(result, entries); + assertTrue(entries.size() == 0); + assertNotNull(zipCreator.getStatisticsMessage()); + } + private void removeEntriesFoundInZipFile(File result, Map entries) throws IOException { ZipFile zf = new ZipFile(result); Enumeration entriesInPhysicalOrder = zf.getEntriesInPhysicalOrder(); @@ -58,21 +87,16 @@ private void removeEntriesFoundInZipFile(File result, Map entrie InputStream inputStream = zf.getInputStream(zipArchiveEntry); byte[] actual = IOUtils.toByteArray(inputStream); byte[] expected = entries.remove(zipArchiveEntry.getName()); - assertArrayEquals( expected, actual); + assertArrayEquals( "For " + zipArchiveEntry.getName(), expected, actual); } zf.close(); } private Map writeEntries(ParallelScatterZipCreator zipCreator) { Map entries = new HashMap(); - for (int i = 0; i < 10000; i++){ - ZipArchiveEntry za = new ZipArchiveEntry( "file" + i); - final String payload = "content" + i; - final byte[] payloadBytes = payload.getBytes(); - entries.put( za.getName(), payloadBytes); - za.setMethod(ZipArchiveEntry.DEFLATED); - za.setSize(payload.length()); - za.setUnixMode(UnixStat.FILE_FLAG | 0664); + for (int i = 0; i < NUMITEMS; i++){ + final byte[] payloadBytes = ("content" + i).getBytes(); + ZipArchiveEntry za = createZipArchiveEntry(entries, i, payloadBytes); zipCreator.addArchiveEntry(za, new InputStreamSupplier() { public InputStream get() { return new ByteArrayInputStream(payloadBytes); @@ -81,4 +105,28 @@ public InputStream get() { } return entries; } + + private Map writeEntriesAsCallable(ParallelScatterZipCreator zipCreator) { + Map entries = new HashMap(); + for (int i = 0; i < NUMITEMS; i++){ + final byte[] payloadBytes = ("content" + i).getBytes(); + ZipArchiveEntry za = createZipArchiveEntry(entries, i, payloadBytes); + final Callable callable = zipCreator.createCallable(za, new InputStreamSupplier() { + public InputStream get() { + return new ByteArrayInputStream(payloadBytes); + } + }); + zipCreator.submit(callable); + } + return entries; + } + + private ZipArchiveEntry createZipArchiveEntry(Map entries, int i, byte[] payloadBytes) { + ZipArchiveEntry za = new ZipArchiveEntry( "file" + i); + entries.put( za.getName(), payloadBytes); + za.setMethod(ZipArchiveEntry.DEFLATED); + za.setSize(payloadBytes.length); + za.setUnixMode(UnixStat.FILE_FLAG | 0664); + return za; + } } \ No newline at end of file From 33c95771c804abc02223f24a0234343b93ffde56 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Tue, 13 Jan 2015 17:26:02 +0000 Subject: [PATCH 082/189] COMPRESS-290 Fixed error message with large groupid This is a bit of a simple solution to the issue, since there are obviously lots of other options that could have similar updates. The reality is that most recent macs that are initialized to run in corporate networks tend to get large GID's for the users. So I just fixed the one we actually have complaints about git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1651417 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/tar/TarArchiveOutputStream.java | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java index ecd364deaf9..1b134af74b6 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java @@ -606,7 +606,7 @@ private void addPaxHeaderForBigNumber(Map paxHeaders, private void failForBigNumbers(TarArchiveEntry entry) { failForBigNumber("entry size", entry.getSize(), TarConstants.MAXSIZE); - failForBigNumber("group id", entry.getGroupId(), TarConstants.MAXID); + failForBigNumberWithPosixMessage("group id", entry.getGroupId(), TarConstants.MAXID); failForBigNumber("last modification time", entry.getModTime().getTime() / 1000, TarConstants.MAXSIZE); @@ -619,10 +619,18 @@ private void failForBigNumbers(TarArchiveEntry entry) { } private void failForBigNumber(String field, long value, long maxValue) { + failForBigNumber(field, value, maxValue, ""); + } + + private void failForBigNumberWithPosixMessage(String field, long value, long maxValue) { + failForBigNumber(field, value, maxValue, " Use STAR or POSIX extensions to overcome this limit"); + } + + private void failForBigNumber(String field, long value, long maxValue, String additionalMsg) { if (value < 0 || value > maxValue) { throw new RuntimeException(field + " '" + value - + "' is too big ( > " - + maxValue + " )"); + + "' is too big ( > " + + maxValue + " )." + additionalMsg); } } From 8aa559f2da10a595d114bd4890197a5a81d3d8ed Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 13 Jan 2015 20:11:07 +0000 Subject: [PATCH 083/189] record fix for COMPRESS-290 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1651464 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index f7eefce6a3a..8770c53e0e1 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -75,6 +75,11 @@ The type attribute can be add,update,fix,remove. has thrown an exception reading the file - for example if the file doesn't exist. + + Improved error message when tar encounters a groupId that is + too big to write without using the STAR or POSIX format. + Date: Wed, 14 Jan 2015 07:06:22 +0000 Subject: [PATCH 084/189] Added ZipArchiveEntryRequest class ZipArchiveEntry is not thread safe, and the hand-off between the creating thread and the executorService actually doing the compression has been somewhat of a tightrope-walking effort, since we cannot reliably read fields off the ZipArchiveEntry Furthermore, to achieve true maximum IO performance in the gather-phase it would be required that Zip headers be created in the parallel part of the compression run, which was not possible prior to this commit. The ZipArchiveEntryRequest has clear and well-defined thread semantics and can cater for any future algorithmic improvements that may want to try to take performance to the very edge of what is achievable. To my understanding this will not be for this next relasease :) git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1651575 13f79535-47bb-0310-9956-ffa450edef68 --- .../zip/ParallelScatterZipCreator.java | 14 ++-- .../archivers/zip/ScatterZipOutputStream.java | 35 +++++---- .../archivers/zip/ZipArchiveEntryRequest.java | 77 +++++++++++++++++++ .../zip/ScatterZipOutputStreamTest.java | 18 ++++- 4 files changed, 118 insertions(+), 26 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 1afbf7c20af..d670222059b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -32,6 +32,7 @@ import java.util.zip.Deflater; import static java.util.Collections.synchronizedList; +import static org.apache.commons.compress.archivers.zip.ZipArchiveEntryRequest.createZipArchiveEntryRequest; /** * Creates a zip in parallel by using multiple threadlocal #ScatterZipOutputStream instances. @@ -132,7 +133,7 @@ public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputSt * Submit a callable for compression * @param callable The callable to run */ - public void submit(Callable callable) { + public final void submit(Callable callable) { futures.add(es.submit(callable)); } @@ -149,20 +150,15 @@ public void submit(Callable callable) { * @return A callable that will be used to check for errors */ - public Callable createCallable(final ZipArchiveEntry zipArchiveEntry, final InputStreamSupplier source) { + public final Callable createCallable(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier source) { final int method = zipArchiveEntry.getMethod(); if (method == ZipMethod.UNKNOWN_CODE) { throw new IllegalArgumentException("Method must be set on the supplied zipArchiveEntry"); } + final ZipArchiveEntryRequest zipArchiveEntryRequest = createZipArchiveEntryRequest(zipArchiveEntry, source); return new Callable() { public Object call() throws Exception { - final ScatterZipOutputStream streamToUse = tlScatterStreams.get(); - InputStream payload = source.get(); - try { - streamToUse.addArchiveEntry(zipArchiveEntry, payload, method); - } finally { - payload.close(); - } + tlScatterStreams.get().addArchiveEntry(zipArchiveEntryRequest); return null; } }; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 9d6548dd9ca..35551c87024 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -49,25 +49,30 @@ public class ScatterZipOutputStream implements Closeable { private final StreamCompressor streamCompressor; private static class CompressedEntry { - final ZipArchiveEntry entry; + final ZipArchiveEntryRequest zipArchiveEntryRequest; final long crc; final long compressedSize; - final int method; final long size; - public CompressedEntry(ZipArchiveEntry entry, long crc, long compressedSize, int method, long size) { - this.entry = entry; + public CompressedEntry(ZipArchiveEntryRequest zipArchiveEntryRequest, long crc, long compressedSize, long size) { + this.zipArchiveEntryRequest = zipArchiveEntryRequest; this.crc = crc; this.compressedSize = compressedSize; - this.method = method; this.size = size; } + /** + * Update the original ZipArchiveEntry witg sizes/crc + * Do not use this methods from threads that did not create the instance itself ! + * @return the zipeArchiveEntry that is basis for this request + */ + public ZipArchiveEntry transferToArchiveEntry(){ + ZipArchiveEntry entry = zipArchiveEntryRequest.getZipArchiveEntry(); entry.setCompressedSize(compressedSize); entry.setSize(size); entry.setCrc(crc); - entry.setMethod(method); + entry.setMethod(zipArchiveEntryRequest.getMethod()); return entry; } } @@ -81,16 +86,18 @@ public ScatterZipOutputStream(ScatterGatherBackingStore backingStore, /** * Add an archive entry to this scatter stream. * - * @param zipArchiveEntry The entry to write - * @param payload The content to write for the entry. The caller is responsible for closing this. - * @param method The compression method + * @param zipArchiveEntryRequest The entry to write. * @throws IOException If writing fails */ - public void addArchiveEntry(ZipArchiveEntry zipArchiveEntry, InputStream payload, int method) throws IOException { - streamCompressor.deflate(payload, method); - items.add(new CompressedEntry(zipArchiveEntry, streamCompressor.getCrc32(), - streamCompressor.getBytesWrittenForLastEntry(), method, - streamCompressor.getBytesRead())); + public void addArchiveEntry(ZipArchiveEntryRequest zipArchiveEntryRequest) throws IOException { + final InputStream payloadStream = zipArchiveEntryRequest.getPayloadStream(); + try { + streamCompressor.deflate(payloadStream, zipArchiveEntryRequest.getMethod()); + } finally { + payloadStream.close(); + } + items.add(new CompressedEntry(zipArchiveEntryRequest, streamCompressor.getCrc32(), + streamCompressor.getBytesWrittenForLastEntry(), streamCompressor.getBytesRead())); } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java new file mode 100644 index 00000000000..5d88372f3b6 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.InputStream; + +/** + * A Thread-safe representation of a ZipArchiveEntry that is used to add entries to parallel archives. + */ +public class ZipArchiveEntryRequest { + /* + The zipArchiveEntry is not thread safe, and cannot be safely accessed by the getters of this class. + It is safely accessible during the construction part of this class and also after the + thread pools have been shut down. + */ + private final ZipArchiveEntry zipArchiveEntry; + private final InputStreamSupplier payloadSupplier; + private final int method; + + + private ZipArchiveEntryRequest(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier payloadSupplier) { + // this constructor has "safe" access to all member variables on zipArchiveEntry + this.zipArchiveEntry = zipArchiveEntry; + this.payloadSupplier = payloadSupplier; + this.method = zipArchiveEntry.getMethod(); + } + + /** + * Create a ZipArchiveEntryRequest + * @param zipArchiveEntry The entry to use + * @param payloadSupplier The payload that will be added to the zip entry. + * @return The newly created request + */ + public static ZipArchiveEntryRequest createZipArchiveEntryRequest(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier payloadSupplier) { + return new ZipArchiveEntryRequest(zipArchiveEntry, payloadSupplier); + } + + /** + * The paylaod that will be added to this zip entry + * @return The input stream. + */ + public InputStream getPayloadStream() { + return payloadSupplier.get(); + } + + /** + * The compression method to use + * @return The compression method to use + */ + public int getMethod(){ + return method; + } + + + /** + * Gets the underlying entry. Do not use this methods from threads that did not create the instance itself ! + * @return the zipeArchiveEntry that is basis for this request + */ + ZipArchiveEntry getZipArchiveEntry() { + return zipArchiveEntry; + } +} diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java index 395d85949e3..dcbd14333c7 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java @@ -22,7 +22,9 @@ import java.io.ByteArrayInputStream; import java.io.File; +import java.io.InputStream; +import static org.apache.commons.compress.archivers.zip.ZipArchiveEntryRequest.createZipArchiveEntryRequest; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; @@ -36,12 +38,14 @@ public void putArchiveEntry() throws Exception { final byte[] A_PAYLOAD = "XAAY".getBytes(); ZipArchiveEntry zab = new ZipArchiveEntry("b.txt"); - ByteArrayInputStream payload = new ByteArrayInputStream(B_PAYLOAD); - scatterZipOutputStream.addArchiveEntry(zab, payload, ZipArchiveEntry.DEFLATED); + zab.setMethod(ZipArchiveEntry.DEFLATED); + final ByteArrayInputStream payload = new ByteArrayInputStream(B_PAYLOAD); + scatterZipOutputStream.addArchiveEntry(createZipArchiveEntryRequest(zab, createPayloadSupplier(payload))); ZipArchiveEntry zae = new ZipArchiveEntry("a.txt"); + zae.setMethod(ZipArchiveEntry.DEFLATED); ByteArrayInputStream payload1 = new ByteArrayInputStream(A_PAYLOAD); - scatterZipOutputStream.addArchiveEntry(zae, payload1, ZipArchiveEntry.DEFLATED); + scatterZipOutputStream.addArchiveEntry(createZipArchiveEntryRequest(zae, createPayloadSupplier(payload1))); File target = File.createTempFile("scattertest", ".zip"); ZipArchiveOutputStream outputStream = new ZipArchiveOutputStream(target); @@ -58,4 +62,12 @@ public void putArchiveEntry() throws Exception { assertEquals(4, a_entry.getSize()); assertArrayEquals(A_PAYLOAD, IOUtils.toByteArray(zf.getInputStream(a_entry))); } + + private InputStreamSupplier createPayloadSupplier(final ByteArrayInputStream payload) { + return new InputStreamSupplier() { + public InputStream get() { + return payload; + } + }; + } } \ No newline at end of file From e707c8233405d1a2eda96649f0fbcb10b623386b Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 20 Jan 2015 14:24:34 +0000 Subject: [PATCH 085/189] throw a special exception when there is no password for an encrpyted 7z archive - COMPRESS-298 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653252 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 5 +++ .../archivers/sevenz/AES256SHA256Decoder.java | 2 +- .../sevenz/PasswordRequiredException.java | 32 +++++++++++++++++++ .../archivers/sevenz/SevenZFileTest.java | 9 ++++++ 4 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 8770c53e0e1..95cc013f653 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -80,6 +80,11 @@ The type attribute can be add,update,fix,remove. Improved error message when tar encounters a groupId that is too big to write without using the STAR or POSIX format. + + SevenZFile now throws the specific PasswordRequiredException + when it encounters an encrypted stream but no password has + been specified. + Date: Tue, 20 Jan 2015 14:56:40 +0000 Subject: [PATCH 086/189] provide archive name if anything goes wrong extracting stuff from 7z archives git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653264 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/sevenz/AES256SHA256Decoder.java | 6 ++--- .../compress/archivers/sevenz/CoderBase.java | 3 ++- .../compress/archivers/sevenz/Coders.java | 22 ++++++++++--------- .../archivers/sevenz/DeltaDecoder.java | 2 +- .../archivers/sevenz/LZMA2Decoder.java | 2 +- .../sevenz/PasswordRequiredException.java | 4 ++-- .../compress/archivers/sevenz/SevenZFile.java | 10 +++++---- .../archivers/sevenz/SevenZFileTest.java | 20 +++++++---------- 8 files changed, 35 insertions(+), 34 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java index 91fb486ff26..66d9d96fb52 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java @@ -30,7 +30,7 @@ class AES256SHA256Decoder extends CoderBase { @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, final byte[] passwordBytes) throws IOException { return new InputStream() { private boolean isInitialized = false; @@ -46,7 +46,7 @@ private CipherInputStream init() throws IOException { final int ivSize = ((byte0 >> 6) & 1) + (byte1 & 0x0f); final int saltSize = ((byte0 >> 7) & 1) + (byte1 >> 4); if (2 + saltSize + ivSize > coder.properties.length) { - throw new IOException("Salt size + IV size too long"); + throw new IOException("Salt size + IV size too long in " + archiveName); } final byte[] salt = new byte[saltSize]; System.arraycopy(coder.properties, 2, salt, 0, saltSize); @@ -54,7 +54,7 @@ private CipherInputStream init() throws IOException { System.arraycopy(coder.properties, 2 + saltSize, iv, 0, ivSize); if (passwordBytes == null) { - throw new PasswordRequiredException(); + throw new PasswordRequiredException(archiveName); } final byte[] aesKeyBytes; if (numCyclesPower == 0x3f) { diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java index a871e523a64..286cc03eb7c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java @@ -64,7 +64,8 @@ Object getOptionsFromCoder(Coder coder, InputStream in) { /** * @return a stream that reads from in using the configured coder and password. */ - abstract InputStream decode(final InputStream in, long uncomressedLength, + abstract InputStream decode(final String archiveName, + final InputStream in, long uncomressedLength, final Coder coder, byte[] password) throws IOException; /** diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java index c488dea481f..fad904c6a2e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java @@ -67,14 +67,15 @@ static CoderBase findByMethod(SevenZMethod method) { return CODER_MAP.get(method); } - static InputStream addDecoder(final InputStream is, long uncompressedLength, + static InputStream addDecoder(final String archiveName, final InputStream is, long uncompressedLength, final Coder coder, final byte[] password) throws IOException { CoderBase cb = findByMethod(SevenZMethod.byId(coder.decompressionMethodId)); if (cb == null) { throw new IOException("Unsupported compression method " + - Arrays.toString(coder.decompressionMethodId)); + Arrays.toString(coder.decompressionMethodId) + + " used in " + archiveName); } - return cb.decode(is, uncompressedLength, coder, password); + return cb.decode(archiveName, is, uncompressedLength, coder, password); } static OutputStream addEncoder(final OutputStream out, final SevenZMethod method, @@ -88,7 +89,7 @@ static OutputStream addEncoder(final OutputStream out, final SevenZMethod method static class CopyDecoder extends CoderBase { @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, byte[] password) throws IOException { return in; } @@ -100,7 +101,7 @@ OutputStream encode(final OutputStream out, final Object options) { static class LZMADecoder extends CoderBase { @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, byte[] password) throws IOException { byte propsByte = coder.properties[0]; long dictSize = coder.properties[1]; @@ -108,7 +109,7 @@ InputStream decode(final InputStream in, long uncompressedLength, dictSize |= (coder.properties[i + 1] & 0xffl) << (8 * i); } if (dictSize > LZMAInputStream.DICT_SIZE_MAX) { - throw new IOException("Dictionary larger than 4GiB maximum size"); + throw new IOException("Dictionary larger than 4GiB maximum size used in " + archiveName); } return new LZMAInputStream(in, uncompressedLength, propsByte, (int) dictSize); } @@ -121,12 +122,13 @@ static class BCJDecoder extends CoderBase { } @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, byte[] password) throws IOException { try { return opts.getInputStream(in); } catch (AssertionError e) { - IOException ex = new IOException("BCJ filter needs XZ for Java > 1.4 - see " + IOException ex = new IOException("BCJ filter used in " + archiveName + + " needs XZ for Java > 1.4 - see " + "http://commons.apache.org/proper/commons-compress/limitations.html#7Z"); ex.initCause(e); throw ex; @@ -149,7 +151,7 @@ static class DeflateDecoder extends CoderBase { } @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, final byte[] password) throws IOException { return new InflaterInputStream(new DummyByteAddingInputStream(in), @@ -168,7 +170,7 @@ static class BZIP2Decoder extends CoderBase { } @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, final byte[] password) throws IOException { return new BZip2CompressorInputStream(in); diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java index 7951e712555..e458334e4bb 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java @@ -30,7 +30,7 @@ class DeltaDecoder extends CoderBase { } @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, byte[] password) throws IOException { return new DeltaOptions(getOptionsFromCoder(coder)).getInputStream(in); } diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java index 7df09f84d98..d3ffe31012c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java @@ -32,7 +32,7 @@ class LZMA2Decoder extends CoderBase { } @Override - InputStream decode(final InputStream in, long uncompressedLength, + InputStream decode(final String archiveName, final InputStream in, long uncompressedLength, final Coder coder, byte[] password) throws IOException { try { int dictionarySize = getDictionarySize(coder); diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java index 8814d3d13f2..8352443dd35 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java @@ -26,7 +26,7 @@ */ public class PasswordRequiredException extends IOException { - public PasswordRequiredException() { - super("Cannot read encrypted files without a password"); + public PasswordRequiredException(String archiveName) { + super("Cannot read encrypted archive " + archiveName + " without a password."); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java index a8c368c6789..398783fb729 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java @@ -67,6 +67,7 @@ public class SevenZFile implements Closeable { static final int SIGNATURE_HEADER_SIZE = 32; + private final String fileName; private RandomAccessFile file; private final Archive archive; private int currentEntryIndex = -1; @@ -91,6 +92,7 @@ public class SevenZFile implements Closeable { public SevenZFile(final File filename, final byte[] password) throws IOException { boolean succeeded = false; this.file = new RandomAccessFile(filename, "r"); + this.fileName = filename.getAbsolutePath(); try { archive = readHeaders(password); if (password != null) { @@ -276,8 +278,8 @@ private DataInputStream readEncodedHeader(final DataInputStream header, final Ar if (coder.numInStreams != 1 || coder.numOutStreams != 1) { throw new IOException("Multi input/output stream coders are not yet supported"); } - inputStreamStack = Coders.addDecoder(inputStreamStack, folder.getUnpackSizeForCoder(coder), - coder, password); + inputStreamStack = Coders.addDecoder(fileName, inputStreamStack, + folder.getUnpackSizeForCoder(coder), coder, password); } if (folder.hasCrc) { inputStreamStack = new CRC32VerifyingInputStream(inputStreamStack, @@ -859,8 +861,8 @@ private InputStream buildDecoderStack(final Folder folder, final long folderOffs throw new IOException("Multi input/output stream coders are not yet supported"); } SevenZMethod method = SevenZMethod.byId(coder.decompressionMethodId); - inputStreamStack = Coders.addDecoder(inputStreamStack, folder.getUnpackSizeForCoder(coder), - coder, password); + inputStreamStack = Coders.addDecoder(fileName, inputStreamStack, + folder.getUnpackSizeForCoder(coder), coder, password); methods.addFirst(new SevenZMethodConfiguration(method, Coders.findByMethod(method).getOptionsFromCoder(coder, inputStreamStack))); } diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index bc84d44e6f7..dd50de86587 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -60,15 +60,6 @@ public void test7zDecryptUnarchive() throws Exception { } } - public void test7zDecryptUnarchiveWithoutPassword() throws Exception { - try { - test7zUnarchive(getFile("bla.encrypted.7z"), SevenZMethod.LZMA); - fail("Expected a PasswordRequiredException"); - } catch (PasswordRequiredException ex) { - // expected - } - } - private void test7zUnarchive(File f, SevenZMethod m) throws Exception { test7zUnarchive(f, m, null); } @@ -77,9 +68,14 @@ public void testEncryptedArchiveRequiresPassword() throws Exception { try { new SevenZFile(getFile("bla.encrypted.7z")); fail("shouldn't decrypt without a password"); - } catch (IOException ex) { - assertEquals("Cannot read encrypted files without a password", - ex.getMessage()); + } catch (PasswordRequiredException ex) { + String msg = ex.getMessage(); + assertTrue("Should start with whining about being unable to decrypt", + msg.startsWith("Cannot read encrypted archive ")); + assertTrue("Should finish the sentence properly", + msg.endsWith(" without a password.")); + assertTrue("Should contain archive's name", + msg.contains("bla.encrypted.7z")); } } From ee8f99bbec00035fb82da003395b8bf909cb6c66 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 20 Jan 2015 15:03:10 +0000 Subject: [PATCH 087/189] javadoc git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653268 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/sevenz/PasswordRequiredException.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java index 8352443dd35..6d6a0051f5f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java @@ -26,6 +26,11 @@ */ public class PasswordRequiredException extends IOException { + /** + * Create a new exception. + * + * @param archiveName name of the archive containing encrypted streams. + */ public PasswordRequiredException(String archiveName) { super("Cannot read encrypted archive " + archiveName + " without a password."); } From 70900a3cf93480fa95d4716b40eeb6a2d5f85f92 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Tue, 20 Jan 2015 16:20:26 +0000 Subject: [PATCH 088/189] Updated changes.xml, fixed a small javadoc issue git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653282 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 13 +++++++++++++ .../compress/archivers/zip/InputStreamSupplier.java | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 95cc013f653..fcbb35966e6 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -44,6 +44,19 @@ The type attribute can be add,update,fix,remove. + + + Added support for parallel compression. This low-level API allows + a client to build a zip/jar file by using the class + org.apache.commons.compress.archivers.zip.ParallelScatterZipCreator. + + Zip documentation updated with further notes about parallel features. + + Please note that some aspects of jar creation need to be + handled by client code and is not part of commons-compress for this release. + + Cut overall object instantiation in half by changing file diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java b/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java index ecc0d4caf50..4a3d4070a83 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java @@ -25,7 +25,7 @@ public interface InputStreamSupplier { /** * Supply an input stream for a resource. - * @return the input stream, may be null if there is no content for the resource. + * @return the input stream. Should never null, but may be an empty stream. */ InputStream get(); } From a103f8aac368abed3f3e40733886a3833bb7e1a1 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 20 Jan 2015 16:24:06 +0000 Subject: [PATCH 089/189] some formatting changes and a stray semicolon git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653284 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveOutputStream.java | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 7f7e7e7f7e2..c53ce9f25a6 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -204,6 +204,8 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream { */ private static final byte[] LZERO = {0, 0, 0, 0}; + private static final byte[] ONE = ZipLong.getBytes(1L); + /** * Holds the offsets of the LFH starts for each entry. */ @@ -266,6 +268,9 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream { private Zip64Mode zip64Mode = Zip64Mode.AsNeeded; + private final byte[] copyBuffer = new byte[32768]; + private final Calendar calendarInstance = Calendar.getInstance(); + /** * Creates a new ZIP OutputStream filtering the underlying stream. * @param out the outputstream to zip @@ -499,7 +504,6 @@ private void closeCopiedEntry(boolean phased) throws IOException { closeEntry(actuallyNeedsZip64, phased); } - private void closeEntry(boolean actuallyNeedsZip64, boolean phased) throws IOException { if (!phased && raf != null) { rewriteSizesAndCrc(actuallyNeedsZip64); @@ -627,6 +631,7 @@ private boolean isZip64Required(ZipArchiveEntry entry1, Zip64Mode requestedMode) private boolean isTooLageForZip32(ZipArchiveEntry zipArchiveEntry){ return zipArchiveEntry.getSize() >= ZIP64_MAGIC || zipArchiveEntry.getCompressedSize() >= ZIP64_MAGIC; } + /** * When using random access output, write the local file header * and potentiall the ZIP64 extra containing the correct CRC and @@ -895,10 +900,6 @@ private void writeCounted(byte[] data) throws IOException { streamCompressor.writeCounted(data); } - - - final byte[] copyBuffer = new byte[32768]; - private void copyFromZipInputStream(InputStream src) throws IOException { if (entry == null) { throw new IllegalStateException("No current entry"); @@ -1159,10 +1160,8 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze) throws IOException { handleZip64Extra(ze, lfhOffset, needsZip64Extra); - return createCentralFileHeader(ze, getName(ze), lfhOffset, needsZip64Extra); - }; - - private final Calendar calendarInstance = Calendar.getInstance(); + return createCentralFileHeader(ze, getName(ze), lfhOffset, needsZip64Extra); + } /** * Writes the central file header entry. @@ -1317,8 +1316,6 @@ protected void writeCentralDirectoryEnd() throws IOException { streamCompressor.writeCounted(data.array(), data.arrayOffset(), dataLen); } - private static final byte[] ONE = ZipLong.getBytes(1L); - /** * Writes the "ZIP64 End of central dir record" and * "ZIP64 End of central dir locator". From 4571c58dd7ff37a70765d0226a37e9a4f7b25baf Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 20 Jan 2015 21:02:24 +0000 Subject: [PATCH 090/189] Arrays.copyOf requires Java6 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653370 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ZipArchiveEntry.java | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 60827588563..6ee9dbbd5cc 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -366,11 +366,17 @@ private ZipExtraField[] getAllExtraFieldsNoCopy() { } private ZipExtraField[] copyOf(ZipExtraField[] src){ - return Arrays.copyOf(src, src.length); + return copyOf(src, src.length); + } + + private ZipExtraField[] copyOf(ZipExtraField[] src, int length) { + ZipExtraField[] cpy = new ZipExtraField[length]; + System.arraycopy(src, 0, cpy, 0, Math.min(src.length, length)); + return cpy; } private ZipExtraField[] getMergedFields() { - final ZipExtraField[] zipExtraFields = Arrays.copyOf(extraFields, extraFields.length + 1); + final ZipExtraField[] zipExtraFields = copyOf(extraFields, extraFields.length + 1); zipExtraFields[zipExtraFields.length] = unparseableExtra; return zipExtraFields; } @@ -401,7 +407,7 @@ public void addExtraField(ZipExtraField ze) { if (getExtraField(ze.getHeaderId())!= null){ removeExtraField(ze.getHeaderId()); } - final ZipExtraField[] zipExtraFields = Arrays.copyOf(extraFields, extraFields.length + 1); + final ZipExtraField[] zipExtraFields = copyOf(extraFields, extraFields.length + 1); zipExtraFields[zipExtraFields.length -1] = ze; extraFields = zipExtraFields; } From 0911c6ec7ead06cf9594847412b24bd56e98cc46 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Thu, 22 Jan 2015 16:32:24 +0000 Subject: [PATCH 091/189] unused import git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653937 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ParallelScatterZipCreator.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index d670222059b..9f6e845fddf 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -19,7 +19,6 @@ import java.io.File; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; From b5fcfd52e313bc6a2b6b38eccdf9534394c62b83 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Thu, 22 Jan 2015 17:14:24 +0000 Subject: [PATCH 092/189] Removed unused import, upgraded statement to contains() git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1653946 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/ar/ArArchiveOutputStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java index 17fe61f778c..3a869c4e9aa 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java @@ -138,7 +138,7 @@ private long writeEntryHeader( final ArArchiveEntry pEntry ) throws IOException throw new IOException("filename too long, > 16 chars: "+n); } if (LONGFILE_BSD == longFileMode && - (n.length() > 16 || n.indexOf(" ") > -1)) { + (n.length() > 16 || n.contains(" "))) { mustAppendName = true; offset += write(ArArchiveInputStream.BSD_LONGNAME_PREFIX + String.valueOf(n.length())); From a53300f877f77cc0e35f1ee03f9f8ae4b72fcd12 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 23 Jan 2015 09:07:27 +0000 Subject: [PATCH 093/189] cleanup in tests git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654149 13f79535-47bb-0310-9956-ffa450edef68 --- .../zip/ParallelScatterZipCreatorTest.java | 19 +++++++++++++++---- .../zip/ScatterZipOutputStreamTest.java | 19 +++++++++++++++---- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java index a512e369dd0..416f90aab17 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java @@ -18,6 +18,7 @@ package org.apache.commons.compress.archivers.zip; import org.apache.commons.compress.utils.IOUtils; +import org.junit.After; import org.junit.Test; import java.io.ByteArrayInputStream; @@ -31,6 +32,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import static org.apache.commons.compress.AbstractTestCase.tryHardToDelete; import static org.junit.Assert.*; @SuppressWarnings("OctalInteger") @@ -38,10 +40,19 @@ public class ParallelScatterZipCreatorTest { private final int NUMITEMS = 5000; + private File result; + private File tmp; + + @After + public void cleanup() { + tryHardToDelete(result); + tryHardToDelete(tmp); + } + @Test public void concurrent() throws Exception { - File result = File.createTempFile("parallelScatterGather1", ""); + result = File.createTempFile("parallelScatterGather1", ""); ZipArchiveOutputStream zos = new ZipArchiveOutputStream(result); zos.setEncoding("UTF-8"); ParallelScatterZipCreator zipCreator = new ParallelScatterZipCreator(); @@ -57,14 +68,14 @@ public void concurrent() @Test public void callableApi() throws Exception { - File result = File.createTempFile("parallelScatterGather2", ""); + result = File.createTempFile("parallelScatterGather2", ""); ZipArchiveOutputStream zos = new ZipArchiveOutputStream(result); zos.setEncoding("UTF-8"); ExecutorService es = Executors.newFixedThreadPool(1); ScatterGatherBackingStoreSupplier supp = new ScatterGatherBackingStoreSupplier() { public ScatterGatherBackingStore get() throws IOException { - return new FileBasedScatterGatherBackingStore(File.createTempFile("parallelscatter", "n1")); + return new FileBasedScatterGatherBackingStore(tmp = File.createTempFile("parallelscatter", "n1")); } }; @@ -129,4 +140,4 @@ private ZipArchiveEntry createZipArchiveEntry(Map entries, int i za.setUnixMode(UnixStat.FILE_FLAG | 0664); return za; } -} \ No newline at end of file +} diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java index dcbd14333c7..6c76d56521d 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java @@ -18,22 +18,33 @@ package org.apache.commons.compress.archivers.zip; import org.apache.commons.compress.utils.IOUtils; +import org.junit.After; import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.File; import java.io.InputStream; +import static org.apache.commons.compress.AbstractTestCase.tryHardToDelete; import static org.apache.commons.compress.archivers.zip.ZipArchiveEntryRequest.createZipArchiveEntryRequest; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; public class ScatterZipOutputStreamTest { + private File scatterFile = null; + private File target = null; + + @After + public void cleanup() { + tryHardToDelete(scatterFile); + tryHardToDelete(target); + } + @Test public void putArchiveEntry() throws Exception { - File scatteFile = File.createTempFile("scattertest", ".notzip"); - ScatterZipOutputStream scatterZipOutputStream = ScatterZipOutputStream.fileBased(scatteFile); + scatterFile = File.createTempFile("scattertest", ".notzip"); + ScatterZipOutputStream scatterZipOutputStream = ScatterZipOutputStream.fileBased(scatterFile); final byte[] B_PAYLOAD = "RBBBBBBS".getBytes(); final byte[] A_PAYLOAD = "XAAY".getBytes(); @@ -47,7 +58,7 @@ public void putArchiveEntry() throws Exception { ByteArrayInputStream payload1 = new ByteArrayInputStream(A_PAYLOAD); scatterZipOutputStream.addArchiveEntry(createZipArchiveEntryRequest(zae, createPayloadSupplier(payload1))); - File target = File.createTempFile("scattertest", ".zip"); + target = File.createTempFile("scattertest", ".zip"); ZipArchiveOutputStream outputStream = new ZipArchiveOutputStream(target); scatterZipOutputStream.writeTo( outputStream); outputStream.close(); @@ -70,4 +81,4 @@ public InputStream get() { } }; } -} \ No newline at end of file +} From 5d083b55a6d160daf21f4fdea106d5db9c95e12b Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 23 Jan 2015 18:22:05 +0000 Subject: [PATCH 094/189] Review comments from Emmanuel Bourg - Added @since tags - Improved javadocs in several places - Extracted ScatterStatistics class git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654291 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/InputStreamSupplier.java | 7 +++ .../zip/ParallelScatterZipCreator.java | 28 ++++++---- .../ScatterGatherBackingStoreSupplier.java | 5 ++ .../archivers/zip/ScatterStatistics.java | 56 +++++++++++++++++++ .../zip/ZipArchiveEntryPredicate.java | 2 + .../archivers/zip/ZipArchiveEntryRequest.java | 2 + 6 files changed, 90 insertions(+), 10 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/ScatterStatistics.java diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java b/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java index 4a3d4070a83..559e7b52981 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java @@ -21,6 +21,13 @@ import java.io.InputStream; +/** + * Supplies input streams. + * + * Implementations are required to be thread safe. + * + * @since 1.10 + */ public interface InputStreamSupplier { /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 9f6e845fddf..91ccfedb56f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -44,6 +44,7 @@ * The client can supply an ExecutorService, but for reasons of memory model consistency, * this will be shut down by this class prior to completion. *

    + * @since 1.10 */ public class ParallelScatterZipCreator { private final List streams = synchronizedList(new ArrayList()); @@ -85,7 +86,8 @@ protected ScatterZipOutputStream initialValue() { }; /** - * Create a ParallelScatterZipCreator with default threads + * Create a ParallelScatterZipCreator with default threads, which is set to the number of available + * processors, as defined by java.lang.Runtime#availableProcessors() */ public ParallelScatterZipCreator() { this(Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())); @@ -129,8 +131,11 @@ public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputSt } /** - * Submit a callable for compression - * @param callable The callable to run + * Submit a callable for compression. + * + * @see #createCallable for details of if/when to use this. + * + * @param callable The callable to run, created by #createCallable, possibly wrapped by caller. */ public final void submit(Callable callable) { futures.add(es.submit(callable)); @@ -141,18 +146,22 @@ public final void submit(Callable callable) { * *

    This method is expected to be called from a single client thread.

    *

    - * This method is used by clients that want finer grained control over how the callable is - * created, possibly wanting to wrap this callable in a different callable

    + * Consider using #addArchiveEntry, which wraps this method and #submit. The most common use case + * for using #createCallable and #submit from a client is if you want to wrap the callable in something + * that can be prioritized by the supplied #ExecutorService, for instance to process large or slow files first. + * Since the creation of the #ExecutorService is handled by the client, all of this is up to the client. * * @param zipArchiveEntry The entry to add. * @param source The source input stream supplier - * @return A callable that will be used to check for errors + * @return A callable that should subsequently passed to #submit, possibly in a wrapped/adapted from. The + * value of this callable is not used, but any exceptions happening inside the compression + * will be propagated through the callable. */ public final Callable createCallable(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier source) { final int method = zipArchiveEntry.getMethod(); if (method == ZipMethod.UNKNOWN_CODE) { - throw new IllegalArgumentException("Method must be set on the supplied zipArchiveEntry"); + throw new IllegalArgumentException("Method must be set on zipArchiveEntry: " + zipArchiveEntry); } final ZipArchiveEntryRequest zipArchiveEntryRequest = createZipArchiveEntryRequest(zipArchiveEntry, source); return new Callable() { @@ -203,9 +212,8 @@ public void writeTo(ZipArchiveOutputStream targetStream) * * @return A string */ - public String getStatisticsMessage() { - return "Compression: " + (compressionDoneAt - startedAt) + "ms," + - "Merging files: " + (scatterDoneAt - compressionDoneAt) + "ms"; + public ScatterStatistics getStatisticsMessage() { + return new ScatterStatistics(compressionDoneAt - startedAt, scatterDoneAt - compressionDoneAt); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java index 54359dcee5e..9583f5d4599 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java @@ -20,6 +20,11 @@ import java.io.IOException; +/** + * Supplies ScatterGatherBackingStore instances. + * + * @since 1.10 + */ public interface ScatterGatherBackingStoreSupplier { /** * Get a ScatterGatherBackingStore. diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterStatistics.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterStatistics.java new file mode 100644 index 00000000000..3839af101ad --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterStatistics.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.apache.commons.compress.archivers.zip; + +/** + * Provides information about a scatter compression run. + * + * @since 1.10 + */ +public class ScatterStatistics { + private final long compressionElapsed; + private final long mergingElapsed; + + ScatterStatistics(long compressionElapsed, long mergingElapsed) { + this.compressionElapsed = compressionElapsed; + this.mergingElapsed = mergingElapsed; + } + + /** + * The number of milliseconds elapsed in the parallel compression phase + * @return The number of milliseconds elapsed + */ + public long getCompressionElapsed() { + return compressionElapsed; + } + + /** + * The number of milliseconds elapsed in merging the results of the parallel compression, the IO phase + * @return The number of milliseconds elapsed + */ + public long getMergingElapsed() { + return mergingElapsed; + } + + @Override + public String toString() { + return "compressionElapsed=" + compressionElapsed + "ms, mergingElapsed=" + mergingElapsed + "ms"; + } + +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java index 8808248a158..e7122b9a6ce 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryPredicate.java @@ -21,6 +21,8 @@ /** * A predicate to test if a #ZipArchiveEntry matches a criteria. * Some day this can extend java.util.function.Predicate + * + * @since 1.10 */ public interface ZipArchiveEntryPredicate { /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java index 5d88372f3b6..90905005403 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java @@ -21,6 +21,8 @@ /** * A Thread-safe representation of a ZipArchiveEntry that is used to add entries to parallel archives. + * + * @since 1.10 */ public class ZipArchiveEntryRequest { /* From 1a90ed5e8713392507996df3bc20b2dfe32ab41e Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Fri, 23 Jan 2015 21:03:32 +0000 Subject: [PATCH 095/189] Added a sample in the docs. Sample is also present in code form (test scope) with test case git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654382 13f79535-47bb-0310-9956-ffa450edef68 --- src/site/xdoc/examples.xml | 29 ++++++++ src/site/xdoc/zip.xml | 2 + .../compress/archivers/zip/ScatterSample.java | 45 ++++++++++++ .../archivers/zip/ScatterSampleTest.java | 71 +++++++++++++++++++ 4 files changed, 147 insertions(+) create mode 100644 src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java create mode 100644 src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java diff --git a/src/site/xdoc/examples.xml b/src/site/xdoc/examples.xml index ed49718d940..0e93028d31f 100644 --- a/src/site/xdoc/examples.xml +++ b/src/site/xdoc/examples.xml @@ -289,6 +289,35 @@ try { content.close(); } ]]> + +

    Creating a zip file with multiple threads:

    + + A simple implementation to create a zip file might be like this: + + +public class ScatterSample { + + ParallelScatterZipCreator scatterZipCreator = new ParallelScatterZipCreator(); + ScatterZipOutputStream dirs = ScatterZipOutputStream.fileBased(File.createTempFile("scatter-dirs", "tmp")); + + public ScatterSample() throws IOException { + } + + public void addEntry(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier streamSupplier) throws IOException { + if (zipArchiveEntry.isDirectory() && !zipArchiveEntry.isUnixSymlink()) + dirs.addArchiveEntry(ZipArchiveEntryRequest.createZipArchiveEntryRequest(zipArchiveEntry, streamSupplier)); + else + scatterZipCreator.addArchiveEntry( zipArchiveEntry, streamSupplier); + } + + public void writeTo(ZipArchiveOutputStream zipArchiveOutputStream) + throws IOException, ExecutionException, InterruptedException { + dirs.writeTo(zipArchiveOutputStream); + dirs.close(); + scatterZipCreator.writeTo(zipArchiveOutputStream); + } +} + diff --git a/src/site/xdoc/zip.xml b/src/site/xdoc/zip.xml index d3041349d19..272f53a7372 100644 --- a/src/site/xdoc/zip.xml +++ b/src/site/xdoc/zip.xml @@ -513,6 +513,8 @@

    There is no guarantee of order of the entries when writing a Zip file with ParallelScatterZipCreator.

    + + See the examples section for a code sample demonstrating how to make a zip file.
    diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java new file mode 100644 index 00000000000..fd57db9baed --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.File; +import java.io.IOException; +import java.util.concurrent.ExecutionException; + +public class ScatterSample { + + ParallelScatterZipCreator scatterZipCreator = new ParallelScatterZipCreator(); + ScatterZipOutputStream dirs = ScatterZipOutputStream.fileBased(File.createTempFile("scatter-dirs", "tmp")); + + public ScatterSample() throws IOException { + } + + public void addEntry(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier streamSupplier) throws IOException { + if (zipArchiveEntry.isDirectory() && !zipArchiveEntry.isUnixSymlink()) + dirs.addArchiveEntry(ZipArchiveEntryRequest.createZipArchiveEntryRequest(zipArchiveEntry, streamSupplier)); + else + scatterZipCreator.addArchiveEntry( zipArchiveEntry, streamSupplier); + } + + public void writeTo(ZipArchiveOutputStream zipArchiveOutputStream) + throws IOException, ExecutionException, InterruptedException { + dirs.writeTo(zipArchiveOutputStream); + dirs.close(); + scatterZipCreator.writeTo(zipArchiveOutputStream); + } +} diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java new file mode 100644 index 00000000000..9497ce03b22 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.util.concurrent.ExecutionException; +import java.util.zip.ZipEntry; + +import static org.junit.Assert.*; + +public class ScatterSampleTest { + + @Test + public void testSample() throws Exception { + File result = File.createTempFile("testSample", "fe"); + + createFile(result); + checkFile(result); + } + + private void createFile(File result) throws IOException, ExecutionException, InterruptedException { + ScatterSample scatterSample = new ScatterSample(); + ZipArchiveEntry archiveEntry = new ZipArchiveEntry("test1.xml"); + archiveEntry.setMethod(ZipEntry.DEFLATED); + InputStreamSupplier supp = new InputStreamSupplier() { + public InputStream get() { + return new ByteArrayInputStream("Hello".getBytes()); + } + }; + + scatterSample.addEntry(archiveEntry, supp); + ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(result); + scatterSample.writeTo(zipArchiveOutputStream); + zipArchiveOutputStream.close(); + } + + private void checkFile(File result) throws IOException { + ZipFile zf = new ZipFile(result); + ZipArchiveEntry archiveEntry1 = zf.getEntries().nextElement(); + assertEquals( "test1.xml", archiveEntry1.getName()); + InputStream inputStream = zf.getInputStream(archiveEntry1); + byte[] b = new byte[6]; + int i = IOUtils.readFully(inputStream, b); + assertEquals(5, i); + assertEquals('H', b[0]); + assertEquals('o', b[4]); + zf.close(); + result.delete(); + } +} \ No newline at end of file From e0ca4f5e4c8e033e5d2a379db4ca2b7f6d029676 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 24 Jan 2015 12:16:38 +0000 Subject: [PATCH 096/189] move PasswordRequiredException git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654501 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/sevenz/AES256SHA256Decoder.java | 1 + .../PasswordRequiredException.java | 11 ++++++----- .../compress/archivers/sevenz/SevenZFileTest.java | 3 ++- 3 files changed, 9 insertions(+), 6 deletions(-) rename src/main/java/org/apache/commons/compress/{archivers/sevenz => exceptions}/PasswordRequiredException.java (73%) diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java index 66d9d96fb52..ba8e9a67fac 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java @@ -27,6 +27,7 @@ import javax.crypto.SecretKey; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; +import org.apache.commons.compress.exceptions.PasswordRequiredException; class AES256SHA256Decoder extends CoderBase { @Override diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java b/src/main/java/org/apache/commons/compress/exceptions/PasswordRequiredException.java similarity index 73% rename from src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java rename to src/main/java/org/apache/commons/compress/exceptions/PasswordRequiredException.java index 6d6a0051f5f..8198b0be871 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/PasswordRequiredException.java +++ b/src/main/java/org/apache/commons/compress/exceptions/PasswordRequiredException.java @@ -15,12 +15,12 @@ * limitations under the License. * */ -package org.apache.commons.compress.archivers.sevenz; +package org.apache.commons.compress.exceptions; import java.io.IOException; /** - * Exception thrown when trying to read an encrypted entry without + * Exception thrown when trying to read an encrypted entry or file without * configuring a password. * @since 1.10 */ @@ -29,9 +29,10 @@ public class PasswordRequiredException extends IOException { /** * Create a new exception. * - * @param archiveName name of the archive containing encrypted streams. + * @param name name of the archive containing encrypted streams or + * the encrypted file. */ - public PasswordRequiredException(String archiveName) { - super("Cannot read encrypted archive " + archiveName + " without a password."); + public PasswordRequiredException(String name) { + super("Cannot read encrypted content from " + name + " without a password."); } } diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index dd50de86587..5af1d86b724 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -23,6 +23,7 @@ import java.util.Arrays; import javax.crypto.Cipher; import org.apache.commons.compress.AbstractTestCase; +import org.apache.commons.compress.exceptions.PasswordRequiredException; public class SevenZFileTest extends AbstractTestCase { private static final String TEST2_CONTENT = "\r\n Date: Sat, 24 Jan 2015 19:26:20 +0000 Subject: [PATCH 097/189] Moved zip-unspecific scatter/gather code to org.apache.commons.compress.parallel git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654572 13f79535-47bb-0310-9956-ffa450edef68 --- findbugs-exclude-filter.xml | 2 +- .../compress/archivers/zip/ParallelScatterZipCreator.java | 5 +++++ .../compress/archivers/zip/ScatterZipOutputStream.java | 2 ++ .../commons/compress/archivers/zip/StreamCompressor.java | 2 ++ .../compress/archivers/zip/ZipArchiveEntryRequest.java | 2 ++ .../FileBasedScatterGatherBackingStore.java | 2 +- .../{archivers/zip => parallel}/InputStreamSupplier.java | 6 ++++-- .../zip => parallel}/ScatterGatherBackingStore.java | 8 +++++--- .../ScatterGatherBackingStoreSupplier.java | 4 ++-- .../archivers/zip/ParallelScatterZipCreatorTest.java | 4 ++++ .../commons/compress/archivers/zip/ScatterSample.java | 2 ++ .../commons/compress/archivers/zip/ScatterSampleTest.java | 1 + .../archivers/zip/ScatterZipOutputStreamTest.java | 1 + 13 files changed, 32 insertions(+), 9 deletions(-) rename src/main/java/org/apache/commons/compress/{archivers/zip => parallel}/FileBasedScatterGatherBackingStore.java (97%) rename src/main/java/org/apache/commons/compress/{archivers/zip => parallel}/InputStreamSupplier.java (81%) rename src/main/java/org/apache/commons/compress/{archivers/zip => parallel}/ScatterGatherBackingStore.java (87%) rename src/main/java/org/apache/commons/compress/{archivers/zip => parallel}/ScatterGatherBackingStoreSupplier.java (92%) diff --git a/findbugs-exclude-filter.xml b/findbugs-exclude-filter.xml index f70be02bee3..7074237ca2b 100644 --- a/findbugs-exclude-filter.xml +++ b/findbugs-exclude-filter.xml @@ -160,7 +160,7 @@ - + diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index 91ccfedb56f..eec6a5698c1 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -17,6 +17,11 @@ */ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.FileBasedScatterGatherBackingStore; +import org.apache.commons.compress.parallel.InputStreamSupplier; +import org.apache.commons.compress.parallel.ScatterGatherBackingStore; +import org.apache.commons.compress.parallel.ScatterGatherBackingStoreSupplier; + import java.io.File; import java.io.IOException; import java.util.ArrayList; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 35551c87024..7c890c096b5 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -18,6 +18,8 @@ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.FileBasedScatterGatherBackingStore; +import org.apache.commons.compress.parallel.ScatterGatherBackingStore; import org.apache.commons.compress.utils.BoundedInputStream; import java.io.Closeable; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index 4eba69bc52c..525fe6c634e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -17,6 +17,8 @@ */ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.ScatterGatherBackingStore; + import java.io.Closeable; import java.io.DataOutput; import java.io.IOException; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java index 90905005403..06bb791e15f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java @@ -17,6 +17,8 @@ */ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.InputStreamSupplier; + import java.io.InputStream; /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/parallel/FileBasedScatterGatherBackingStore.java similarity index 97% rename from src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java rename to src/main/java/org/apache/commons/compress/parallel/FileBasedScatterGatherBackingStore.java index 42c24b8f9a5..59ee6804267 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/FileBasedScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/parallel/FileBasedScatterGatherBackingStore.java @@ -15,7 +15,7 @@ * limitations under the License. * */ -package org.apache.commons.compress.archivers.zip; +package org.apache.commons.compress.parallel; import java.io.File; import java.io.FileNotFoundException; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java b/src/main/java/org/apache/commons/compress/parallel/InputStreamSupplier.java similarity index 81% rename from src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java rename to src/main/java/org/apache/commons/compress/parallel/InputStreamSupplier.java index 559e7b52981..f227e643132 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/InputStreamSupplier.java +++ b/src/main/java/org/apache/commons/compress/parallel/InputStreamSupplier.java @@ -17,14 +17,16 @@ * under the License. */ -package org.apache.commons.compress.archivers.zip; +package org.apache.commons.compress.parallel; import java.io.InputStream; /** * Supplies input streams. * - * Implementations are required to be thread safe. + * Implementations are required to support thread-handover. While an instance will + * not be accessed concurrently by multiple threads, it will be called by + * a different thread than it was created on. * * @since 1.10 */ diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStore.java similarity index 87% rename from src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java rename to src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStore.java index 9e33c53375c..ed3450dbfe1 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStore.java @@ -15,15 +15,17 @@ * limitations under the License. * */ -package org.apache.commons.compress.archivers.zip; +package org.apache.commons.compress.parallel; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; /** - *

    Abstraction over a scatter-output zip archives can be written to - * with a method to gather all content from an InputStream later on.

    + *

    Store intermediate payload in a scatter-gather scenario. + * Multiple threads write their payload to a backing store, which can + * subsequently be reversed to an #InputStream to be used as input in the + * gather phase.

    * *

    It is the responsibility of the allocator of an instance of this class * to close this. Closing it should clear off any allocated structures diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStoreSupplier.java similarity index 92% rename from src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java rename to src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStoreSupplier.java index 9583f5d4599..ad0356f3025 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterGatherBackingStoreSupplier.java +++ b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStoreSupplier.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.apache.commons.compress.archivers.zip; +package org.apache.commons.compress.parallel; import java.io.IOException; @@ -27,7 +27,7 @@ */ public interface ScatterGatherBackingStoreSupplier { /** - * Get a ScatterGatherBackingStore. + * Create a ScatterGatherBackingStore. * * @return a ScatterGatherBackingStore, not null */ diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java index 416f90aab17..a6d6df47294 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java @@ -17,6 +17,10 @@ */ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.FileBasedScatterGatherBackingStore; +import org.apache.commons.compress.parallel.InputStreamSupplier; +import org.apache.commons.compress.parallel.ScatterGatherBackingStore; +import org.apache.commons.compress.parallel.ScatterGatherBackingStoreSupplier; import org.apache.commons.compress.utils.IOUtils; import org.junit.After; import org.junit.Test; diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java index fd57db9baed..cede94f72a5 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSample.java @@ -17,6 +17,8 @@ */ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.InputStreamSupplier; + import java.io.File; import java.io.IOException; import java.util.concurrent.ExecutionException; diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java index 9497ce03b22..b0da1c5a88f 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java @@ -17,6 +17,7 @@ */ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.InputStreamSupplier; import org.apache.commons.compress.utils.IOUtils; import org.junit.Test; diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java index 6c76d56521d..092b8bd7092 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java @@ -17,6 +17,7 @@ */ package org.apache.commons.compress.archivers.zip; +import org.apache.commons.compress.parallel.InputStreamSupplier; import org.apache.commons.compress.utils.IOUtils; import org.junit.After; import org.junit.Test; From 9b06ac40e6831a64814f559739ea7d8a30ef2b88 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Sun, 25 Jan 2015 08:47:37 +0000 Subject: [PATCH 098/189] Javadoc review fixes git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654622 13f79535-47bb-0310-9956-ffa450edef68 --- .../zip/ParallelScatterZipCreator.java | 39 ++++++++++--------- .../archivers/zip/ScatterZipOutputStream.java | 14 +++---- .../archivers/zip/StreamCompressor.java | 17 ++++---- .../parallel/ScatterGatherBackingStore.java | 2 +- .../ScatterGatherBackingStoreSupplier.java | 2 +- src/site/xdoc/examples.xml | 2 +- 6 files changed, 39 insertions(+), 37 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index eec6a5698c1..e058de7c355 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -39,15 +39,15 @@ import static org.apache.commons.compress.archivers.zip.ZipArchiveEntryRequest.createZipArchiveEntryRequest; /** - * Creates a zip in parallel by using multiple threadlocal #ScatterZipOutputStream instances. + * Creates a zip in parallel by using multiple threadlocal {@link ScatterZipOutputStream} instances. *

    * Note that this class generally makes no guarantees about the order of things written to * the output file. Things that need to come in a specific order (manifests, directories) * must be handled by the client of this class, usually by writing these things to the - * #ZipArchiveOutputStream *before* calling #writeTo on this class.

    + * {@link ZipArchiveOutputStream} before calling {@link #writeTo writeTo} on this class.

    *

    - * The client can supply an ExecutorService, but for reasons of memory model consistency, - * this will be shut down by this class prior to completion. + * The client can supply an {@link java.util.concurrent.ExecutorService}, but for reasons of + * memory model consistency, this will be shut down by this class prior to completion. *

    * @since 1.10 */ @@ -92,7 +92,7 @@ protected ScatterZipOutputStream initialValue() { /** * Create a ParallelScatterZipCreator with default threads, which is set to the number of available - * processors, as defined by java.lang.Runtime#availableProcessors() + * processors, as defined by {@link java.lang.Runtime#availableProcessors} */ public ParallelScatterZipCreator() { this(Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors())); @@ -138,9 +138,9 @@ public void addArchiveEntry(final ZipArchiveEntry zipArchiveEntry, final InputSt /** * Submit a callable for compression. * - * @see #createCallable for details of if/when to use this. + * @see ParallelScatterZipCreator#createCallable for details of if/when to use this. * - * @param callable The callable to run, created by #createCallable, possibly wrapped by caller. + * @param callable The callable to run, created by {@link #createCallable createCallable}, possibly wrapped by caller. */ public final void submit(Callable callable) { futures.add(es.submit(callable)); @@ -148,19 +148,20 @@ public final void submit(Callable callable) { /** * Create a callable that will compress the given archive entry. - * + *

    *

    This method is expected to be called from a single client thread.

    - *

    - * Consider using #addArchiveEntry, which wraps this method and #submit. The most common use case - * for using #createCallable and #submit from a client is if you want to wrap the callable in something - * that can be prioritized by the supplied #ExecutorService, for instance to process large or slow files first. - * Since the creation of the #ExecutorService is handled by the client, all of this is up to the client. + *

    + * Consider using {@link #addArchiveEntry addArchiveEntry}, which wraps this method and {@link #submit submit}. + * The most common use case for using {@link #createCallable createCallable} and {@link #submit submit} from a + * client is if you want to wrap the callable in something that can be prioritized by the supplied + * {@link ExecutorService}, for instance to process large or slow files first. + * Since the creation of the {@link ExecutorService} is handled by the client, all of this is up to the client. * * @param zipArchiveEntry The entry to add. - * @param source The source input stream supplier - * @return A callable that should subsequently passed to #submit, possibly in a wrapped/adapted from. The - * value of this callable is not used, but any exceptions happening inside the compression - * will be propagated through the callable. + * @param source The source input stream supplier + * @return A callable that should subsequently passed to #submit, possibly in a wrapped/adapted from. The + * value of this callable is not used, but any exceptions happening inside the compression + * will be propagated through the callable. */ public final Callable createCallable(ZipArchiveEntry zipArchiveEntry, InputStreamSupplier source) { @@ -179,13 +180,13 @@ public Object call() throws Exception { /** - * Write the contents this to the target #ZipArchiveOutputStream. + * Write the contents this to the target {@link ZipArchiveOutputStream}. *

    * It may be beneficial to write things like directories and manifest files to the targetStream * before calling this method. *

    * - * @param targetStream The ZipArchiveOutputStream to receive the contents of the scatter streams + * @param targetStream The {@link ZipArchiveOutputStream} to receive the contents of the scatter streams * @throws IOException If writing fails * @throws InterruptedException If we get interrupted * @throws ExecutionException If something happens in the parallel execution diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 7c890c096b5..622e0b4a522 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -41,7 +41,7 @@ * implementation only supports file-based backing. *

    * Thread safety: This class supports multiple threads. But the "writeTo" method must be called - * by the thread that originally created the ZipArchiveEntry. + * by the thread that originally created the {@link ZipArchiveEntry}. * * @since 1.10 */ @@ -64,9 +64,9 @@ public CompressedEntry(ZipArchiveEntryRequest zipArchiveEntryRequest, long crc, } /** - * Update the original ZipArchiveEntry witg sizes/crc + * Update the original {@link ZipArchiveEntry} with sizes/crc * Do not use this methods from threads that did not create the instance itself ! - * @return the zipeArchiveEntry that is basis for this request + * @return the zipArchiveEntry that is basis for this request */ public ZipArchiveEntry transferToArchiveEntry(){ @@ -105,7 +105,7 @@ public void addArchiveEntry(ZipArchiveEntryRequest zipArchiveEntryRequest) throw /** * Write the contents of this scatter stream to a target archive. * - * @param target The archive to receive the contents of this #ScatterZipOutputStream + * @param target The archive to receive the contents of this {@link ScatterZipOutputStream}. * @throws IOException If writing fails */ public void writeTo(ZipArchiveOutputStream target) throws IOException { @@ -129,10 +129,10 @@ public void close() throws IOException { } /** - * Create a ScatterZipOutputStream with default compression level that is backed by a file + * Create a {@link ScatterZipOutputStream} with default compression level that is backed by a file * * @param file The file to offload compressed data into. - * @return A ScatterZipOutputStream that is ready for use. + * @return A ScatterZipOutputStream that is ready for use. * @throws FileNotFoundException */ public static ScatterZipOutputStream fileBased(File file) throws FileNotFoundException { @@ -140,7 +140,7 @@ public static ScatterZipOutputStream fileBased(File file) throws FileNotFoundExc } /** - * Create a ScatterZipOutputStream that is backed by a file + * Create a {@link ScatterZipOutputStream} that is backed by a file * * @param file The file to offload compressed data into. * @param compressionLevel The compression level to use, @see #Deflater diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java index 525fe6c634e..8e12e6461a9 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java @@ -28,8 +28,9 @@ import java.util.zip.Deflater; /** - * Encapsulates a Deflater and crc calculator, handling multiple types of output streams. - * Currently #ZipEntry.DEFLATED and #ZipEntry.STORED are the only supported compression methods. + * Encapsulates a {@link Deflater} and crc calculator, handling multiple types of output streams. + * Currently {@link java.util.zip.ZipEntry#DEFLATED} and {@link java.util.zip.ZipEntry#STORED} are the only + * supported compression methods. * * @since 1.10 */ @@ -63,7 +64,7 @@ public abstract class StreamCompressor implements Closeable { /** * Create a stream compressor with the given compression level. * - * @param os The #OutputStream stream to receive output + * @param os The stream to receive output * @param deflater The deflater to use * @return A stream compressor */ @@ -74,7 +75,7 @@ static StreamCompressor create(OutputStream os, Deflater deflater) { /** * Create a stream compressor with the default compression level. * - * @param os The #OutputStream stream to receive output + * @param os The stream to receive output * @return A stream compressor */ static StreamCompressor create(OutputStream os) { @@ -84,7 +85,7 @@ static StreamCompressor create(OutputStream os) { /** * Create a stream compressor with the given compression level. * - * @param os The #DataOutput to receive output + * @param os The DataOutput to receive output * @param deflater The deflater to use for the compressor * @return A stream compressor */ @@ -95,8 +96,8 @@ static StreamCompressor create(DataOutput os, Deflater deflater) { /** * Create a stream compressor with the given compression level. * - * @param compressionLevel The #Deflater compression level - * @param bs The #ScatterGatherBackingStore to receive output + * @param compressionLevel The {@link Deflater} compression level + * @param bs The ScatterGatherBackingStore to receive output * @return A stream compressor */ public static StreamCompressor create(int compressionLevel, ScatterGatherBackingStore bs) { @@ -107,7 +108,7 @@ public static StreamCompressor create(int compressionLevel, ScatterGatherBacking /** * Create a stream compressor with the default compression level. * - * @param bs The #ScatterGatherBackingStore to receive output + * @param bs The ScatterGatherBackingStore to receive output * @return A stream compressor */ public static StreamCompressor create(ScatterGatherBackingStore bs) { diff --git a/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStore.java b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStore.java index ed3450dbfe1..f4f243a7979 100644 --- a/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStore.java +++ b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStore.java @@ -24,7 +24,7 @@ /** *

    Store intermediate payload in a scatter-gather scenario. * Multiple threads write their payload to a backing store, which can - * subsequently be reversed to an #InputStream to be used as input in the + * subsequently be reversed to an {@link InputStream} to be used as input in the * gather phase.

    * *

    It is the responsibility of the allocator of an instance of this class diff --git a/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStoreSupplier.java b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStoreSupplier.java index ad0356f3025..4bc33ce3e91 100644 --- a/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStoreSupplier.java +++ b/src/main/java/org/apache/commons/compress/parallel/ScatterGatherBackingStoreSupplier.java @@ -21,7 +21,7 @@ import java.io.IOException; /** - * Supplies ScatterGatherBackingStore instances. + * Supplies {@link ScatterGatherBackingStore} instances. * * @since 1.10 */ diff --git a/src/site/xdoc/examples.xml b/src/site/xdoc/examples.xml index 0e93028d31f..7e44c62bc7c 100644 --- a/src/site/xdoc/examples.xml +++ b/src/site/xdoc/examples.xml @@ -292,7 +292,7 @@ try {

    Creating a zip file with multiple threads:

    - A simple implementation to create a zip file might be like this: + A simple implementation to create a zip file might look like this: public class ScatterSample { From 232b6db3af45188e0757f3565b52af745f86fe61 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 25 Jan 2015 08:59:15 +0000 Subject: [PATCH 099/189] package level javadocs for exceptions package git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654629 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/exceptions/package-info.java | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress/exceptions/package-info.java diff --git a/src/main/java/org/apache/commons/compress/exceptions/package-info.java b/src/main/java/org/apache/commons/compress/exceptions/package-info.java new file mode 100644 index 00000000000..ccdcae16f25 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/exceptions/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/** + * Contains specialized exceptions that might be thrown by Commons Compress. + * @since 1.10 + */ +package org.apache.commons.compress.exceptions; From 1053ec1d658f6c584d6043c325da1b17ec2cba8c Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 25 Jan 2015 10:00:53 +0000 Subject: [PATCH 100/189] expand BitInputStream to allow reading of up to 63 bits at once git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654638 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/BitStream.java | 6 +++--- .../archivers/zip/ExplodingInputStream.java | 4 ++-- .../compressors/lzw/LZWInputStream.java | 5 ++++- .../compressors/z/ZCompressorInputStream.java | 6 +++--- .../commons/compress/utils/BitInputStream.java | 18 +++++++++--------- .../compress/utils/BitInputStreamTest.java | 4 ++-- 6 files changed, 23 insertions(+), 20 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/BitStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/BitStream.java index 3064641d304..d40c1ad6655 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/BitStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/BitStream.java @@ -42,7 +42,7 @@ class BitStream extends BitInputStream { * @return The next bit (0 or 1) or -1 if the end of the stream has been reached */ int nextBit() throws IOException { - return readBits(1); + return (int) readBits(1); } /** @@ -51,11 +51,11 @@ int nextBit() throws IOException { * @param n the number of bits read (up to 8) * @return The value formed by the n bits, or -1 if the end of the stream has been reached */ - int nextBits(final int n) throws IOException { + long nextBits(final int n) throws IOException { return readBits(n); } int nextByte() throws IOException { - return readBits(8); + return (int) readBits(8); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java index aa9a5ce3951..87d3c525f41 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java @@ -124,7 +124,7 @@ private void fillBuffer() throws IOException { if (literalTree != null) { literal = literalTree.read(bits); } else { - literal = bits.nextBits(8); + literal = bits.nextByte(); } if (literal == -1) { @@ -137,7 +137,7 @@ private void fillBuffer() throws IOException { } else if (bit == 0) { // back reference int distanceLowSize = dictionarySize == 4096 ? 6 : 7; - int distanceLow = bits.nextBits(distanceLowSize); + int distanceLow = (int) bits.nextBits(distanceLowSize); int distanceHigh = distanceTree.read(bits); if (distanceHigh == -1 && distanceLow <= 0) { // end of stream reached, nothing left to decode diff --git a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java index df866e5c431..6900b7cfb8d 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java @@ -121,7 +121,10 @@ protected void initializeTables(int maxCodeSize) { * Reads the next code from the stream. */ protected int readNextCode() throws IOException { - return in.readBits(codeSize); + if (codeSize > 31) { + throw new IllegalArgumentException("code size must not be bigger than 31"); + } + return (int) in.readBits(codeSize); } /** diff --git a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java index fb410528826..1bb65b50818 100644 --- a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java @@ -40,9 +40,9 @@ public class ZCompressorInputStream extends LZWInputStream { public ZCompressorInputStream(InputStream inputStream) throws IOException { super(inputStream, ByteOrder.LITTLE_ENDIAN); - int firstByte = in.readBits(8); - int secondByte = in.readBits(8); - int thirdByte = in.readBits(8); + int firstByte = (int) in.readBits(8); + int secondByte = (int) in.readBits(8); + int thirdByte = (int) in.readBits(8); if (firstByte != MAGIC_1 || secondByte != MAGIC_2 || thirdByte < 0) { throw new IOException("Input is not in .Z format"); } diff --git a/src/main/java/org/apache/commons/compress/utils/BitInputStream.java b/src/main/java/org/apache/commons/compress/utils/BitInputStream.java index d8988c77856..dd1d9b67553 100644 --- a/src/main/java/org/apache/commons/compress/utils/BitInputStream.java +++ b/src/main/java/org/apache/commons/compress/utils/BitInputStream.java @@ -29,8 +29,8 @@ * @NotThreadSafe */ public class BitInputStream implements Closeable { - private static final int MAXIMUM_CACHE_SIZE = 31; // bits in int minus sign bit - private static final int[] MASKS = new int[MAXIMUM_CACHE_SIZE + 1]; + private static final int MAXIMUM_CACHE_SIZE = 63; // bits in long minus sign bit + private static final long[] MASKS = new long[MAXIMUM_CACHE_SIZE + 1]; static { for (int i = 1; i <= MAXIMUM_CACHE_SIZE; i++) { @@ -40,7 +40,7 @@ public class BitInputStream implements Closeable { private final InputStream in; private final ByteOrder byteOrder; - private int bitsCached = 0; + private long bitsCached = 0; private int bitsCachedSize = 0; /** @@ -68,20 +68,20 @@ public void clearBitCache() { } /** - * Returns at most 31 bits read from the underlying stream. + * Returns at most 63 bits read from the underlying stream. * * @param count the number of bits to read, must be a positive - * number not bigger than 31. - * @return the bits concatenated as an integer using the stream's byte order. + * number not bigger than 63. + * @return the bits concatenated as a long using the stream's byte order. * -1 if the end of the underlying stream has been reached before reading * the requested number of bits */ - public int readBits(final int count) throws IOException { + public long readBits(final int count) throws IOException { if (count < 0 || count > MAXIMUM_CACHE_SIZE) { throw new IllegalArgumentException("count must not be negative or greater than " + MAXIMUM_CACHE_SIZE); } while (bitsCachedSize < count) { - final int nextByte = in.read(); + final long nextByte = in.read(); if (nextByte < 0) { return nextByte; } @@ -94,7 +94,7 @@ public int readBits(final int count) throws IOException { bitsCachedSize += 8; } - final int bitsOut; + final long bitsOut; if (byteOrder == ByteOrder.LITTLE_ENDIAN) { bitsOut = (bitsCached & MASKS[count]); bitsCached >>>= count; diff --git a/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java b/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java index ed146cfd2eb..9c1eeca887a 100644 --- a/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java @@ -35,9 +35,9 @@ public void shouldNotAllowReadingOfANegativeAmountOfBits() throws IOException { } @Test(expected = IllegalArgumentException.class) - public void shouldNotAllowReadingOfMoreThan31BitsAtATime() throws IOException { + public void shouldNotAllowReadingOfMoreThan63BitsAtATime() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - bis.readBits(32); + bis.readBits(64); } @Test From 1a97010c3aa00daed602655f8cd08a9c7b42df7d Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 25 Jan 2015 11:29:06 +0000 Subject: [PATCH 101/189] don't try to compile package-info.java - Thanks to sebb git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654646 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pom.xml b/pom.xml index 7c4402dd1ab..a25248eeb18 100644 --- a/pom.xml +++ b/pom.xml @@ -200,6 +200,22 @@ Unix Compress, DEFLATE and ar, cpio, jar, tar, zip, dump, 7z, arj. + + org.apache.maven.plugins + maven-compiler-plugin + + ${maven.compiler.source} + ${maven.compiler.target} + ${commons.encoding} + ${commons.compiler.fork} + ${commons.compiler.compilerVersion} + ${commons.compiler.javac} + + + **/package-info.java + + + From e8aa2bd3dea2e6d185628f54bf8363a4712b57fb Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Sun, 25 Jan 2015 11:42:05 +0000 Subject: [PATCH 102/189] Added package info, updated "What's new" section for 1.10 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654647 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/parallel/package.html | 23 +++++++++++++++++++ src/site/xdoc/index.xml | 14 ++++------- 2 files changed, 28 insertions(+), 9 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/parallel/package.html diff --git a/src/main/java/org/apache/commons/compress/parallel/package.html b/src/main/java/org/apache/commons/compress/parallel/package.html new file mode 100644 index 00000000000..3517bc57526 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/parallel/package.html @@ -0,0 +1,23 @@ + + + +

    Provides common API classes for parallel compression features.

    + + diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index 5f8b6964937..6a0f2ece0c7 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -51,18 +51,13 @@
    -

    The current release is 1.9 and requires Java 5.

    +

    The current release is 1.10 and requires Java 5.

    Below we highlight some new features, for a full list of changes see the Changes Report.

    - -
      -
    • support for raw DEFLATE streams
    • -
    -
    - +
    • the old org.apache.commons.compress.compressors.z._internal_ @@ -70,8 +65,9 @@ org.apache.commons.compress.compressors.lzw and the code is now an official part of Commons Compress' API.
    • -
    • A new class in the zip package can create archives - while compressing different entries in parallel.
    • +
    • Added support for parallel ZIP compression.
    • +
    • Added support for raw transfer of entries from one ZIP file to another without uncompress/compress.
    • +
    • Performance improvements for creating ZIP files with lots of small entries.
    From 2e5fdff37e7c2b351173567cc849df92a1cd8607 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 26 Jan 2015 05:03:00 +0000 Subject: [PATCH 103/189] Move PasswordRequiredException git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654720 13f79535-47bb-0310-9956-ffa450edef68 --- .../PasswordRequiredException.java | 2 +- .../archivers/sevenz/AES256SHA256Decoder.java | 2 +- .../compress/exceptions/package-info.java | 23 ------------------- .../archivers/sevenz/SevenZFileTest.java | 2 +- 4 files changed, 3 insertions(+), 26 deletions(-) rename src/main/java/org/apache/commons/compress/{exceptions => }/PasswordRequiredException.java (96%) delete mode 100644 src/main/java/org/apache/commons/compress/exceptions/package-info.java diff --git a/src/main/java/org/apache/commons/compress/exceptions/PasswordRequiredException.java b/src/main/java/org/apache/commons/compress/PasswordRequiredException.java similarity index 96% rename from src/main/java/org/apache/commons/compress/exceptions/PasswordRequiredException.java rename to src/main/java/org/apache/commons/compress/PasswordRequiredException.java index 8198b0be871..6532a3a959b 100644 --- a/src/main/java/org/apache/commons/compress/exceptions/PasswordRequiredException.java +++ b/src/main/java/org/apache/commons/compress/PasswordRequiredException.java @@ -15,7 +15,7 @@ * limitations under the License. * */ -package org.apache.commons.compress.exceptions; +package org.apache.commons.compress; import java.io.IOException; diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java index ba8e9a67fac..e2ed5c24735 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java @@ -27,7 +27,7 @@ import javax.crypto.SecretKey; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.compress.exceptions.PasswordRequiredException; +import org.apache.commons.compress.PasswordRequiredException; class AES256SHA256Decoder extends CoderBase { @Override diff --git a/src/main/java/org/apache/commons/compress/exceptions/package-info.java b/src/main/java/org/apache/commons/compress/exceptions/package-info.java deleted file mode 100644 index ccdcae16f25..00000000000 --- a/src/main/java/org/apache/commons/compress/exceptions/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -/** - * Contains specialized exceptions that might be thrown by Commons Compress. - * @since 1.10 - */ -package org.apache.commons.compress.exceptions; diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index 5af1d86b724..6fbb3e3a29f 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -23,7 +23,7 @@ import java.util.Arrays; import javax.crypto.Cipher; import org.apache.commons.compress.AbstractTestCase; -import org.apache.commons.compress.exceptions.PasswordRequiredException; +import org.apache.commons.compress.PasswordRequiredException; public class SevenZFileTest extends AbstractTestCase { private static final String TEST2_CONTENT = "\r\n Date: Mon, 26 Jan 2015 05:04:03 +0000 Subject: [PATCH 104/189] no need for special compiler configuration anymore git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654721 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/pom.xml b/pom.xml index a25248eeb18..7c4402dd1ab 100644 --- a/pom.xml +++ b/pom.xml @@ -200,22 +200,6 @@ Unix Compress, DEFLATE and ar, cpio, jar, tar, zip, dump, 7z, arj. - - org.apache.maven.plugins - maven-compiler-plugin - - ${maven.compiler.source} - ${maven.compiler.target} - ${commons.encoding} - ${commons.compiler.fork} - ${commons.compiler.compilerVersion} - ${commons.compiler.javac} - - - **/package-info.java - - - From ec075149f2fb7cc7cbba9f685ad38d3c816e6ccd Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 26 Jan 2015 05:12:09 +0000 Subject: [PATCH 105/189] update release notes git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654722 13f79535-47bb-0310-9956-ffa450edef68 --- RELEASE-NOTES.txt | 69 ++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 60 insertions(+), 9 deletions(-) diff --git a/RELEASE-NOTES.txt b/RELEASE-NOTES.txt index b62a41010e8..f990a7a0d18 100644 --- a/RELEASE-NOTES.txt +++ b/RELEASE-NOTES.txt @@ -5,6 +5,66 @@ compression and archive formats. These include: bzip2, gzip, pack200, lzma, xz, Snappy, traditional Unix Compress, DEFLATE and ar, cpio, jar, tar, zip, dump, 7z, arj. +Release 1.10 +------------ + +Release 1.10 moves the former +org.apache.commons.compress.compressors.z._internal_ package which +breaks backwards compatibility for code which used the old package. + +New features: +o Added support for parallel compression. This low-level API allows + a client to build a zip/jar file by using the class + org.apache.commons.compress.archivers.zip.ParallelScatterZipCreator. + + Zip documentation updated with further notes about parallel features. + + Please note that some aspects of jar creation need to be + handled by client code and is not part of commons-compress for this + release. + Issue: COMPRESS-296. Thanks to Kristian Rosenvold. +o Cut overall object instantiation in half by changing file + header generation algorithm, for a 10-15 percent performance + improvement. + + Also extracted two private methods createLocalFileHeader + and createCentralFileHeader in ZipArchiveOutputStream. + These may have some interesting additional usages in the + near future. Thanks to Kristian Rosenvold. +o New methods in ZipArchiveOutputStream and ZipFile allows + entries to be copied from one archive to another without + having to re-compress them. + Issue: COMPRESS-295. Thanks to Kristian Rosenvold. + +Fixed Bugs: +o ZipFile logs a warning in its finalizer when its constructor + has thrown an exception reading the file - for example if the + file doesn't exist. + Issue: COMPRESS-297. +o Improved error message when tar encounters a groupId that is + too big to write without using the STAR or POSIX format. + Issue: COMPRESS-290. Thanks to Kristian Rosenvold. +o SevenZFile now throws the specific PasswordRequiredException + when it encounters an encrypted stream but no password has + been specified. + Issue: COMPRESS-298. + +Changes: +o Moved the package + org.apache.commons.compress.compressors.z._internal_ to + org.apache.commons.compress.compressors.lzw and made it part + of the API that is officially supported. This will break + existing code that uses the old package. Thanks to Damjan Jovanovic. + +For complete information on Apache Commons Compress, including instructions +on how to submit bug reports, patches, or suggestions for improvement, +see the Apache Commons Compress website: + +http://commons.apache.org/compress/ + +Old Release Notes +================= + Release 1.9 ----------- @@ -32,15 +92,6 @@ o Checking for XZ for Java may be expensive. The result will now be XZUtils#setCacheXZAvailability to overrride this default behavior. Issue: COMPRESS-285. -For complete information on Apache Commons Compress, including instructions -on how to submit bug reports, patches, or suggestions for improvement, -see the Apache Commons Compress website: - -http://commons.apache.org/compress/ - -Old Release Notes -================= - Release 1.8.1 ------------- From 8d7288d02d77f67662bf8feebeea5d8ec9f6e82d Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Mon, 26 Jan 2015 21:52:27 +0000 Subject: [PATCH 106/189] Avoid 1:1 binary comparison because timestamps will occasionaly vary git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654901 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/ZipTestCase.java | 41 +++++++++++++++---- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index 2f3bf0d2637..7b7187365e0 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -26,6 +26,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; +import java.util.Enumeration; import java.util.List; import java.util.zip.ZipEntry; @@ -403,15 +404,37 @@ private ZipArchiveOutputStream createSecondEntry(ZipArchiveOutputStream zos) thr private void assertSameFileContents(File expectedFile, File actualFile) throws IOException { int size = (int) Math.max(expectedFile.length(), actualFile.length()); - byte[] expected = new byte[size]; - byte[] actual = new byte[size]; - final FileInputStream expectedIs = new FileInputStream(expectedFile); - final FileInputStream actualIs = new FileInputStream(actualFile); - IOUtils.readFully(expectedIs, expected); - IOUtils.readFully(actualIs, actual); - expectedIs.close(); - actualIs.close(); - Assert.assertArrayEquals(expected, actual); + ZipFile expected = new ZipFile(expectedFile); + ZipFile actual = new ZipFile(actualFile); + byte[] expectedBuf = new byte[size]; + byte[] actualBuf = new byte[size]; + + Enumeration actualInOrder = actual.getEntriesInPhysicalOrder(); + Enumeration expectedInOrder = expected.getEntriesInPhysicalOrder(); + + while (actualInOrder.hasMoreElements()){ + ZipArchiveEntry actualElement = actualInOrder.nextElement(); + ZipArchiveEntry expectedElement = expectedInOrder.nextElement(); + assertEquals( expectedElement.getName(), actualElement.getName()); + assertEquals( expectedElement.getMethod(), actualElement.getMethod()); + assertEquals( expectedElement.getGeneralPurposeBit(), actualElement.getGeneralPurposeBit()); + assertEquals( expectedElement.getCrc(), actualElement.getCrc()); + assertEquals( expectedElement.getCompressedSize(), actualElement.getCompressedSize()); + assertEquals( expectedElement.getSize(), actualElement.getSize()); + assertEquals( expectedElement.getExternalAttributes(), actualElement.getExternalAttributes()); + assertEquals( expectedElement.getInternalAttributes(), actualElement.getInternalAttributes()); + + InputStream actualIs = actual.getInputStream(actualElement); + InputStream expectedIs = expected.getInputStream(expectedElement); + IOUtils.readFully(expectedIs, expectedBuf); + IOUtils.readFully(actualIs, actualBuf); + expectedIs.close(); + actualIs.close(); + Assert.assertArrayEquals(expectedBuf, actualBuf); // Buffers are larger than payload. dont care + } + + expected.close(); + actual.close(); } From 2f72bf1f3355aa5986c484535d92c4fe12ead0a4 Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Tue, 27 Jan 2015 06:33:30 +0000 Subject: [PATCH 107/189] Added a small comment about not testing timestamp in zip git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1654980 13f79535-47bb-0310-9956-ffa450edef68 --- .../java/org/apache/commons/compress/archivers/ZipTestCase.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index 7b7187365e0..f8826a8e261 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -416,6 +416,8 @@ private void assertSameFileContents(File expectedFile, File actualFile) throws I ZipArchiveEntry actualElement = actualInOrder.nextElement(); ZipArchiveEntry expectedElement = expectedInOrder.nextElement(); assertEquals( expectedElement.getName(), actualElement.getName()); + // Don't compare timestamps since they may vary; + // there's no support for stubbed out clock (TimeSource) in ZipArchiveOutputStream assertEquals( expectedElement.getMethod(), actualElement.getMethod()); assertEquals( expectedElement.getGeneralPurposeBit(), actualElement.getGeneralPurposeBit()); assertEquals( expectedElement.getCrc(), actualElement.getCrc()); From ecd20dafe8071db0280f25eda7e95f480a69347d Mon Sep 17 00:00:00 2001 From: Kristian Rosenvold Date: Wed, 28 Jan 2015 20:18:41 +0000 Subject: [PATCH 108/189] Fixed javadoc for jdk8, again :) git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1655453 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ParallelScatterZipCreator.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index e058de7c355..c6fe9f1d726 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -148,9 +148,9 @@ public final void submit(Callable callable) { /** * Create a callable that will compress the given archive entry. - *

    + * *

    This method is expected to be called from a single client thread.

    - *

    + * * Consider using {@link #addArchiveEntry addArchiveEntry}, which wraps this method and {@link #submit submit}. * The most common use case for using {@link #createCallable createCallable} and {@link #submit submit} from a * client is if you want to wrap the callable in something that can be prioritized by the supplied From bdd4bd6095ae7922f6c91302f4a912d51756cb2f Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 30 Jan 2015 08:14:03 +0000 Subject: [PATCH 109/189] typos git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1655954 13f79535-47bb-0310-9956-ffa450edef68 --- RELEASE-NOTES.txt | 2 +- src/site/xdoc/index.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/RELEASE-NOTES.txt b/RELEASE-NOTES.txt index f990a7a0d18..18195763374 100644 --- a/RELEASE-NOTES.txt +++ b/RELEASE-NOTES.txt @@ -1,4 +1,4 @@ - Apache Apache Commons Compress RELEASE NOTES + Apache Commons Compress RELEASE NOTES Apache Commons Compress software defines an API for working with compression and archive formats. These include: bzip2, gzip, pack200, diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index 6a0f2ece0c7..f738f763497 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -59,7 +59,7 @@

      -
    • the old +
    • The old org.apache.commons.compress.compressors.z._internal_ now is org.apache.commons.compress.compressors.lzw From 641e980276f90bae8ab196e0b5bdf3e863ba1ee1 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 30 Jan 2015 08:14:32 +0000 Subject: [PATCH 110/189] remove file that no longer seems relevant git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1655955 13f79535-47bb-0310-9956-ffa450edef68 --- PROPOSAL.txt | 54 ---------------------------------------------------- 1 file changed, 54 deletions(-) delete mode 100644 PROPOSAL.txt diff --git a/PROPOSAL.txt b/PROPOSAL.txt deleted file mode 100644 index e870351b378..00000000000 --- a/PROPOSAL.txt +++ /dev/null @@ -1,54 +0,0 @@ -Proposal for Compress Package - -(0) rationale - -Ant contains packages to deal with tar, bzip2 and zip archives that -have been reused in various projects in- and outside of Apache. - -Compress provides a focused home for these packages together with -new external contributions for the ar and cpio formats with a focus on -being a reusable library - unlike the code in Ant. - -(1) scope of the package - -The package provides a unified API for different archiving and -compression formats together with implementations for these -algorithms. - -(1.5) interaction with other packages - -No interactions planned ATM. - -(2) identify the initial source for the package - -Compress' code is already part of the sandbox. - -(2.1) identify the base name for the package - -org.apache.commons.compress - -(2.2) identify the coding conventions for this package - -The code uses the conventions from the Jakarta Turbine package. - -(3) identify any Commons resources to be created - -(3.1) mailing list - -No changes. - -(3.2) SVN repositories - -The package will need to move from the sandbox. - -(3.3) JIRA - -A new Commons Compress JIRA project shall be created and the issues of -the Compress Component of the Commons Sandbox project migrated to the -new project. - -(4) identify the initial set of committers to be listed in the Status File. - -Henri Yandell -Stefan Bodewig -Torsten Curdt From c100d2ee7eaef66e1b176b5c6a4dfea3711b21dc Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 30 Jan 2015 08:24:54 +0000 Subject: [PATCH 111/189] stronger backwards incompatibility warning git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1655956 13f79535-47bb-0310-9956-ffa450edef68 --- RELEASE-NOTES.txt | 5 +++++ src/site/xdoc/index.xml | 5 ++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/RELEASE-NOTES.txt b/RELEASE-NOTES.txt index 18195763374..fe4a1177a39 100644 --- a/RELEASE-NOTES.txt +++ b/RELEASE-NOTES.txt @@ -12,6 +12,11 @@ Release 1.10 moves the former org.apache.commons.compress.compressors.z._internal_ package which breaks backwards compatibility for code which used the old package. +This also changes the superclass of ZCompressorInputStream which makes +this class binary incompatible with the one of Compress 1.9. Code +that extends ZCompressorInputStream will need to be recompiled in +order to work with Compress 1.10. + New features: o Added support for parallel compression. This low-level API allows a client to build a zip/jar file by using the class diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index f738f763497..0c94448bb2d 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -64,7 +64,10 @@ now is org.apache.commons.compress.compressors.lzw and the code is now an official part of Commons - Compress' API.
    • + Compress' API. This change also causes + ZCompressorInputStream of 1.10 to be binary + incompatible with the one of 1.9 and code that extends + this class will need to be recompiled.
    • Added support for parallel ZIP compression.
    • Added support for raw transfer of entries from one ZIP file to another without uncompress/compress.
    • Performance improvements for creating ZIP files with lots of small entries.
    • From b9443c28141fb17d5835e277cbc97726ff5af577 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 1 Feb 2015 19:39:42 +0000 Subject: [PATCH 112/189] Avoid unclosed stream warnings git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1656353 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/utils/BitInputStreamTest.java | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java b/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java index 9c1eeca887a..793d6fa9e91 100644 --- a/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java @@ -32,30 +32,35 @@ public class BitInputStreamTest { public void shouldNotAllowReadingOfANegativeAmountOfBits() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); bis.readBits(-1); + bis.close(); } @Test(expected = IllegalArgumentException.class) public void shouldNotAllowReadingOfMoreThan63BitsAtATime() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); bis.readBits(64); + bis.close(); } @Test public void testReading24BitsInLittleEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); assertEquals(0x000140f8, bis.readBits(24)); + bis.close(); } @Test public void testReading24BitsInBigEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); assertEquals(0x00f84001, bis.readBits(24)); + bis.close(); } @Test public void testReading17BitsInLittleEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); assertEquals(0x000140f8, bis.readBits(17)); + bis.close(); } @Test @@ -63,12 +68,14 @@ public void testReading17BitsInBigEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); // 1-11110000-10000000 assertEquals(0x0001f080, bis.readBits(17)); + bis.close(); } @Test public void testReading30BitsInLittleEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); assertEquals(0x2f0140f8, bis.readBits(30)); + bis.close(); } @Test @@ -76,12 +83,14 @@ public void testReading30BitsInBigEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); // 111110-00010000-00000000-01001011 assertEquals(0x3e10004b, bis.readBits(30)); + bis.close(); } @Test public void testReading31BitsInLittleEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); assertEquals(0x2f0140f8, bis.readBits(31)); + bis.close(); } @Test @@ -89,6 +98,7 @@ public void testReading31BitsInBigEndian() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); // 1111100-00100000-00000000-10010111 assertEquals(0x7c200097, bis.readBits(31)); + bis.close(); } @Test @@ -97,6 +107,7 @@ public void testClearBitCache() throws IOException { assertEquals(0x08, bis.readBits(4)); bis.clearBitCache(); assertEquals(0, bis.readBits(1)); + bis.close(); } @Test @@ -104,6 +115,7 @@ public void testEOF() throws IOException { BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); assertEquals(0x2f0140f8, bis.readBits(30)); assertEquals(-1, bis.readBits(3)); + bis.close(); } private ByteArrayInputStream getStream() { From d9db37dc7a467b05b9cfef232b77dd742455eb86 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 1 Feb 2015 19:40:43 +0000 Subject: [PATCH 113/189] Avoid unclosed stream warnings git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1656354 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/commons/compress/archivers/zip/BitStreamTest.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java index 1b240546680..0aad22637f8 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java @@ -30,6 +30,7 @@ public void testEmptyStream() throws Exception { assertEquals("next bit", -1, stream.nextBit()); assertEquals("next bit", -1, stream.nextBit()); assertEquals("next bit", -1, stream.nextBit()); + stream.close(); } public void testStream() throws Exception { @@ -54,12 +55,14 @@ public void testStream() throws Exception { assertEquals("bit 15", 0, stream.nextBit()); assertEquals("next bit", -1, stream.nextBit()); + stream.close(); } public void testNextByteFromEmptyStream() throws Exception { BitStream stream = new BitStream(new ByteArrayInputStream(new byte[0])); assertEquals("next byte", -1, stream.nextByte()); assertEquals("next byte", -1, stream.nextByte()); + stream.close(); } public void testReadAlignedBytes() throws Exception { @@ -67,6 +70,7 @@ public void testReadAlignedBytes() throws Exception { assertEquals("next byte", 0xEA, stream.nextByte()); assertEquals("next byte", 0x35, stream.nextByte()); assertEquals("next byte", -1, stream.nextByte()); + stream.close(); } public void testNextByte() throws Exception { @@ -78,5 +82,6 @@ public void testNextByte() throws Exception { assertEquals("next byte", 0x5E, stream.nextByte()); assertEquals("next byte", -1, stream.nextByte()); // not enough bits left to read a byte + stream.close(); } } From b48bd0f57a1419c3bd4a048a53fb08d151e00490 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 1 Feb 2015 19:41:10 +0000 Subject: [PATCH 114/189] Unused git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1656355 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/commons/compress/archivers/sevenz/SevenZFileTest.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index 6fbb3e3a29f..177e9a594f7 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -18,7 +18,6 @@ package org.apache.commons.compress.archivers.sevenz; import java.io.File; -import java.io.IOException; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import javax.crypto.Cipher; From 26705dfdfa4a8d6b6ae316ff159fb89576677e18 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 1 Feb 2015 19:41:56 +0000 Subject: [PATCH 115/189] Avoid unclosed stream warnings git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1656356 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ScatterZipOutputStreamTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java index 092b8bd7092..6ac094ba2cb 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStreamTest.java @@ -73,6 +73,7 @@ public void putArchiveEntry() throws Exception { final ZipArchiveEntry a_entry = zf.getEntries("a.txt").iterator().next(); assertEquals(4, a_entry.getSize()); assertArrayEquals(A_PAYLOAD, IOUtils.toByteArray(zf.getInputStream(a_entry))); + zf.close(); } private InputStreamSupplier createPayloadSupplier(final ByteArrayInputStream payload) { From 63e5a8355fba6dd5ab504ac440bb81699c89f85b Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 1 Feb 2015 19:44:43 +0000 Subject: [PATCH 116/189] Generics git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1656357 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ParallelScatterZipCreator.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java index c6fe9f1d726..198bed13300 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java @@ -55,7 +55,7 @@ public class ParallelScatterZipCreator { private final List streams = synchronizedList(new ArrayList()); private final ExecutorService es; private final ScatterGatherBackingStoreSupplier backingStoreSupplier; - private final List futures = new ArrayList(); + private final List> futures = new ArrayList>(); private final long startedAt = System.currentTimeMillis(); private long compressionDoneAt = 0; @@ -195,7 +195,7 @@ public void writeTo(ZipArchiveOutputStream targetStream) throws IOException, InterruptedException, ExecutionException { // Make sure we catch any exceptions from parallel phase - for (Future future : futures) { + for (Future future : futures) { future.get(); } From 01468f09b9156554e1ff7ca6136c0241e69da27d Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 1 Feb 2015 19:46:19 +0000 Subject: [PATCH 117/189] Missing serialversionUID git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1656358 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/commons/compress/PasswordRequiredException.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/PasswordRequiredException.java b/src/main/java/org/apache/commons/compress/PasswordRequiredException.java index 6532a3a959b..2be6f2208b4 100644 --- a/src/main/java/org/apache/commons/compress/PasswordRequiredException.java +++ b/src/main/java/org/apache/commons/compress/PasswordRequiredException.java @@ -26,6 +26,8 @@ */ public class PasswordRequiredException extends IOException { + private static final long serialVersionUID = 1391070005491684483L; + /** * Create a new exception. * From 59ebc0737de7ef61fe9d7edf39cc2c4490cfc865 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 2 Feb 2015 05:07:50 +0000 Subject: [PATCH 118/189] experiments show binary compatibiity isn't hurt as badly git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1656387 13f79535-47bb-0310-9956-ffa450edef68 --- RELEASE-NOTES.txt | 6 +----- src/site/xdoc/index.xml | 5 +---- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/RELEASE-NOTES.txt b/RELEASE-NOTES.txt index fe4a1177a39..92fbb82b44a 100644 --- a/RELEASE-NOTES.txt +++ b/RELEASE-NOTES.txt @@ -11,11 +11,7 @@ Release 1.10 Release 1.10 moves the former org.apache.commons.compress.compressors.z._internal_ package which breaks backwards compatibility for code which used the old package. - -This also changes the superclass of ZCompressorInputStream which makes -this class binary incompatible with the one of Compress 1.9. Code -that extends ZCompressorInputStream will need to be recompiled in -order to work with Compress 1.10. +This also changes the superclass of ZCompressorInputStream. New features: o Added support for parallel compression. This low-level API allows diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index 0c94448bb2d..f738f763497 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -64,10 +64,7 @@ now is org.apache.commons.compress.compressors.lzw and the code is now an official part of Commons - Compress' API. This change also causes - ZCompressorInputStream of 1.10 to be binary - incompatible with the one of 1.9 and code that extends - this class will need to be recompiled. + Compress' API.
    • Added support for parallel ZIP compression.
    • Added support for raw transfer of entries from one ZIP file to another without uncompress/compress.
    • Performance improvements for creating ZIP files with lots of small entries.
    • From 697f2b0d8461363983079c7430745ec26396fba7 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Wed, 4 Feb 2015 17:55:56 +0000 Subject: [PATCH 119/189] Drop Apache now it is in project.name Add historic changes git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1657345 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/release-notes.vm | 532 ++++++++++++++++++++++++++++++++++- 1 file changed, 530 insertions(+), 2 deletions(-) diff --git a/src/changes/release-notes.vm b/src/changes/release-notes.vm index 6137b081dd4..219f56a3772 100644 --- a/src/changes/release-notes.vm +++ b/src/changes/release-notes.vm @@ -14,7 +14,7 @@ ## KIND, either express or implied. See the License for the ## specific language governing permissions and limitations ## under the License. - Apache ${project.name} ${version} RELEASE NOTES + ${project.name} RELEASE NOTES $introduction.replaceAll("(?= 1.5 of XZ for Java to read archives + using BCJ, though. + Issue: COMPRESS-257. + +Fixed Bugs: +o BZip2CompressorInputStream read fewer bytes than possible from + a truncated stream. + Issue: COMPRESS-253. +o SevenZFile failed claiming the dictionary was too large when + archives used LZMA compression for headers and content and + certain non-default dictionary sizes. + Issue: COMPRESS-253. +o CompressorStreamFactory.createCompressorInputStream with + explicit compression did not honor decompressConcatenated + Issue: COMPRESS-259. +o TarArchiveInputStream will now read archives created by tar + implementations that encode big numbers by not adding a + trailing NUL. + Issue: COMPRESS-262. +o ZipArchiveInputStream would return NUL bytes for the first 512 + bytes of a STORED entry if it was the very first entry of the + archive. + Issue: COMPRESS-264. +o When writing PAX/POSIX headers for TAR entries with + backslashes or certain non-ASCII characters in their name + TarArchiveOutputStream could fail. + Issue: COMPRESS-265. +o ArchiveStreamFactory now throws a StreamingNotSupported - a + new subclass of ArchiveException - if it is asked to read from + or write to a stream and Commons Compress doesn't support + streaming for the format. This currently only applies to the + 7z format. + Issue: COMPRESS-267. + +Release 1.7 +----------- + +New features: +o Read-Only support for Snappy compression. + Issue: COMPRESS-147. Thanks to BELUGA BEHR. +o Read-Only support for .Z compressed files. + Issue: COMPRESS-243. Thanks to Damjan Jovanovic. +o ZipFile and ZipArchiveInputStream now support reading entries + compressed using the SHRINKING method. Thanks to Damjan Jovanovic. +o GzipCompressorOutputStream now supports setting the compression + level and the header metadata (filename, comment, modification time, + operating system and extra flags) + Issue: COMPRESS-250. Thanks to Emmanuel Bourg. +o ZipFile and ZipArchiveInputStream now support reading entries + compressed using the IMPLODE method. + Issue: COMPRESS-115. Thanks to Emmanuel Bourg. +o ZipFile and the 7z file classes now implement Closeable and can be + used in try-with-resources constructs. + +Fixed Bugs: +o SevenZOutputFile#closeArchiveEntry throws an exception when using + LZMA2 compression on Java8. Issue: COMPRESS-241. +o 7z reading of big 64bit values could be wrong. + Issue: COMPRESS-244. Thanks to Nico Kruber. +o TarArchiveInputStream could fail to read an archive completely. + Issue: COMPRESS-245. +o The time-setters in X5455_ExtendedTimestamp now set the + corresponding flags explicitly - i.e. they set the bit if the valus + is not-null and reset it otherwise. This may cause + incompatibilities if you use setFlags to unset a bit and later set + the time to a non-null value - the flag will now be set. + Issue: COMPRESS-242. +o SevenZOutputFile would create invalid archives if more than six + empty files or directories were included. Issue: COMPRESS-252. + +Release 1.6 +----------- + +Version 1.6 introduces changes to the internal API of the tar package that +break backwards compatibility in the following rare cases. This version +removes the package private TarBuffer class along with the protected "buffer" +members in TarArchiveInputStream and TarArchiveOutputStream. This change will +only affect you if you have created a subclass of one of the stream classes +and accessed the buffer member or directly used the TarBuffer class. + +Changes in this version include: + +New features: +o Added support for 7z archives. Most compression algorithms + can be read and written, LZMA and encryption are only + supported when reading. Issue: COMPRESS-54. Thanks to Damjan Jovanovic. +o Added read-only support for ARJ archives that don't use + compression. Issue: COMPRESS-226. Thanks to Damjan Jovanovic. +o DumpArchiveInputStream now supports an encoding parameter that + can be used to specify the encoding of file names. +o The CPIO streams now support an encoding parameter that can be + used to specify the encoding of file names. +o Read-only support for LZMA standalone compression has been added. + Issue: COMPRESS-111. + +Fixed Bugs: +o TarBuffer.tryToConsumeSecondEOFRecord could throw a + NullPointerException Issue: COMPRESS-223. Thanks to Jeremy Gustie. +o Parsing of zip64 extra fields has become more lenient in order + to be able to read archives created by DotNetZip and maybe + other archivers as well. Issue: COMPRESS-228. +o TAR will now properly read the names of symbolic links with + long names that use the GNU variant to specify the long file + name. Issue: COMPRESS-229. Thanks to Christoph Gysin. +o ZipFile#getInputStream could return null if the archive + contained duplicate entries. + The class now also provides two new methods to obtain all + entries of a given name rather than just the first one. + Issue: COMPRESS-227. +o CpioArchiveInputStream failed to read archives created by + Redline RPM. Issue: COMPRESS-236. Thanks to Andrew Duffy. +o TarArchiveOutputStream now properly handles link names that + are too long to fit into a traditional TAR header. Issue: + COMPRESS-237. Thanks to Emmanuel Bourg. +o The auto-detecting create*InputStream methods of Archive and + CompressorStreamFactory could fail to detect the format of + blocking input streams. Issue: COMPRESS-239. + +Changes: +o Readabilty patch to TarArchiveInputStream. Issue: + COMPRESS-232. Thanks to BELUGA BEHR. +o Performance improvements to TarArchiveInputStream, in + particular to the skip method. Issue: COMPRESS-234. Thanks to + BELUGA BEHR. + +Release 1.5 +----------- + +New features: + +o CompressorStreamFactory has an option to create decompressing + streams that decompress the full input for formats that support + multiple concatenated streams. + Issue: COMPRESS-220. + +Fixed Bugs: + +o Typo in CompressorStreamFactory Javadoc + Issue: COMPRESS-218. + Thanks to Gili. +o ArchiveStreamFactory's tar stream detection created false positives + for AIFF files. + Issue: COMPRESS-191. + Thanks to Jukka Zitting. +o XZ for Java didn't provide an OSGi bundle. Compress' dependency on + it has now been marked optional so Compress itself can still be used + in an OSGi context. + Issue: COMPRESS-199. + Thanks to Jukka Zitting. +o When specifying the encoding explicitly TarArchiveOutputStream would + write unreadable names in GNU mode or even cause errors in POSIX + mode for file names longer than 66 characters. + Issue: COMPRESS-200. + Thanks to Christian Schlichtherle. +o Writing TAR PAX headers failed if the generated entry name ended + with a "/". + Issue: COMPRESS-203. +o ZipArchiveInputStream sometimes failed to provide input to the + Inflater when it needed it, leading to reads returning 0. + Issue: COMPRESS-189. + Thanks to Daniel Lowe. +o TarArchiveInputStream ignored the encoding for GNU long name + entries. + Issue: COMPRESS-212. +o TarArchiveInputStream could leave the second EOF record inside the + stream it had just finished reading. + Issue: COMPRESS-206. + Thanks to Peter De Maeyer. +o DumpArchiveInputStream no longer implicitly closes the original + input stream when it reaches the end of the archive. +o ZipArchiveInputStream now consumes the remainder of the archive when + getNextZipEntry returns null. +o Unit tests could fail if the source tree was checked out to a + directory tree containign spaces. + Issue: COMPRESS-205. + Thanks to Daniel Lowe. +o Fixed a potential ArrayIndexOutOfBoundsException when reading STORED + entries from ZipArchiveInputStream. + Issue: COMPRESS-219. +o CompressorStreamFactory can now be used without XZ for Java being + available. + Issue: COMPRESS-221. + +Changes: + +o Improved exception message if a zip archive cannot be read because + of an unsupported compression method. + Issue: COMPRESS-188. + Thanks to Harald Kuhn. +o ArchiveStreamFactory has a setting for file name encoding that sets + up encoding for ZIP and TAR streams. + Issue: COMPRESS-192. + Thanks to Jukka Zitting. +o TarArchiveEntry now has a method to verify its checksum. + Issue: COMPRESS-191. + Thanks to Jukka Zitting. +o Split/spanned ZIP archives are now properly detected by + ArchiveStreamFactory but will cause an + UnsupportedZipFeatureException when read. +o ZipArchiveInputStream now reads archives that start with a "PK00" + signature. Archives with this signatures are created when the + archiver was willing to split the archive but in the end only needed + a single segment - so didn't split anything. + Issue: COMPRESS-208. +o TarArchiveEntry has a new constructor that allows setting linkFlag + and preserveLeadingSlashes at the same time. + Issue: COMPRESS-201. +o ChangeSetPerformer has a new perform overload that uses a ZipFile + instance as input. + Issue: COMPRESS-159. +o Garbage collection pressure has been reduced by reusing temporary + byte arrays in classes. + Issue: COMPRESS-172. + Thanks to Thomas Mair. +o Can now handle zip extra field 0x5455 - Extended Timestamp. + Issue: COMPRESS-210. + Thanks to Julius Davies. +o handle zip extra field 0x7875 - Info Zip New Unix Extra Field. + Issue: COMPRESS-211. + Thanks to Julius Davies. +o ZipShort, ZipLong, ZipEightByteInteger should implement Serializable + Issue: COMPRESS-213. + Thanks to Julius Davies. +o better support for unix symlinks in ZipFile entries. + Issue: COMPRESS-214. + Thanks to Julius Davies. +o ZipFile's initialization has been improved for non-Zip64 archives. + Issue: COMPRESS-215. + Thanks to Robin Power. +o Updated XZ for Java dependency to 1.2 as this version provides + proper OSGi manifest attributes. + +Release 1.4.1 +------------- + +This is a security bugfix release, see +http://commons.apache.org/proper/commons-compress/security.html#Fixed_in_Apache_Commons_Compress_1.4.1 + +Fixed Bugs: + +o Ported libbzip2's fallback sort algorithm to + BZip2CompressorOutputStream to speed up compression in certain + edge cases. + +Release 1.4 +----------- + +New features: +o COMPRESS-156: Support for the XZ format has been added. + +Fixed Bugs: +o COMPRESS-183: The tar package now allows the encoding of file names to be + specified and can optionally use PAX extension headers to + write non-ASCII file names. + The stream classes now write (or expect to read) archives that + use the platform's native encoding for file names. Apache + Commons Compress 1.3 used to strip everything but the lower + eight bits of each character which effectively only worked for + ASCII and ISO-8859-1 file names. + This new default behavior is a breaking change. +o COMPRESS-184: TarArchiveInputStream failed to parse PAX headers that + contained non-ASCII characters. +o COMPRESS-178: TarArchiveInputStream throws IllegalArgumentException instead of IOException +o COMPRESS-179: TarUtils.formatLongOctalOrBinaryBytes() assumes the field will be 12 bytes long +o COMPRESS-175: GNU Tar sometimes uses binary encoding for UID and GID +o COMPRESS-171: ArchiveStreamFactory.createArchiveInputStream would claim + short text files were TAR archives. +o COMPRESS-164: ZipFile didn't work properly for archives using unicode extra + fields rather than UTF-8 filenames and the EFS-Flag. +o COMPRESS-169: For corrupt archives ZipFile would throw a RuntimeException in + some cases and an IOException in others. It will now + consistently throw an IOException. + +Changes: +o COMPRESS-182: The tar package can now write archives that use star/GNU/BSD + extensions or use the POSIX/PAX variant to store numeric + values that don't fit into the traditional header fields. +o COMPRESS-181: Added a workaround for a Bug some tar implementations that add + a NUL byte as first byte in numeric header fields. +o COMPRESS-176: Added a workaround for a Bug in WinZIP which uses backslashes + as path separators in Unicode Extra Fields. +o COMPRESS-131: ArrayOutOfBounds while decompressing bz2. Added test case - code already seems to have been fixed. +o COMPRESS-146: BZip2CompressorInputStream now optionally supports reading of + concatenated .bz2 files. +o COMPRESS-154: GZipCompressorInputStream now optionally supports reading of + concatenated .gz files. +o COMPRESS-16: The tar package can now read archives that use star/GNU/BSD + extensions for files that are longer than 8 GByte as well as + archives that use the POSIX/PAX variant. +o COMPRESS-165: The tar package can now write archives that use star/GNU/BSD + extensions for files that are longer than 8 GByte as well as + archives that use the POSIX/PAX variant. +o COMPRESS-166: The tar package can now use the POSIX/PAX variant for writing + entries with names longer than 100 characters. + +Release 1.3 +----------- + +Commons Compress 1.3 is the first version to require Java5 at runtime. + +Changes in this version include: + +New features: +o Support for the Pack200 format has been added. Issue: COMPRESS-142. +o Read-only support for the format used by the Unix dump(8) tool + has been added. Issue: COMPRESS-132. + +Fixed Bugs: +o BZip2CompressorInputStream's getBytesRead method always + returned 0. +o ZipArchiveInputStream and ZipArchiveOutputStream could leak + resources on some JDKs. Issue: COMPRESS-152. +o TarArchiveOutputStream's getBytesWritten method didn't count + correctly. Issue: COMPRESS-160. + +Changes: +o The ZIP package now supports Zip64 extensions. Issue: COMPRESS-36. +o The AR package now supports the BSD dialect of storing file + names longer than 16 chars (both reading and writing). + Issue: COMPRESS-144. + +Release 1.2 +----------- + +New features: +o COMPRESS-123: ZipArchiveEntry has a new method getRawName that provides the + original bytes that made up the name. This may allow user + code to detect the encoding. +o COMPRESS-122: TarArchiveEntry provides access to the flags that determine + whether it is an archived symbolic link, pipe or other + "uncommon" file system object. + +Fixed Bugs: +o COMPRESS-129: ZipArchiveInputStream could fail with a "Truncated ZIP" error + message for entries between 2 GByte and 4 GByte in size. +o COMPRESS-145: TarArchiveInputStream now detects sparse entries using the + oldgnu format and properly reports it cannot extract their + contents. +o COMPRESS-130: The Javadoc for ZipArchiveInputStream#skip now matches the + implementation, the code has been made more defensive. +o COMPRESS-140: ArArchiveInputStream fails if entries contain only blanks for + userId or groupId. Thanks to Trejkaz. +o COMPRESS-139: ZipFile may leak resources on some JDKs. +o COMPRESS-125: BZip2CompressorInputStream throws IOException if + underlying stream returns available() == 0. + Removed the check. +o COMPRESS-127: Calling close() on inputStream returned by + CompressorStreamFactory.createCompressorInputStream() + does not close the underlying input stream. +o COMPRESS-119: TarArchiveOutputStream#finish now writes all buffered + data to the stream + +Changes: +o ZipFile now implements finalize which closes the underlying + file. +o COMPRESS-117: Certain tar files not recognised by + ArchiveStreamFactory. + +Release 1.1 +----------- + +New features: +o COMPRESS-108: Command-line interface to list archive contents. + Usage: java -jar commons-compress-n.m.jar archive-name [zip|tar|etc] +o COMPRESS-109: Tar implementation does not support Pax headers + Added support for reading pax headers. + Note: does not support global pax headers +o COMPRESS-103: ZipArchiveInputStream can optionally extract data that used + the STORED compression method and a data descriptor. + Doing so in a stream is not safe in general, so you have to + explicitly enable the feature. By default the stream will + throw an exception if it encounters such an entry. +o COMPRESS-98: The ZIP classes will throw specialized exceptions if any + attempt is made to read or write data that uses zip features + not supported (yet). +o COMPRESS-99: ZipFile#getEntries returns entries in a predictable order - + the order they appear inside the central directory. + A new method getEntriesInPhysicalOrder returns entries in + order of the entry data, i.e. the order ZipArchiveInputStream + would see. +o The Archive*Stream and ZipFile classes now have + can(Read|Write)EntryData methods that can be used to check + whether a given entry's data can be read/written. + The method currently returns false for ZIP archives if an + entry uses an unsupported compression method or encryption. +o COMPRESS-89: The ZIP classes now detect encrypted entries. +o COMPRESS-97: Added autodetection of compression format to + CompressorStreamFactory. +o COMPRESS-95: Improve ExceptionMessages in ArchiveStreamFactory Thanks to Joerg Bellmann. +o A new constructor of TarArchiveEntry can create entries with + names that start with slashes - the default is to strip + leading slashes in order to create relative path names. +o ArchiveEntry now has a getLastModifiedDate method. +o COMPRESS-78: Add a BZip2Utils class modelled after GZipUtils Thanks to Jukka Zitting. + +Fixed Bugs: +o COMPRESS-72: Move acknowledgements from NOTICE to README +o COMPRESS-113: TarArchiveEntry.parseTarHeader() includes the trailing space/NUL when parsing the octal size +o COMPRESS-118: TarUtils.parseName does not properly handle characters outside the range 0-127 +o COMPRESS-107: ArchiveStreamFactory does not recognise tar files created by Ant +o COMPRESS-110: Support "ustar" prefix field, which is used when file paths are longer + than 100 characters. +o COMPRESS-100: ZipArchiveInputStream will throw an exception if it detects an + entry that uses a data descriptor for a STORED entry since it + cannot reliably find the end of data for this "compression" + method. +o COMPRESS-101: ZipArchiveInputStream should now properly read archives that + use data descriptors but without the "unofficial" signature. +o COMPRESS-74: ZipArchiveInputStream failed to update the number of bytes + read properly. +o ArchiveInputStream has a new method getBytesRead that should + be preferred over getCount since the later may truncate the + number of bytes read for big archives. +o COMPRESS-85: The cpio archives created by CpioArchiveOutputStream couldn't + be read by many existing native implementations because the + archives contained multiple entries with the same inode/device + combinations and weren't padded to a blocksize of 512 bytes. +o COMPRESS-73: ZipArchiveEntry, ZipFile and ZipArchiveInputStream are now + more lenient when parsing extra fields. +o COMPRESS-82: cpio is terribly slow. + Documented that buffered streams are needed for performance +o Improved exception message if the extra field data in ZIP + archives cannot be parsed. +o COMPRESS-17: Tar format unspecified - current support documented. +o COMPRESS-94: ZipArchiveEntry's equals method was broken for entries created + with the String-arg constructor. This lead to broken ZIP + archives if two different entries had the same hash code. Thanks to Anon Devs. +o COMPRESS-87: ZipArchiveInputStream could repeatedly return 0 on read() when + the archive was truncated. Thanks to Antoni Mylka. +o COMPRESS-86: Tar archive entries holding the file name for names longer + than 100 characters in GNU longfile mode didn't properly + specify they'd be using the "oldgnu" extension. +o COMPRESS-83: Delegate all read and write methods in GZip stream in order to + speed up operations. +o The ar and cpio streams now properly read and write last + modified times. +o COMPRESS-81: TarOutputStream can leave garbage at the end of the archive + +Changes: +o COMPRESS-112: ArArchiveInputStream does not handle GNU extended filename records (//) +o COMPRESS-105: Document that the name of an ZipArchiveEntry determines whether + an entry is considered a directory or not. + If you don't use the constructor with the File argument the entry's + name must end in a "/" in order for the entry to be known as a directory. +o COMPRESS-79: Move DOS/Java time conversions into Zip utility class. +o COMPRESS-75: ZipArchiveInputStream does not show location in file + where a problem occurred. From 08bf3f7a17bcfa374dbf0fea4acf248f8f8f2402 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Wed, 4 Feb 2015 17:56:41 +0000 Subject: [PATCH 120/189] Reformat to look like RN git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1657347 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 7c4402dd1ab..a86de4a1094 100644 --- a/pom.xml +++ b/pom.xml @@ -31,9 +31,9 @@ Apache Commons Compress software defines an API for working with -compression and archive formats. -These include: bzip2, gzip, pack200, lzma, xz, Snappy, traditional -Unix Compress, DEFLATE and ar, cpio, jar, tar, zip, dump, 7z, arj. +compression and archive formats. These include: bzip2, gzip, pack200, +lzma, xz, Snappy, traditional Unix Compress, DEFLATE and ar, cpio, +jar, tar, zip, dump, 7z, arj. From 027af528575b2adc82debb3d8d871071ea3aa224 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Wed, 4 Feb 2015 18:03:23 +0000 Subject: [PATCH 121/189] Tweak git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1657350 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/release-notes.vm | 1 + 1 file changed, 1 insertion(+) diff --git a/src/changes/release-notes.vm b/src/changes/release-notes.vm index 219f56a3772..38c2b777a3b 100644 --- a/src/changes/release-notes.vm +++ b/src/changes/release-notes.vm @@ -664,3 +664,4 @@ o COMPRESS-105: Document that the name of an ZipArchiveEntry determines whether o COMPRESS-79: Move DOS/Java time conversions into Zip utility class. o COMPRESS-75: ZipArchiveInputStream does not show location in file where a problem occurred. + From e0c0a10d271cc8bd9ccf4cf029e8e03ab0c96ecb Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Wed, 4 Feb 2015 19:05:30 +0000 Subject: [PATCH 122/189] Synch with RN contents git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1657373 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index fcbb35966e6..f6211683108 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -43,7 +43,16 @@ The type attribute can be add,update,fix,remove. + description="Release 1.10 +------------ + + + +Release 1.10 moves the former +org.apache.commons.compress.compressors.z._internal_ package which +breaks backwards compatibility for code which used the old package. +This also changes the superclass of ZCompressorInputStream. +"> @@ -54,7 +63,8 @@ The type attribute can be add,update,fix,remove. Zip documentation updated with further notes about parallel features. Please note that some aspects of jar creation need to be - handled by client code and is not part of commons-compress for this release. + handled by client code and is not part of commons-compress for this + release. Date: Wed, 4 Feb 2015 19:06:58 +0000 Subject: [PATCH 123/189] Fix indentation and alignment to agree with Release Notes Can now generate the RN file as follows: mvn changes:announcement-generate -Prelease-notes -Dchanges.version=1.10 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1657375 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/release-notes.vm | 43 +++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 11 deletions(-) diff --git a/src/changes/release-notes.vm b/src/changes/release-notes.vm index 38c2b777a3b..7e2b2b98325 100644 --- a/src/changes/release-notes.vm +++ b/src/changes/release-notes.vm @@ -27,13 +27,11 @@ $release.description.replaceAll(" ", " ## Fix up indentation for multi-line action descriptions #macro ( indent ) -#set($action=$action.replaceAll("(?m)^ +"," ")) +#set($action=$action.replaceAll("(?m)^ +"," ")) #end #if ($release.getActions().size() == 0) No changes defined in this version. #else -Changes in this version include: - #if ($release.getActions('add').size() !=0) New features: #foreach($actionItem in $release.getActions('add')) @@ -51,7 +49,12 @@ New features: #else #set($dueto="") #end -o#if($!issue != "") $issue: #end ${action} #if($!dueto != "")Thanks to $dueto. #end +o ${action}## +#if($!issue != "") + + Issue: $issue. #if($!dueto != "")Thanks to $dueto. #end +#else#if($!dueto != "") Thanks to $dueto. #end +#end #set($issue="") #set($dueto="") @@ -75,7 +78,12 @@ Fixed Bugs: #else #set($dueto="") #end -o#if($!issue != "") $issue: #end ${action} #if($!dueto != "")Thanks to $dueto. #end +o ${action}## +#if($!issue != "") + + Issue: $issue. #if($!dueto != "")Thanks to $dueto. #end +#else#if($!dueto != "") Thanks to $dueto. #end +#end #set($issue="") #set($dueto="") @@ -99,7 +107,12 @@ Changes: #else #set($dueto="") #end -o#if($!issue != "") $issue: #end ${action} #if($!dueto != "")Thanks to $dueto. #end +o ${action}## +#if($!issue != "") + + Issue: $issue. #if($!dueto != "")Thanks to $dueto. #end +#else#if($!dueto != "") Thanks to $dueto. #end +#end #set($issue="") #set($dueto="") @@ -123,7 +136,12 @@ Removed: #else #set($dueto="") #end -o#if($!issue != "") $issue. #end ${action} #if($!dueto != "")Thanks to $dueto. #end +o ${action}## +#if($!issue != "") + + Issue: $issue. #if($!dueto != "")Thanks to $dueto. #end +#else#if($!dueto != "") Thanks to $dueto. #end +#end #set($issue="") #set($dueto="") @@ -131,11 +149,14 @@ o#if($!issue != "") $issue. #end ${action} #if($!dueto != "")Thanks to $dueto. # #end ## End of main loop #end - -For complete information on ${project.name}, including instructions on how to submit bug reports, -patches, or suggestions for improvement, see the ${project.name} website: +#macro ( fixurl $url ) +$url.replaceAll("proper/commons-","") +#end +For complete information on ${project.name}, including instructions +on how to submit bug reports, patches, or suggestions for improvement, +see the ${project.name} website: -${project.url} +#fixurl ( ${project.url} ) Old Release Notes ================= From 915fdd3fcb55f81fbb4f63f9f9752c329488c524 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 13 Feb 2015 19:12:09 +0000 Subject: [PATCH 124/189] python tarfile library seems to create embedded NULs, COMPRESS-301, unit test to follow git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1659649 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/tar/TarUtils.java | 5 +++++ .../compress/archivers/tar/TarUtilsTest.java | 6 ------ src/test/resources/COMPRESS-178.tar | Bin 10240 -> 10240 bytes 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java index 94e175c210d..7f7d57feaf2 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -132,6 +132,11 @@ public static long parseOctal(final byte[] buffer, final int offset, final int l for ( ;start < end; start++) { final byte currentByte = buffer[start]; + if (currentByte == 0) { + // some archivers don't pad the whole field but just insert a NUL + // COMPRESS-301 + break; + } // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java index 9210c54e5bb..8f3888b2c1b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java @@ -95,12 +95,6 @@ public void testParseOctalInvalid() throws Exception{ fail("Expected IllegalArgumentException - embedded space"); } catch (IllegalArgumentException expected) { } - buffer = " 0\00007 ".getBytes(CharsetNames.UTF_8); // Invalid - embedded NUL - try { - TarUtils.parseOctal(buffer,0, buffer.length); - fail("Expected IllegalArgumentException - embedded NUL"); - } catch (IllegalArgumentException expected) { - } } private void checkRoundTripOctal(final long value, final int bufsize) { diff --git a/src/test/resources/COMPRESS-178.tar b/src/test/resources/COMPRESS-178.tar index 6a13219797b81a7bdaa3a62fcf36d747b4bcadb0..7e2ca7af0f12a0f5655008d12af819d4ac45d414 100644 GIT binary patch delta 12 TcmZn&Xb6~)&B(YhXNo!i9vcL9 delta 14 VcmZn&Xb6~)Jvp0=dt+IbIshv&1%m(p From 3a587d6d65e5bc2a1e0b237834a1dd53f2659bd8 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 17:27:25 +0000 Subject: [PATCH 125/189] r1512804 broke immutability git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1659953 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/commons/compress/archivers/ArchiveStreamFactory.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index be422d373f6..6443e0a60af 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -72,7 +72,6 @@ * in.close(); * * - * @Immutable */ public class ArchiveStreamFactory { From d84b2ec4054445dc2ebc79d09ed48a08665ad131 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 17:54:40 +0000 Subject: [PATCH 126/189] r1453945 broke immutability git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1659958 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/compressors/CompressorStreamFactory.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index 7442ba11bd7..0b8c64058aa 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -63,7 +63,6 @@ * in.close(); * * - * @Immutable */ public class CompressorStreamFactory { From a6906ad2bfe148a6f6b1d7674f6cfe2eabbd7b09 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 18:45:04 +0000 Subject: [PATCH 127/189] Javadoc fixes git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1659976 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/ArchiveStreamFactory.java | 24 +++++++++++-------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index 6443e0a60af..c906ab7bb63 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -76,27 +76,29 @@ public class ArchiveStreamFactory { /** - * Constant used to identify the AR archive format. + * Constant (value {@value}) used to identify the AR archive format. * @since 1.1 */ public static final String AR = "ar"; /** - * Constant used to identify the ARJ archive format. + * Constant (value {@value}) used to identify the ARJ archive format. + * Not supported as an output stream type. * @since 1.6 */ public static final String ARJ = "arj"; /** - * Constant used to identify the CPIO archive format. + * Constant (value {@value}) used to identify the CPIO archive format. * @since 1.1 */ public static final String CPIO = "cpio"; /** - * Constant used to identify the Unix DUMP archive format. + * Constant (value {@value}) used to identify the Unix DUMP archive format. + * Not supported as an output stream type. * @since 1.3 */ public static final String DUMP = "dump"; /** - * Constant used to identify the JAR archive format. + * Constant (value {@value}) used to identify the JAR archive format. * @since 1.1 */ public static final String JAR = "jar"; @@ -106,12 +108,12 @@ public class ArchiveStreamFactory { */ public static final String TAR = "tar"; /** - * Constant used to identify the ZIP archive format. + * Constant (value {@value}) used to identify the ZIP archive format. * @since 1.1 */ public static final String ZIP = "zip"; /** - * Constant used to identify the 7z archive format. + * Constant (value {@value}) used to identify the 7z archive format. * @since 1.8 */ public static final String SEVEN_Z = "7z"; @@ -145,7 +147,8 @@ public void setEntryEncoding(String entryEncoding) { /** * Create an archive input stream from an archiver name and an input stream. * - * @param archiverName the archive name, i.e. "ar", "arj", "zip", "tar", "jar", "dump" or "cpio" + * @param archiverName the archive name, + * i.e. {@value #AR}, {@value #ARJ}, {@value #ZIP}, {@value #TAR}, {@value #JAR}, {@value #CPIO}, {@value #DUMP} or {@value #SEVEN_Z} * @param in the input stream * @return the archive input stream * @throws ArchiveException if the archiver name is not known @@ -214,9 +217,10 @@ public ArchiveInputStream createArchiveInputStream( } /** - * Create an archive output stream from an archiver name and an input stream. + * Create an archive output stream from an archiver name and an output stream. * - * @param archiverName the archive name, i.e. "ar", "zip", "tar", "jar" or "cpio" + * @param archiverName the archive name, + * i.e. {@value #AR}, {@value #ZIP}, {@value #TAR}, {@value #JAR}, {@value #CPIO} or {@value #SEVEN_Z} * @param out the output stream * @return the archive output stream * @throws ArchiveException if the archiver name is not known From 1f91bbb19515037f14b53f73db6905c50d3633a6 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 18:57:37 +0000 Subject: [PATCH 128/189] Javadoc corrections and enhancements git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1659981 13f79535-47bb-0310-9956-ffa450edef68 --- .../compressors/CompressorStreamFactory.java | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index 0b8c64058aa..22100568662 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -67,54 +67,59 @@ public class CompressorStreamFactory { /** - * Constant used to identify the BZIP2 compression algorithm. + * Constant (value {@value}) used to identify the BZIP2 compression algorithm. * @since 1.1 */ public static final String BZIP2 = "bzip2"; /** - * Constant used to identify the GZIP compression algorithm. + * Constant (value {@value}) used to identify the GZIP compression algorithm. + * Not supported as an output stream type. * @since 1.1 */ public static final String GZIP = "gz"; /** - * Constant used to identify the PACK200 compression algorithm. + * Constant (value {@value}) used to identify the PACK200 compression algorithm. * @since 1.3 */ public static final String PACK200 = "pack200"; /** - * Constant used to identify the XZ compression method. + * Constant (value {@value}) used to identify the XZ compression method. * @since 1.4 */ public static final String XZ = "xz"; /** - * Constant used to identify the LZMA compression method. + * Constant (value {@value}) used to identify the LZMA compression method. + * Not supported as an output stream type. * @since 1.6 */ public static final String LZMA = "lzma"; /** - * Constant used to identify the "framed" Snappy compression method. + * Constant (value {@value}) used to identify the "framed" Snappy compression method. + * Not supported as an output stream type. * @since 1.7 */ public static final String SNAPPY_FRAMED = "snappy-framed"; /** - * Constant used to identify the "raw" Snappy compression method. + * Constant (value {@value}) used to identify the "raw" Snappy compression method. + * Not supported as an output stream type. * @since 1.7 */ public static final String SNAPPY_RAW = "snappy-raw"; /** - * Constant used to identify the traditional Unix compress method. + * Constant (value {@value}) used to identify the traditional Unix compress method. + * Not supported as an output stream type. * @since 1.7 */ public static final String Z = "z"; /** - * Constant used to identify the Deflate compress method. + * Constant (value {@value}) used to identify the Deflate compress method. * @since 1.9 */ public static final String DEFLATE = "deflate"; @@ -200,8 +205,10 @@ public CompressorInputStream createCompressorInputStream(final InputStream in) /** * Create a compressor input stream from a compressor name and an input stream. * - * @param name of the compressor, i.e. "gz", "bzip2", "xz", - * "lzma", "snappy-raw", "snappy-framed", "pack200", "z" + * @param name of the compressor, + * i.e. {@value #GZIP}, {@value #BZIP2}, {@value #XZ}, {@value #LZMA}, + * {@value #PACK200}, {@value #SNAPPY_RAW}, {@value #SNAPPY_FRAMED}, + * {@value #Z} or {@value #DEFLATE} * @param in the input stream * @return compressor input stream * @throws CompressorException if the compressor name is not known @@ -260,9 +267,11 @@ public CompressorInputStream createCompressorInputStream(final String name, } /** - * Create an compressor output stream from an compressor name and an input stream. + * Create an compressor output stream from an compressor name and an output stream. * - * @param name the compressor name, i.e. "gz", "bzip2", "xz", or "pack200" + * @param name the compressor name, + * i.e. {@value #GZIP}, {@value #BZIP2}, {@value #XZ}, + * {@value #PACK200} or {@value #DEFLATE} * @param out the output stream * @return the compressor output stream * @throws CompressorException if the archiver name is not known From 8e7d71fce1aed00ffdfea85629fdf0d5da0b1186 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 22:42:13 +0000 Subject: [PATCH 129/189] Easier to read outline in Eclipse IDE if issue number is put first git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660000 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index f6211683108..f981e249229 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,7 +54,7 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> - Added support for parallel compression. This low-level API allows a client to build a zip/jar file by using the class @@ -87,23 +87,23 @@ This also changes the superclass of ZCompressorInputStream. of the API that is officially supported. This will break existing code that uses the old package. - New methods in ZipArchiveOutputStream and ZipFile allows entries to be copied from one archive to another without having to re-compress them. - + ZipFile logs a warning in its finalizer when its constructor has thrown an exception reading the file - for example if the file doesn't exist. - Improved error message when tar encounters a groupId that is too big to write without using the STAR or POSIX format. - + SevenZFile now throws the specific PasswordRequiredException when it encounters an encrypted stream but no password has been specified. From 121cae778dcb63a4ca4f1ee2743e58bffcbfe4be Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 22:49:01 +0000 Subject: [PATCH 130/189] COMPRESS-303 Restore immutability/thread-safety to CompressorStreamFactory. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660002 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 5 ++ .../compressors/CompressorStreamFactory.java | 46 ++++++++++++++++++- 2 files changed, 49 insertions(+), 2 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index f981e249229..f676d7f53d0 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,11 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + Restore immutability/thread-safety to CompressorStreamFactory. + The class is now immutable provided that the method setDecompressConcatenated is not used. + The class is thread-safe. + Added support for parallel compression. This low-level API allows diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index 22100568662..120dee9f9d6 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -62,7 +62,7 @@ * IOUtils.copy(in, new FileOutputStream(output)); * in.close(); * - * + * @Immutable provided that the deprecated method setDecompressConcatenated is not used. */ public class CompressorStreamFactory { @@ -124,7 +124,44 @@ public class CompressorStreamFactory { */ public static final String DEFLATE = "deflate"; - private boolean decompressConcatenated = false; + /** + * If true, decompress until the end of the input. + * If false, stop after the first stream and leave the + * input position to point to the next byte after the stream + */ + private final Boolean decompressUntilEOF; + // This is Boolean so setDecompressConcatenated can determine whether it has been set by the ctor + // once the setDecompressConcatenated method has been removed, it can revert to boolean + + /** + * If true, decompress until the end of the input. + * If false, stop after the first stream and leave the + * input position to point to the next byte after the stream + */ + + private volatile boolean decompressConcatenated = false; + + /** + * Create an instance with the decompress Concatenated option set to false. + */ + public CompressorStreamFactory() { + this.decompressUntilEOF = null; + } + + /** + * Create an instance with the provided decompress Concatenated option. + * @param decompressUntilEOF + * if true, decompress until the end of the + * input; if false, stop after the first + * stream and leave the input position to point + * to the next byte after the stream + * @since 1.10 + */ + public CompressorStreamFactory(boolean decompressUntilEOF) { + this.decompressUntilEOF = Boolean.valueOf(decompressUntilEOF); + // Also copy to existing variable so can continue to use that as the current value + this.decompressConcatenated = decompressUntilEOF; + } /** * Whether to decompress the full input or only the first stream @@ -138,8 +175,13 @@ public class CompressorStreamFactory { * stream and leave the input position to point * to the next byte after the stream * @since 1.5 + * @deprecated 1.10 use the {@link #CompressorStreamFactory(boolean)} constructor instead */ + @Deprecated public void setDecompressConcatenated(boolean decompressConcatenated) { + if (this.decompressUntilEOF != null) { + throw new IllegalStateException("Cannot override the setting defined by the constructor"); + } this.decompressConcatenated = decompressConcatenated; } From d6144b9486c36b20557f859f968e58e52ff4eef7 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 22:55:46 +0000 Subject: [PATCH 131/189] Javadoc git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660003 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/compressors/CompressorStreamFactory.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index 120dee9f9d6..b459fffd7a4 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -154,7 +154,8 @@ public CompressorStreamFactory() { * if true, decompress until the end of the * input; if false, stop after the first * stream and leave the input position to point - * to the next byte after the stream + * to the next byte after the stream. + * This setting applies to the gzip, bzip2 and xz formats only. * @since 1.10 */ public CompressorStreamFactory(boolean decompressUntilEOF) { @@ -176,6 +177,8 @@ public CompressorStreamFactory(boolean decompressUntilEOF) { * to the next byte after the stream * @since 1.5 * @deprecated 1.10 use the {@link #CompressorStreamFactory(boolean)} constructor instead + * @throws IllegalStateException if the constructor {@link #CompressorStreamFactory(boolean)} + * was used to create the factory */ @Deprecated public void setDecompressConcatenated(boolean decompressConcatenated) { From 0b33b14271675a78b8bc2e538089af2ecde7d2a0 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 15 Feb 2015 23:09:26 +0000 Subject: [PATCH 132/189] COMPRESS-302 Restore immutability/thread-safety to ArchiveStreamFactory. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660005 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 5 +++ .../archivers/ArchiveStreamFactory.java | 38 ++++++++++++++++++- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index f676d7f53d0..428e0c7183a 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,11 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + Restore immutability/thread-safety to ArchiveStreamFactory. + The class is now immutable provided that the method setEntryEncoding is not used. + The class is thread-safe. + Restore immutability/thread-safety to CompressorStreamFactory. The class is now immutable provided that the method setDecompressConcatenated is not used. diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index c906ab7bb63..4aefb29b8a6 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -71,7 +71,7 @@ * out.close(); * in.close(); * - * + * @Immutable provided that the deprecated method setEntryEncoding is not used. */ public class ArchiveStreamFactory { @@ -118,10 +118,36 @@ public class ArchiveStreamFactory { */ public static final String SEVEN_Z = "7z"; + /** + * Entry encoding, null for the platform default. + */ + private final String encoding; + /** * Entry encoding, null for the default. */ - private String entryEncoding = null; + private volatile String entryEncoding = null; + + /** + * Create an instance using the platform default encoding. + */ + public ArchiveStreamFactory() { + this(null); + } + + /** + * Create an instance using the specified encoding. + * + * @param encoding the encoding to be used. + * + * @since 1.10 + */ + public ArchiveStreamFactory(String encoding) { + super(); + this.encoding = encoding; + // Also set the original field so can continue to use it. + this.entryEncoding = encoding; + } /** * Returns the encoding to use for arj, zip, dump, cpio and tar @@ -139,8 +165,16 @@ public String getEntryEncoding() { * * @param entryEncoding the entry encoding, null uses the default. * @since 1.5 + * @deprecated 1.10 use {@link #ArchiveStreamFactory(String)} to specify the encoding + * @throws IllegalStateException if the constructor {@link #ArchiveStreamFactory(String)} + * was used to specify the factory encoding. */ + @Deprecated public void setEntryEncoding(String entryEncoding) { + // Note: this does not detect new ArchiveStreamFactory(null) but that does not set the encoding anyway + if (encoding != null) { + throw new IllegalStateException("Cannot overide encoding set by the constructor"); + } this.entryEncoding = entryEncoding; } From eecb5a6d8610be02c33d5dc46cf2caa3c4b6e339 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 00:06:45 +0000 Subject: [PATCH 133/189] Update to CP 37 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660007 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index a86de4a1094..294e0fa5150 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ org.apache.commons commons-parent - 36 + 37 org.apache.commons From f18a9f6ee6d488cfdbc6bd2ac0686b88e6161674 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 00:08:05 +0000 Subject: [PATCH 134/189] JUnit => 4.12 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660008 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 294e0fa5150..c150930fdc2 100644 --- a/pom.xml +++ b/pom.xml @@ -56,7 +56,7 @@ jar, tar, zip, dump, 7z, arj. junit junit - 4.11 + 4.12 test From 830af4f8fcef573a043e9f7a9abc8d0860507969 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 00:22:10 +0000 Subject: [PATCH 135/189] No need to override the parent changes report definition; avoids report redefinition warning git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660009 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/pom.xml b/pom.xml index c150930fdc2..c90d30f0bb8 100644 --- a/pom.xml +++ b/pom.xml @@ -205,25 +205,6 @@ jar, tar, zip, dump, 7z, arj. - - - org.apache.maven.plugins - maven-changes-plugin - ${commons.changes.version} - - - %URL%/%ISSUE% - - - - - - changes-report - jira-report - - - - org.codehaus.mojo From b5a60a2beb9e301431f4441140ef455a112b76b5 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 00:27:55 +0000 Subject: [PATCH 136/189] No longer exists git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660010 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pom.xml b/pom.xml index c90d30f0bb8..cfb8effd846 100644 --- a/pom.xml +++ b/pom.xml @@ -275,8 +275,6 @@ jar, tar, zip, dump, 7z, arj. src/test/resources/** - - PROPOSAL.txt .pmd .gitignore .gitattributes From 9cf0fe64c79f34e786e8d180f9b935797207b1fa Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 00:29:53 +0000 Subject: [PATCH 137/189] Non-existent files git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660011 13f79535-47bb-0310-9956-ffa450edef68 --- src/main/assembly/src.xml | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/assembly/src.xml b/src/main/assembly/src.xml index cedd93f2cf4..70fe14ac81a 100644 --- a/src/main/assembly/src.xml +++ b/src/main/assembly/src.xml @@ -24,13 +24,11 @@ - build.xml LICENSE.txt NOTICE.txt README.txt RELEASE-NOTES.txt pom.xml - STATUS findbugs-exclude-filter.xml pmd-ruleset.xml From b1caea2095fda0685c40b46ea1f7f22da457c9f6 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 16:10:03 +0000 Subject: [PATCH 138/189] Add test cases for concatenated compressed files git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660151 13f79535-47bb-0310-9956-ffa450edef68 --- .../compressors/CompressorStreamFactory.java | 5 + .../compress/DetectCompressorTestCase.java | 77 --------- .../compressors/DetectCompressorTestCase.java | 148 ++++++++++++++++++ 3 files changed, 153 insertions(+), 77 deletions(-) delete mode 100644 src/test/java/org/apache/commons/compress/DetectCompressorTestCase.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index b459fffd7a4..677da4e28df 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -358,4 +358,9 @@ public CompressorOutputStream createCompressorOutputStream( } throw new CompressorException("Compressor: " + name + " not found."); } + + // For Unit tests + boolean getDecompressConcatenated() { + return decompressConcatenated; + } } diff --git a/src/test/java/org/apache/commons/compress/DetectCompressorTestCase.java b/src/test/java/org/apache/commons/compress/DetectCompressorTestCase.java deleted file mode 100644 index 09e7b85507f..00000000000 --- a/src/test/java/org/apache/commons/compress/DetectCompressorTestCase.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.commons.compress; - -import static org.apache.commons.compress.AbstractTestCase.getFile; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.FileInputStream; -import java.io.IOException; -import junit.framework.TestCase; - -import org.apache.commons.compress.compressors.CompressorException; -import org.apache.commons.compress.compressors.CompressorInputStream; -import org.apache.commons.compress.compressors.CompressorStreamFactory; -import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; -import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; -import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream; -import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; - -public final class DetectCompressorTestCase extends TestCase { - - public DetectCompressorTestCase(String name) { - super(name); - } - - final CompressorStreamFactory factory = new CompressorStreamFactory(); - - public void testDetection() throws Exception { - CompressorInputStream bzip2 = getStreamFor("bla.txt.bz2"); - assertNotNull(bzip2); - assertTrue(bzip2 instanceof BZip2CompressorInputStream); - - CompressorInputStream gzip = getStreamFor("bla.tgz"); - assertNotNull(gzip); - assertTrue(gzip instanceof GzipCompressorInputStream); - - CompressorInputStream pack200 = getStreamFor("bla.pack"); - assertNotNull(pack200); - assertTrue(pack200 instanceof Pack200CompressorInputStream); - - CompressorInputStream xz = getStreamFor("bla.tar.xz"); - assertNotNull(xz); - assertTrue(xz instanceof XZCompressorInputStream); - - try { - factory.createCompressorInputStream(new ByteArrayInputStream(new byte[0])); - fail("No exception thrown for an empty input stream"); - } catch (CompressorException e) { - // expected - } - } - - private CompressorInputStream getStreamFor(String resource) - throws CompressorException, IOException { - return factory.createCompressorInputStream( - new BufferedInputStream(new FileInputStream( - getFile(resource)))); - } - -} diff --git a/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java new file mode 100644 index 00000000000..9554bddc028 --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors; + +import static org.apache.commons.compress.AbstractTestCase.getFile; + +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; +import java.io.FileInputStream; +import java.io.IOException; + +import junit.framework.TestCase; + +import org.apache.commons.compress.compressors.CompressorException; +import org.apache.commons.compress.compressors.CompressorInputStream; +import org.apache.commons.compress.compressors.CompressorStreamFactory; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream; +import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; + +@SuppressWarnings("deprecation") // deliberately tests setDecompressConcatenated +public final class DetectCompressorTestCase extends TestCase { + + public DetectCompressorTestCase(String name) { + super(name); + } + + final CompressorStreamFactory factory = new CompressorStreamFactory(); + private static final CompressorStreamFactory factoryTrue = new CompressorStreamFactory(true); + private static final CompressorStreamFactory factoryFalse = new CompressorStreamFactory(false); + + // Must be static to allow use in the TestData entries + private static final CompressorStreamFactory factorySetTrue; + private static final CompressorStreamFactory factorySetFalse; + + static { + factorySetTrue = new CompressorStreamFactory(); + factorySetTrue.setDecompressConcatenated(true); + factorySetFalse = new CompressorStreamFactory(); + factorySetFalse.setDecompressConcatenated(false); + } + + static class TestData { + final String fileName; // The multiple file name + final char[] entryNames; // expected entries ... + final CompressorStreamFactory factory; // ... when using this factory + final boolean concat; // expected value for decompressConcatenated + TestData(String name, char[] names, CompressorStreamFactory factory, boolean concat) { + this.fileName = name; + this.entryNames = names; + this.factory = factory; + this.concat = concat; + } + } + + private final TestData[] tests = { + new TestData("multiple.bz2", new char[]{'a','b'}, factoryTrue, true), + new TestData("multiple.bz2", new char[]{'a','b'}, factorySetTrue, true), + new TestData("multiple.bz2", new char[]{'a'}, factoryFalse, false), + new TestData("multiple.bz2", new char[]{'a'}, factorySetFalse, false), + new TestData("multiple.bz2", new char[]{'a'}, factory, false), + + new TestData("multiple.gz", new char[]{'a','b'}, factoryTrue, true), + new TestData("multiple.gz", new char[]{'a','b'}, factorySetTrue, true), + new TestData("multiple.gz", new char[]{'a'}, factoryFalse, false), + new TestData("multiple.gz", new char[]{'a'}, factorySetFalse, false), + new TestData("multiple.gz", new char[]{'a'}, factory, false), + + new TestData("multiple.xz", new char[]{'a','b'}, factoryTrue, true), + new TestData("multiple.xz", new char[]{'a','b'}, factorySetTrue, true), + new TestData("multiple.xz", new char[]{'a'}, factoryFalse, false), + new TestData("multiple.xz", new char[]{'a'}, factorySetFalse, false), + new TestData("multiple.xz", new char[]{'a'}, factory, false), + }; + + public void testDetection() throws Exception { + CompressorInputStream bzip2 = getStreamFor("bla.txt.bz2"); + assertNotNull(bzip2); + assertTrue(bzip2 instanceof BZip2CompressorInputStream); + + CompressorInputStream gzip = getStreamFor("bla.tgz"); + assertNotNull(gzip); + assertTrue(gzip instanceof GzipCompressorInputStream); + + CompressorInputStream pack200 = getStreamFor("bla.pack"); + assertNotNull(pack200); + assertTrue(pack200 instanceof Pack200CompressorInputStream); + + CompressorInputStream xz = getStreamFor("bla.tar.xz"); + assertNotNull(xz); + assertTrue(xz instanceof XZCompressorInputStream); + + try { + factory.createCompressorInputStream(new ByteArrayInputStream(new byte[0])); + fail("No exception thrown for an empty input stream"); + } catch (CompressorException e) { + // expected + } + } + + public void testMutiples() throws Exception { + for(int i=0; i Date: Mon, 16 Feb 2015 16:43:44 +0000 Subject: [PATCH 139/189] Javadoc git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660161 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/arj/ArjArchiveInputStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index 01e1570f896..09e26f6086e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -52,7 +52,7 @@ public class ArjArchiveInputStream extends ArchiveInputStream { * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in. * @param inputStream the underlying stream, whose ownership is taken * @param charsetName the charset used for file names and comments - * in the archive + * in the archive. May be {@code null} to use the platform default. * @throws ArchiveException */ public ArjArchiveInputStream(final InputStream inputStream, From 66338dd9c4bf1e76be830749d9bc93c7be35cb2a Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 19:16:49 +0000 Subject: [PATCH 140/189] Add tests for IllegalStateException git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660182 13f79535-47bb-0310-9956-ffa450edef68 --- .../compressors/DetectCompressorTestCase.java | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java index 9554bddc028..6902209fe08 100644 --- a/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java @@ -115,6 +115,31 @@ public void testDetection() throws Exception { } } + public void testOverride() { + CompressorStreamFactory fac = new CompressorStreamFactory(); + assertFalse(fac.getDecompressConcatenated()); + fac.setDecompressConcatenated(true); + assertTrue(fac.getDecompressConcatenated()); + + fac = new CompressorStreamFactory(false); + assertFalse(fac.getDecompressConcatenated()); + try { + fac.setDecompressConcatenated(true); + fail("Expected IllegalStateException"); + } catch (IllegalStateException ise) { + // expected + } + + fac = new CompressorStreamFactory(true); + assertTrue(fac.getDecompressConcatenated()); + try { + fac.setDecompressConcatenated(true); + fail("Expected IllegalStateException"); + } catch (IllegalStateException ise) { + // expected + } + } + public void testMutiples() throws Exception { for(int i=0; i Date: Mon, 16 Feb 2015 23:13:01 +0000 Subject: [PATCH 141/189] COMPRESS-306 ArchiveStreamFactory fails to pass on the encoding when creating some streams. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660245 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 9 + .../archivers/ArchiveStreamFactory.java | 52 ++-- .../cpio/CpioArchiveInputStream.java | 10 +- .../cpio/CpioArchiveOutputStream.java | 10 +- .../dump/DumpArchiveInputStream.java | 12 +- .../archivers/jar/JarArchiveInputStream.java | 16 ++ .../archivers/jar/JarArchiveOutputStream.java | 12 + .../archivers/tar/TarArchiveInputStream.java | 14 +- .../archivers/tar/TarArchiveOutputStream.java | 12 +- .../archivers/zip/ZipArchiveInputStream.java | 8 + .../archivers/ArchiveStreamFactoryTest.java | 254 ++++++++++++++++++ 11 files changed, 372 insertions(+), 37 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 428e0c7183a..0e6db4c47c2 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,15 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + ArchiveStreamFactory fails to pass on the encoding when creating some streams. + * ArjArchiveInputStream + * CpioArchiveInputStream + * DumpArchiveInputStream + * JarArchiveInputStream + * TarArchiveInputStream + * JarArchiveOutputStream + Restore immutability/thread-safety to ArchiveStreamFactory. The class is now immutable provided that the method setEntryEncoding is not used. diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index 4aefb29b8a6..c73e665f117 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -150,10 +150,10 @@ public ArchiveStreamFactory(String encoding) { } /** - * Returns the encoding to use for arj, zip, dump, cpio and tar - * files, or null for the default. + * Returns the encoding to use for arj, jar, zip, dump, cpio and tar + * files, or null for the archiver default. * - * @return entry encoding, or null + * @return entry encoding, or null for the archiver default * @since 1.5 */ public String getEntryEncoding() { @@ -161,9 +161,9 @@ public String getEntryEncoding() { } /** - * Sets the encoding to use for arj, zip, dump, cpio and tar files. Use null for the default. + * Sets the encoding to use for arj, jar, zip, dump, cpio and tar files. Use null for the archiver default. * - * @param entryEncoding the entry encoding, null uses the default. + * @param entryEncoding the entry encoding, null uses the archiver default. * @since 1.5 * @deprecated 1.10 use {@link #ArchiveStreamFactory(String)} to specify the encoding * @throws IllegalStateException if the constructor {@link #ArchiveStreamFactory(String)} @@ -227,7 +227,11 @@ public ArchiveInputStream createArchiveInputStream( } } if (JAR.equalsIgnoreCase(archiverName)) { - return new JarArchiveInputStream(in); + if (entryEncoding != null) { + return new JarArchiveInputStream(in, entryEncoding); + } else { + return new JarArchiveInputStream(in); + } } if (CPIO.equalsIgnoreCase(archiverName)) { if (entryEncoding != null) { @@ -254,7 +258,7 @@ public ArchiveInputStream createArchiveInputStream( * Create an archive output stream from an archiver name and an output stream. * * @param archiverName the archive name, - * i.e. {@value #AR}, {@value #ZIP}, {@value #TAR}, {@value #JAR}, {@value #CPIO} or {@value #SEVEN_Z} + * i.e. {@value #AR}, {@value #ZIP}, {@value #TAR}, {@value #JAR} or {@value #CPIO} * @param out the output stream * @return the archive output stream * @throws ArchiveException if the archiver name is not known @@ -290,7 +294,11 @@ public ArchiveOutputStream createArchiveOutputStream( } } if (JAR.equalsIgnoreCase(archiverName)) { - return new JarArchiveOutputStream(out); + if (entryEncoding != null) { + return new JarArchiveOutputStream(out, entryEncoding); + } else { + return new JarArchiveOutputStream(out); + } } if (CPIO.equalsIgnoreCase(archiverName)) { if (entryEncoding != null) { @@ -339,13 +347,25 @@ public ArchiveInputStream createArchiveInputStream(final InputStream in) return new ZipArchiveInputStream(in); } } else if (JarArchiveInputStream.matches(signature, signatureLength)) { - return new JarArchiveInputStream(in); + if (entryEncoding != null) { + return new JarArchiveInputStream(in, entryEncoding); + } else { + return new JarArchiveInputStream(in); + } } else if (ArArchiveInputStream.matches(signature, signatureLength)) { return new ArArchiveInputStream(in); } else if (CpioArchiveInputStream.matches(signature, signatureLength)) { - return new CpioArchiveInputStream(in); + if (entryEncoding != null) { + return new CpioArchiveInputStream(in, entryEncoding); + } else { + return new CpioArchiveInputStream(in); + } } else if (ArjArchiveInputStream.matches(signature, signatureLength)) { - return new ArjArchiveInputStream(in); + if (entryEncoding != null) { + return new ArjArchiveInputStream(in, entryEncoding); + } else { + return new ArjArchiveInputStream(in); + } } else if (SevenZFile.matches(signature, signatureLength)) { throw new StreamingNotSupportedException(SEVEN_Z); } @@ -356,7 +376,7 @@ public ArchiveInputStream createArchiveInputStream(final InputStream in) signatureLength = IOUtils.readFully(in, dumpsig); in.reset(); if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) { - return new DumpArchiveInputStream(in); + return new DumpArchiveInputStream(in, entryEncoding); } // Tar needs an even bigger buffer to check the signature; read the first block @@ -365,11 +385,7 @@ public ArchiveInputStream createArchiveInputStream(final InputStream in) signatureLength = IOUtils.readFully(in, tarheader); in.reset(); if (TarArchiveInputStream.matches(tarheader, signatureLength)) { - if (entryEncoding != null) { - return new TarArchiveInputStream(in, entryEncoding); - } else { - return new TarArchiveInputStream(in); - } + return new TarArchiveInputStream(in, entryEncoding); } // COMPRESS-117 - improve auto-recognition if (signatureLength >= 512) { @@ -378,7 +394,7 @@ public ArchiveInputStream createArchiveInputStream(final InputStream in) tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader)); // COMPRESS-191 - verify the header checksum if (tais.getNextTarEntry().isCheckSumOK()) { - return new TarArchiveInputStream(in); + return new TarArchiveInputStream(in, encoding); } } catch (Exception e) { // NOPMD // can generate IllegalArgumentException as well diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java index 0d7e4ba1fff..915b56e54b7 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java @@ -91,7 +91,10 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements /** * The encoding to use for filenames and labels. */ - private final ZipEncoding encoding; + private final ZipEncoding zipEncoding; + + // the provided encoding (for unit tests) + final String encoding; /** * Construct the cpio input stream with a blocksize of {@link @@ -150,7 +153,8 @@ public CpioArchiveInputStream(final InputStream in, int blockSize) { public CpioArchiveInputStream(final InputStream in, int blockSize, String encoding) { this.in = in; this.blockSize = blockSize; - this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + this.encoding = encoding; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); } /** @@ -444,7 +448,7 @@ private String readCString(final int length) throws IOException { byte tmpBuffer[] = new byte[length - 1]; readFully(tmpBuffer, 0, tmpBuffer.length); this.in.read(); - return encoding.decode(tmpBuffer); + return zipEncoding.decode(tmpBuffer); } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java index ff86ddf9f34..fc829ffab9a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java @@ -92,7 +92,10 @@ public class CpioArchiveOutputStream extends ArchiveOutputStream implements /** * The encoding to use for filenames and labels. */ - private final ZipEncoding encoding; + private final ZipEncoding zipEncoding; + + // the provided encoding (for unit tests) + final String encoding; /** * Construct the cpio output stream with a specified format, a @@ -157,7 +160,8 @@ public CpioArchiveOutputStream(final OutputStream out, final short format, } this.entryFormat = format; this.blockSize = blockSize; - this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + this.encoding = encoding; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); } /** @@ -534,7 +538,7 @@ private void writeAsciiLong(final long number, final int length, * @throws IOException if the string couldn't be written */ private void writeCString(final String str) throws IOException { - ByteBuffer buf = encoding.encode(str); + ByteBuffer buf = zipEncoding.encode(str); final int len = buf.limit() - buf.position(); out.write(buf.array(), buf.arrayOffset(), len); out.write('\0'); diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index 6381e899893..09431a4031c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -74,7 +74,10 @@ public class DumpArchiveInputStream extends ArchiveInputStream { /** * The encoding to use for filenames and labels. */ - private final ZipEncoding encoding; + private final ZipEncoding zipEncoding; + + // the provided encoding (for unit tests) + final String encoding; /** * Constructor using the platform's default encoding for file @@ -99,7 +102,8 @@ public DumpArchiveInputStream(InputStream is, String encoding) throws ArchiveException { this.raw = new TapeInputStream(is); this.hasHitEOF = false; - this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + this.encoding = encoding; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); try { // read header, verify it's a dump archive. @@ -110,7 +114,7 @@ public DumpArchiveInputStream(InputStream is, String encoding) } // get summary information - summary = new DumpArchiveSummary(headerBytes, this.encoding); + summary = new DumpArchiveSummary(headerBytes, this.zipEncoding); // reset buffer with actual block size. raw.resetBlockSize(summary.getNTRec(), summary.isCompressed()); @@ -351,7 +355,7 @@ private void readDirectoryEntry(DumpArchiveEntry entry) byte type = blockBuffer[i + 6]; - String name = DumpArchiveUtil.decode(encoding, blockBuffer, i + 8, blockBuffer[i + 7]); + String name = DumpArchiveUtil.decode(zipEncoding, blockBuffer, i + 8, blockBuffer[i + 7]); if (".".equals(name) || "..".equals(name)) { // do nothing... diff --git a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java index d051a4b6d88..1ebac2fe730 100644 --- a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java @@ -32,10 +32,26 @@ */ public class JarArchiveInputStream extends ZipArchiveInputStream { + /** + * Creates an instance from the input stream using the default encoding. + * + * @param inputStream the input stream to wrap + */ public JarArchiveInputStream( final InputStream inputStream ) { super(inputStream); } + /** + * Creates an instance from the input stream using the specified encoding. + * + * @param inputStream the input stream to wrap + * @param encoding the encoding to use + * @since 1.10 + */ + public JarArchiveInputStream( final InputStream inputStream, final String encoding ) { + super(inputStream, encoding); + } + public JarArchiveEntry getNextJarEntry() throws IOException { ZipArchiveEntry entry = getNextZipEntry(); return entry == null ? null : new JarArchiveEntry(entry); diff --git a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.java index f372ad7606a..96d0fbfe20e 100644 --- a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.java @@ -41,6 +41,18 @@ public JarArchiveOutputStream(final OutputStream out) { super(out); } + /** + * Create and instance that wraps the output stream using the provided encoding. + * + * @param out the output stream to wrap + * @param encoding the encoding to use. Use null for the platform default. + * @since 1.10 + */ + public JarArchiveOutputStream(final OutputStream out, final String encoding) { + super(out); + setEncoding(encoding); + } + // @throws ClassCastException if entry is not an instance of ZipArchiveEntry @Override public void putArchiveEntry(ArchiveEntry ze) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index 545d15c50d1..c5570071e0d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -73,7 +73,10 @@ public class TarArchiveInputStream extends ArchiveInputStream { private TarArchiveEntry currEntry; /** The encoding of the file */ - private final ZipEncoding encoding; + private final ZipEncoding zipEncoding; + + // the provided encoding (for unit tests) + final String encoding; /** * Constructor for TarInputStream. @@ -137,7 +140,8 @@ public TarArchiveInputStream(InputStream is, int blockSize, int recordSize, String encoding) { this.is = is; this.hasHitEOF = false; - this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + this.encoding = encoding; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); this.recordSize = recordSize; this.blockSize = blockSize; } @@ -271,7 +275,7 @@ public TarArchiveEntry getNextTarEntry() throws IOException { } try { - currEntry = new TarArchiveEntry(headerBuf, encoding); + currEntry = new TarArchiveEntry(headerBuf, zipEncoding); } catch (IllegalArgumentException e) { IOException ioe = new IOException("Error detected parsing the header"); ioe.initCause(e); @@ -289,7 +293,7 @@ public TarArchiveEntry getNextTarEntry() throws IOException { // entry return null; } - currEntry.setLinkName(encoding.decode(longLinkData)); + currEntry.setLinkName(zipEncoding.decode(longLinkData)); } if (currEntry.isGNULongNameEntry()) { @@ -300,7 +304,7 @@ public TarArchiveEntry getNextTarEntry() throws IOException { // entry return null; } - currEntry.setName(encoding.decode(longNameData)); + currEntry.setName(zipEncoding.decode(longNameData)); } if (currEntry.isPaxHeader()){ // Process Pax headers diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java index 1b134af74b6..43525c8c6b8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java @@ -84,7 +84,10 @@ public class TarArchiveOutputStream extends ArchiveOutputStream { private final OutputStream out; - private final ZipEncoding encoding; + private final ZipEncoding zipEncoding; + + // the provided encoding (for unit tests) + final String encoding; private boolean addPaxHeadersForNonAsciiNames = false; private static final ZipEncoding ASCII = @@ -150,7 +153,8 @@ public TarArchiveOutputStream(OutputStream os, int blockSize, int recordSize) { public TarArchiveOutputStream(OutputStream os, int blockSize, int recordSize, String encoding) { out = new CountingOutputStream(os); - this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + this.encoding = encoding; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); this.assemLen = 0; this.assemBuf = new byte[recordSize]; @@ -301,7 +305,7 @@ && handleLongName(entry, linkName, paxHeaders, "linkpath", writePaxHeaders(entry, entryName, paxHeaders); } - entry.writeEntryHeader(recordBuf, encoding, + entry.writeEntryHeader(recordBuf, zipEncoding, bigNumberMode == BIGNUMBER_STAR); writeRecord(recordBuf); @@ -660,7 +664,7 @@ private boolean handleLongName(TarArchiveEntry entry , String name, Map paxHeaders, String paxHeaderName, byte linkType, String fieldName) throws IOException { - final ByteBuffer encodedName = encoding.encode(name); + final ByteBuffer encodedName = zipEncoding.encode(name); final int len = encodedName.limit() - encodedName.position(); if (len >= TarConstants.NAMELEN) { diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java index 226d29e5986..7a69141c296 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -61,6 +61,9 @@ public class ZipArchiveInputStream extends ArchiveInputStream { /** The zip encoding to use for filenames and the file comment. */ private final ZipEncoding zipEncoding; + // the provided encoding (for unit tests) + final String encoding; + /** Whether to look for and use Unicode extra fields. */ private final boolean useUnicodeExtraFields; @@ -139,6 +142,10 @@ public class ZipArchiveInputStream extends ArchiveInputStream { private int entriesRead = 0; + /** + * Create an instance using UTF-8 encoding + * @param inputStream the stream to wrap + */ public ZipArchiveInputStream(InputStream inputStream) { this(inputStream, ZipEncodingHelper.UTF8); } @@ -175,6 +182,7 @@ public ZipArchiveInputStream(InputStream inputStream, String encoding, boolean useUnicodeExtraFields, boolean allowStoredEntriesWithDataDescriptor) { + this.encoding = encoding; zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); this.useUnicodeExtraFields = useUnicodeExtraFields; in = new PushbackInputStream(inputStream, buf.capacity()); diff --git a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java index d7b47b1dac6..0352fed0e66 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java @@ -18,7 +18,9 @@ */ package org.apache.commons.compress.archivers; +import static org.apache.commons.compress.AbstractTestCase.getFile; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -26,8 +28,15 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.FileInputStream; +import java.io.IOException; import java.io.InputStream; +import java.lang.reflect.Field; +import org.apache.commons.compress.archivers.arj.ArjArchiveInputStream; +import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream; +import org.apache.commons.compress.archivers.dump.DumpArchiveInputStream; +import org.apache.commons.compress.archivers.jar.JarArchiveInputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; import org.junit.Test; @@ -147,4 +156,249 @@ public void skipsPK00Prefix() throws Exception { fis.close(); } } + + @Test + public void testEncodingCtor() { + ArchiveStreamFactory fac = new ArchiveStreamFactory(); + assertNull(fac.getEntryEncoding()); + fac = new ArchiveStreamFactory(null); + assertNull(fac.getEntryEncoding()); + fac = new ArchiveStreamFactory("UTF-8"); + assertEquals("UTF-8", fac.getEntryEncoding()); + } + + @Test + @SuppressWarnings("deprecation") + public void testEncodingDeprecated() { + ArchiveStreamFactory fac = new ArchiveStreamFactory(); + assertNull(fac.getEntryEncoding()); + fac.setEntryEncoding("UTF-8"); + assertEquals("UTF-8", fac.getEntryEncoding()); + fac.setEntryEncoding("US_ASCII"); + assertEquals("US_ASCII", fac.getEntryEncoding()); + fac = new ArchiveStreamFactory("UTF-8"); + assertEquals("UTF-8", fac.getEntryEncoding()); + try { + fac.setEntryEncoding("US_ASCII"); + fail("Expected IllegalStateException"); + } catch (IllegalStateException ise) { + // expected + } + } + + static class TestData { + final String testFile; + final String expectedEncoding; + final ArchiveStreamFactory fac; + final String fieldName; + final String type; + final boolean hasOutputStream; + TestData(String testFile, String type, boolean hasOut, String expectedEncoding, ArchiveStreamFactory fac, String fieldName) { + this.testFile = testFile; + this.expectedEncoding = expectedEncoding; + this.fac = fac; + this.fieldName = fieldName; + this.type = type; + this.hasOutputStream = hasOut; + } + } + + @SuppressWarnings("deprecation") // test of deprecated method + static ArchiveStreamFactory getFactory(String entryEncoding) { + ArchiveStreamFactory fac = new ArchiveStreamFactory(); + fac.setEntryEncoding(entryEncoding); + return fac; + } + // The different factory types + private static final ArchiveStreamFactory FACTORY = new ArchiveStreamFactory(); + private static final ArchiveStreamFactory FACTORY_UTF8 = new ArchiveStreamFactory("UTF-8"); + private static final ArchiveStreamFactory FACTORY_ASCII = new ArchiveStreamFactory("ASCII"); + private static final ArchiveStreamFactory FACTORY_SET_UTF8 = getFactory("UTF-8"); + private static final ArchiveStreamFactory FACTORY_SET_ASCII = getFactory("ASCII"); + + // Default encoding if none is provided (not even null) + // The test currently assumes that the output default is the same as the input default + private static final String ARJ_DEFAULT; + private static final String DUMP_DEFAULT; + + private static final String ZIP_DEFAULT = getField(new ZipArchiveInputStream(null),"encoding"); + private static final String CPIO_DEFAULT = getField(new CpioArchiveInputStream(null),"encoding"); + private static final String TAR_DEFAULT = getField(new TarArchiveInputStream(null),"encoding"); + private static final String JAR_DEFAULT = getField(new JarArchiveInputStream(null),"encoding"); + + static { + String dflt; + dflt = "??"; + try { + dflt = getField(new ArjArchiveInputStream(new FileInputStream(getFile("bla.arj"))), "charsetName"); + } catch (ArchiveException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + ARJ_DEFAULT = dflt; + dflt = "??"; + try { + dflt = getField(new DumpArchiveInputStream(new FileInputStream(getFile("bla.dump"))), "encoding"); + } catch (ArchiveException e) { + e.printStackTrace(); + } catch (Exception e) { + e.printStackTrace(); + } + DUMP_DEFAULT = dflt; + } + + static final TestData[] TESTS = { + new TestData("bla.arj", ArchiveStreamFactory.ARJ, false, ARJ_DEFAULT, FACTORY, "charsetName"), + new TestData("bla.arj", ArchiveStreamFactory.ARJ, false, "UTF-8", FACTORY_UTF8, "charsetName"), + new TestData("bla.arj", ArchiveStreamFactory.ARJ, false, "ASCII", FACTORY_ASCII, "charsetName"), + new TestData("bla.arj", ArchiveStreamFactory.ARJ, false, "UTF-8", FACTORY_SET_UTF8, "charsetName"), + new TestData("bla.arj", ArchiveStreamFactory.ARJ, false, "ASCII", FACTORY_SET_ASCII, "charsetName"), + + new TestData("bla.cpio", ArchiveStreamFactory.CPIO, true, CPIO_DEFAULT, FACTORY, "encoding"), + new TestData("bla.cpio", ArchiveStreamFactory.CPIO, true, "UTF-8", FACTORY_UTF8, "encoding"), + new TestData("bla.cpio", ArchiveStreamFactory.CPIO, true, "ASCII", FACTORY_ASCII, "encoding"), + new TestData("bla.cpio", ArchiveStreamFactory.CPIO, true, "UTF-8", FACTORY_SET_UTF8, "encoding"), + new TestData("bla.cpio", ArchiveStreamFactory.CPIO, true, "ASCII", FACTORY_SET_ASCII, "encoding"), + + new TestData("bla.dump", ArchiveStreamFactory.DUMP, false, DUMP_DEFAULT, FACTORY, "encoding"), + new TestData("bla.dump", ArchiveStreamFactory.DUMP, false, "UTF-8", FACTORY_UTF8, "encoding"), + new TestData("bla.dump", ArchiveStreamFactory.DUMP, false, "ASCII", FACTORY_ASCII, "encoding"), + new TestData("bla.dump", ArchiveStreamFactory.DUMP, false, "UTF-8", FACTORY_SET_UTF8, "encoding"), + new TestData("bla.dump", ArchiveStreamFactory.DUMP, false, "ASCII", FACTORY_SET_ASCII, "encoding"), + + new TestData("bla.tar", ArchiveStreamFactory.TAR, true, TAR_DEFAULT, FACTORY, "encoding"), + new TestData("bla.tar", ArchiveStreamFactory.TAR, true, "UTF-8", FACTORY_UTF8, "encoding"), + new TestData("bla.tar", ArchiveStreamFactory.TAR, true, "ASCII", FACTORY_ASCII, "encoding"), + new TestData("bla.tar", ArchiveStreamFactory.TAR, true, "UTF-8", FACTORY_SET_UTF8, "encoding"), + new TestData("bla.tar", ArchiveStreamFactory.TAR, true, "ASCII", FACTORY_SET_ASCII, "encoding"), + + new TestData("bla.jar", ArchiveStreamFactory.JAR, true, JAR_DEFAULT, FACTORY, "encoding"), + new TestData("bla.jar", ArchiveStreamFactory.JAR, true, "UTF-8", FACTORY_UTF8, "encoding"), + new TestData("bla.jar", ArchiveStreamFactory.JAR, true, "ASCII", FACTORY_ASCII, "encoding"), + new TestData("bla.jar", ArchiveStreamFactory.JAR, true, "UTF-8", FACTORY_SET_UTF8, "encoding"), + new TestData("bla.jar", ArchiveStreamFactory.JAR, true, "ASCII", FACTORY_SET_ASCII, "encoding"), + + new TestData("bla.zip", ArchiveStreamFactory.ZIP, true, ZIP_DEFAULT, FACTORY, "encoding"), + new TestData("bla.zip", ArchiveStreamFactory.ZIP, true, "UTF-8", FACTORY_UTF8, "encoding"), + new TestData("bla.zip", ArchiveStreamFactory.ZIP, true, "ASCII", FACTORY_ASCII, "encoding"), + new TestData("bla.zip", ArchiveStreamFactory.ZIP, true, "UTF-8", FACTORY_SET_UTF8, "encoding"), + new TestData("bla.zip", ArchiveStreamFactory.ZIP, true, "ASCII", FACTORY_SET_ASCII, "encoding"), + }; + + @Test + public void testEncodingInputStreamAutodetect() throws Exception { + int failed = 0; + for(int i = 1; i <= TESTS.length; i++) { + TestData test = TESTS[i-1]; + ArchiveInputStream ais = getInputStreamFor(test.testFile, test.fac); + final String field = getField(ais,test.fieldName); + if (!eq(test.expectedEncoding,field)) { + System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + " type: " + test.type); + failed++; + } + } + if (failed > 0) { + fail("Tests failed: " + failed); + } + } + + @Test + public void testEncodingInputStream() throws Exception { + int failed = 0; + for(int i = 1; i <= TESTS.length; i++) { + TestData test = TESTS[i-1]; + ArchiveInputStream ais = getInputStreamFor(test.type, test.testFile, test.fac); + final String field = getField(ais,test.fieldName); + if (!eq(test.expectedEncoding,field)) { + System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + " type: " + test.type); + failed++; + } + } + if (failed > 0) { + fail("Tests failed: " + failed); + } + } + + @Test + public void testEncodingOutputStream() throws Exception { + int failed = 0; + for(int i = 1; i <= TESTS.length; i++) { + TestData test = TESTS[i-1]; + if (test.hasOutputStream) { + ArchiveOutputStream ais = getOutputStreamFor(test.type, test.fac); + final String field = getField(ais, test.fieldName); + if (!eq(test.expectedEncoding, field)) { + System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + " type: " + test.type); + failed++; + } + } + } + if (failed > 0) { + fail("Tests failed: " + failed); + } + } + + // equals allowing null + private static boolean eq(String exp, String act) { + if (exp == null) { + return act == null; + } + return exp.equals(act); + } + + private static String getField(Object instance, String name) { + Class cls = instance.getClass(); + Field fld; + try { + fld = cls.getDeclaredField(name); + } catch (NoSuchFieldException nsfe) { + try { + fld = cls.getSuperclass().getDeclaredField(name); + } catch (NoSuchFieldException e) { + System.out.println("Cannot find " + name + " in class " + instance.getClass().getSimpleName()); + return "??"; + } + } + boolean isAccessible = fld.isAccessible(); + try { + if (!isAccessible) { + fld.setAccessible(true); + } + final Object object = fld.get(instance); + if (object instanceof String || object == null) { + return (String) object; + } else { + System.out.println("Wrong type: " + object.getClass().getCanonicalName() + " for " + name + " in class " + instance.getClass().getSimpleName()); + return "??"; + } + } catch (Exception e) { + e.printStackTrace(); + return "??"; + } finally { + if (!isAccessible) { + fld.setAccessible(isAccessible); + } + } + } + + private ArchiveInputStream getInputStreamFor(String resource, ArchiveStreamFactory factory) + throws IOException, ArchiveException { + return factory.createArchiveInputStream( + new BufferedInputStream(new FileInputStream( + getFile(resource)))); + } + + private ArchiveInputStream getInputStreamFor(String type, String resource, ArchiveStreamFactory factory) + throws IOException, ArchiveException { + return factory.createArchiveInputStream( + type, + new BufferedInputStream(new FileInputStream( + getFile(resource)))); + } + + private ArchiveOutputStream getOutputStreamFor(String type, ArchiveStreamFactory factory) + throws IOException, ArchiveException { + return factory.createArchiveOutputStream(type, new ByteArrayOutputStream()); + } } From 74e1dd612e3c06d049033d13004d93d004226a65 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 23:19:37 +0000 Subject: [PATCH 142/189] Ensure "mvn javadoc:javadoc" knows about the JCIP tags git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660246 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/pom.xml b/pom.xml index cfb8effd846..9e658d8b7ca 100644 --- a/pom.xml +++ b/pom.xml @@ -146,6 +146,43 @@ jar, tar, zip, dump, 7z, arj. + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + true + ${maven.compiler.source} + ${commons.encoding} + ${commons.docEncoding} + true + + ${commons.javadoc.java.link} + ${commons.javadoc.javaee.link} + + + + Immutable + a + This class is immutable + + + NotThreadSafe + a + This class is not thread-safe + + + ThreadSafe + a + This class is thread-safe + + + + + + From 2a95bf89813ebf321955d191afda7e0b08d69710 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Mon, 16 Feb 2015 23:21:14 +0000 Subject: [PATCH 143/189] Javadoc git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660247 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/commons/compress/archivers/ArchiveStreamFactory.java | 1 + .../commons/compress/compressors/CompressorStreamFactory.java | 1 + 2 files changed, 2 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java index c73e665f117..78f254d74b8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -72,6 +72,7 @@ * in.close(); * * @Immutable provided that the deprecated method setEntryEncoding is not used. + * @ThreadSafe even if the deprecated method setEntryEncoding is used */ public class ArchiveStreamFactory { diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index 677da4e28df..4bc18102461 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -63,6 +63,7 @@ * in.close(); * * @Immutable provided that the deprecated method setDecompressConcatenated is not used. + * @ThreadSafe even if the deprecated method setDecompressConcatenated is used */ public class CompressorStreamFactory { From 58fa9a673547c884cd1f11a8927414e563480358 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Tue, 17 Feb 2015 00:40:20 +0000 Subject: [PATCH 144/189] COMPRESS-305 Convert all tests to JUnit4 style All but AbstractTestCase and subclasses git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660261 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/ExceptionMessageTest.java | 18 ++++++---- .../jar/JarArchiveOutputStreamTest.java | 8 +++-- .../memory/MemoryArchiveTestCase.java | 7 ++-- .../archivers/tar/SparseFilesTest.java | 6 ++-- .../archivers/tar/TarArchiveEntryTest.java | 13 ++++++-- .../compress/archivers/tar/TarUtilsTest.java | 23 +++++++++++-- .../archivers/zip/AsiExtraFieldTest.java | 17 ++++++---- .../archivers/zip/BinaryTreeTest.java | 7 ++-- .../compress/archivers/zip/BitStreamTest.java | 11 +++++-- .../archivers/zip/CircularBufferTest.java | 8 +++-- .../archivers/zip/EncryptedArchiveTest.java | 7 ++-- .../archivers/zip/ExplodeSupportTest.java | 11 +++++-- .../archivers/zip/ExtraFieldUtilsTest.java | 19 +++++++---- .../archivers/zip/GeneralPurposeBitTest.java | 13 ++++++-- .../zip/Maven221MultiVolumeTest.java | 10 +++--- ...ip64ExtendedInformationExtraFieldTest.java | 21 +++++++++--- .../archivers/zip/ZipArchiveEntryTest.java | 20 +++++++---- .../zip/ZipEightByteIntegerTest.java | 18 ++++++---- .../archivers/zip/ZipEncodingTest.java | 22 ++++++++----- .../compress/archivers/zip/ZipFileTest.java | 20 ++++++++--- .../compress/archivers/zip/ZipLongTest.java | 18 ++++++---- .../compress/archivers/zip/ZipShortTest.java | 18 ++++++---- .../compress/archivers/zip/ZipUtilTest.java | 33 ++++++++++--------- .../compressors/BZip2UtilsTestCase.java | 8 +++-- .../compressors/DetectCompressorTestCase.java | 13 ++++---- .../compressors/GzipUtilsTestCase.java | 8 +++-- .../compressors/xz/XZUtilsTestCase.java | 13 ++++++-- 27 files changed, 267 insertions(+), 123 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/archivers/ExceptionMessageTest.java b/src/test/java/org/apache/commons/compress/archivers/ExceptionMessageTest.java index 9197fe5d478..36289bef94d 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ExceptionMessageTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ExceptionMessageTest.java @@ -1,7 +1,7 @@ package org.apache.commons.compress.archivers; -import org.junit.Assert; -import junit.framework.TestCase; +import static org.junit.Assert.*; +import org.junit.Test; /* * Licensed to the Apache Software Foundation (ASF) under one @@ -21,7 +21,7 @@ * specific language governing permissions and limitations * under the License. */ -public class ExceptionMessageTest extends TestCase { +public class ExceptionMessageTest { private static final String ARCHIVER_NULL_MESSAGE = "Archivername must not be null."; @@ -30,45 +30,49 @@ public class ExceptionMessageTest extends TestCase { private static final String OUTPUTSTREAM_NULL_MESSAGE = "OutputStream must not be null."; + @Test public void testMessageWhenArchiverNameIsNull_1(){ try{ new ArchiveStreamFactory().createArchiveInputStream(null, System.in); fail("Should raise an IllegalArgumentException."); }catch (IllegalArgumentException e) { - Assert.assertEquals(ARCHIVER_NULL_MESSAGE, e.getMessage()); + assertEquals(ARCHIVER_NULL_MESSAGE, e.getMessage()); } catch (ArchiveException e) { fail("ArchiveException not expected"); } } + @Test public void testMessageWhenInputStreamIsNull(){ try{ new ArchiveStreamFactory().createArchiveInputStream("zip", null); fail("Should raise an IllegalArgumentException."); }catch (IllegalArgumentException e) { - Assert.assertEquals(INPUTSTREAM_NULL_MESSAGE, e.getMessage()); + assertEquals(INPUTSTREAM_NULL_MESSAGE, e.getMessage()); } catch (ArchiveException e) { fail("ArchiveException not expected"); } } + @Test public void testMessageWhenArchiverNameIsNull_2(){ try{ new ArchiveStreamFactory().createArchiveOutputStream(null, System.out); fail("Should raise an IllegalArgumentException."); } catch (IllegalArgumentException e) { - Assert.assertEquals(ARCHIVER_NULL_MESSAGE, e.getMessage()); + assertEquals(ARCHIVER_NULL_MESSAGE, e.getMessage()); } catch (ArchiveException e){ fail("ArchiveException not expected"); } } + @Test public void testMessageWhenOutputStreamIsNull(){ try{ new ArchiveStreamFactory().createArchiveOutputStream("zip", null); fail("Should raise an IllegalArgumentException."); } catch (IllegalArgumentException e) { - Assert.assertEquals(OUTPUTSTREAM_NULL_MESSAGE, e.getMessage()); + assertEquals(OUTPUTSTREAM_NULL_MESSAGE, e.getMessage()); } catch (ArchiveException e) { fail("ArchiveException not expected"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStreamTest.java index e97fb1a3bf8..111ebcad074 100644 --- a/src/test/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/jar/JarArchiveOutputStreamTest.java @@ -18,20 +18,22 @@ */ package org.apache.commons.compress.archivers.jar; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import junit.framework.TestCase; - +import org.junit.Test; import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.archivers.zip.JarMarker; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipExtraField; import org.apache.commons.compress.archivers.zip.ZipFile; -public class JarArchiveOutputStreamTest extends TestCase { +public class JarArchiveOutputStreamTest { + @Test public void testJarMarker() throws IOException { File testArchive = File.createTempFile("jar-aostest", ".jar"); testArchive.deleteOnExit(); diff --git a/src/test/java/org/apache/commons/compress/archivers/memory/MemoryArchiveTestCase.java b/src/test/java/org/apache/commons/compress/archivers/memory/MemoryArchiveTestCase.java index a5a910b0973..238675d0c03 100644 --- a/src/test/java/org/apache/commons/compress/archivers/memory/MemoryArchiveTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/memory/MemoryArchiveTestCase.java @@ -18,14 +18,17 @@ */ package org.apache.commons.compress.archivers.memory; +import static org.junit.Assert.*; + import java.io.IOException; -import junit.framework.TestCase; +import org.junit.Test; import org.apache.commons.compress.archivers.ArchiveEntry; -public final class MemoryArchiveTestCase extends TestCase { +public final class MemoryArchiveTestCase { + @Test public void testReading() throws IOException { final MemoryArchiveInputStream is = new MemoryArchiveInputStream(new String[][] { diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java index 2c0acc868b3..e8427dc23cf 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java @@ -19,14 +19,16 @@ package org.apache.commons.compress.archivers.tar; import static org.apache.commons.compress.AbstractTestCase.getFile; +import static org.junit.Assert.*; +import org.junit.Test; import java.io.File; import java.io.FileInputStream; -import junit.framework.TestCase; -public class SparseFilesTest extends TestCase { +public class SparseFilesTest { + @Test public void testOldGNU() throws Throwable { File file = getFile("oldgnu_sparse.tar"); TarArchiveInputStream tin = null; diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java index 6225f987174..4d0e85b1cf7 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java @@ -18,17 +18,18 @@ package org.apache.commons.compress.archivers.tar; +import static org.junit.Assert.*; +import org.junit.Test; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Locale; -import junit.framework.TestCase; - import org.apache.commons.compress.AbstractTestCase; -public class TarArchiveEntryTest extends TestCase implements TarConstants { +public class TarArchiveEntryTest implements TarConstants { private static final String OS = System.getProperty("os.name").toLowerCase(Locale.ENGLISH); @@ -40,11 +41,13 @@ public class TarArchiveEntryTest extends TestCase implements TarConstants { * * @see "https://issues.apache.org/jira/browse/SANDBOX-284" */ + @Test public void testFileSystemRoot() { TarArchiveEntry t = new TarArchiveEntry(new File(ROOT)); assertEquals("/", t.getName()); } + @Test public void testTarFileWithFSRoot() throws IOException { File f = File.createTempFile("taetest", ".tar"); f.deleteOnExit(); @@ -104,6 +107,7 @@ public void testTarFileWithFSRoot() throws IOException { } } + @Test public void testMaxFileSize(){ TarArchiveEntry t = new TarArchiveEntry(""); t.setSize(0); @@ -117,18 +121,21 @@ public void testMaxFileSize(){ t.setSize(0100000000000L); } + @Test public void testLinkFlagConstructor() { TarArchiveEntry t = new TarArchiveEntry("/foo", LF_GNUTYPE_LONGNAME); assertGnuMagic(t); assertEquals("foo", t.getName()); } + @Test public void testLinkFlagConstructorWithFileFlag() { TarArchiveEntry t = new TarArchiveEntry("/foo", LF_NORMAL); assertPosixMagic(t); assertEquals("foo", t.getName()); } + @Test public void testLinkFlagConstructorWithPreserve() { TarArchiveEntry t = new TarArchiveEntry("/foo", LF_GNUTYPE_LONGNAME, true); diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java index 8f3888b2c1b..846b2f6a16b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java @@ -18,15 +18,20 @@ package org.apache.commons.compress.archivers.tar; -import junit.framework.TestCase; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; import org.apache.commons.compress.archivers.zip.ZipEncoding; import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; import org.apache.commons.compress.utils.CharsetNames; +import org.junit.Test; -public class TarUtilsTest extends TestCase { +public class TarUtilsTest { + @Test public void testName(){ byte [] buff = new byte[20]; String sb1 = "abcdefghijklmnopqrstuvwxyz"; @@ -43,6 +48,7 @@ public void testName(){ assertEquals(sb1, sb2); } + @Test public void testParseOctal() throws Exception{ long value; byte [] buffer; @@ -69,6 +75,7 @@ public void testParseOctal() throws Exception{ assertEquals(0, value); } + @Test public void testParseOctalInvalid() throws Exception{ byte [] buffer; buffer=new byte[0]; // empty byte array @@ -109,6 +116,7 @@ private void checkRoundTripOctal(final long value) { checkRoundTripOctal(value, TarConstants.SIZELEN); } + @Test public void testRoundTripOctal() { checkRoundTripOctal(0); checkRoundTripOctal(1); @@ -129,10 +137,12 @@ private void checkRoundTripOctalOrBinary(final long value, final int bufsize) { assertEquals(value,parseValue); } + @Test public void testRoundTripOctalOrBinary8() { testRoundTripOctalOrBinary(8); } + @Test public void testRoundTripOctalOrBinary12() { testRoundTripOctalOrBinary(12); checkRoundTripOctalOrBinary(Long.MAX_VALUE, 12); @@ -148,6 +158,7 @@ private void testRoundTripOctalOrBinary(int length) { } // Check correct trailing bytes are generated + @Test public void testTrailers() { byte [] buffer = new byte[12]; TarUtils.formatLongOctalBytes(123, buffer, 0, buffer.length); @@ -163,12 +174,14 @@ public void testTrailers() { assertEquals('3', buffer[buffer.length-3]); // end of number } + @Test public void testNegative() throws Exception { byte [] buffer = new byte[22]; TarUtils.formatUnsignedOctalString(-1, buffer, 0, buffer.length); assertEquals("1777777777777777777777", new String(buffer, CharsetNames.UTF_8)); } + @Test public void testOverflow() throws Exception { byte [] buffer = new byte[8-1]; // a lot of the numbers have 8-byte buffers (nul term) TarUtils.formatUnsignedOctalString(07777777L, buffer, 0, buffer.length); @@ -180,6 +193,7 @@ public void testOverflow() throws Exception { } } + @Test public void testRoundTripNames(){ checkName(""); checkName("The quick brown fox\n"); @@ -187,6 +201,7 @@ public void testRoundTripNames(){ // checkName("\0"); // does not work, because NUL is ignored } + @Test public void testRoundEncoding() throws Exception { // COMPRESS-114 ZipEncoding enc = ZipEncodingHelper.getZipEncoding(CharsetNames.ISO_8859_1); @@ -202,6 +217,7 @@ private void checkName(String string) { assertEquals(string, TarUtils.parseName(buff, 0, len)); } + @Test public void testReadNegativeBinary8Byte() { byte[] b = new byte[] { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, @@ -210,6 +226,7 @@ public void testReadNegativeBinary8Byte() { assertEquals(-3601l, TarUtils.parseOctalOrBinary(b, 0, 8)); } + @Test public void testReadNegativeBinary12Byte() { byte[] b = new byte[] { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, @@ -220,6 +237,7 @@ public void testReadNegativeBinary12Byte() { } + @Test public void testWriteNegativeBinary8Byte() { byte[] b = new byte[] { (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, @@ -229,6 +247,7 @@ public void testWriteNegativeBinary8Byte() { } // https://issues.apache.org/jira/browse/COMPRESS-191 + @Test public void testVerifyHeaderCheckSum() { byte[] valid = { // from bla.tar 116, 101, 115, 116, 49, 46, 120, 109, 108, 0, 0, 0, 0, 0, 0, diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/AsiExtraFieldTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/AsiExtraFieldTest.java index 2f05d26cca1..d4c14ec3138 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/AsiExtraFieldTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/AsiExtraFieldTest.java @@ -18,20 +18,20 @@ package org.apache.commons.compress.archivers.zip; -import junit.framework.TestCase; +import static org.junit.Assert.*; + +import org.junit.Test; /** - * JUnit 3 testcases for org.apache.commons.compress.archivers.zip.AsiExtraField. + * JUnit testcases for org.apache.commons.compress.archivers.zip.AsiExtraField. * */ -public class AsiExtraFieldTest extends TestCase implements UnixStat { - public AsiExtraFieldTest(String name) { - super(name); - } +public class AsiExtraFieldTest implements UnixStat { /** * Test file mode magic. - */ + */ + @Test public void testModes() { AsiExtraField a = new AsiExtraField(); a.setMode(0123); @@ -45,6 +45,7 @@ public void testModes() { /** * Test content. */ + @Test public void testContent() { AsiExtraField a = new AsiExtraField(); a.setMode(0123); @@ -79,6 +80,7 @@ public void testContent() { /** * Test reparse */ + @Test public void testReparse() throws Exception { // CRC manually calculated, sorry byte[] data = {(byte)0xC6, 0x02, 0x78, (byte)0xB6, // CRC @@ -139,6 +141,7 @@ public void testReparse() throws Exception { } } + @Test public void testClone() { AsiExtraField s1 = new AsiExtraField(); s1.setUserId(42); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/BinaryTreeTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/BinaryTreeTest.java index 2d5cb60f96d..de0e48865fa 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/BinaryTreeTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/BinaryTreeTest.java @@ -19,14 +19,17 @@ package org.apache.commons.compress.archivers.zip; +import static org.junit.Assert.*; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; -import junit.framework.TestCase; +import org.junit.Test; -public class BinaryTreeTest extends TestCase { +public class BinaryTreeTest { + @Test public void testDecode() throws IOException { InputStream in = new ByteArrayInputStream(new byte[] { 0x02, 0x42, 0x01, 0x13 }); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java index 0aad22637f8..5bc182cee8a 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/BitStreamTest.java @@ -19,12 +19,15 @@ package org.apache.commons.compress.archivers.zip; +import static org.junit.Assert.*; + import java.io.ByteArrayInputStream; -import junit.framework.TestCase; +import org.junit.Test; -public class BitStreamTest extends TestCase { +public class BitStreamTest { + @Test public void testEmptyStream() throws Exception { BitStream stream = new BitStream(new ByteArrayInputStream(new byte[0])); assertEquals("next bit", -1, stream.nextBit()); @@ -33,6 +36,7 @@ public void testEmptyStream() throws Exception { stream.close(); } + @Test public void testStream() throws Exception { BitStream stream = new BitStream(new ByteArrayInputStream(new byte[] { (byte) 0xEA, 0x03 })); @@ -58,6 +62,7 @@ public void testStream() throws Exception { stream.close(); } + @Test public void testNextByteFromEmptyStream() throws Exception { BitStream stream = new BitStream(new ByteArrayInputStream(new byte[0])); assertEquals("next byte", -1, stream.nextByte()); @@ -65,6 +70,7 @@ public void testNextByteFromEmptyStream() throws Exception { stream.close(); } + @Test public void testReadAlignedBytes() throws Exception { BitStream stream = new BitStream(new ByteArrayInputStream(new byte[] { (byte) 0xEA, 0x35 })); assertEquals("next byte", 0xEA, stream.nextByte()); @@ -73,6 +79,7 @@ public void testReadAlignedBytes() throws Exception { stream.close(); } + @Test public void testNextByte() throws Exception { BitStream stream = new BitStream(new ByteArrayInputStream(new byte[] { (byte) 0xEA, 0x35 })); assertEquals("bit 0", 0, stream.nextBit()); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java index ffcebf29c14..cf5e1c899f7 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/CircularBufferTest.java @@ -19,10 +19,13 @@ package org.apache.commons.compress.archivers.zip; -import junit.framework.TestCase; +import static org.junit.Assert.*; -public class CircularBufferTest extends TestCase { +import org.junit.Test; +public class CircularBufferTest { + + @Test public void testPutAndGet() throws Exception { int size = 16; CircularBuffer buffer = new CircularBuffer(size); @@ -40,6 +43,7 @@ public void testPutAndGet() throws Exception { assertFalse("available", buffer.available()); } + @Test public void testCopy() throws Exception { CircularBuffer buffer = new CircularBuffer(16); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java index a98850c2ec4..cd88db0b1f8 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java @@ -19,15 +19,17 @@ package org.apache.commons.compress.archivers.zip; import static org.apache.commons.compress.AbstractTestCase.getFile; +import static org.junit.Assert.*; import java.io.File; import java.io.FileInputStream; import java.io.IOException; -import junit.framework.TestCase; +import org.junit.Test; -public class EncryptedArchiveTest extends TestCase { +public class EncryptedArchiveTest { + @Test public void testReadPasswordEncryptedEntryViaZipFile() throws IOException { File file = getFile("password-encrypted.zip"); @@ -50,6 +52,7 @@ public void testReadPasswordEncryptedEntryViaZipFile() } } + @Test public void testReadPasswordEncryptedEntryViaStream() throws IOException { File file = getFile("password-encrypted.zip"); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java index 86bb2833665..febf3b0a4cd 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java @@ -19,6 +19,7 @@ package org.apache.commons.compress.archivers.zip; +import static org.junit.Assert.*; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; @@ -27,11 +28,11 @@ import java.util.zip.CRC32; import java.util.zip.CheckedOutputStream; -import junit.framework.TestCase; import org.apache.commons.compress.utils.BoundedInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; -public class ExplodeSupportTest extends TestCase { +public class ExplodeSupportTest { private void testArchiveWithImplodeCompression(String filename, String entryName) throws IOException { ZipFile zip = new ZipFile(new File(filename)); @@ -50,14 +51,17 @@ private void testArchiveWithImplodeCompression(String filename, String entryName zip.close(); } + @Test public void testArchiveWithImplodeCompression4K2Trees() throws IOException { testArchiveWithImplodeCompression("target/test-classes/archives/imploding-4Kdict-2trees.zip", "HEADER.TXT"); } + @Test public void testArchiveWithImplodeCompression8K3Trees() throws IOException { testArchiveWithImplodeCompression("target/test-classes/archives/imploding-8Kdict-3trees.zip", "LICENSE.TXT"); } + @Test public void testTikaTestArchive() throws IOException { testArchiveWithImplodeCompression("target/test-classes/moby-imploded.zip", "README"); } @@ -80,14 +84,17 @@ private void testZipStreamWithImplodeCompression(String filename, String entryNa assertEquals("CRC32", entry.getCrc(), out.getChecksum().getValue()); } + @Test public void testZipStreamWithImplodeCompression4K2Trees() throws IOException { testZipStreamWithImplodeCompression("target/test-classes/archives/imploding-4Kdict-2trees.zip", "HEADER.TXT"); } + @Test public void testZipStreamWithImplodeCompression8K3Trees() throws IOException { testZipStreamWithImplodeCompression("target/test-classes/archives/imploding-8Kdict-3trees.zip", "LICENSE.TXT"); } + @Test public void testTikaTestStream() throws IOException { testZipStreamWithImplodeCompression("target/test-classes/moby-imploded.zip", "README"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtilsTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtilsTest.java index 7f770532e6e..0573fa4965d 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtilsTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtilsTest.java @@ -18,16 +18,16 @@ package org.apache.commons.compress.archivers.zip; -import junit.framework.TestCase; +import static org.junit.Assert.*; + +import org.junit.Before; +import org.junit.Test; /** - * JUnit 3 testcases for org.apache.commons.compress.archivers.zip.ExtraFieldUtils. + * JUnit testcases for org.apache.commons.compress.archivers.zip.ExtraFieldUtils. * */ -public class ExtraFieldUtilsTest extends TestCase implements UnixStat { - public ExtraFieldUtilsTest(String name) { - super(name); - } +public class ExtraFieldUtilsTest implements UnixStat { /** * Header-ID of a ZipExtraField not supported by Commons Compress. @@ -42,7 +42,7 @@ public ExtraFieldUtilsTest(String name) { private byte[] data; private byte[] aLocal; - @Override + @Before public void setUp() { a = new AsiExtraField(); a.setMode(0755); @@ -70,6 +70,7 @@ public void setUp() { /** * test parser. */ + @Test public void testParse() throws Exception { ZipExtraField[] ze = ExtraFieldUtils.parse(data); assertEquals("number of fields", 2, ze.length); @@ -93,6 +94,7 @@ public void testParse() throws Exception { } } + @Test public void testParseWithRead() throws Exception { ZipExtraField[] ze = ExtraFieldUtils.parse(data, true, @@ -123,6 +125,7 @@ public void testParseWithRead() throws Exception { } } + @Test public void testParseWithSkip() throws Exception { ZipExtraField[] ze = ExtraFieldUtils.parse(data, true, @@ -148,6 +151,7 @@ public void testParseWithSkip() throws Exception { /** * Test merge methods */ + @Test public void testMerge() { byte[] local = ExtraFieldUtils.mergeLocalFileDataData(new ZipExtraField[] {a, dummy}); @@ -174,6 +178,7 @@ public void testMerge() { } + @Test public void testMergeWithUnparseableData() throws Exception { ZipExtraField d = new UnparseableExtraFieldData(); byte[] b = UNRECOGNIZED_HEADER.getBytes(); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java index b33f6f385de..4d62ff0904f 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBitTest.java @@ -19,12 +19,15 @@ package org.apache.commons.compress.archivers.zip; +import static org.junit.Assert.*; + import java.util.Arrays; -import junit.framework.TestCase; +import org.junit.Test; -public class GeneralPurposeBitTest extends TestCase { +public class GeneralPurposeBitTest { + @Test public void testDefaults() { assertFalse(new GeneralPurposeBit().usesDataDescriptor()); assertFalse(new GeneralPurposeBit().usesUTF8ForNames()); @@ -34,6 +37,7 @@ public void testDefaults() { assertTrue(Arrays.equals(b, new GeneralPurposeBit().encode())); } + @Test public void testParseEdgeCases() { assertFalse(GeneralPurposeBit.parse(new byte[2], 0) .usesDataDescriptor()); @@ -57,6 +61,7 @@ public void testParseEdgeCases() { .usesStrongEncryption()); } + @Test public void testDataDescriptor() { byte[] flags = new byte[] {(byte) 8, (byte) 0}; assertTrue(GeneralPurposeBit.parse(flags, 0).usesDataDescriptor()); @@ -65,6 +70,7 @@ public void testDataDescriptor() { assertTrue(Arrays.equals(flags, b.encode())); } + @Test public void testLanguageEncodingFlag() { byte[] flags = new byte[] {(byte) 0, (byte) 8}; assertTrue(GeneralPurposeBit.parse(flags, 0).usesUTF8ForNames()); @@ -73,6 +79,7 @@ public void testLanguageEncodingFlag() { assertTrue(Arrays.equals(flags, b.encode())); } + @Test public void testEncryption() { byte[] flags = new byte[] {(byte) 1, (byte) 0}; assertTrue(GeneralPurposeBit.parse(flags, 0).usesEncryption()); @@ -81,6 +88,7 @@ public void testEncryption() { assertTrue(Arrays.equals(flags, b.encode())); } + @Test public void testStrongEncryption() { byte[] flags = new byte[] {(byte) 65, (byte) 0}; assertTrue(GeneralPurposeBit.parse(flags, 0).usesStrongEncryption()); @@ -93,6 +101,7 @@ public void testStrongEncryption() { assertFalse(GeneralPurposeBit.parse(flags, 0).usesStrongEncryption()); } + @Test public void testClone() { GeneralPurposeBit b = new GeneralPurposeBit(); b.useStrongEncryption(true); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/Maven221MultiVolumeTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/Maven221MultiVolumeTest.java index 2c1902f05e1..51910a6db72 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/Maven221MultiVolumeTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/Maven221MultiVolumeTest.java @@ -19,17 +19,17 @@ package org.apache.commons.compress.archivers.zip; import static org.apache.commons.compress.AbstractTestCase.getFile; +import static org.junit.Assert.*; import java.io.File; import java.io.FileInputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.commons.compress.archivers.ArchiveEntry; +import org.junit.Test; /** - * JUnit 3 testcase for a multi-volume zip file. + * JUnit testcase for a multi-volume zip file. * * Some tools (like 7-zip) allow users to split a large archives into 'volumes' * with a given size to fit them into multiple cds, usb drives, or emails with @@ -42,7 +42,7 @@ * yields an exception. * */ -public class Maven221MultiVolumeTest extends TestCase { +public class Maven221MultiVolumeTest { private static final String [] ENTRIES = new String [] { "apache-maven-2.2.1/", @@ -65,6 +65,7 @@ public class Maven221MultiVolumeTest extends TestCase { private static final String LAST_ENTRY_NAME = "apache-maven-2.2.1/lib/maven-2.2.1-uber.jar"; + @Test public void testRead7ZipMultiVolumeArchiveForStream() throws IOException { FileInputStream archive = @@ -111,6 +112,7 @@ public void testRead7ZipMultiVolumeArchiveForStream() throws IOException { } } + @Test public void testRead7ZipMultiVolumeArchiveForFile() throws IOException { File file = getFile("apache-maven-2.2.1.zip.001"); try { diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraFieldTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraFieldTest.java index 7d7ca2b1811..07eb700cdf3 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraFieldTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraFieldTest.java @@ -18,15 +18,14 @@ package org.apache.commons.compress.archivers.zip; +import static org.junit.Assert.*; + import java.math.BigInteger; import java.util.zip.ZipException; -import junit.framework.TestCase; +import org.junit.Test; -public class Zip64ExtendedInformationExtraFieldTest extends TestCase { - public Zip64ExtendedInformationExtraFieldTest(String name) { - super(name); - } +public class Zip64ExtendedInformationExtraFieldTest { private static final ZipEightByteInteger SIZE = new ZipEightByteInteger(0x12345678); @@ -38,6 +37,7 @@ public Zip64ExtendedInformationExtraFieldTest(String name) { .setBit(3)); private static final ZipLong DISK = new ZipLong(0x12); + @Test public void testWriteCDOnlySizes() { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(SIZE, CSIZE); @@ -47,6 +47,7 @@ public void testWriteCDOnlySizes() { checkSizes(b); } + @Test public void testWriteCDSizeAndOffset() { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(SIZE, CSIZE, OFF, null); @@ -57,6 +58,7 @@ public void testWriteCDSizeAndOffset() { checkOffset(b, 16); } + @Test public void testWriteCDSizeOffsetAndDisk() { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(SIZE, CSIZE, OFF, DISK); @@ -68,6 +70,7 @@ public void testWriteCDSizeOffsetAndDisk() { checkDisk(b, 24); } + @Test public void testWriteCDSizeAndDisk() { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(SIZE, CSIZE, null, DISK); @@ -78,6 +81,7 @@ public void testWriteCDSizeAndDisk() { checkDisk(b, 16); } + @Test public void testReadLFHSizesOnly() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); @@ -91,6 +95,7 @@ public void testReadLFHSizesOnly() throws ZipException { assertNull(f.getDiskStartNumber()); } + @Test public void testReadLFHSizesAndOffset() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); @@ -105,6 +110,7 @@ public void testReadLFHSizesAndOffset() throws ZipException { assertNull(f.getDiskStartNumber()); } + @Test public void testReadLFHSizesOffsetAndDisk() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); @@ -120,6 +126,7 @@ public void testReadLFHSizesOffsetAndDisk() throws ZipException { assertEquals(DISK, f.getDiskStartNumber()); } + @Test public void testReadLFHSizesAndDisk() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); @@ -134,6 +141,7 @@ public void testReadLFHSizesAndDisk() throws ZipException { assertEquals(DISK, f.getDiskStartNumber()); } + @Test public void testReadCDSizesOffsetAndDisk() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); @@ -149,6 +157,7 @@ public void testReadCDSizesOffsetAndDisk() throws ZipException { assertEquals(DISK, f.getDiskStartNumber()); } + @Test public void testReadCDSizesAndOffset() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); @@ -163,6 +172,7 @@ public void testReadCDSizesAndOffset() throws ZipException { assertNull(f.getDiskStartNumber()); } + @Test public void testReadCDSomethingAndDisk() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); @@ -176,6 +186,7 @@ public void testReadCDSomethingAndDisk() throws ZipException { assertEquals(DISK, f.getDiskStartNumber()); } + @Test public void testReparseCDSingleEightByteData() throws ZipException { Zip64ExtendedInformationExtraField f = new Zip64ExtendedInformationExtraField(); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java index ee9c10a4896..a0b402411f6 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryTest.java @@ -18,24 +18,23 @@ package org.apache.commons.compress.archivers.zip; -import junit.framework.TestCase; +import static org.junit.Assert.*; import java.io.ByteArrayOutputStream; import java.util.zip.ZipEntry; +import org.junit.Test; + /** - * JUnit 3 testcases for org.apache.commons.compress.archivers.zip.ZipEntry. + * JUnit testcases for org.apache.commons.compress.archivers.zip.ZipEntry. * */ -public class ZipArchiveEntryTest extends TestCase { - - public ZipArchiveEntryTest(String name) { - super(name); - } +public class ZipArchiveEntryTest { /** * test handling of extra fields */ + @Test public void testExtraFields() { AsiExtraField a = new AsiExtraField(); a.setDirectory(true); @@ -89,6 +88,7 @@ public void testExtraFields() { /** * test handling of extra fields via central directory */ + @Test public void testExtraFieldMerging() { AsiExtraField a = new AsiExtraField(); a.setDirectory(true); @@ -135,6 +135,7 @@ public void testExtraFieldMerging() { /** * test handling of extra fields */ + @Test public void testAddAsFirstExtraField() { AsiExtraField a = new AsiExtraField(); a.setDirectory(true); @@ -170,6 +171,7 @@ public void testAddAsFirstExtraField() { assertSame(a, result[2]); } + @Test public void testUnixMode() { ZipArchiveEntry ze = new ZipArchiveEntry("foo"); assertEquals(0, ze.getPlatform()); @@ -205,6 +207,7 @@ public void testUnixMode() { * COMPRESS-93. */ + @Test public void testCompressionMethod() throws Exception { ZipArchiveOutputStream zos = new ZipArchiveOutputStream(new ByteArrayOutputStream()); @@ -232,6 +235,7 @@ public void testCompressionMethod() throws Exception { * COMPRESS-94. */ + @Test public void testNotEquals() { ZipArchiveEntry entry1 = new ZipArchiveEntry("foo"); ZipArchiveEntry entry2 = new ZipArchiveEntry("bar"); @@ -242,6 +246,7 @@ public void testNotEquals() { * Tests comment's influence on equals comparisons. * @see "https://issues.apache.org/jira/browse/COMPRESS-187" */ + @Test public void testNullCommentEqualsEmptyComment() { ZipArchiveEntry entry1 = new ZipArchiveEntry("foo"); ZipArchiveEntry entry2 = new ZipArchiveEntry("foo"); @@ -254,6 +259,7 @@ public void testNullCommentEqualsEmptyComment() { assertFalse(entry2.equals(entry3)); } + @Test public void testCopyConstructor() throws Exception { ZipArchiveEntry archiveEntry = new ZipArchiveEntry("fred"); archiveEntry.setUnixMode(0664); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipEightByteIntegerTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipEightByteIntegerTest.java index 7f09083e376..8df0edb43fb 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipEightByteIntegerTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipEightByteIntegerTest.java @@ -18,23 +18,22 @@ package org.apache.commons.compress.archivers.zip; +import static org.junit.Assert.*; + import java.math.BigInteger; -import junit.framework.TestCase; +import org.junit.Test; /** - * JUnit 3 testcases for org.apache.commons.compress.archivers.zip.ZipEightByteInteger. + * JUnit testcases for org.apache.commons.compress.archivers.zip.ZipEightByteInteger. * */ -public class ZipEightByteIntegerTest extends TestCase { - - public ZipEightByteIntegerTest(String name) { - super(name); - } +public class ZipEightByteIntegerTest { /** * Test conversion to bytes. */ + @Test public void testLongToBytes() { ZipEightByteInteger zl = new ZipEightByteInteger(0xAB12345678l); byte[] result = zl.getBytes(); @@ -52,6 +51,7 @@ public void testLongToBytes() { /** * Test conversion from bytes. */ + @Test public void testLongFromBytes() { byte[] val = new byte[] {0x78, 0x56, 0x34, 0x12, (byte) 0xAB, 0x00, 0x00, 0x00}; ZipEightByteInteger zl = new ZipEightByteInteger(val); @@ -61,6 +61,7 @@ public void testLongFromBytes() { /** * Test conversion to bytes. */ + @Test public void testBIToBytes() { ZipEightByteInteger zl = new ZipEightByteInteger(BigInteger.valueOf(Long.MAX_VALUE) @@ -80,6 +81,7 @@ public void testBIToBytes() { /** * Test conversion from bytes. */ + @Test public void testBIFromBytes() { byte[] val = new byte[] {(byte) 0xFE, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF}; ZipEightByteInteger zl = new ZipEightByteInteger(val); @@ -91,6 +93,7 @@ public void testBIFromBytes() { /** * Test the contract of the equals method. */ + @Test public void testEquals() { ZipEightByteInteger zl = new ZipEightByteInteger(0x12345678); ZipEightByteInteger zl2 = new ZipEightByteInteger(0x12345678); @@ -110,6 +113,7 @@ public void testEquals() { /** * Test sign handling. */ + @Test public void testSign() { ZipEightByteInteger zl = new ZipEightByteInteger(new byte[] {(byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF}); assertEquals(BigInteger.valueOf(Long.MAX_VALUE).shiftLeft(1).setBit(0), diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipEncodingTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipEncodingTest.java index e15a83e638a..a2d204f5448 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipEncodingTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipEncodingTest.java @@ -22,14 +22,17 @@ import java.io.IOException; import java.nio.ByteBuffer; -import junit.framework.TestCase; - import org.apache.commons.compress.utils.CharsetNames; +import static org.junit.Assert.*; + +import org.junit.Assert; +import org.junit.Test; + /** * Test zip encodings. */ -public class ZipEncodingTest extends TestCase { +public class ZipEncodingTest { private static final String UNENC_STRING = "\u2016"; // stress test for internal grow method. @@ -39,16 +42,19 @@ public class ZipEncodingTest extends TestCase { private static final String BAD_STRING_ENC = "%U2016%U2015%U2016%U2015%U2016%U2015%U2016%U2015%U2016%U2015%U2016"; + @Test public void testSimpleCp437Encoding() throws IOException { doSimpleEncodingTest("Cp437", null); } + @Test public void testSimpleCp850Encoding() throws IOException { doSimpleEncodingTest("Cp850", null); } + @Test public void testNioCp1252Encoding() throws IOException { // CP1252 has some undefined code points, these are // the defined ones @@ -108,11 +114,11 @@ public void testNioCp1252Encoding() throws IOException { private static void assertEquals(byte[] expected, ByteBuffer actual) { - assertEquals(expected.length, actual.limit()); + Assert.assertEquals(expected.length, actual.limit()); for (byte anExpected : expected) { byte a = actual.get(); - assertEquals(anExpected, a); + Assert.assertEquals(anExpected, a); } } @@ -132,15 +138,15 @@ private void doSimpleEncodingTest(String name, byte[] testBytes) String decoded = enc.decode(testBytes); - assertEquals(true, enc.canEncode(decoded)); + assertTrue(enc.canEncode(decoded)); ByteBuffer encoded = enc.encode(decoded); assertEquals(testBytes, encoded); - assertEquals(false, enc.canEncode(UNENC_STRING)); + assertFalse(enc.canEncode(UNENC_STRING)); assertEquals("%U2016".getBytes(CharsetNames.US_ASCII), enc.encode(UNENC_STRING)); - assertEquals(false, enc.canEncode(BAD_STRING)); + assertFalse(enc.canEncode(BAD_STRING)); assertEquals(BAD_STRING_ENC.getBytes(CharsetNames.US_ASCII), enc.encode(BAD_STRING)); } diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java index 07acc38f59e..5fceaeb194d 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java @@ -19,7 +19,7 @@ package org.apache.commons.compress.archivers.zip; import static org.apache.commons.compress.AbstractTestCase.getFile; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.*; import java.io.File; import java.io.FileInputStream; @@ -32,17 +32,19 @@ import java.util.TreeMap; import java.util.zip.ZipEntry; -import junit.framework.TestCase; import org.apache.commons.compress.utils.IOUtils; +import org.junit.After; +import org.junit.Test; -public class ZipFileTest extends TestCase { +public class ZipFileTest { private ZipFile zf = null; - @Override + @After public void tearDown() { ZipFile.closeQuietly(zf); } + @Test public void testCDOrder() throws Exception { readOrderTest(); ArrayList l = Collections.list(zf.getEntries()); @@ -71,6 +73,7 @@ public void testCDOrder() throws Exception { assertEntryName(l, 22, "ZipFile"); } + @Test public void testPhysicalOrder() throws Exception { readOrderTest(); ArrayList l = Collections.list(zf.getEntriesInPhysicalOrder()); @@ -99,6 +102,7 @@ public void testPhysicalOrder() throws Exception { assertEntryName(l, 22, "ZipUtil"); } + @Test public void testDoubleClose() throws Exception { readOrderTest(); zf.close(); @@ -109,6 +113,7 @@ public void testDoubleClose() throws Exception { } } + @Test public void testReadingOfStoredEntry() throws Exception { File f = File.createTempFile("commons-compress-zipfiletest", ".zip"); f.deleteOnExit(); @@ -149,6 +154,7 @@ public void testReadingOfStoredEntry() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-176" */ + @Test public void testWinzipBackSlashWorkaround() throws Exception { File archive = getFile("test-winzip.zip"); zf = new ZipFile(archive); @@ -161,6 +167,7 @@ public void testWinzipBackSlashWorkaround() throws Exception { * COMPRESS-208. */ + @Test public void testSkipsPK00Prefix() throws Exception { File archive = getFile("COMPRESS-208.zip"); zf = new ZipFile(archive); @@ -168,6 +175,7 @@ public void testSkipsPK00Prefix() throws Exception { assertNotNull(zf.getEntry("test2.xml")); } + @Test public void testUnixSymlinkSampleFile() throws Exception { final String entryPrefix = "COMPRESS-214_unix_symlinks/"; final TreeMap expectedVals = new TreeMap(); @@ -210,6 +218,7 @@ public void testUnixSymlinkSampleFile() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-227" */ + @Test public void testDuplicateEntry() throws Exception { File archive = getFile("COMPRESS-227.zip"); zf = new ZipFile(archive); @@ -229,6 +238,7 @@ public void testDuplicateEntry() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-228" */ + @Test public void testExcessDataInZip64ExtraField() throws Exception { File archive = getFile("COMPRESS-228.zip"); zf = new ZipFile(archive); @@ -238,6 +248,7 @@ public void testExcessDataInZip64ExtraField() throws Exception { assertEquals(26101, ze.getSize()); } + @Test public void testUnshrinking() throws Exception { zf = new ZipFile(getFile("SHRUNK.ZIP")); ZipArchiveEntry test = zf.getEntry("TEST1.XML"); @@ -263,6 +274,7 @@ public void testUnshrinking() throws Exception { * COMPRESS-264. */ + @Test public void testReadingOfFirstStoredEntry() throws Exception { File archive = getFile("COMPRESS-264.zip"); zf = new ZipFile(archive); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java index 21a4612d7a4..33a1b7b5cd1 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipLongTest.java @@ -18,21 +18,20 @@ package org.apache.commons.compress.archivers.zip; -import junit.framework.TestCase; +import static org.junit.Assert.*; + +import org.junit.Test; /** - * JUnit 3 testcases for org.apache.commons.compress.archivers.zip.ZipLong. + * JUnit testcases for org.apache.commons.compress.archivers.zip.ZipLong. * */ -public class ZipLongTest extends TestCase { - - public ZipLongTest(String name) { - super(name); - } +public class ZipLongTest { /** * Test conversion to bytes. */ + @Test public void testToBytes() { ZipLong zl = new ZipLong(0x12345678); byte[] result = zl.getBytes(); @@ -46,6 +45,7 @@ public void testToBytes() { /** * Test conversion to bytes. */ + @Test public void testPut() { byte[] arr = new byte[5]; ZipLong.putLong(0x12345678, arr, 1); @@ -58,6 +58,7 @@ public void testPut() { /** * Test conversion from bytes. */ + @Test public void testFromBytes() { byte[] val = new byte[] {0x78, 0x56, 0x34, 0x12}; ZipLong zl = new ZipLong(val); @@ -67,6 +68,7 @@ public void testFromBytes() { /** * Test the contract of the equals method. */ + @Test public void testEquals() { ZipLong zl = new ZipLong(0x12345678); ZipLong zl2 = new ZipLong(0x12345678); @@ -86,11 +88,13 @@ public void testEquals() { /** * Test sign handling. */ + @Test public void testSign() { ZipLong zl = new ZipLong(new byte[] {(byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF}); assertEquals(0x00000000FFFFFFFFl, zl.getValue()); } + @Test public void testClone() { ZipLong s1 = new ZipLong(42); ZipLong s2 = (ZipLong) s1.clone(); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java index 8e77ff585d2..eb498baa2ca 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipShortTest.java @@ -18,21 +18,20 @@ package org.apache.commons.compress.archivers.zip; -import junit.framework.TestCase; +import static org.junit.Assert.*; + +import org.junit.Test; /** - * JUnit 3 testcases for org.apache.commons.compress.archivers.zip.ZipShort. + * JUnit testcases for org.apache.commons.compress.archivers.zip.ZipShort. * */ -public class ZipShortTest extends TestCase { - - public ZipShortTest(String name) { - super(name); - } +public class ZipShortTest { /** * Test conversion to bytes. */ + @Test public void testToBytes() { ZipShort zs = new ZipShort(0x1234); byte[] result = zs.getBytes(); @@ -45,6 +44,7 @@ public void testToBytes() { /** * Test conversion to bytes. */ + @Test public void testPut() { byte[] arr = new byte[3]; ZipShort.putShort(0x1234, arr, 1); @@ -56,6 +56,7 @@ public void testPut() { /** * Test conversion from bytes. */ + @Test public void testFromBytes() { byte[] val = new byte[] {0x34, 0x12}; ZipShort zs = new ZipShort(val); @@ -65,6 +66,7 @@ public void testFromBytes() { /** * Test the contract of the equals method. */ + @Test public void testEquals() { ZipShort zs = new ZipShort(0x1234); ZipShort zs2 = new ZipShort(0x1234); @@ -84,11 +86,13 @@ public void testEquals() { /** * Test sign handling. */ + @Test public void testSign() { ZipShort zs = new ZipShort(new byte[] {(byte)0xFF, (byte)0xFF}); assertEquals(0x0000FFFF, zs.getValue()); } + @Test public void testClone() { ZipShort s1 = new ZipShort(42); ZipShort s2 = (ZipShort) s1.clone(); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java index ff29f2eb8ad..ea7e9a481b2 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipUtilTest.java @@ -18,7 +18,7 @@ package org.apache.commons.compress.archivers.zip; -import junit.framework.TestCase; +import static org.junit.Assert.*; import java.math.BigInteger; import java.util.Arrays; @@ -26,20 +26,16 @@ import java.util.Date; import java.util.TimeZone; -public class ZipUtilTest extends TestCase { +import org.junit.Before; +import org.junit.Test; + +public class ZipUtilTest { private Date time; private ZipLong zl; - /** - * Constructor - */ - public ZipUtilTest(String name) { - super(name); - } - - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { time = new Date(); Calendar cal = Calendar.getInstance(); cal.setTime(time); @@ -60,16 +56,13 @@ protected void setUp() throws Exception { zl = new ZipLong(result); } - @Override - protected void tearDown() throws Exception { - super.tearDown(); - } - + @Test public void testZipLong() throws Exception { ZipLong test = ZipUtil.toDosTime(time); assertEquals(test.getValue(), zl.getValue()); } + @Test public void testAdjustToLong() { assertEquals(Integer.MAX_VALUE, ZipUtil.adjustToLong(Integer.MAX_VALUE)); @@ -79,6 +72,7 @@ public void testAdjustToLong() { ZipUtil.adjustToLong(2 * Integer.MAX_VALUE)); } + @Test public void testMinTime(){ byte[] b1 = ZipUtil.toDosTime(0); byte b10 = b1[0]; // Save the first byte @@ -87,6 +81,7 @@ public void testMinTime(){ assertEquals(b10,b2[0]); // first byte should still be the same } + @Test public void testOutsideCalendar(){ byte[] b1 = ZipUtil.toDosTime(160441200000L); // 1.1..1975 assertEquals(0, b1[0]); @@ -95,6 +90,7 @@ public void testOutsideCalendar(){ assertEquals(0, b1[3]); } + @Test public void testInsideCalendar(){ TimeZone tz = TimeZone.getDefault(); long date = 476096400000L; // 1.1.1985, 10:00 am GMT @@ -105,6 +101,7 @@ public void testInsideCalendar(){ assertEquals(10, b1[3]); } + @Test public void testReverse() { byte[][] bTest = new byte[6][]; bTest[0] = new byte[]{}; @@ -131,6 +128,7 @@ public void testReverse() { } } + @Test public void testBigToLong() { BigInteger big1 = BigInteger.valueOf(1); BigInteger big2 = BigInteger.valueOf(Long.MAX_VALUE); @@ -157,6 +155,7 @@ public void testBigToLong() { } } + @Test public void testLongToBig() { long l0 = 0; long l1 = 1; @@ -185,6 +184,7 @@ public void testLongToBig() { } } + @Test public void testSignedByteToUnsignedInt() { // Yay, we can completely test all possible input values in this case! int expectedVal = 128; @@ -198,6 +198,7 @@ public void testSignedByteToUnsignedInt() { } } + @Test public void testUnsignedIntToSignedByte() { int unsignedVal = 128; for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; i++) { diff --git a/src/test/java/org/apache/commons/compress/compressors/BZip2UtilsTestCase.java b/src/test/java/org/apache/commons/compress/compressors/BZip2UtilsTestCase.java index a745c4f6d53..27154efac91 100644 --- a/src/test/java/org/apache/commons/compress/compressors/BZip2UtilsTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/BZip2UtilsTestCase.java @@ -18,12 +18,14 @@ */ package org.apache.commons.compress.compressors; -import junit.framework.TestCase; +import static org.junit.Assert.*; import org.apache.commons.compress.compressors.bzip2.BZip2Utils; +import org.junit.Test; -public class BZip2UtilsTestCase extends TestCase { +public class BZip2UtilsTestCase { + @Test public void testIsCompressedFilename() { assertFalse(BZip2Utils.isCompressedFilename("")); assertFalse(BZip2Utils.isCompressedFilename(".gz")); @@ -45,6 +47,7 @@ public void testIsCompressedFilename() { assertFalse(BZip2Utils.isCompressedFilename("x.tbz2.y")); } + @Test public void testGetUncompressedFilename() { assertEquals("", BZip2Utils.getUncompressedFilename("")); assertEquals(".bz2", BZip2Utils.getUncompressedFilename(".bz2")); @@ -63,6 +66,7 @@ public void testGetUncompressedFilename() { assertEquals("x.tbz2.y", BZip2Utils.getUncompressedFilename("x.tbz2.y")); } + @Test public void testGetCompressedFilename() { assertEquals(".bz2", BZip2Utils.getCompressedFilename("")); assertEquals(" .bz2", BZip2Utils.getCompressedFilename(" ")); diff --git a/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java index 6902209fe08..0fc84248b9f 100644 --- a/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java @@ -19,14 +19,13 @@ package org.apache.commons.compress.compressors; import static org.apache.commons.compress.AbstractTestCase.getFile; +import static org.junit.Assert.*; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.IOException; -import junit.framework.TestCase; - import org.apache.commons.compress.compressors.CompressorException; import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.compressors.CompressorStreamFactory; @@ -34,13 +33,10 @@ import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream; import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; +import org.junit.Test; @SuppressWarnings("deprecation") // deliberately tests setDecompressConcatenated -public final class DetectCompressorTestCase extends TestCase { - - public DetectCompressorTestCase(String name) { - super(name); - } +public final class DetectCompressorTestCase { final CompressorStreamFactory factory = new CompressorStreamFactory(); private static final CompressorStreamFactory factoryTrue = new CompressorStreamFactory(true); @@ -90,6 +86,7 @@ static class TestData { new TestData("multiple.xz", new char[]{'a'}, factory, false), }; + @Test public void testDetection() throws Exception { CompressorInputStream bzip2 = getStreamFor("bla.txt.bz2"); assertNotNull(bzip2); @@ -115,6 +112,7 @@ public void testDetection() throws Exception { } } + @Test public void testOverride() { CompressorStreamFactory fac = new CompressorStreamFactory(); assertFalse(fac.getDecompressConcatenated()); @@ -140,6 +138,7 @@ public void testOverride() { } } + @Test public void testMutiples() throws Exception { for(int i=0; i Date: Tue, 17 Feb 2015 11:16:34 +0000 Subject: [PATCH 145/189] archives directory is reserved for use by ArchiveReadTests git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660351 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/ExplodeSupportTest.java | 8 ++++---- .../{archives => }/imploding-4Kdict-2trees.zip | Bin .../{archives => }/imploding-8Kdict-3trees.zip | Bin 3 files changed, 4 insertions(+), 4 deletions(-) rename src/test/resources/{archives => }/imploding-4Kdict-2trees.zip (100%) rename src/test/resources/{archives => }/imploding-8Kdict-3trees.zip (100%) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java index febf3b0a4cd..4c8d39afef5 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java @@ -53,12 +53,12 @@ private void testArchiveWithImplodeCompression(String filename, String entryName @Test public void testArchiveWithImplodeCompression4K2Trees() throws IOException { - testArchiveWithImplodeCompression("target/test-classes/archives/imploding-4Kdict-2trees.zip", "HEADER.TXT"); + testArchiveWithImplodeCompression("target/test-classes/imploding-4Kdict-2trees.zip", "HEADER.TXT"); } @Test public void testArchiveWithImplodeCompression8K3Trees() throws IOException { - testArchiveWithImplodeCompression("target/test-classes/archives/imploding-8Kdict-3trees.zip", "LICENSE.TXT"); + testArchiveWithImplodeCompression("target/test-classes/imploding-8Kdict-3trees.zip", "LICENSE.TXT"); } @Test @@ -86,12 +86,12 @@ private void testZipStreamWithImplodeCompression(String filename, String entryNa @Test public void testZipStreamWithImplodeCompression4K2Trees() throws IOException { - testZipStreamWithImplodeCompression("target/test-classes/archives/imploding-4Kdict-2trees.zip", "HEADER.TXT"); + testZipStreamWithImplodeCompression("target/test-classes/imploding-4Kdict-2trees.zip", "HEADER.TXT"); } @Test public void testZipStreamWithImplodeCompression8K3Trees() throws IOException { - testZipStreamWithImplodeCompression("target/test-classes/archives/imploding-8Kdict-3trees.zip", "LICENSE.TXT"); + testZipStreamWithImplodeCompression("target/test-classes/imploding-8Kdict-3trees.zip", "LICENSE.TXT"); } @Test diff --git a/src/test/resources/archives/imploding-4Kdict-2trees.zip b/src/test/resources/imploding-4Kdict-2trees.zip similarity index 100% rename from src/test/resources/archives/imploding-4Kdict-2trees.zip rename to src/test/resources/imploding-4Kdict-2trees.zip diff --git a/src/test/resources/archives/imploding-8Kdict-3trees.zip b/src/test/resources/imploding-8Kdict-3trees.zip similarity index 100% rename from src/test/resources/archives/imploding-8Kdict-3trees.zip rename to src/test/resources/imploding-8Kdict-3trees.zip From 2e6967a356de82a7a94ac9e0e7ed1c0e6ff142bd Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Tue, 17 Feb 2015 15:13:23 +0000 Subject: [PATCH 146/189] It's useless to have fail() in a loop. Show all the missing entries. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660403 13f79535-47bb-0310-9956-ffa450edef68 --- .../java/org/apache/commons/compress/AbstractTestCase.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/AbstractTestCase.java b/src/test/java/org/apache/commons/compress/AbstractTestCase.java index 3f6fcd58c91..e6b1384e137 100644 --- a/src/test/java/org/apache/commons/compress/AbstractTestCase.java +++ b/src/test/java/org/apache/commons/compress/AbstractTestCase.java @@ -30,6 +30,7 @@ import java.net.URI; import java.net.URL; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -359,9 +360,7 @@ protected File checkArchiveContent(ArchiveInputStream in, List expected, } in.close(); if (expected != null && expected.size() > 0) { - for (String name : expected) { - fail("Expected entry: " + name); - } + fail(expected.size() + " missing entries: " + Arrays.toString(expected.toArray())); } if (expected != null) { assertEquals(0, expected.size()); From 35bc6b73f2cb856e58f50b411914f21028cbafb0 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Tue, 17 Feb 2015 15:19:53 +0000 Subject: [PATCH 147/189] Oops - test class was not being run because the name ended in Tests instead of Test Fix up to work under Surefire git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660410 13f79535-47bb-0310-9956-ffa450edef68 --- ...{ArchiveReadTests.java => ArchiveReadTest.java} | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) rename src/test/java/org/apache/commons/compress/{ArchiveReadTests.java => ArchiveReadTest.java} (85%) diff --git a/src/test/java/org/apache/commons/compress/ArchiveReadTests.java b/src/test/java/org/apache/commons/compress/ArchiveReadTest.java similarity index 85% rename from src/test/java/org/apache/commons/compress/ArchiveReadTests.java rename to src/test/java/org/apache/commons/compress/ArchiveReadTest.java index 267b04a574f..37a95404597 100644 --- a/src/test/java/org/apache/commons/compress/ArchiveReadTests.java +++ b/src/test/java/org/apache/commons/compress/ArchiveReadTest.java @@ -41,30 +41,32 @@ * The class uses nested suites in order to be able to name the test after the file name, * as JUnit does not allow one to change the display name of a test. */ -public class ArchiveReadTests extends AbstractTestCase { +public class ArchiveReadTest extends AbstractTestCase { - final static ClassLoader classLoader = ArchiveReadTests.class.getClassLoader(); + final static ClassLoader classLoader = ArchiveReadTest.class.getClassLoader(); private File file; private static final ArrayList fileList = new ArrayList(); - public ArchiveReadTests(String name) { + public ArchiveReadTest(String name) { super(name); } - private ArchiveReadTests(String name, File file){ + private ArchiveReadTest(String name, File file){ super(name); this.file = file; } public static TestSuite suite() throws IOException{ TestSuite suite = new TestSuite("ArchiveReadTests"); + // TODO move fileList setup to static block File arcdir =new File(classLoader.getResource("archives").getFile()); assertTrue(arcdir.exists()); File listing= new File(arcdir,"files.txt"); assertTrue("files.txt is readable",listing.canRead()); BufferedReader br = new BufferedReader(new FileReader(listing)); String line; + fileList.clear(); // Surefire calls the suite more than once while ((line=br.readLine())!=null){ if (line.startsWith("#")){ continue; @@ -79,7 +81,7 @@ public static TestSuite suite() throws IOException{ } // Appears to be the only way to give the test a variable name TestSuite namedSuite = new TestSuite(file.getName()); - Test test = new ArchiveReadTests("testArchive", file); + Test test = new ArchiveReadTest("testArchive", file); namedSuite.addTest(test); suite.addTest(namedSuite); } @@ -99,6 +101,8 @@ public void testArchive() throws Exception{ checkArchiveContent(file, expected); } catch (ArchiveException e) { fail("Problem checking "+file); + } catch (junit.framework.AssertionFailedError e) { // show error in context + fail("Problem checking " + file + " " +e); } } } From 0f540f0f5848f2262e1b8937c39e269a79731efc Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Tue, 17 Feb 2015 17:30:37 +0000 Subject: [PATCH 148/189] Oops, omitted a Test annotation git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660452 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/zip/X5455_ExtendedTimestampTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java index 18b9502baeb..4761654647c 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java @@ -437,6 +437,7 @@ public void testWriteReadRoundtrip() throws IOException { zf.close(); } + @Test public void testBitsAreSetWithTime() { xf.setModifyJavaTime(new Date(1111)); assertTrue(xf.isBit0_modifyTimePresent()); From 0bbe5b59af443da2c3101cb9259e7ad68152dd13 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Tue, 17 Feb 2015 17:40:54 +0000 Subject: [PATCH 149/189] COMPRESS-305 Convert all tests to JUnit4 style Convert the AbstractTestCase test cases git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1660457 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/AbstractTestCase.java | 22 +-- .../commons/compress/ArchiveReadTest.java | 74 ++++----- .../commons/compress/ArchiveUtilsTest.java | 7 + .../commons/compress/ChainingTestCase.java | 5 + .../compress/DetectArchiverTestCase.java | 12 +- .../commons/compress/IOMethodsTest.java | 17 ++ .../compress/archivers/ArTestCase.java | 13 ++ .../archivers/ArchiveOutputStreamTest.java | 20 +-- .../compress/archivers/CpioTestCase.java | 9 + .../compress/archivers/DumpTestCase.java | 9 + .../compress/archivers/JarTestCase.java | 5 + .../compress/archivers/LongPathTest.java | 92 +++++----- .../compress/archivers/LongSymLinkTest.java | 157 ++++++++++++++++++ .../compress/archivers/SevenZTestCase.java | 7 + .../compress/archivers/TarTestCase.java | 13 ++ .../compress/archivers/ZipTestCase.java | 17 ++ .../ar/ArArchiveInputStreamTest.java | 5 + .../ar/ArArchiveOutputStreamTest.java | 5 + .../arj/ArjArchiveInputStreamTest.java | 6 + .../cpio/CpioArchiveInputStreamTest.java | 5 + .../cpio/CpioArchiveOutputStreamTest.java | 4 + .../dump/DumpArchiveInputStreamTest.java | 6 +- .../archivers/sevenz/SevenZFileTest.java | 14 ++ .../sevenz/SevenZOutputFileTest.java | 33 ++++ .../tar/TarArchiveOutputStreamTest.java | 33 +++- .../archivers/zip/UTF8ZipFilesTest.java | 20 +++ .../compress/changes/ChangeSetTestCase.java | 31 ++++ .../compress/compressors/BZip2TestCase.java | 8 + .../compress/compressors/DeflateTestCase.java | 5 + .../compressors/FramedSnappyTestCase.java | 6 +- .../compress/compressors/GZipTestCase.java | 16 ++ .../compress/compressors/LZMATestCase.java | 2 + .../compress/compressors/Pack200TestCase.java | 14 ++ .../compress/compressors/XZTestCase.java | 7 + .../compress/compressors/ZTestCase.java | 7 + .../compressors/pack200/Pack200UtilsTest.java | 3 + ...FramedSnappyCompressorInputStreamTest.java | 11 +- 37 files changed, 600 insertions(+), 120 deletions(-) create mode 100644 src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java diff --git a/src/test/java/org/apache/commons/compress/AbstractTestCase.java b/src/test/java/org/apache/commons/compress/AbstractTestCase.java index e6b1384e137..a17d03e14bc 100644 --- a/src/test/java/org/apache/commons/compress/AbstractTestCase.java +++ b/src/test/java/org/apache/commons/compress/AbstractTestCase.java @@ -18,6 +18,7 @@ */ package org.apache.commons.compress; +import static org.junit.Assert.*; import java.io.BufferedInputStream; import java.io.Closeable; import java.io.File; @@ -34,15 +35,15 @@ import java.util.List; import java.util.Locale; -import junit.framework.TestCase; - import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.ArchiveOutputStream; import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.utils.IOUtils; +import org.junit.After; +import org.junit.Before; -public abstract class AbstractTestCase extends TestCase { +public abstract class AbstractTestCase { protected File dir; protected File resultDir; @@ -52,15 +53,8 @@ public abstract class AbstractTestCase extends TestCase { protected ArchiveStreamFactory factory = new ArchiveStreamFactory(); - public AbstractTestCase() { - } - - public AbstractTestCase(String name) { - super(name); - } - - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { dir = mkdir("dir"); resultDir = mkdir("dir-result"); archive = null; @@ -90,8 +84,8 @@ public static File getFile(String path) throws IOException { return new File(uri); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { rmdir(dir); rmdir(resultDir); dir = resultDir = null; diff --git a/src/test/java/org/apache/commons/compress/ArchiveReadTest.java b/src/test/java/org/apache/commons/compress/ArchiveReadTest.java index 37a95404597..ac4a74106a1 100644 --- a/src/test/java/org/apache/commons/compress/ArchiveReadTest.java +++ b/src/test/java/org/apache/commons/compress/ArchiveReadTest.java @@ -18,17 +18,23 @@ package org.apache.commons.compress; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.BufferedReader; import java.io.File; import java.io.FileReader; -import java.io.IOException; +import java.io.FilenameFilter; import java.util.ArrayList; - -import junit.framework.Test; -import junit.framework.TestSuite; +import java.util.Collection; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveException; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; /** * Test that can read various archive file examples. @@ -37,55 +43,48 @@ * * Files must be in resources/archives, and there must be a file.txt containing * the list of files in the archives. - * - * The class uses nested suites in order to be able to name the test after the file name, - * as JUnit does not allow one to change the display name of a test. */ +@RunWith(Parameterized.class) public class ArchiveReadTest extends AbstractTestCase { - final static ClassLoader classLoader = ArchiveReadTest.class.getClassLoader(); + private static final ClassLoader CLASSLOADER = ArchiveReadTest.class.getClassLoader(); + private static final File ARCDIR = new File(CLASSLOADER.getResource("archives").getFile()); + private static final ArrayList FILELIST = new ArrayList(); private File file; - private static final ArrayList fileList = new ArrayList(); - public ArchiveReadTest(String name) { - super(name); - } - - private ArchiveReadTest(String name, File file){ - super(name); + public ArchiveReadTest(File file){ this.file = file; } - public static TestSuite suite() throws IOException{ - TestSuite suite = new TestSuite("ArchiveReadTests"); - // TODO move fileList setup to static block - File arcdir =new File(classLoader.getResource("archives").getFile()); - assertTrue(arcdir.exists()); - File listing= new File(arcdir,"files.txt"); + @BeforeClass + public static void setUpFileList() throws Exception { + assertTrue(ARCDIR.exists()); + File listing= new File(ARCDIR,"files.txt"); assertTrue("files.txt is readable",listing.canRead()); BufferedReader br = new BufferedReader(new FileReader(listing)); String line; - fileList.clear(); // Surefire calls the suite more than once while ((line=br.readLine())!=null){ - if (line.startsWith("#")){ - continue; + if (!line.startsWith("#")){ + FILELIST.add(line); } - fileList.add(line); } br.close(); - File[]files=arcdir.listFiles(); - for (final File file : files) { - if (file.getName().endsWith(".txt")){ - continue; + } + + @Parameters + public static Collection data() { + assertTrue(ARCDIR.exists()); + Collection params = new ArrayList(); + for (File f : ARCDIR.listFiles(new FilenameFilter() { + public boolean accept(File dir, String name) { + return !name.endsWith(".txt"); } - // Appears to be the only way to give the test a variable name - TestSuite namedSuite = new TestSuite(file.getName()); - Test test = new ArchiveReadTest("testArchive", file); - namedSuite.addTest(test); - suite.addTest(namedSuite); + })) + { + params.add(new Object[] { f }); } - return suite; + return params; } // files.txt contains size and filename @@ -94,14 +93,15 @@ protected String getExpectedString(ArchiveEntry entry) { return entry.getSize() + " " + entry.getName(); } + @Test public void testArchive() throws Exception{ @SuppressWarnings("unchecked") // fileList is correct type already - ArrayList expected= (ArrayList) fileList.clone(); + ArrayList expected= (ArrayList) FILELIST.clone(); try { checkArchiveContent(file, expected); } catch (ArchiveException e) { fail("Problem checking "+file); - } catch (junit.framework.AssertionFailedError e) { // show error in context + } catch (AssertionError e) { // show error in context fail("Problem checking " + file + " " +e); } } diff --git a/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java b/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java index abc663d8d3b..113b28731ef 100644 --- a/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java +++ b/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java @@ -18,7 +18,10 @@ package org.apache.commons.compress; +import static org.junit.Assert.*; + import org.apache.commons.compress.utils.ArchiveUtils; +import org.junit.Test; public class ArchiveUtilsTest extends AbstractTestCase { @@ -31,6 +34,8 @@ public class ArchiveUtilsTest extends AbstractTestCase { i += 2; } } + + @Test public void testCompareBA(){ byte[] buffer1 = {1,2,3}; byte[] buffer2 = {1,2,3,0}; @@ -45,6 +50,7 @@ public void testCompareBA(){ assertTrue(ArchiveUtils.isEqual(buffer3, buffer1)); } + @Test public void testCompareAscii(){ byte[] buffer1 = {'a','b','c'}; byte[] buffer2 = {'d','e','f',0}; @@ -54,6 +60,7 @@ public void testCompareAscii(){ assertFalse(ArchiveUtils.matchAsciiBuffer("def", buffer2)); } + @Test public void testAsciiConversions() { asciiToByteAndBackOK(""); asciiToByteAndBackOK("abcd"); diff --git a/src/test/java/org/apache/commons/compress/ChainingTestCase.java b/src/test/java/org/apache/commons/compress/ChainingTestCase.java index 4dd98194e60..cc82d9fa3d3 100644 --- a/src/test/java/org/apache/commons/compress/ChainingTestCase.java +++ b/src/test/java/org/apache/commons/compress/ChainingTestCase.java @@ -18,6 +18,8 @@ package org.apache.commons.compress; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; @@ -25,10 +27,12 @@ import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.junit.Test; public class ChainingTestCase extends AbstractTestCase { + @Test public void testTarGzip() throws Exception { File file = getFile("bla.tgz"); final TarArchiveInputStream is = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(file))); @@ -38,6 +42,7 @@ public void testTarGzip() throws Exception { is.close(); } + @Test public void testTarBzip2() throws Exception { File file = getFile("bla.tar.bz2"); final TarArchiveInputStream is = new TarArchiveInputStream(new BZip2CompressorInputStream(new FileInputStream(file))); diff --git a/src/test/java/org/apache/commons/compress/DetectArchiverTestCase.java b/src/test/java/org/apache/commons/compress/DetectArchiverTestCase.java index 7d5c1cbcfa9..ad1890237d6 100644 --- a/src/test/java/org/apache/commons/compress/DetectArchiverTestCase.java +++ b/src/test/java/org/apache/commons/compress/DetectArchiverTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress; +import static org.junit.Assert.*; + import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; @@ -30,14 +32,13 @@ import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; +import org.junit.Test; public final class DetectArchiverTestCase extends AbstractTestCase { - public DetectArchiverTestCase(String name) { - super(name); - } final ClassLoader classLoader = getClass().getClassLoader(); + @Test public void testDetectionNotArchive() throws IOException { try { getStreamFor("test.txt"); @@ -47,12 +48,14 @@ public void testDetectionNotArchive() throws IOException { } } + @Test public void testCOMPRESS117() throws Exception { final ArchiveInputStream tar = getStreamFor("COMPRESS-117.tar"); assertNotNull(tar); assertTrue(tar instanceof TarArchiveInputStream); } + @Test public void testDetection() throws Exception { final ArchiveInputStream ar = getStreamFor("bla.ar"); @@ -100,10 +103,12 @@ private ArchiveInputStream getStreamFor(String resource) // emptyArchive("ar"); // } + @Test public void testEmptyCpioArchive() throws Exception { checkEmptyArchive("cpio"); } + @Test public void testEmptyJarArchive() throws Exception { checkEmptyArchive("jar"); } @@ -112,6 +117,7 @@ public void testEmptyJarArchive() throws Exception { // public void testEmptyTarArchive() throws Exception { // checkEmptyArchive("tar"); // } + @Test public void testEmptyZipArchive() throws Exception { checkEmptyArchive("zip"); } diff --git a/src/test/java/org/apache/commons/compress/IOMethodsTest.java b/src/test/java/org/apache/commons/compress/IOMethodsTest.java index 593b0e17579..2ba67a1d108 100644 --- a/src/test/java/org/apache/commons/compress/IOMethodsTest.java +++ b/src/test/java/org/apache/commons/compress/IOMethodsTest.java @@ -18,6 +18,8 @@ package org.apache.commons.compress; +import static org.junit.Assert.*; + import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; @@ -32,6 +34,7 @@ import org.apache.commons.compress.archivers.jar.JarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.junit.Test; /** * Check that the different write methods create the same output. @@ -49,44 +52,58 @@ public class IOMethodsTest extends AbstractTestCase { } } + @Test public void testWriteAr() throws Exception { ArchiveEntry entry = new ArArchiveEntry("dummy", bytesToTest); compareWrites("ar", entry); } + + @Test public void testWriteCpio() throws Exception { ArchiveEntry entry = new CpioArchiveEntry("dummy", bytesToTest); compareWrites("cpio", entry); } + + @Test public void testWriteJar() throws Exception { ArchiveEntry entry = new JarArchiveEntry("dummy"); compareWrites("jar", entry); } + + @Test public void testWriteTar() throws Exception { TarArchiveEntry entry = new TarArchiveEntry("dummy"); entry.setSize(bytesToTest); compareWrites("tar", entry); } + + @Test public void testWriteZip() throws Exception { ArchiveEntry entry = new ZipArchiveEntry("dummy"); compareWrites("zip", entry); } + @Test public void testReadAr() throws Exception { compareReads("ar"); } + @Test public void testReadCpio() throws Exception { compareReads("cpio"); } + @Test public void testReadJar() throws Exception { compareReads("jar"); } + @Test public void testReadTar() throws Exception { compareReads("tar"); } + @Test public void testReadZip() throws Exception { compareReads("zip"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java index 75df762ad6b..8eac9bb49f1 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.archivers; +import static org.junit.Assert.*; + import java.io.BufferedInputStream; import java.io.ByteArrayOutputStream; import java.io.File; @@ -31,9 +33,12 @@ import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Ignore; +import org.junit.Test; public final class ArTestCase extends AbstractTestCase { + @Test public void testArArchiveCreation() throws Exception { final File output = new File(dir, "bla.ar"); @@ -53,6 +58,7 @@ public void testArArchiveCreation() throws Exception { os.close(); } + @Test public void testArUnarchive() throws Exception { final File output = new File(dir, "bla.ar"); { @@ -88,6 +94,7 @@ public void testArUnarchive() throws Exception { is.close(); } + @Test public void testArDelete() throws Exception { final File output = new File(dir, "bla.ar"); @@ -182,6 +189,8 @@ public void testArDelete() throws Exception { } // TODO: revisit - does AR not support storing directories? + @Ignore + @Test public void XtestDirectoryEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -222,6 +231,8 @@ public void XtestDirectoryEntryFromFile() throws Exception { } // TODO: revisit - does AR not support storing directories? + @Ignore + @Test public void XtestExplicitDirectoryEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -261,6 +272,7 @@ public void XtestExplicitDirectoryEntry() throws Exception { } } + @Test public void testFileEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -310,6 +322,7 @@ public void testFileEntryFromFile() throws Exception { } } + @Test public void testExplicitFileEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; diff --git a/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java index 42da535fd19..451d70d60dc 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.archivers; +import static org.junit.Assert.*; + import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; @@ -32,19 +34,11 @@ import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public class ArchiveOutputStreamTest extends AbstractTestCase { - @Override - protected void setUp() throws Exception { - super.setUp(); - } - - @Override - protected void tearDown() throws Exception { - super.tearDown(); - } - + @Test public void testFinish() throws Exception { OutputStream out1 = new ByteArrayOutputStream(); @@ -94,6 +88,7 @@ public void testFinish() throws Exception { } } + @Test public void testOptionalFinish() throws Exception { OutputStream out1 = new ByteArrayOutputStream(); @@ -114,22 +109,27 @@ public void testOptionalFinish() throws Exception { } } + @Test public void testCallSequenceAr() throws Exception{ doCallSequence("Ar"); } + @Test public void testCallSequenceCpio() throws Exception{ doCallSequence("Cpio"); } + @Test public void testCallSequenceJar() throws Exception{ doCallSequence("Jar"); } + @Test public void testCallSequenceTar() throws Exception{ doCallSequence("Tar"); } + @Test public void testCallSequenceZip() throws Exception{ doCallSequence("Zip"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java b/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java index 3e189a76c67..248a52fa1ea 100644 --- a/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.archivers; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -32,9 +34,11 @@ import org.apache.commons.compress.archivers.cpio.CpioArchiveOutputStream; import org.apache.commons.compress.archivers.cpio.CpioConstants; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class CpioTestCase extends AbstractTestCase { + @Test public void testCpioArchiveCreation() throws Exception { final File output = new File(dir, "bla.cpio"); @@ -55,6 +59,7 @@ public void testCpioArchiveCreation() throws Exception { out.close(); } + @Test public void testCpioUnarchive() throws Exception { final File output = new File(dir, "bla.cpio"); final File file1 = getFile("test1.xml"); @@ -108,6 +113,7 @@ public void testCpioUnarchive() throws Exception { assertEquals("length of " + t.getAbsolutePath(), file2Length, t.length()); } + @Test public void testDirectoryEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -147,6 +153,7 @@ public void testDirectoryEntryFromFile() throws Exception { } } + @Test public void testExplicitDirectoryEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -187,6 +194,7 @@ public void testExplicitDirectoryEntry() throws Exception { } } + @Test public void testFileEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -235,6 +243,7 @@ public void testFileEntryFromFile() throws Exception { } } + @Test public void testExplicitFileEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; diff --git a/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java b/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java index 8adb9d348c3..f8155f09683 100644 --- a/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.archivers; +import static org.junit.Assert.*; + import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; @@ -29,13 +31,16 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.archivers.dump.DumpArchiveInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class DumpTestCase extends AbstractTestCase { + @Test public void testDumpUnarchiveAll() throws Exception { unarchiveAll(getFile("bla.dump")); } + @Test public void testCompressedDumpUnarchiveAll() throws Exception { unarchiveAll(getFile("bla.z.dump")); } @@ -74,10 +79,12 @@ private void unarchiveAll(final File input) throws Exception { } } + @Test public void testArchiveDetection() throws Exception { archiveDetection(getFile("bla.dump")); } + @Test public void testCompressedArchiveDetection() throws Exception { archiveDetection(getFile("bla.z.dump")); } @@ -94,10 +101,12 @@ private void archiveDetection(final File f) throws Exception { } } + @Test public void testCheckArchive() throws Exception { checkDumpArchive(getFile("bla.dump")); } + @Test public void testCheckCompressedArchive() throws Exception { checkDumpArchive(getFile("bla.z.dump")); } diff --git a/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java b/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java index ee672af8fb9..ef060213316 100644 --- a/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java @@ -27,8 +27,11 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class JarTestCase extends AbstractTestCase { + + @Test public void testJarArchiveCreation() throws Exception { final File output = new File(dir, "bla.jar"); @@ -51,6 +54,7 @@ public void testJarArchiveCreation() throws Exception { } + @Test public void testJarUnarchive() throws Exception { final File input = getFile("bla.jar"); final InputStream is = new FileInputStream(input); @@ -81,6 +85,7 @@ public void testJarUnarchive() throws Exception { is.close(); } + @Test public void testJarUnarchiveAll() throws Exception { final File input = getFile("bla.jar"); final InputStream is = new FileInputStream(input); diff --git a/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java b/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java index 38e35a6f4e3..dfc52aaffce 100644 --- a/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java @@ -18,87 +18,79 @@ package org.apache.commons.compress.archivers; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; -import java.io.IOException; +import java.io.FilenameFilter; import java.util.ArrayList; -import java.util.Map; -import java.util.HashMap; +import java.util.Collection; import junit.framework.AssertionFailedError; -import junit.framework.Test; -import junit.framework.TestSuite; import org.apache.commons.compress.AbstractTestCase; +import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; /** * Test that can read various tar file examples. * - * The class uses nested suites in order to be able to name the test after the file name, - * as JUnit does not allow one to change the display name of a test. - */ + * Files must be in resources/longpath, and there must be a file.txt containing + * the list of files in the archives. +*/ +@RunWith(Parameterized.class) public class LongPathTest extends AbstractTestCase { - private String name; - private File file; - - private static final Map> fileLists = new HashMap>(); + private static final ClassLoader CLASSLOADER = LongPathTest.class.getClassLoader(); + private static final File ARCDIR = new File(CLASSLOADER.getResource("longpath").getFile()); + private static final ArrayList FILELIST = new ArrayList(); - public LongPathTest(String name) { - super(name); - } + private File file; - private LongPathTest(String name, String function, File file) { - super(function); - this.name = name; + public LongPathTest(File file){ this.file = file; } - public static TestSuite suite() throws IOException{ - TestSuite suite = new TestSuite("LongPathTests"); - suite.addTest(createSuite("LongPathTest", "longpath")); - suite.addTest(createSuite("LongSymlinkTest", "longsymlink")); - return suite; - } - - public static TestSuite createSuite(String name, String dirname) throws IOException { - TestSuite suite = new TestSuite(name); - File arcdir = getFile(dirname); - assertTrue(arcdir.exists()); - File listing= new File(arcdir,"files.txt"); - assertTrue("File listing is readable",listing.canRead()); + @BeforeClass + public static void setUpFileList() throws Exception { + assertTrue(ARCDIR.exists()); + File listing= new File(ARCDIR,"files.txt"); + assertTrue("files.txt is readable",listing.canRead()); BufferedReader br = new BufferedReader(new FileReader(listing)); - - ArrayList fileList = new ArrayList(); String line; while ((line=br.readLine())!=null){ - if (line.startsWith("#")){ - continue; + if (!line.startsWith("#")){ + FILELIST.add(line); } - fileList.add(line); } - fileLists.put(name, fileList); br.close(); - File[]files=arcdir.listFiles(); - for (final File file : files) { - if (file.getName().endsWith(".txt")){ - continue; + } + + @Parameters + public static Collection data() { + Collection params = new ArrayList(); + for (File f : ARCDIR.listFiles(new FilenameFilter() { + public boolean accept(File dir, String name) { + return !name.endsWith(".txt"); } - // Appears to be the only way to give the test a variable name - TestSuite namedSuite = new TestSuite(file.getName()); - Test test = new LongPathTest(name, "testArchive", file); - namedSuite.addTest(test); - suite.addTest(namedSuite); + })) + { + params.add(new Object[] { f }); } - return suite; + return params; } @Override @@ -112,10 +104,10 @@ protected String getExpectedString(ArchiveEntry entry) { return entry.getName(); } + @Test public void testArchive() throws Exception { - ArrayList fileList = fileLists.get(name); @SuppressWarnings("unchecked") // fileList is of correct type - ArrayList expected = (ArrayList) fileList.clone(); + ArrayList expected = (ArrayList) FILELIST.clone(); String name = file.getName(); if ("minotaur.jar".equals(name) || "minotaur-0.jar".equals(name)){ expected.add("META-INF/"); @@ -140,7 +132,7 @@ public void testArchive() throws Exception { assertTrue(ais instanceof ArArchiveInputStream); // CPIO does not store directories or directory names expected.clear(); - for (String ent : fileList) { + for (String ent : FILELIST) { if (!ent.endsWith("/")) {// not a directory final int lastSlash = ent.lastIndexOf('/'); if (lastSlash >= 0) { // extract path name diff --git a/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java b/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java new file mode 100644 index 00000000000..9a08825dbdb --- /dev/null +++ b/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.BufferedInputStream; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileReader; +import java.io.FilenameFilter; +import java.util.ArrayList; +import java.util.Collection; + +import junit.framework.AssertionFailedError; + +import org.apache.commons.compress.AbstractTestCase; +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; +import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; +import org.junit.BeforeClass; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +/** + * Test that can read various tar file examples. + * + * Files must be in resources/longsymlink, and there must be a file.txt containing + * the list of files in the archives. +*/ +@RunWith(Parameterized.class) +public class LongSymLinkTest extends AbstractTestCase { + + private static final ClassLoader CLASSLOADER = LongSymLinkTest.class.getClassLoader(); + private static final File ARCDIR = new File(CLASSLOADER.getResource("longsymlink").getFile()); + private static final ArrayList FILELIST = new ArrayList(); + + private File file; + + public LongSymLinkTest(File file){ + this.file = file; + } + + @BeforeClass + public static void setUpFileList() throws Exception { + assertTrue(ARCDIR.exists()); + File listing= new File(ARCDIR,"files.txt"); + assertTrue("files.txt is readable",listing.canRead()); + BufferedReader br = new BufferedReader(new FileReader(listing)); + String line; + while ((line=br.readLine())!=null){ + if (!line.startsWith("#")){ + FILELIST.add(line); + } + } + br.close(); + } + + @Parameters + public static Collection data() { + Collection params = new ArrayList(); + for (File f : ARCDIR.listFiles(new FilenameFilter() { + public boolean accept(File dir, String name) { + return !name.endsWith(".txt"); + } + })) + { + params.add(new Object[] { f }); + } + return params; + } + + + @Override + protected String getExpectedString(ArchiveEntry entry) { + if (entry instanceof TarArchiveEntry) { + TarArchiveEntry tarEntry = (TarArchiveEntry) entry; + if (tarEntry.isSymbolicLink()) { + return tarEntry.getName() + " -> " + tarEntry.getLinkName(); + } + } + return entry.getName(); + } + + @Test + public void testArchive() throws Exception { + @SuppressWarnings("unchecked") // fileList is of correct type + ArrayList expected = (ArrayList) FILELIST.clone(); + String name = file.getName(); + if ("minotaur.jar".equals(name) || "minotaur-0.jar".equals(name)){ + expected.add("META-INF/"); + expected.add("META-INF/MANIFEST.MF"); + } + ArchiveInputStream ais = factory.createArchiveInputStream(new BufferedInputStream(new FileInputStream(file))); + // check if expected type recognised + if (name.endsWith(".tar")){ + assertTrue(ais instanceof TarArchiveInputStream); + } else if (name.endsWith(".jar") || name.endsWith(".zip")){ + assertTrue(ais instanceof ZipArchiveInputStream); + } else if (name.endsWith(".cpio")){ + assertTrue(ais instanceof CpioArchiveInputStream); + // Hack: cpio does not add trailing "/" to directory names + for(int i=0; i < expected.size(); i++){ + String ent = expected.get(i); + if (ent.endsWith("/")){ + expected.set(i, ent.substring(0, ent.length()-1)); + } + } + } else if (name.endsWith(".ar")){ + assertTrue(ais instanceof ArArchiveInputStream); + // CPIO does not store directories or directory names + expected.clear(); + for (String ent : FILELIST) { + if (!ent.endsWith("/")) {// not a directory + final int lastSlash = ent.lastIndexOf('/'); + if (lastSlash >= 0) { // extract path name + expected.add(ent.substring(lastSlash + 1, ent.length())); + } else { + expected.add(ent); + } + } + } + } else { + fail("Unexpected file type: "+name); + } + try { + checkArchiveContent(ais, expected); + } catch (AssertionFailedError e) { + fail("Error processing "+file.getName()+" "+e); + } finally { + ais.close(); + } + } +} diff --git a/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java b/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java index b9cec114daf..16bedc13534 100644 --- a/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java @@ -17,6 +17,8 @@ */ package org.apache.commons.compress.archivers; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -26,21 +28,26 @@ import org.apache.commons.compress.archivers.sevenz.SevenZFile; import org.apache.commons.compress.archivers.sevenz.SevenZMethod; import org.apache.commons.compress.archivers.sevenz.SevenZOutputFile; +import org.junit.Test; public class SevenZTestCase extends AbstractTestCase { + @Test public void testSevenZArchiveCreationUsingCopy() throws Exception { testSevenZArchiveCreation(SevenZMethod.COPY); } + @Test public void testSevenZArchiveCreationUsingLZMA2() throws Exception { testSevenZArchiveCreation(SevenZMethod.LZMA2); } + @Test public void testSevenZArchiveCreationUsingBZIP2() throws Exception { testSevenZArchiveCreation(SevenZMethod.BZIP2); } + @Test public void testSevenZArchiveCreationUsingDeflate() throws Exception { testSevenZArchiveCreation(SevenZMethod.DEFLATE); } diff --git a/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java b/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java index d77fab2b473..f1d2b515bf9 100644 --- a/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.archivers; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -31,8 +33,11 @@ import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.utils.CharsetNames; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class TarTestCase extends AbstractTestCase { + + @Test public void testTarArchiveCreation() throws Exception { final File output = new File(dir, "bla.tar"); final File file1 = getFile("test1.xml"); @@ -52,6 +57,7 @@ public void testTarArchiveCreation() throws Exception { os.close(); } + @Test public void testTarArchiveLongNameCreation() throws Exception { String name = "testdata/12345678901234567890123456789012345678901234567890123456789012345678901234567890123456.xml"; byte[] bytes = name.getBytes(CharsetNames.UTF_8); @@ -104,6 +110,7 @@ public void testTarArchiveLongNameCreation() throws Exception { } } + @Test public void testTarUnarchive() throws Exception { final File input = getFile("bla.tar"); final InputStream is = new FileInputStream(input); @@ -115,6 +122,7 @@ public void testTarUnarchive() throws Exception { out.close(); } + @Test public void testCOMPRESS114() throws Exception { final File input = getFile("COMPRESS-114.tar"); final InputStream is = new FileInputStream(input); @@ -127,6 +135,7 @@ public void testCOMPRESS114() throws Exception { in.close(); } + @Test public void testDirectoryEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -166,6 +175,7 @@ public void testDirectoryEntryFromFile() throws Exception { } } + @Test public void testExplicitDirectoryEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -205,6 +215,7 @@ public void testExplicitDirectoryEntry() throws Exception { } } + @Test public void testFileEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -253,6 +264,7 @@ public void testFileEntryFromFile() throws Exception { } } + @Test public void testExplicitFileEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -303,6 +315,7 @@ public void testExplicitFileEntry() throws Exception { } } + @Test public void testCOMPRESS178() throws Exception { final File input = getFile("COMPRESS-178.tar"); final InputStream is = new FileInputStream(input); diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java index f8826a8e261..9a506cd2e00 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.archivers; +import static org.junit.Assert.*; + import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; @@ -40,6 +42,7 @@ import org.apache.commons.compress.archivers.zip.ZipMethod; import org.apache.commons.compress.utils.IOUtils; import org.junit.Assert; +import org.junit.Test; public final class ZipTestCase extends AbstractTestCase { /** @@ -47,6 +50,7 @@ public final class ZipTestCase extends AbstractTestCase { * and source is the same, it looks like the operations have worked * @throws Exception */ + @Test public void testZipArchiveCreation() throws Exception { // Archive final File output = new File(dir, "bla.zip"); @@ -111,6 +115,7 @@ public void testZipArchiveCreation() throws Exception { * Simple unarchive test. Asserts nothing. * @throws Exception */ + @Test public void testZipUnarchive() throws Exception { final File input = getFile("bla.zip"); final InputStream is = new FileInputStream(input); @@ -127,6 +132,7 @@ public void testZipUnarchive() throws Exception { * COMPRESS-208. */ + @Test public void testSkipsPK00Prefix() throws Exception { final File input = getFile("COMPRESS-208.zip"); InputStream is = new FileInputStream(input); @@ -145,6 +151,7 @@ public void testSkipsPK00Prefix() throws Exception { * COMPRESS-93. */ + @Test public void testSupportedCompressionMethod() throws IOException { /* ZipFile bla = new ZipFile(getFile("bla.zip")); @@ -167,6 +174,7 @@ public void testSupportedCompressionMethod() throws IOException { * @see COMPRESS-93 */ + @Test public void testSkipEntryWithUnsupportedCompressionMethod() throws IOException { ZipArchiveInputStream zip = @@ -196,6 +204,7 @@ public void testSkipEntryWithUnsupportedCompressionMethod() * * @throws Exception */ + @Test public void testListAllFilesWithNestedArchive() throws Exception { final File input = getFile("OSX_ArchiveWithNestedArchive.zip"); @@ -230,6 +239,7 @@ public void testListAllFilesWithNestedArchive() throws Exception { results.contains("test3.xml"); } + @Test public void testDirectoryEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -265,6 +275,7 @@ public void testDirectoryEntryFromFile() throws Exception { } } + @Test public void testExplicitDirectoryEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -307,6 +318,7 @@ public boolean test(ZipArchiveEntry zipArchiveEntry) { } }; + @Test public void testCopyRawEntriesFromFile() throws IOException { @@ -338,6 +350,7 @@ public void testCopyRawEntriesFromFile() zf2.close(); } + @Test public void testCopyRawZip64EntryFromFile() throws IOException { @@ -362,6 +375,8 @@ public void testCopyRawZip64EntryFromFile() assertSameFileContents(reference, fileResult); zf1.close(); } + + @Test public void testUnixModeInAddRaw() throws IOException { File[] tmp = createTempDirAndFile(); @@ -449,6 +464,7 @@ private void createArchiveEntry(String payload, ZipArchiveOutputStream zos, Stri zos.closeArchiveEntry(); } + @Test public void testFileEntryFromFile() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; @@ -493,6 +509,7 @@ public void testFileEntryFromFile() throws Exception { } } + @Test public void testExplicitFileEntry() throws Exception { File[] tmp = createTempDirAndFile(); File archive = null; diff --git a/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java index 7a6f63dd549..ae09d63c360 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java @@ -18,19 +18,24 @@ package org.apache.commons.compress.archivers.ar; +import static org.junit.Assert.*; + import java.io.BufferedInputStream; import java.io.FileInputStream; import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.utils.ArchiveUtils; +import org.junit.Test; public class ArArchiveInputStreamTest extends AbstractTestCase { + @Test public void testReadLongNamesGNU() throws Exception { checkLongNameEntry("longfile_gnu.ar"); } + @Test public void testReadLongNamesBSD() throws Exception { checkLongNameEntry("longfile_bsd.ar"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java index 4918e1ef782..6394ce63844 100644 --- a/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java @@ -18,6 +18,8 @@ package org.apache.commons.compress.archivers.ar; +import static org.junit.Assert.*; + import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; @@ -26,9 +28,11 @@ import java.util.List; import org.apache.commons.compress.AbstractTestCase; +import org.junit.Test; public class ArArchiveOutputStreamTest extends AbstractTestCase { + @Test public void testLongFileNamesCauseExceptionByDefault() { ArArchiveOutputStream os = null; try { @@ -44,6 +48,7 @@ public void testLongFileNamesCauseExceptionByDefault() { } } + @Test public void testLongFileNamesWorkUsingBSDDialect() throws Exception { FileOutputStream fos = null; ArArchiveOutputStream os = null; diff --git a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java index 745dba8ace8..40d8f14003c 100644 --- a/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java @@ -19,14 +19,18 @@ package org.apache.commons.compress.archivers.arj; +import static org.junit.Assert.*; + import java.io.FileInputStream; import java.util.Calendar; import java.util.TimeZone; import org.apache.commons.compress.AbstractTestCase; +import org.junit.Test; public class ArjArchiveInputStreamTest extends AbstractTestCase { + @Test public void testArjUnarchive() throws Exception { StringBuilder expected = new StringBuilder(); expected.append("test1.xml\n"); @@ -50,6 +54,7 @@ public void testArjUnarchive() throws Exception { assertEquals(result.toString(), expected.toString()); } + @Test public void testReadingOfAttributesDosVersion() throws Exception { ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.arj"))); ArjArchiveEntry entry = in.getNextEntry(); @@ -63,6 +68,7 @@ public void testReadingOfAttributesDosVersion() throws Exception { in.close(); } + @Test public void testReadingOfAttributesUnixVersion() throws Exception { ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.unix.arj"))); ArjArchiveEntry entry = in.getNextEntry(); diff --git a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java index da85da8e980..06570f7db2a 100644 --- a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java @@ -18,12 +18,16 @@ */ package org.apache.commons.compress.archivers.cpio; +import static org.junit.Assert.*; + import java.io.FileInputStream; import org.apache.commons.compress.AbstractTestCase; +import org.junit.Test; public class CpioArchiveInputStreamTest extends AbstractTestCase { + @Test public void testCpioUnarchive() throws Exception { StringBuilder expected = new StringBuilder(); expected.append("./test1.xml\n"); @@ -46,6 +50,7 @@ public void testCpioUnarchive() throws Exception { assertEquals(result.toString(), expected.toString()); } + @Test public void testCpioUnarchiveCreatedByRedlineRpm() throws Exception { CpioArchiveInputStream in = new CpioArchiveInputStream(new FileInputStream(getFile("redline.cpio"))); diff --git a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java index 8c5302e5f25..d2f66f9d003 100644 --- a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.archivers.cpio; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -25,9 +27,11 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public class CpioArchiveOutputStreamTest extends AbstractTestCase { + @Test public void testWriteOldBinary() throws Exception { final File f = getFile("test1.xml"); final File output = new File(dir, "test.cpio"); diff --git a/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java index 8e9772e4391..e8f2928534d 100644 --- a/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java @@ -18,16 +18,18 @@ */ package org.apache.commons.compress.archivers.dump; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.*; import java.io.FileInputStream; import java.io.InputStream; import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.archivers.ArchiveException; +import org.junit.Test; public class DumpArchiveInputStreamTest extends AbstractTestCase { + @Test public void testNotADumpArchive() throws Exception { FileInputStream is = new FileInputStream(getFile("bla.zip")); try { @@ -41,6 +43,7 @@ public void testNotADumpArchive() throws Exception { } } + @Test public void testNotADumpArchiveButBigEnough() throws Exception { FileInputStream is = new FileInputStream(getFile("zip64support.tar.bz2")); try { @@ -54,6 +57,7 @@ public void testNotADumpArchiveButBigEnough() throws Exception { } } + @Test public void testConsumesArchiveCompletely() throws Exception { InputStream is = DumpArchiveInputStreamTest.class .getResourceAsStream("/archive_with_trailer.dump"); diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java index 177e9a594f7..196d0405aec 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java @@ -17,17 +17,21 @@ */ package org.apache.commons.compress.archivers.sevenz; +import static org.junit.Assert.*; + import java.io.File; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import javax.crypto.Cipher; import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.PasswordRequiredException; +import org.junit.Test; public class SevenZFileTest extends AbstractTestCase { private static final String TEST2_CONTENT = "\r\n\r\n\r\n\t\r\n\n"; + @Test public void testAllEmptyFilesArchive() throws Exception { SevenZFile archive = new SevenZFile(getFile("7z-empty-mhc-off.7z")); try { @@ -37,22 +41,27 @@ public void testAllEmptyFilesArchive() throws Exception { } } + @Test public void testHelloWorldHeaderCompressionOffCopy() throws Exception { checkHelloWorld("7z-hello-mhc-off-copy.7z"); } + @Test public void testHelloWorldHeaderCompressionOffLZMA2() throws Exception { checkHelloWorld("7z-hello-mhc-off-lzma2.7z"); } + @Test public void test7zUnarchive() throws Exception { test7zUnarchive(getFile("bla.7z"), SevenZMethod.LZMA); } + @Test public void test7zDeflateUnarchive() throws Exception { test7zUnarchive(getFile("bla.deflate.7z"), SevenZMethod.DEFLATE); } + @Test public void test7zDecryptUnarchive() throws Exception { if (isStrongCryptoAvailable()) { test7zUnarchive(getFile("bla.encrypted.7z"), SevenZMethod.LZMA, // stack LZMA + AES @@ -64,6 +73,7 @@ private void test7zUnarchive(File f, SevenZMethod m) throws Exception { test7zUnarchive(f, m, null); } + @Test public void testEncryptedArchiveRequiresPassword() throws Exception { try { new SevenZFile(getFile("bla.encrypted.7z")); @@ -82,6 +92,7 @@ public void testEncryptedArchiveRequiresPassword() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-256" */ + @Test public void testCompressedHeaderWithNonDefaultDictionarySize() throws Exception { SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-256.7z")); try { @@ -95,6 +106,7 @@ public void testCompressedHeaderWithNonDefaultDictionarySize() throws Exception } } + @Test public void testSignatureCheck() { assertTrue(SevenZFile.matches(SevenZFile.sevenZSignature, SevenZFile.sevenZSignature.length)); @@ -109,6 +121,7 @@ public void testSignatureCheck() { (byte) 0xAF, 0x27, 0x1D}, 6)); } + @Test public void testReadingBackLZMA2DictSize() throws Exception { File output = new File(dir, "lzma2-dictsize.7z"); SevenZOutputFile outArchive = new SevenZOutputFile(output); @@ -134,6 +147,7 @@ public void testReadingBackLZMA2DictSize() throws Exception { } } + @Test public void testReadingBackDeltaDistance() throws Exception { File output = new File(dir, "delta-distance.7z"); SevenZOutputFile outArchive = new SevenZOutputFile(output); diff --git a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java index 19fc1994e5f..9400e7ac8a5 100644 --- a/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java @@ -17,6 +17,9 @@ */ package org.apache.commons.compress.archivers.sevenz; +import static org.junit.Assert.*; +import org.junit.Test; + import java.io.File; import java.io.IOException; import java.util.Arrays; @@ -49,6 +52,7 @@ public void tearDown() throws Exception { super.tearDown(); } + @Test public void testDirectoriesAndEmptyFiles() throws Exception { output = new File(dir, "empties.7z"); @@ -145,6 +149,7 @@ public void testDirectoriesAndEmptyFiles() throws Exception { } + @Test public void testDirectoriesOnly() throws Exception { output = new File(dir, "dirs.7z"); SevenZOutputFile outArchive = new SevenZOutputFile(output); @@ -173,6 +178,7 @@ public void testDirectoriesOnly() throws Exception { } + @Test public void testCantFinishTwice() throws Exception { output = new File(dir, "finish.7z"); SevenZOutputFile outArchive = new SevenZOutputFile(output); @@ -187,96 +193,118 @@ public void testCantFinishTwice() throws Exception { } } + @Test public void testSixEmptyFiles() throws Exception { testCompress252(6, 0); } + @Test public void testSixFilesSomeNotEmpty() throws Exception { testCompress252(6, 2); } + @Test public void testSevenEmptyFiles() throws Exception { testCompress252(7, 0); } + @Test public void testSevenFilesSomeNotEmpty() throws Exception { testCompress252(7, 2); } + @Test public void testEightEmptyFiles() throws Exception { testCompress252(8, 0); } + @Test public void testEightFilesSomeNotEmpty() throws Exception { testCompress252(8, 2); } + @Test public void testNineEmptyFiles() throws Exception { testCompress252(9, 0); } + @Test public void testNineFilesSomeNotEmpty() throws Exception { testCompress252(9, 2); } + @Test public void testTwentyNineEmptyFiles() throws Exception { testCompress252(29, 0); } + @Test public void testTwentyNineFilesSomeNotEmpty() throws Exception { testCompress252(29, 7); } + @Test public void testCopyRoundtrip() throws Exception { testRoundTrip(SevenZMethod.COPY); } + @Test public void testBzip2Roundtrip() throws Exception { testRoundTrip(SevenZMethod.BZIP2); } + @Test public void testLzma2Roundtrip() throws Exception { testRoundTrip(SevenZMethod.LZMA2); } + @Test public void testDeflateRoundtrip() throws Exception { testRoundTrip(SevenZMethod.DEFLATE); } + @Test public void testBCJX86Roundtrip() throws Exception { if (XZ_BCJ_IS_BUGGY) { return; } testFilterRoundTrip(new SevenZMethodConfiguration(SevenZMethod.BCJ_X86_FILTER)); } + @Test public void testBCJARMRoundtrip() throws Exception { if (XZ_BCJ_IS_BUGGY) { return; } testFilterRoundTrip(new SevenZMethodConfiguration(SevenZMethod.BCJ_ARM_FILTER)); } + @Test public void testBCJARMThumbRoundtrip() throws Exception { if (XZ_BCJ_IS_BUGGY) { return; } testFilterRoundTrip(new SevenZMethodConfiguration(SevenZMethod.BCJ_ARM_THUMB_FILTER)); } + @Test public void testBCJIA64Roundtrip() throws Exception { if (XZ_BCJ_IS_BUGGY) { return; } testFilterRoundTrip(new SevenZMethodConfiguration(SevenZMethod.BCJ_IA64_FILTER)); } + @Test public void testBCJPPCRoundtrip() throws Exception { if (XZ_BCJ_IS_BUGGY) { return; } testFilterRoundTrip(new SevenZMethodConfiguration(SevenZMethod.BCJ_PPC_FILTER)); } + @Test public void testBCJSparcRoundtrip() throws Exception { if (XZ_BCJ_IS_BUGGY) { return; } testFilterRoundTrip(new SevenZMethodConfiguration(SevenZMethod.BCJ_SPARC_FILTER)); } + @Test public void testDeltaRoundtrip() throws Exception { testFilterRoundTrip(new SevenZMethodConfiguration(SevenZMethod.DELTA_FILTER)); } + @Test public void testStackOfContentCompressions() throws Exception { output = new File(dir, "multiple-methods.7z"); ArrayList methods = new ArrayList(); @@ -287,6 +315,7 @@ public void testStackOfContentCompressions() throws Exception { createAndReadBack(output, methods); } + @Test public void testDeflateWithConfiguration() throws Exception { output = new File(dir, "deflate-options.7z"); // Deflater.BEST_SPEED @@ -294,6 +323,7 @@ public void testDeflateWithConfiguration() throws Exception { .singletonList(new SevenZMethodConfiguration(SevenZMethod.DEFLATE, 1))); } + @Test public void testBzip2WithConfiguration() throws Exception { output = new File(dir, "bzip2-options.7z"); // 400k block size @@ -301,6 +331,7 @@ public void testBzip2WithConfiguration() throws Exception { .singletonList(new SevenZMethodConfiguration(SevenZMethod.BZIP2, 4))); } + @Test public void testLzma2WithIntConfiguration() throws Exception { output = new File(dir, "lzma2-options.7z"); // 1 MB dictionary @@ -308,6 +339,7 @@ public void testLzma2WithIntConfiguration() throws Exception { .singletonList(new SevenZMethodConfiguration(SevenZMethod.LZMA2, 1 << 20))); } + @Test public void testLzma2WithOptionsConfiguration() throws Exception { output = new File(dir, "lzma2-options2.7z"); LZMA2Options opts = new LZMA2Options(1); @@ -315,6 +347,7 @@ public void testLzma2WithOptionsConfiguration() throws Exception { .singletonList(new SevenZMethodConfiguration(SevenZMethod.LZMA2, opts))); } + @Test public void testArchiveWithMixedMethods() throws Exception { output = new File(dir, "mixed-methods.7z"); SevenZOutputFile outArchive = new SevenZOutputFile(output); diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java index 7de18ff58f5..3f80be93bee 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java @@ -18,6 +18,8 @@ package org.apache.commons.compress.archivers.tar; +import static org.junit.Assert.*; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; @@ -37,11 +39,12 @@ import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.utils.CharsetNames; import org.apache.commons.compress.utils.IOUtils; - import org.junit.Assert; +import org.junit.Test; public class TarArchiveOutputStreamTest extends AbstractTestCase { + @Test public void testCount() throws Exception { File f = File.createTempFile("commons-compress-tarcount", ".tar"); f.deleteOnExit(); @@ -69,6 +72,7 @@ public void testCount() throws Exception { assertEquals(f.length(), tarOut.getBytesWritten()); } + @Test public void testMaxFileSizeError() throws Exception { TarArchiveEntry t = new TarArchiveEntry("foo"); t.setSize(077777777777L); @@ -84,6 +88,7 @@ public void testMaxFileSizeError() throws Exception { } } + @Test public void testBigNumberStarMode() throws Exception { TarArchiveEntry t = new TarArchiveEntry("foo"); t.setSize(0100000000000L); @@ -109,6 +114,7 @@ public void testBigNumberStarMode() throws Exception { closeQuietly(tos); } + @Test public void testBigNumberPosixMode() throws Exception { TarArchiveEntry t = new TarArchiveEntry("foo"); t.setSize(0100000000000L); @@ -136,6 +142,7 @@ public void testBigNumberPosixMode() throws Exception { closeQuietly(tos); } + @Test public void testWriteSimplePaxHeaders() throws Exception { Map m = new HashMap(); m.put("a", "b"); @@ -149,6 +156,7 @@ public void testWriteSimplePaxHeaders() throws Exception { assertEquals("6 a=b\n", new String(data, 512, 6, CharsetNames.UTF_8)); } + @Test public void testPaxHeadersWithLength99() throws Exception { Map m = new HashMap(); m.put("a", @@ -167,6 +175,7 @@ public void testPaxHeadersWithLength99() throws Exception { + "012\n", new String(data, 512, 99, CharsetNames.UTF_8)); } + @Test public void testPaxHeadersWithLength101() throws Exception { Map m = new HashMap(); m.put("a", @@ -201,6 +210,7 @@ private byte[] writePaxHeader(Map m) throws Exception { return bos.toByteArray(); } + @Test public void testWriteLongFileNamePosixMode() throws Exception { String n = "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" @@ -225,6 +235,7 @@ public void testWriteLongFileNamePosixMode() throws Exception { tos.close(); } + @Test public void testOldEntryStarMode() throws Exception { TarArchiveEntry t = new TarArchiveEntry("foo"); t.setSize(Integer.MAX_VALUE); @@ -255,6 +266,7 @@ public void testOldEntryStarMode() throws Exception { closeQuietly(tos); } + @Test public void testOldEntryPosixMode() throws Exception { TarArchiveEntry t = new TarArchiveEntry("foo"); t.setSize(Integer.MAX_VALUE); @@ -287,6 +299,7 @@ public void testOldEntryPosixMode() throws Exception { closeQuietly(tos); } + @Test public void testOldEntryError() throws Exception { TarArchiveEntry t = new TarArchiveEntry("foo"); t.setSize(Integer.MAX_VALUE); @@ -301,6 +314,7 @@ public void testOldEntryError() throws Exception { tos.close(); } + @Test public void testWriteNonAsciiPathNamePaxHeader() throws Exception { String n = "\u00e4"; TarArchiveEntry t = new TarArchiveEntry(n); @@ -322,6 +336,7 @@ public void testWriteNonAsciiPathNamePaxHeader() throws Exception { tin.close(); } + @Test public void testWriteNonAsciiLinkPathNamePaxHeader() throws Exception { String n = "\u00e4"; TarArchiveEntry t = new TarArchiveEntry("a", TarConstants.LF_LINK); @@ -347,6 +362,7 @@ public void testWriteNonAsciiLinkPathNamePaxHeader() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-200" */ + @Test public void testRoundtripWith67CharFileNameGnu() throws Exception { testRoundtripWith67CharFileName(TarArchiveOutputStream.LONGFILE_GNU); } @@ -354,6 +370,7 @@ public void testRoundtripWith67CharFileNameGnu() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-200" */ + @Test public void testRoundtripWith67CharFileNamePosix() throws Exception { testRoundtripWith67CharFileName(TarArchiveOutputStream.LONGFILE_POSIX); } @@ -379,6 +396,7 @@ private void testRoundtripWith67CharFileName(int mode) throws Exception { tin.close(); } + @Test public void testWriteLongDirectoryNameErrorMode() throws Exception { String n = "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" @@ -399,6 +417,7 @@ public void testWriteLongDirectoryNameErrorMode() throws Exception { } } + @Test public void testWriteLongDirectoryNameTruncateMode() throws Exception { String n = "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" @@ -422,6 +441,7 @@ public void testWriteLongDirectoryNameTruncateMode() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-203" */ + @Test public void testWriteLongDirectoryNameGnuMode() throws Exception { testWriteLongDirectoryName(TarArchiveOutputStream.LONGFILE_GNU); } @@ -429,6 +449,7 @@ public void testWriteLongDirectoryNameGnuMode() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-203" */ + @Test public void testWriteLongDirectoryNamePosixMode() throws Exception { testWriteLongDirectoryName(TarArchiveOutputStream.LONGFILE_POSIX); } @@ -456,6 +477,7 @@ private void testWriteLongDirectoryName(int mode) throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-203" */ + @Test public void testWriteNonAsciiDirectoryNamePosixMode() throws Exception { String n = "f\u00f6\u00f6/"; TarArchiveEntry t = new TarArchiveEntry(n); @@ -477,6 +499,7 @@ public void testWriteNonAsciiDirectoryNamePosixMode() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-265" */ + @Test public void testWriteNonAsciiNameWithUnfortunateNamePosixMode() throws Exception { String n = "f\u00f6\u00f6\u00dc"; TarArchiveEntry t = new TarArchiveEntry(n); @@ -498,6 +521,7 @@ public void testWriteNonAsciiNameWithUnfortunateNamePosixMode() throws Exception /** * @see "https://issues.apache.org/jira/browse/COMPRESS-237" */ + @Test public void testWriteLongLinkNameErrorMode() throws Exception { String linkname = "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" @@ -519,6 +543,7 @@ public void testWriteLongLinkNameErrorMode() throws Exception { } } + @Test public void testWriteLongLinkNameTruncateMode() throws Exception { String linkname = "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" @@ -543,6 +568,7 @@ public void testWriteLongLinkNameTruncateMode() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-237" */ + @Test public void testWriteLongLinkNameGnuMode() throws Exception { testWriteLongLinkName(TarArchiveOutputStream.LONGFILE_GNU); } @@ -550,6 +576,7 @@ public void testWriteLongLinkNameGnuMode() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-237" */ + @Test public void testWriteLongLinkNamePosixMode() throws Exception { testWriteLongLinkName(TarArchiveOutputStream.LONGFILE_POSIX); } @@ -557,7 +584,7 @@ public void testWriteLongLinkNamePosixMode() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-237" */ - public void testWriteLongLinkName(int mode) throws Exception { + private void testWriteLongLinkName(int mode) throws Exception { String linkname = "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789/test"; @@ -580,6 +607,7 @@ public void testWriteLongLinkName(int mode) throws Exception { tin.close(); } + @Test public void testPadsOutputToFullBlockLength() throws Exception { File f = File.createTempFile("commons-compress-padding", ".tar"); f.deleteOnExit(); @@ -605,6 +633,7 @@ public void testPadsOutputToFullBlockLength() throws Exception { * * @throws Exception */ + @Test public void testLongNameMd5Hash() throws Exception { final String longFileName = "a/considerably/longer/file/name/which/forces/use/of/the/long/link/header/which/appears/to/always/use/the/current/time/as/modification/date"; String fname = longFileName; diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java index 874c55e3efb..c1a52942d86 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/UTF8ZipFilesTest.java @@ -18,6 +18,8 @@ package org.apache.commons.compress.archivers.zip; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -29,6 +31,7 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.utils.CharsetNames; +import org.junit.Test; public class UTF8ZipFilesTest extends AbstractTestCase { @@ -37,41 +40,49 @@ public class UTF8ZipFilesTest extends AbstractTestCase { private static final String EURO_FOR_DOLLAR_TXT = "\u20AC_for_Dollar.txt"; private static final String OIL_BARREL_TXT = "\u00D6lf\u00E4sser.txt"; + @Test public void testUtf8FileRoundtripExplicitUnicodeExtra() throws IOException { testFileRoundtrip(CharsetNames.UTF_8, true, true); } + @Test public void testUtf8FileRoundtripNoEFSExplicitUnicodeExtra() throws IOException { testFileRoundtrip(CharsetNames.UTF_8, false, true); } + @Test public void testCP437FileRoundtripExplicitUnicodeExtra() throws IOException { testFileRoundtrip(CP437, false, true); } + @Test public void testASCIIFileRoundtripExplicitUnicodeExtra() throws IOException { testFileRoundtrip(CharsetNames.US_ASCII, false, true); } + @Test public void testUtf8FileRoundtripImplicitUnicodeExtra() throws IOException { testFileRoundtrip(CharsetNames.UTF_8, true, false); } + @Test public void testUtf8FileRoundtripNoEFSImplicitUnicodeExtra() throws IOException { testFileRoundtrip(CharsetNames.UTF_8, false, false); } + @Test public void testCP437FileRoundtripImplicitUnicodeExtra() throws IOException { testFileRoundtrip(CP437, false, false); } + @Test public void testASCIIFileRoundtripImplicitUnicodeExtra() throws IOException { testFileRoundtrip(CharsetNames.US_ASCII, false, false); @@ -83,6 +94,7 @@ public void testASCIIFileRoundtripImplicitUnicodeExtra() * 7-ZIP doesn't use EFS for strings that can be encoded in CP437 * - which is true for OIL_BARREL_TXT. */ + @Test public void testRead7ZipArchive() throws IOException { File archive = getFile("utf8-7zip-test.zip"); ZipFile zf = null; @@ -96,6 +108,7 @@ public void testRead7ZipArchive() throws IOException { } } + @Test public void testRead7ZipArchiveForStream() throws IOException { FileInputStream archive = new FileInputStream(getFile("utf8-7zip-test.zip")); @@ -116,6 +129,7 @@ public void testRead7ZipArchiveForStream() throws IOException { * WinZIP created archive, uses Unicode Extra Fields but only in * the central directory. */ + @Test public void testReadWinZipArchive() throws IOException { File archive = getFile("utf8-winzip-test.zip"); ZipFile zf = null; @@ -141,6 +155,7 @@ private void assertCanRead(ZipFile zf, String fileName) throws IOException { } } + @Test public void testReadWinZipArchiveForStream() throws IOException { FileInputStream archive = new FileInputStream(getFile("utf8-winzip-test.zip")); @@ -157,6 +172,7 @@ public void testReadWinZipArchiveForStream() throws IOException { } } + @Test public void testZipFileReadsUnicodeFields() throws IOException { File file = File.createTempFile("unicode-test", ".zip"); file.deleteOnExit(); @@ -176,6 +192,7 @@ public void testZipFileReadsUnicodeFields() throws IOException { } } + @Test public void testZipArchiveInputStreamReadsUnicodeFields() throws IOException { File file = File.createTempFile("unicode-test", ".zip"); @@ -193,6 +210,7 @@ public void testZipArchiveInputStreamReadsUnicodeFields() } } + @Test public void testRawNameReadFromZipFile() throws IOException { File archive = getFile("utf8-7zip-test.zip"); @@ -205,6 +223,7 @@ public void testRawNameReadFromZipFile() } } + @Test public void testRawNameReadFromStream() throws IOException { FileInputStream archive = @@ -365,6 +384,7 @@ private static void assertUnicodeName(ZipArchiveEntry ze, } } + @Test public void testUtf8Interoperability() throws IOException { File file1 = getFile("utf8-7zip-test.zip"); File file2 = getFile("utf8-winzip-test.zip"); diff --git a/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java b/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java index 989ed35c18b..ea3f115e0c9 100644 --- a/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java +++ b/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.changes; +import static org.junit.Assert.*; + import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; @@ -39,6 +41,7 @@ import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipFile; +import org.junit.Test; /** * Checks several ChangeSet business logics. @@ -74,6 +77,7 @@ private void archiveListDelete(String prefix){ * * @throws Exception */ + @Test public void testAddChangeTwice() throws Exception { InputStream in = null; InputStream in2 = null; @@ -108,6 +112,7 @@ public void testAddChangeTwice() throws Exception { * * @throws Exception */ + @Test public void testAddChangeTwiceWithoutReplace() throws Exception { InputStream in = null; InputStream in2 = null; @@ -143,6 +148,7 @@ public void testAddChangeTwiceWithoutReplace() throws Exception { * * @throws Exception */ + @Test public void testDeleteDir() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -184,6 +190,7 @@ public void testDeleteDir() throws Exception { * * @throws Exception */ + @Test public void testDeleteDir2() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -225,6 +232,7 @@ public void testDeleteDir2() throws Exception { * * @throws Exception */ + @Test public void testDeleteDir3() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -266,6 +274,7 @@ public void testDeleteDir3() throws Exception { * * @throws Exception */ + @Test public void testDeleteFile() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -308,6 +317,7 @@ public void testDeleteFile() throws Exception { * * @throws Exception */ + @Test public void testDeleteFile2() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -351,6 +361,7 @@ public void testDeleteFile2() throws Exception { * * @throws Exception */ + @Test public void testDeletePlusAddSame() throws Exception { final String archivename = "zip"; File input = this.createArchive(archivename); @@ -422,6 +433,7 @@ public void testDeletePlusAddSame() throws Exception { * * @throws Exception */ + @Test public void testChangeSetResults() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -485,6 +497,7 @@ public void testChangeSetResults() throws Exception { * * @throws Exception */ + @Test public void testDeletePlusAdd() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -531,6 +544,7 @@ public void testDeletePlusAdd() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromAndAddToZip() throws Exception { final String archivename = "zip"; File input = this.createArchive(archivename); @@ -577,6 +591,7 @@ public void testDeleteFromAndAddToZip() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromAndAddToZipUsingZipFilePerform() throws Exception { final String archivename = "zip"; File input = this.createArchive(archivename); @@ -622,6 +637,7 @@ public void testDeleteFromAndAddToZipUsingZipFilePerform() throws Exception { * * @throws Exception */ + @Test public void testAddDeleteAdd() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -670,6 +686,7 @@ public void testAddDeleteAdd() throws Exception { * * @throws Exception */ + @Test public void testDeleteAddDelete() throws Exception { final String archivename = "cpio"; File input = this.createArchive(archivename); @@ -719,6 +736,7 @@ public void testDeleteAddDelete() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromZip() throws Exception { ArchiveOutputStream out = null; ArchiveInputStream ais = null; @@ -759,6 +777,7 @@ public void testDeleteFromZip() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromTar() throws Exception { ArchiveOutputStream out = null; ArchiveInputStream ais = null; @@ -796,6 +815,7 @@ public void testDeleteFromTar() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromJar() throws Exception { ArchiveOutputStream out = null; ArchiveInputStream ais = null; @@ -831,6 +851,7 @@ public void testDeleteFromJar() throws Exception { this.checkArchiveContent(temp, expected); } + @Test public void testDeleteFromAndAddToTar() throws Exception { ArchiveOutputStream out = null; ArchiveInputStream ais = null; @@ -884,6 +905,7 @@ public void testDeleteFromAndAddToTar() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromAndAddToJar() throws Exception { ArchiveOutputStream out = null; ArchiveInputStream ais = null; @@ -929,6 +951,7 @@ public void testDeleteFromAndAddToJar() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromAr() throws Exception { ArchiveOutputStream out = null; ArchiveInputStream ais = null; @@ -967,6 +990,7 @@ public void testDeleteFromAr() throws Exception { * * @throws Exception */ + @Test public void testDeleteFromAndAddToAr() throws Exception { ArchiveOutputStream out = null; ArchiveInputStream ais = null; @@ -1015,6 +1039,7 @@ public void testDeleteFromAndAddToAr() throws Exception { * * @throws Exception */ + @Test public void testRenameAndDelete() throws Exception { } @@ -1028,6 +1053,7 @@ public void testRenameAndDelete() throws Exception { * * @throws Exception */ + @Test public void testAddMoveDelete() throws Exception { } @@ -1036,6 +1062,7 @@ public void testAddMoveDelete() throws Exception { * * @throws Exception */ + @Test public void testAddToEmptyArchive() throws Exception { final String archivename = "zip"; File input = this.createEmptyArchive(archivename); @@ -1081,6 +1108,7 @@ public void testAddToEmptyArchive() throws Exception { * * @throws Exception */ + @Test public void testDeleteAddToOneFileArchive() throws Exception { final String archivename = "zip"; File input = this.createSingleEntryArchive(archivename); @@ -1129,6 +1157,7 @@ public void testDeleteAddToOneFileArchive() throws Exception { * * @throws Exception */ + @Test public void testAddDeleteToOneFileArchive() throws Exception { final String archivename = "cpio"; File input = this.createSingleEntryArchive(archivename); @@ -1178,6 +1207,7 @@ public void testAddDeleteToOneFileArchive() throws Exception { * * @throws Exception */ + @Test public void testAddAllreadyExistingWithReplaceTrue() throws Exception { final String archivename = "zip"; File input = this.createArchive(archivename); @@ -1222,6 +1252,7 @@ public void testAddAllreadyExistingWithReplaceTrue() throws Exception { * * @throws Exception */ + @Test public void testAddAllreadyExistingWithReplaceFalse() throws Exception { final String archivename = "zip"; File input = this.createArchive(archivename); diff --git a/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java b/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java index 21f7ad1e951..51321664ed9 100644 --- a/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.compressors; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -27,9 +29,11 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class BZip2TestCase extends AbstractTestCase { + @Test public void testBzipCreation() throws Exception { File output = null; final File input = getFile("test.txt"); @@ -58,6 +62,7 @@ public void testBzipCreation() throws Exception { assertEquals(input.length(),decompressed.length()); } + @Test public void testBzip2Unarchive() throws Exception { final File input = getFile("bla.txt.bz2"); final File output = new File(dir, "bla.txt"); @@ -69,6 +74,7 @@ public void testBzip2Unarchive() throws Exception { os.close(); } + @Test public void testConcatenatedStreamsReadFirstOnly() throws Exception { final File input = getFile("multiple.bz2"); final InputStream is = new FileInputStream(input); @@ -86,6 +92,7 @@ public void testConcatenatedStreamsReadFirstOnly() throws Exception { } } + @Test public void testConcatenatedStreamsReadFully() throws Exception { final File input = getFile("multiple.bz2"); final InputStream is = new FileInputStream(input); @@ -105,6 +112,7 @@ public void testConcatenatedStreamsReadFully() throws Exception { } } + @Test public void testCOMPRESS131() throws Exception { final File input = getFile("COMPRESS-131.bz2"); final InputStream is = new FileInputStream(input); diff --git a/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java b/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java index 17615deaab5..7d6a7e5de5c 100644 --- a/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java @@ -29,6 +29,7 @@ import org.apache.commons.compress.compressors.deflate.DeflateCompressorOutputStream; import org.apache.commons.compress.compressors.deflate.DeflateParameters; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class DeflateTestCase extends AbstractTestCase { @@ -37,6 +38,7 @@ public final class DeflateTestCase extends AbstractTestCase { * * @throws Exception */ + @Test public void testDeflateCreation() throws Exception { final File input = getFile("test1.xml"); final File output = new File(dir, "test1.xml.deflatez"); @@ -59,6 +61,7 @@ public void testDeflateCreation() throws Exception { * * @throws Exception */ + @Test public void testRawDeflateCreation() throws Exception { final File input = getFile("test1.xml"); final File output = new File(dir, "test1.xml.deflate"); @@ -82,6 +85,7 @@ public void testRawDeflateCreation() throws Exception { * * @throws Exception */ + @Test public void testDeflateUnarchive() throws Exception { final File input = getFile("bla.tar.deflatez"); final File output = new File(dir, "bla.tar"); @@ -109,6 +113,7 @@ public void testDeflateUnarchive() throws Exception { * * @throws Exception */ + @Test public void testRawDeflateUnarchive() throws Exception { final File input = getFile("bla.tar.deflate"); final File output = new File(dir, "bla.tar"); diff --git a/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java b/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java index fb0a2801ed7..0f38c28fc21 100644 --- a/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java @@ -18,7 +18,7 @@ */ package org.apache.commons.compress.compressors; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.*; import java.io.BufferedInputStream; import java.io.File; @@ -30,10 +30,12 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.compressors.snappy.FramedSnappyCompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class FramedSnappyTestCase extends AbstractTestCase { + @Test public void testDefaultExtraction() throws Exception { testUnarchive(new StreamWrapper() { public CompressorInputStream wrap(InputStream is) throws IOException { @@ -42,6 +44,7 @@ public CompressorInputStream wrap(InputStream is) throws IOException { }); } + @Test public void testDefaultExtractionViaFactory() throws Exception { testUnarchive(new StreamWrapper() { public CompressorInputStream wrap(InputStream is) throws Exception { @@ -52,6 +55,7 @@ public CompressorInputStream wrap(InputStream is) throws Exception { }); } + @Test public void testDefaultExtractionViaFactoryAutodetection() throws Exception { testUnarchive(new StreamWrapper() { public CompressorInputStream wrap(InputStream is) throws Exception { diff --git a/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java b/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java index 6446bff9194..58d9f2f258c 100644 --- a/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.compressors; +import static org.junit.Assert.*; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; @@ -35,9 +37,11 @@ import org.apache.commons.compress.compressors.gzip.GzipParameters; import org.apache.commons.compress.utils.IOUtils; import org.junit.Assert; +import org.junit.Test; public final class GZipTestCase extends AbstractTestCase { + @Test public void testGzipCreation() throws Exception { final File input = getFile("test1.xml"); final File output = new File(dir, "test1.xml.gz"); @@ -55,6 +59,7 @@ public void testGzipCreation() throws Exception { } } + @Test public void testGzipUnarchive() throws Exception { final File input = getFile("bla.tgz"); final File output = new File(dir, "bla.tar"); @@ -77,6 +82,7 @@ public void testGzipUnarchive() throws Exception { } } + @Test public void testConcatenatedStreamsReadFirstOnly() throws Exception { final File input = getFile("multiple.gz"); final InputStream is = new FileInputStream(input); @@ -94,6 +100,7 @@ public void testConcatenatedStreamsReadFirstOnly() throws Exception { } } + @Test public void testConcatenatedStreamsReadFully() throws Exception { final File input = getFile("multiple.gz"); final InputStream is = new FileInputStream(input); @@ -116,6 +123,7 @@ public void testConcatenatedStreamsReadFully() throws Exception { /** * @see "https://issues.apache.org/jira/browse/COMPRESS-84" */ + @Test public void testCorruptedInput() throws Exception { InputStream in = null; OutputStream out = null; @@ -153,6 +161,7 @@ public void testCorruptedInput() throws Exception { } } + @Test public void testInteroperabilityWithGzipCompressorInputStream() throws Exception { FileInputStream fis = new FileInputStream(getFile("test3.xml")); byte[] content; @@ -181,6 +190,7 @@ public void testInteroperabilityWithGzipCompressorInputStream() throws Exception Assert.assertArrayEquals("uncompressed content", content, content2); } + @Test public void testInteroperabilityWithGZIPInputStream() throws Exception { FileInputStream fis = new FileInputStream(getFile("test3.xml")); byte[] content; @@ -209,6 +219,7 @@ public void testInteroperabilityWithGZIPInputStream() throws Exception { Assert.assertArrayEquals("uncompressed content", content, content2); } + @Test public void testInvalidCompressionLevel() { GzipParameters parameters = new GzipParameters(); try { @@ -247,18 +258,22 @@ private void testExtraFlags(int compressionLevel, int flag) throws Exception { assertEquals("extra flags (XFL)", flag, bout.toByteArray()[8]); } + @Test public void testExtraFlagsFastestCompression() throws Exception { testExtraFlags(Deflater.BEST_SPEED, 4); } + @Test public void testExtraFlagsBestCompression() throws Exception { testExtraFlags(Deflater.BEST_COMPRESSION, 2); } + @Test public void testExtraFlagsDefaultCompression() throws Exception { testExtraFlags(Deflater.DEFAULT_COMPRESSION, 0); } + @Test public void testOverWrite() throws Exception { GzipCompressorOutputStream out = new GzipCompressorOutputStream(new ByteArrayOutputStream()); out.close(); @@ -270,6 +285,7 @@ public void testOverWrite() throws Exception { } } + @Test public void testMetadataRoundTrip() throws Exception { ByteArrayOutputStream bout = new ByteArrayOutputStream(); diff --git a/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java b/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java index bb108cec423..cdc00f51a4b 100644 --- a/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java @@ -26,9 +26,11 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class LZMATestCase extends AbstractTestCase { + @Test public void testLZMAUnarchive() throws Exception { final File input = getFile("bla.tar.lzma"); final File output = new File(dir, "bla.tar"); diff --git a/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java b/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java index 6c13db0d8d2..c2dc7a16307 100644 --- a/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.compressors; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -38,21 +40,26 @@ import org.apache.commons.compress.compressors.pack200.Pack200CompressorOutputStream; import org.apache.commons.compress.compressors.pack200.Pack200Strategy; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class Pack200TestCase extends AbstractTestCase { + @Test public void testJarUnarchiveAllInMemory() throws Exception { jarUnarchiveAll(false, Pack200Strategy.IN_MEMORY); } + @Test public void testJarUnarchiveAllFileArgInMemory() throws Exception { jarUnarchiveAll(true, Pack200Strategy.IN_MEMORY); } + @Test public void testJarUnarchiveAllTempFile() throws Exception { jarUnarchiveAll(false, Pack200Strategy.TEMP_FILE); } + @Test public void testJarUnarchiveAllFileTempFile() throws Exception { jarUnarchiveAll(true, Pack200Strategy.TEMP_FILE); } @@ -89,10 +96,12 @@ private void jarUnarchiveAll(boolean useFile, Pack200Strategy mode) } } + @Test public void testJarArchiveCreationInMemory() throws Exception { jarArchiveCreation(Pack200Strategy.IN_MEMORY); } + @Test public void testJarArchiveCreationTempFile() throws Exception { jarArchiveCreation(Pack200Strategy.TEMP_FILE); } @@ -137,6 +146,7 @@ private void jarArchiveCreation(Pack200Strategy mode) throws Exception { } } + @Test public void testGoodSignature() throws Exception { final InputStream is = new FileInputStream(getFile("bla.pack")); try { @@ -148,6 +158,7 @@ public void testGoodSignature() throws Exception { } } + @Test public void testBadSignature() throws Exception { final InputStream is = new FileInputStream(getFile("bla.jar")); try { @@ -159,6 +170,7 @@ public void testBadSignature() throws Exception { } } + @Test public void testShortSignature() throws Exception { final InputStream is = new FileInputStream(getFile("bla.pack")); try { @@ -170,6 +182,7 @@ public void testShortSignature() throws Exception { } } + @Test public void testInputStreamMethods() throws Exception { Map m = new HashMap(); m.put("foo", "bar"); @@ -196,6 +209,7 @@ public void testInputStreamMethods() throws Exception { } } + @Test public void testOutputStreamMethods() throws Exception { final File output = new File(dir, "bla.pack"); Map m = new HashMap(); diff --git a/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java b/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java index e959dffd947..2a86b3f5024 100644 --- a/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.compressors; +import static org.junit.Assert.*; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; @@ -28,9 +30,11 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class XZTestCase extends AbstractTestCase { + @Test public void testXZCreation() throws Exception { long max = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage().getMax(); System.out.println("XZTestCase: HeapMax="+max+" bytes "+(double)max/(1024*1024)+" MB"); @@ -50,6 +54,7 @@ public void testXZCreation() throws Exception { } } + @Test public void testXZUnarchive() throws Exception { final File input = getFile("bla.tar.xz"); final File output = new File(dir, "bla.tar"); @@ -72,6 +77,7 @@ public void testXZUnarchive() throws Exception { } } + @Test public void testConcatenatedStreamsReadFirstOnly() throws Exception { final File input = getFile("multiple.xz"); final InputStream is = new FileInputStream(input); @@ -89,6 +95,7 @@ public void testConcatenatedStreamsReadFirstOnly() throws Exception { } } + @Test public void testConcatenatedStreamsReadFully() throws Exception { final File input = getFile("multiple.xz"); final InputStream is = new FileInputStream(input); diff --git a/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java b/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java index be1474613ca..3ea1e2e620c 100644 --- a/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java @@ -18,6 +18,8 @@ */ package org.apache.commons.compress.compressors; +import static org.junit.Assert.*; + import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; @@ -28,9 +30,11 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.compressors.z.ZCompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class ZTestCase extends AbstractTestCase { + @Test public void testZUnarchive() throws Exception { testUnarchive(new StreamWrapper() { public CompressorInputStream wrap(InputStream is) throws IOException { @@ -39,6 +43,7 @@ public CompressorInputStream wrap(InputStream is) throws IOException { }); } + @Test public void testZUnarchiveViaFactory() throws Exception { testUnarchive(new StreamWrapper() { public CompressorInputStream wrap(InputStream is) throws Exception { @@ -48,6 +53,7 @@ public CompressorInputStream wrap(InputStream is) throws Exception { }); } + @Test public void testZUnarchiveViaAutoDetection() throws Exception { testUnarchive(new StreamWrapper() { public CompressorInputStream wrap(InputStream is) throws Exception { @@ -57,6 +63,7 @@ public CompressorInputStream wrap(InputStream is) throws Exception { }); } + @Test public void testMatches() throws Exception { assertFalse(ZCompressorInputStream.matches(new byte[] { 1, 2, 3, 4 }, 4)); assertFalse(ZCompressorInputStream.matches(new byte[] { 0x1f, 2, 3, 4 }, 4)); diff --git a/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java b/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java index cd2eb721db8..2a75a086787 100644 --- a/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java @@ -29,9 +29,11 @@ import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class Pack200UtilsTest extends AbstractTestCase { + @Test public void testNormalize() throws Throwable { final File input = getFile("bla.jar"); final File[] output = createTempDirAndFile(); @@ -68,6 +70,7 @@ public void testNormalize() throws Throwable { } } + @Test public void testNormalizeInPlace() throws Throwable { final File input = getFile("bla.jar"); final File[] output = createTempDirAndFile(); diff --git a/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java index aa1083e7163..6a73c3b4c05 100644 --- a/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java @@ -18,7 +18,7 @@ */ package org.apache.commons.compress.compressors.snappy; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -30,10 +30,12 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Test; public final class FramedSnappyCompressorInputStreamTest extends AbstractTestCase { + @Test public void testMatches() throws IOException { assertFalse(FramedSnappyCompressorInputStream.matches(new byte[10], 10)); byte[] b = new byte[12]; @@ -52,6 +54,7 @@ public void testMatches() throws IOException { /** * Something big enough to make buffers slide. */ + @Test public void testLoremIpsum() throws Exception { final FileInputStream isSz = new FileInputStream(getFile("lorem-ipsum.txt.sz")); final File outputSz = new File(dir, "lorem-ipsum.1"); @@ -101,6 +104,7 @@ public void testLoremIpsum() throws Exception { } } + @Test public void testRemainingChunkTypes() throws Exception { final FileInputStream isSz = new FileInputStream(getFile("mixed.txt.sz")); ByteArrayOutputStream out = new ByteArrayOutputStream(); @@ -123,6 +127,7 @@ public void testRemainingChunkTypes() throws Exception { }, out.toByteArray()); } + @Test public void testAvailable() throws Exception { final FileInputStream isSz = new FileInputStream(getFile("mixed.txt.sz")); try { @@ -142,6 +147,7 @@ public void testAvailable() throws Exception { } } + @Test public void testUnskippableChunk() { byte[] input = new byte[] { (byte) 0xff, 6, 0, 0, 's', 'N', 'a', 'P', 'p', 'Y', @@ -158,12 +164,13 @@ public void testUnskippableChunk() { } } + @Test public void testChecksumUnmasking() { testChecksumUnmasking(0xc757l); testChecksumUnmasking(0xffffc757l); } - public void testChecksumUnmasking(long x) { + private void testChecksumUnmasking(long x) { assertEquals(Long.toHexString(x), Long.toHexString(FramedSnappyCompressorInputStream .unmask(mask(x)))); From 5cf68bdc46c7bcc0fc5eb3cacd1c84514ef552cf Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 20 Feb 2015 16:18:00 +0000 Subject: [PATCH 150/189] return 0 from read when asked to read 0 bytes - COMPRESS-309 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1661151 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 5 +++ .../bzip2/BZip2CompressorInputStream.java | 3 ++ .../bzip2/BZip2CompressorInputStreamTest.java | 33 +++++++++++++++++++ 3 files changed, 41 insertions(+) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 0e6db4c47c2..5ead1073d78 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,11 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + BZip2CompressorInputStream#read would return -1 when asked to + read 0 bytes. + ArchiveStreamFactory fails to pass on the encoding when creating some streams. * ArjArchiveInputStream diff --git a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java index e0e3b1ec135..b32b1f9b21e 100644 --- a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java @@ -165,6 +165,9 @@ public int read(final byte[] dest, final int offs, final int len) if (this.in == null) { throw new IOException("stream closed"); } + if (len == 0) { + return 0; + } final int hi = offs + len; int destOffs = offs; diff --git a/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java index 2ee4567d05f..d15d74a6728 100644 --- a/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java @@ -20,8 +20,11 @@ import static org.apache.commons.compress.AbstractTestCase.getFile; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; import java.io.FileInputStream; import java.io.IOException; +import org.junit.Assert; import org.junit.Test; public class BZip2CompressorInputStreamTest { @@ -36,4 +39,34 @@ public void shouldThrowAnIOExceptionWhenAppliedToAZipFile() throws Exception { } } + /** + * @see "https://issues.apache.org/jira/browse/COMPRESS-309" + */ + @Test + public void readOfLength0ShouldReturn0() throws Exception { + // Create a big random piece of data + byte[] rawData = new byte[1048576]; + for (int i=0; i < rawData.length; ++i) { + rawData[i] = (byte) Math.floor(Math.random()*256); + } + + // Compress it + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + BZip2CompressorOutputStream bzipOut = new BZip2CompressorOutputStream(baos); + bzipOut.write(rawData); + bzipOut.flush(); + bzipOut.close(); + baos.flush(); + baos.close(); + + // Try to read it back in + ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); + BZip2CompressorInputStream bzipIn = new BZip2CompressorInputStream(bais); + byte[] buffer = new byte[1024]; + Assert.assertEquals(1024, bzipIn.read(buffer, 0, 1024)); + Assert.assertEquals(0, bzipIn.read(buffer, 1024, 0)); + Assert.assertEquals(1024, bzipIn.read(buffer, 0, 1024)); + bzipIn.close(); + } + } From 4355c0d224342b5c94881b7aabb441f765867dfe Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 20 Feb 2015 16:21:26 +0000 Subject: [PATCH 151/189] add/fix a few dates in changes report git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1661153 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 5ead1073d78..2053b0a0372 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -59,7 +59,7 @@ This also changes the superclass of ZCompressorInputStream. BZip2CompressorInputStream#read would return -1 when asked to read 0 bytes. - + ArchiveStreamFactory fails to pass on the encoding when creating some streams. * ArjArchiveInputStream * CpioArchiveInputStream @@ -68,17 +68,17 @@ This also changes the superclass of ZCompressorInputStream. * TarArchiveInputStream * JarArchiveOutputStream - + Restore immutability/thread-safety to ArchiveStreamFactory. The class is now immutable provided that the method setEntryEncoding is not used. The class is thread-safe. - + Restore immutability/thread-safety to CompressorStreamFactory. The class is now immutable provided that the method setDecompressConcatenated is not used. The class is thread-safe. - Added support for parallel compression. This low-level API allows a client to build a zip/jar file by using the class From f8bf45444c49053a92f6f560c5008d5ac43aa27f Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 20 Feb 2015 16:30:45 +0000 Subject: [PATCH 152/189] revert 'fix' for COMPRESS-301 as GNU tar doesn't like embedded NULs either git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1661155 13f79535-47bb-0310-9956-ffa450edef68 --- .../compress/archivers/tar/TarUtils.java | 5 ----- .../compress/archivers/tar/TarUtilsTest.java | 6 ++++++ src/test/resources/COMPRESS-178.tar | Bin 10240 -> 10240 bytes 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java index 7f7d57feaf2..94e175c210d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -132,11 +132,6 @@ public static long parseOctal(final byte[] buffer, final int offset, final int l for ( ;start < end; start++) { final byte currentByte = buffer[start]; - if (currentByte == 0) { - // some archivers don't pad the whole field but just insert a NUL - // COMPRESS-301 - break; - } // CheckStyle:MagicNumber OFF if (currentByte < '0' || currentByte > '7'){ throw new IllegalArgumentException( diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java index 846b2f6a16b..5ebbbb5b71b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java @@ -102,6 +102,12 @@ public void testParseOctalInvalid() throws Exception{ fail("Expected IllegalArgumentException - embedded space"); } catch (IllegalArgumentException expected) { } + buffer = " 0\00007 ".getBytes(CharsetNames.UTF_8); // Invalid - embedded NUL + try { + TarUtils.parseOctal(buffer,0, buffer.length); + fail("Expected IllegalArgumentException - embedded NUL"); + } catch (IllegalArgumentException expected) { + } } private void checkRoundTripOctal(final long value, final int bufsize) { diff --git a/src/test/resources/COMPRESS-178.tar b/src/test/resources/COMPRESS-178.tar index 7e2ca7af0f12a0f5655008d12af819d4ac45d414..6a13219797b81a7bdaa3a62fcf36d747b4bcadb0 100644 GIT binary patch delta 14 VcmZn&Xb6~)Jvp0=dt+IbIshv&1%m(p delta 12 TcmZn&Xb6~)&B(YhXNo!i9vcL9 From af1f8163af8b52bc28a18109858744967935d828 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 20 Feb 2015 16:44:36 +0000 Subject: [PATCH 153/189] don't read system properties for default charset - COMPRESS-308 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1661157 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 7 +++++-- .../commons/compress/archivers/zip/ZipEncodingHelper.java | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 2053b0a0372..66104794214 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,8 +54,11 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> - + + ZipEncodingHelper no longer reads system properties directly + to determine the default charset. + + BZip2CompressorInputStream#read would return -1 when asked to read 0 bytes. diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java index c0f1cfaef22..a4f0f801938 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java @@ -243,7 +243,7 @@ public static ZipEncoding getZipEncoding(String name) { static boolean isUTF8(String charsetName) { if (charsetName == null) { // check platform's default encoding - charsetName = System.getProperty("file.encoding"); + charsetName = Charset.defaultCharset().name(); } if (Charsets.UTF_8.name().equalsIgnoreCase(charsetName)) { return true; From 02b3c2491c8d41a52044ab3c1d4a531d0f769f6e Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Tue, 24 Feb 2015 17:43:17 +0000 Subject: [PATCH 154/189] CGI must be svn:executable * git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1662060 13f79535-47bb-0310-9956-ffa450edef68 --- src/site/resources/download_compress.cgi | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 src/site/resources/download_compress.cgi diff --git a/src/site/resources/download_compress.cgi b/src/site/resources/download_compress.cgi old mode 100644 new mode 100755 From c12c48ce91c6b5cceb61043a914d0b438b7a6900 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Sun, 1 Mar 2015 13:36:51 +0000 Subject: [PATCH 155/189] Name the parameterised tests git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1663097 13f79535-47bb-0310-9956-ffa450edef68 --- .../java/org/apache/commons/compress/ArchiveReadTest.java | 8 ++++---- .../apache/commons/compress/archivers/LongPathTest.java | 8 ++++---- .../commons/compress/archivers/LongSymLinkTest.java | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/test/java/org/apache/commons/compress/ArchiveReadTest.java b/src/test/java/org/apache/commons/compress/ArchiveReadTest.java index ac4a74106a1..e8a3a4bcf33 100644 --- a/src/test/java/org/apache/commons/compress/ArchiveReadTest.java +++ b/src/test/java/org/apache/commons/compress/ArchiveReadTest.java @@ -53,8 +53,8 @@ public class ArchiveReadTest extends AbstractTestCase { private File file; - public ArchiveReadTest(File file){ - this.file = file; + public ArchiveReadTest(String file){ + this.file = new File(ARCDIR, file); } @BeforeClass @@ -72,11 +72,11 @@ public static void setUpFileList() throws Exception { br.close(); } - @Parameters + @Parameters(name = "file={0}") public static Collection data() { assertTrue(ARCDIR.exists()); Collection params = new ArrayList(); - for (File f : ARCDIR.listFiles(new FilenameFilter() { + for (String f : ARCDIR.list(new FilenameFilter() { public boolean accept(File dir, String name) { return !name.endsWith(".txt"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java b/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java index dfc52aaffce..3aa423da75e 100644 --- a/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java @@ -60,8 +60,8 @@ public class LongPathTest extends AbstractTestCase { private File file; - public LongPathTest(File file){ - this.file = file; + public LongPathTest(String file){ + this.file = new File(ARCDIR, file); } @BeforeClass @@ -79,10 +79,10 @@ public static void setUpFileList() throws Exception { br.close(); } - @Parameters + @Parameters(name = "file={0}") public static Collection data() { Collection params = new ArrayList(); - for (File f : ARCDIR.listFiles(new FilenameFilter() { + for (String f : ARCDIR.list(new FilenameFilter() { public boolean accept(File dir, String name) { return !name.endsWith(".txt"); } diff --git a/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java b/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java index 9a08825dbdb..6ae4aa834dd 100644 --- a/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java @@ -60,8 +60,8 @@ public class LongSymLinkTest extends AbstractTestCase { private File file; - public LongSymLinkTest(File file){ - this.file = file; + public LongSymLinkTest(String file){ + this.file = new File(ARCDIR, file); } @BeforeClass @@ -79,10 +79,10 @@ public static void setUpFileList() throws Exception { br.close(); } - @Parameters + @Parameters(name = "file={0}") public static Collection data() { Collection params = new ArrayList(); - for (File f : ARCDIR.listFiles(new FilenameFilter() { + for (String f : ARCDIR.list(new FilenameFilter() { public boolean accept(File dir, String name) { return !name.endsWith(".txt"); } From 31b546aecd87725c8a1274d1485cee89bdddfdce Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 28 Mar 2015 18:58:16 +0000 Subject: [PATCH 156/189] COMPRESS-312 normalize filename in TarArchiveEntry's (File, String) constructor git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1669822 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 4 ++++ .../compress/archivers/tar/TarArchiveEntry.java | 13 +++++++------ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 66104794214..5a2f5933665 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,10 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + TarArchiveEntry's constructor with a File and a String arg + didn't normalize the name. + ZipEncodingHelper no longer reads system properties directly to determine the default charset. diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java index a61a9b096a4..9ba145f4de6 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -263,7 +263,7 @@ public TarArchiveEntry(String name, byte linkFlag, boolean preserveLeadingSlashe * @param file The file that the entry represents. */ public TarArchiveEntry(File file) { - this(file, normalizeFileName(file.getPath(), false)); + this(file, file.getPath()); } /** @@ -274,23 +274,24 @@ public TarArchiveEntry(File file) { * @param fileName the name to be used for the entry. */ public TarArchiveEntry(File file, String fileName) { + String normalizedName = normalizeFileName(fileName, false); this.file = file; if (file.isDirectory()) { this.mode = DEFAULT_DIR_MODE; this.linkFlag = LF_DIR; - int nameLength = fileName.length(); - if (nameLength == 0 || fileName.charAt(nameLength - 1) != '/') { - this.name = fileName + "/"; + int nameLength = normalizedName.length(); + if (nameLength == 0 || normalizedName.charAt(nameLength - 1) != '/') { + this.name = normalizedName + "/"; } else { - this.name = fileName; + this.name = normalizedName; } } else { this.mode = DEFAULT_FILE_MODE; this.linkFlag = LF_NORMAL; this.size = file.length(); - this.name = fileName; + this.name = normalizedName; } this.modTime = file.lastModified() / MILLIS_PER_SECOND; From f24b9fd5ca348d8c482c3c50499c3e9582edb1f9 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 28 Mar 2015 19:00:01 +0000 Subject: [PATCH 157/189] sort 1.10 changelog entries by date git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1669823 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 44 ++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 5a2f5933665..5e2bd810c14 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -85,6 +85,16 @@ This also changes the superclass of ZCompressorInputStream. The class is now immutable provided that the method setDecompressConcatenated is not used. The class is thread-safe. + + SevenZFile now throws the specific PasswordRequiredException + when it encounters an encrypted stream but no password has + been specified. + + + Improved error message when tar encounters a groupId that is + too big to write without using the STAR or POSIX format. + Added support for parallel compression. This low-level API allows @@ -97,7 +107,6 @@ This also changes the superclass of ZCompressorInputStream. handled by client code and is not part of commons-compress for this release. - Cut overall object instantiation in half by changing file @@ -109,35 +118,24 @@ This also changes the superclass of ZCompressorInputStream. These may have some interesting additional usages in the near future. - - - Moved the package - org.apache.commons.compress.compressors.z._internal_ to - org.apache.commons.compress.compressors.lzw and made it part - of the API that is officially supported. This will break - existing code that uses the old package. - - - New methods in ZipArchiveOutputStream and ZipFile allows - entries to be copied from one archive to another without - having to re-compress them. - ZipFile logs a warning in its finalizer when its constructor has thrown an exception reading the file - for example if the file doesn't exist. - - Improved error message when tar encounters a groupId that is - too big to write without using the STAR or POSIX format. + New methods in ZipArchiveOutputStream and ZipFile allows + entries to be copied from one archive to another without + having to re-compress them. - - SevenZFile now throws the specific PasswordRequiredException - when it encounters an encrypted stream but no password has - been specified. + + Moved the package + org.apache.commons.compress.compressors.z._internal_ to + org.apache.commons.compress.compressors.lzw and made it part + of the API that is officially supported. This will break + existing code that uses the old package. From 6e8bb88263dd64b703679bf853c15f40ac07e347 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Mon, 30 Mar 2015 15:59:17 +0000 Subject: [PATCH 158/189] COMPRESS-313 add auto-detection for LZMA streams git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1670129 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 3 + .../compressors/CompressorStreamFactory.java | 6 + .../lzma/LZMACompressorInputStream.java | 32 ++++ .../compress/compressors/lzma/LZMAUtils.java | 161 ++++++++++++++++++ src/site/xdoc/index.xml | 1 + .../compress/compressors/LZMATestCase.java | 40 +++-- .../compressors/lzma/LZMAUtilsTestCase.java | 108 ++++++++++++ 7 files changed, 341 insertions(+), 10 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java create mode 100644 src/test/java/org/apache/commons/compress/compressors/lzma/LZMAUtilsTestCase.java diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 5e2bd810c14..f3ae88ad365 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,9 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + CompressorStreamFactory can now auto-detect LZMA streams. + TarArchiveEntry's constructor with a File and a String arg didn't normalize the name. diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index 4bc18102461..adca670ca73 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -29,6 +29,7 @@ import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream; +import org.apache.commons.compress.compressors.lzma.LZMAUtils; import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream; import org.apache.commons.compress.compressors.xz.XZUtils; @@ -241,6 +242,11 @@ public CompressorInputStream createCompressorInputStream(final InputStream in) return new XZCompressorInputStream(in, decompressConcatenated); } + if (LZMAUtils.matches(signature, signatureLength) && + LZMAUtils.isLZMACompressionAvailable()) { + return new LZMACompressorInputStream(in); + } + } catch (IOException e) { throw new CompressorException("Failed to detect Compressor from InputStream.", e); } diff --git a/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java index 142b6178147..f9a8eae19f7 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorInputStream.java @@ -81,4 +81,36 @@ public int available() throws IOException { public void close() throws IOException { in.close(); } + + /** + * Checks if the signature matches what is expected for an lzma file. + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is an lzma compressed stream, false otherwise + * + * @since 1.10 + */ + public static boolean matches(byte[] signature, int length) { + + if (signature == null || length < 3) { + return false; + } + + if (signature[0] != 0x5d) { + return false; + } + + if (signature[1] != 0) { + return false; + } + + if (signature[2] != 0) { + return false; + } + + return true; + } } diff --git a/src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java b/src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java new file mode 100644 index 00000000000..7d3e1beafaa --- /dev/null +++ b/src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.lzma; + +import java.util.HashMap; +import java.util.Map; +import org.apache.commons.compress.compressors.FileNameUtil; + +/** + * Utility code for the lzma compression format. + * @ThreadSafe + * @since 1.10 + */ +public class LZMAUtils { + + private static final FileNameUtil fileNameUtil; + + /** + * LZMA Header Magic Bytes begin a LZMA file. + */ + private static final byte[] HEADER_MAGIC = { + (byte) 0x5D, 0, 0 + }; + + static enum CachedAvailability { + DONT_CACHE, CACHED_AVAILABLE, CACHED_UNAVAILABLE + } + + private static volatile CachedAvailability cachedLZMAAvailability; + + static { + Map uncompressSuffix = new HashMap(); + uncompressSuffix.put(".lzma", ""); + uncompressSuffix.put("-lzma", ""); + fileNameUtil = new FileNameUtil(uncompressSuffix, ".lzma"); + cachedLZMAAvailability = CachedAvailability.DONT_CACHE; + try { + Class.forName("org.osgi.framework.BundleEvent"); + } catch (Exception ex) { + setCacheLZMAAvailablity(true); + } + } + + /** Private constructor to prevent instantiation of this utility class. */ + private LZMAUtils() { + } + + /** + * Checks if the signature matches what is expected for a .lzma file. + * + * @param signature the bytes to check + * @param length the number of bytes to check + * @return true if signature matches the .lzma magic bytes, false otherwise + */ + public static boolean matches(byte[] signature, int length) { + if (length < HEADER_MAGIC.length) { + return false; + } + + for (int i = 0; i < HEADER_MAGIC.length; ++i) { + if (signature[i] != HEADER_MAGIC[i]) { + return false; + } + } + + return true; + } + + /** + * Are the classes required to support LZMA compression available? + */ + public static boolean isLZMACompressionAvailable() { + final CachedAvailability cachedResult = cachedLZMAAvailability; + if (cachedResult != CachedAvailability.DONT_CACHE) { + return cachedResult == CachedAvailability.CACHED_AVAILABLE; + } + return internalIsLZMACompressionAvailable(); + } + + private static boolean internalIsLZMACompressionAvailable() { + try { + LZMACompressorInputStream.matches(null, 0); + return true; + } catch (NoClassDefFoundError error) { + return false; + } + } + + /** + * Detects common lzma suffixes in the given filename. + * + * @param filename name of a file + * @return {@code true} if the filename has a common lzma suffix, + * {@code false} otherwise + */ + public static boolean isCompressedFilename(String filename) { + return fileNameUtil.isCompressedFilename(filename); + } + + /** + * Maps the given name of a lzma-compressed file to the name that + * the file should have after uncompression. Any filenames with + * the generic ".lzma" suffix (or any other generic lzma suffix) + * is mapped to a name without that suffix. If no lzma suffix is + * detected, then the filename is returned unmapped. + * + * @param filename name of a file + * @return name of the corresponding uncompressed file + */ + public static String getUncompressedFilename(String filename) { + return fileNameUtil.getUncompressedFilename(filename); + } + + /** + * Maps the given filename to the name that the file should have after + * compression with lzma. + * + * @param filename name of a file + * @return name of the corresponding compressed file + */ + public static String getCompressedFilename(String filename) { + return fileNameUtil.getCompressedFilename(filename); + } + + /** + * Whether to cache the result of the LZMA check. + * + *

      This defaults to {@code false} in an OSGi environment and {@code true} otherwise.

      + * @param doCache whether to cache the result + */ + public static void setCacheLZMAAvailablity(boolean doCache) { + if (!doCache) { + cachedLZMAAvailability = CachedAvailability.DONT_CACHE; + } else if (cachedLZMAAvailability == CachedAvailability.DONT_CACHE) { + final boolean hasLzma = internalIsLZMACompressionAvailable(); + cachedLZMAAvailability = hasLzma ? CachedAvailability.CACHED_AVAILABLE + : CachedAvailability.CACHED_UNAVAILABLE; + } + } + + // only exists to support unit tests + static CachedAvailability getCachedLZMAAvailability() { + return cachedLZMAAvailability; + } +} diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index f738f763497..0f095b01c17 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -68,6 +68,7 @@
    • Added support for parallel ZIP compression.
    • Added support for raw transfer of entries from one ZIP file to another without uncompress/compress.
    • Performance improvements for creating ZIP files with lots of small entries.
    • +
    • Added auto-detection for LZMA.
    diff --git a/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java b/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java index cdc00f51a4b..56b5c71ce18 100644 --- a/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java @@ -18,9 +18,11 @@ */ package org.apache.commons.compress.compressors; +import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; +import java.io.IOException; import java.io.InputStream; import org.apache.commons.compress.AbstractTestCase; @@ -37,18 +39,36 @@ public void testLZMAUnarchive() throws Exception { final InputStream is = new FileInputStream(input); try { final CompressorInputStream in = new LZMACompressorInputStream(is); - FileOutputStream out = null; - try { - out = new FileOutputStream(output); - IOUtils.copy(in, out); - } finally { - if (out != null) { - out.close(); - } - in.close(); - } + copy(in, output); + } finally { + is.close(); + } + } + + @Test + public void testLZMAUnarchiveWithAutodetection() throws Exception { + final File input = getFile("bla.tar.lzma"); + final File output = new File(dir, "bla.tar"); + final InputStream is = new BufferedInputStream(new FileInputStream(input)); + try { + final CompressorInputStream in = new CompressorStreamFactory() + .createCompressorInputStream(is); + copy(in, output); } finally { is.close(); } } + + private void copy(InputStream in, File output) throws IOException { + FileOutputStream out = null; + try { + out = new FileOutputStream(output); + IOUtils.copy(in, out); + } finally { + if (out != null) { + out.close(); + } + in.close(); + } + } } diff --git a/src/test/java/org/apache/commons/compress/compressors/lzma/LZMAUtilsTestCase.java b/src/test/java/org/apache/commons/compress/compressors/lzma/LZMAUtilsTestCase.java new file mode 100644 index 00000000000..357c9fbfe6e --- /dev/null +++ b/src/test/java/org/apache/commons/compress/compressors/lzma/LZMAUtilsTestCase.java @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.lzma; + +import static org.junit.Assert.*; + +import org.junit.Test; + +public class LZMAUtilsTestCase { + + @Test + public void testIsCompressedFilename() { + assertFalse(LZMAUtils.isCompressedFilename("")); + assertFalse(LZMAUtils.isCompressedFilename(".lzma")); + + assertTrue(LZMAUtils.isCompressedFilename("x.lzma")); + assertTrue(LZMAUtils.isCompressedFilename("x-lzma")); + + assertFalse(LZMAUtils.isCompressedFilename("xxgz")); + assertFalse(LZMAUtils.isCompressedFilename("lzmaz")); + assertFalse(LZMAUtils.isCompressedFilename("xaz")); + + assertFalse(LZMAUtils.isCompressedFilename("x.lzma ")); + assertFalse(LZMAUtils.isCompressedFilename("x.lzma\n")); + assertFalse(LZMAUtils.isCompressedFilename("x.lzma.y")); + } + + @Test + public void testGetUncompressedFilename() { + assertEquals("", LZMAUtils.getUncompressedFilename("")); + assertEquals(".lzma", LZMAUtils.getUncompressedFilename(".lzma")); + + assertEquals("x", LZMAUtils.getUncompressedFilename("x.lzma")); + assertEquals("x", LZMAUtils.getUncompressedFilename("x-lzma")); + + assertEquals("x.lzma ", LZMAUtils.getUncompressedFilename("x.lzma ")); + assertEquals("x.lzma\n", LZMAUtils.getUncompressedFilename("x.lzma\n")); + assertEquals("x.lzma.y", LZMAUtils.getUncompressedFilename("x.lzma.y")); + } + + @Test + public void testGetCompressedFilename() { + assertEquals(".lzma", LZMAUtils.getCompressedFilename("")); + assertEquals("x.lzma", LZMAUtils.getCompressedFilename("x")); + + assertEquals("x.wmf .lzma", LZMAUtils.getCompressedFilename("x.wmf ")); + assertEquals("x.wmf\n.lzma", LZMAUtils.getCompressedFilename("x.wmf\n")); + assertEquals("x.wmf.y.lzma", LZMAUtils.getCompressedFilename("x.wmf.y")); + } + + @Test + public void testMatches() { + byte[] data = { + (byte) 0x5D, 0, 0, + }; + assertFalse(LZMAUtils.matches(data, 2)); + assertTrue(LZMAUtils.matches(data, 3)); + assertTrue(LZMAUtils.matches(data, 4)); + data[2] = '0'; + assertFalse(LZMAUtils.matches(data, 3)); + } + + @Test + public void testCachingIsEnabledByDefaultAndLZMAIsPresent() { + assertEquals(LZMAUtils.CachedAvailability.CACHED_AVAILABLE, LZMAUtils.getCachedLZMAAvailability()); + assertTrue(LZMAUtils.isLZMACompressionAvailable()); + } + + @Test + public void testCanTurnOffCaching() { + try { + LZMAUtils.setCacheLZMAAvailablity(false); + assertEquals(LZMAUtils.CachedAvailability.DONT_CACHE, LZMAUtils.getCachedLZMAAvailability()); + assertTrue(LZMAUtils.isLZMACompressionAvailable()); + } finally { + LZMAUtils.setCacheLZMAAvailablity(true); + } + } + + @Test + public void testTurningOnCachingReEvaluatesAvailability() { + try { + LZMAUtils.setCacheLZMAAvailablity(false); + assertEquals(LZMAUtils.CachedAvailability.DONT_CACHE, LZMAUtils.getCachedLZMAAvailability()); + LZMAUtils.setCacheLZMAAvailablity(true); + assertEquals(LZMAUtils.CachedAvailability.CACHED_AVAILABLE, LZMAUtils.getCachedLZMAAvailability()); + } finally { + LZMAUtils.setCacheLZMAAvailablity(true); + } + } + +} From a6a5ab08ae314ee54aaf63b5dd1a795cafcd97a0 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 1 Apr 2015 04:19:38 +0000 Subject: [PATCH 159/189] Make README look better on github. closes #1 as a side effect git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1670561 13f79535-47bb-0310-9956-ffa450edef68 --- README.txt | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/README.txt b/README.txt index 0f39d0246b5..12ec26cc9f4 100644 --- a/README.txt +++ b/README.txt @@ -1,4 +1,11 @@ -Apache Commons Compress was derived from various sources, including: +# Apache Commons Compress + +Commons Compress is a Java library for working with various +compression and archiving formats. + +For full documentation see http://commons.apache.org/proper/commons-compress/ + +## Apache Commons Compress was derived from various sources, including: Original BZip2 classes contributed by Keiron Liddle , Aftex Software to the Apache Ant project. From 24002bfccf7c7ddcd5ed53108deab0c49ba1930b Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 1 May 2015 13:40:03 +0000 Subject: [PATCH 160/189] fix format of text file - this is no markdown actually, all I wanted to say is closes #2 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1677137 13f79535-47bb-0310-9956-ffa450edef68 --- README.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.txt b/README.txt index 12ec26cc9f4..b743b64210c 100644 --- a/README.txt +++ b/README.txt @@ -1,4 +1,5 @@ -# Apache Commons Compress +Apache Commons Compress +======================= Commons Compress is a Java library for working with various compression and archiving formats. From 24a3100e9897837b513a0d9f2ae26fd02ec91246 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 6 May 2015 04:32:48 +0000 Subject: [PATCH 161/189] COMPRESS-315 allow tar entries with gid/uid > Integer.MAX_INT git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1677921 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 4 ++ .../archivers/tar/TarArchiveEntry.java | 52 +++++++++++++++++-- .../archivers/tar/TarArchiveOutputStream.java | 8 +-- 3 files changed, 56 insertions(+), 8 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index f3ae88ad365..1881fd51c07 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,10 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + TarArchiveOutputStream can now write entries with group or + user ids > 0x80000000. + CompressorStreamFactory can now auto-detect LZMA streams. diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java index 9ba145f4de6..ce419c1a1f8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -121,10 +121,10 @@ public class TarArchiveEntry implements TarConstants, ArchiveEntry { private int mode; /** The entry's user id. */ - private int userId = 0; + private long userId = 0; /** The entry's group id. */ - private int groupId = 0; + private long groupId = 0; /** The entry's size. */ private long size = 0; @@ -424,9 +424,12 @@ public void setLinkName(String link) { * Get this entry's user id. * * @return This entry's user id. + * @deprecated use #getLongUserId instead as user ids can be + * bigger than {@link Integer.MAX_INT} */ + @Deprecated public int getUserId() { - return userId; + return (int) (userId & 0xffffffff); } /** @@ -435,6 +438,26 @@ public int getUserId() { * @param userId This entry's new user id. */ public void setUserId(int userId) { + setUserId((long) userId); + } + + /** + * Get this entry's user id. + * + * @return This entry's user id. + * @since 1.10 + */ + public long getLongUserId() { + return userId; + } + + /** + * Set this entry's user id. + * + * @param userId This entry's new user id. + * @since 1.10 + */ + public void setUserId(long userId) { this.userId = userId; } @@ -442,9 +465,12 @@ public void setUserId(int userId) { * Get this entry's group id. * * @return This entry's group id. + * @deprecated use #getLongGroupId instead as group ids can be + * bigger than {@link Integer.MAX_INT} */ + @Deprecated public int getGroupId() { - return groupId; + return (int) (groupId & 0xffffffff); } /** @@ -453,6 +479,24 @@ public int getGroupId() { * @param groupId This entry's new group id. */ public void setGroupId(int groupId) { + setGroupId((long) groupId); + } + + /** + * Get this entry's group id. + * + * @return This entry's group id. + */ + public long getLongGroupId() { + return groupId; + } + + /** + * Set this entry's group id. + * + * @param groupId This entry's new group id. + */ + public void setGroupId(long groupId) { this.groupId = groupId; } diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java index 43525c8c6b8..f12f9ccabef 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java @@ -584,12 +584,12 @@ private void addPaxHeadersForBigNumbers(Map paxHeaders, TarArchiveEntry entry) { addPaxHeaderForBigNumber(paxHeaders, "size", entry.getSize(), TarConstants.MAXSIZE); - addPaxHeaderForBigNumber(paxHeaders, "gid", entry.getGroupId(), + addPaxHeaderForBigNumber(paxHeaders, "gid", entry.getLongGroupId(), TarConstants.MAXID); addPaxHeaderForBigNumber(paxHeaders, "mtime", entry.getModTime().getTime() / 1000, TarConstants.MAXSIZE); - addPaxHeaderForBigNumber(paxHeaders, "uid", entry.getUserId(), + addPaxHeaderForBigNumber(paxHeaders, "uid", entry.getLongUserId(), TarConstants.MAXID); // star extensions by J\u00f6rg Schilling addPaxHeaderForBigNumber(paxHeaders, "SCHILY.devmajor", @@ -610,11 +610,11 @@ private void addPaxHeaderForBigNumber(Map paxHeaders, private void failForBigNumbers(TarArchiveEntry entry) { failForBigNumber("entry size", entry.getSize(), TarConstants.MAXSIZE); - failForBigNumberWithPosixMessage("group id", entry.getGroupId(), TarConstants.MAXID); + failForBigNumberWithPosixMessage("group id", entry.getLongGroupId(), TarConstants.MAXID); failForBigNumber("last modification time", entry.getModTime().getTime() / 1000, TarConstants.MAXSIZE); - failForBigNumber("user id", entry.getUserId(), TarConstants.MAXID); + failForBigNumber("user id", entry.getLongUserId(), TarConstants.MAXID); failForBigNumber("mode", entry.getMode(), TarConstants.MAXID); failForBigNumber("major device number", entry.getDevMajor(), TarConstants.MAXID); From 3c4a09bf28e7cd600b919b8c799fbbfd19a94c0b Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Fri, 8 May 2015 19:11:27 +0000 Subject: [PATCH 162/189] COMPRESS-314 read group/user ids > 0x80000000 from (posix) tars git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1678430 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 6 +++++- .../archivers/tar/TarArchiveInputStream.java | 4 ++-- .../tar/TarArchiveInputStreamTest.java | 21 +++++++++++++++++++ 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 1881fd51c07..372a7cb5205 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,7 +54,11 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> - + + TarArchiveInputStream can now read entries with group or + user ids > 0x80000000. + + TarArchiveOutputStream can now write entries with group or user ids > 0x80000000. diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java index c5570071e0d..41acf2af565 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -498,11 +498,11 @@ private void applyPaxHeadersToCurrentEntry(Map headers) { } else if ("linkpath".equals(key)){ currEntry.setLinkName(val); } else if ("gid".equals(key)){ - currEntry.setGroupId(Integer.parseInt(val)); + currEntry.setGroupId(Long.parseLong(val)); } else if ("gname".equals(key)){ currEntry.setGroupName(val); } else if ("uid".equals(key)){ - currEntry.setUserId(Integer.parseInt(val)); + currEntry.setUserId(Long.parseLong(val)); } else if ("uname".equals(key)){ currEntry.setUserName(val); } else if ("size".equals(key)){ diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java index f4fca534123..ca53b8b483f 100644 --- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java @@ -235,6 +235,27 @@ public void shouldThrowAnExceptionOnTruncatedEntries() throws Exception { } } + @Test + public void shouldReadBigGid() throws Exception { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + TarArchiveOutputStream tos = new TarArchiveOutputStream(bos); + tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX); + TarArchiveEntry t = new TarArchiveEntry("name"); + t.setGroupId(4294967294l); + t.setSize(1); + tos.putArchiveEntry(t); + tos.write(30); + tos.closeArchiveEntry(); + tos.close(); + byte[] data = bos.toByteArray(); + ByteArrayInputStream bis = new ByteArrayInputStream(data); + TarArchiveInputStream tis = + new TarArchiveInputStream(bis); + t = tis.getNextTarEntry(); + assertEquals(4294967294l, t.getLongGroupId()); + tis.close(); + } + private TarArchiveInputStream getTestStream(String name) { return new TarArchiveInputStream( TarArchiveInputStreamTest.class.getResourceAsStream(name)); From 9b3e389fc692513cc701b886a9dcdab6ebf32453 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 9 May 2015 18:59:30 +0000 Subject: [PATCH 163/189] missing since tags git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1678517 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/commons/compress/archivers/tar/TarArchiveEntry.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java index ce419c1a1f8..f39f0d9631b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -485,6 +485,7 @@ public void setGroupId(int groupId) { /** * Get this entry's group id. * + * @since 1.10 * @return This entry's group id. */ public long getLongGroupId() { @@ -494,6 +495,7 @@ public long getLongGroupId() { /** * Set this entry's group id. * + * @since 1.10 * @param groupId This entry's new group id. */ public void setGroupId(long groupId) { From b72b5c716ac18150396d05988d9eab745bea02b9 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 23 May 2015 15:05:43 +0000 Subject: [PATCH 164/189] COMPRESS-316 detect DEFLATE streams with ZLIB header, submitted by Nick Burch git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1681356 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 5 ++++ .../compressors/CompressorStreamFactory.java | 4 +++ .../deflate/DeflateCompressorInputStream.java | 27 +++++++++++++++++++ .../compressors/DetectCompressorTestCase.java | 5 ++++ 4 files changed, 41 insertions(+) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 372a7cb5205..1d198775d33 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,6 +54,11 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + CompressorStreamFactory can now auto-detect DEFLATE streams + with ZLIB header. + TarArchiveInputStream can now read entries with group or user ids > 0x80000000. diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index adca670ca73..8dc3c525112 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -237,6 +237,10 @@ public CompressorInputStream createCompressorInputStream(final InputStream in) return new ZCompressorInputStream(in); } + if (DeflateCompressorInputStream.matches(signature, signatureLength)) { + return new DeflateCompressorInputStream(in); + } + if (XZUtils.matches(signature, signatureLength) && XZUtils.isXZCompressionAvailable()) { return new XZCompressorInputStream(in, decompressConcatenated); diff --git a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java index a81a7a0da1c..b8ed19815b0 100644 --- a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java @@ -30,6 +30,12 @@ * @since 1.9 */ public class DeflateCompressorInputStream extends CompressorInputStream { + private static final int MAGIC_1 = 0x78; + private static final int MAGIC_2a = 0x01; + private static final int MAGIC_2b = 0x5e; + private static final int MAGIC_2c = 0x9c; + private static final int MAGIC_2d = 0xda; + private final InputStream in; /** @@ -88,4 +94,25 @@ public int available() throws IOException { public void close() throws IOException { in.close(); } + + /** + * Checks if the signature matches what is expected for a zlib / deflated file + * with the zlib header. + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is zlib / deflate compressed with a header + * stream, false otherwise + * + * @since 1.9 + */ + public static boolean matches(byte[] signature, int length) { + return length > 3 && signature[0] == MAGIC_1 && ( + signature[1] == (byte) MAGIC_2a || + signature[1] == (byte) MAGIC_2b || + signature[1] == (byte) MAGIC_2c || + signature[1] == (byte) MAGIC_2d); + } } diff --git a/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java index 0fc84248b9f..218d91322d0 100644 --- a/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java +++ b/src/test/java/org/apache/commons/compress/compressors/DetectCompressorTestCase.java @@ -30,6 +30,7 @@ import org.apache.commons.compress.compressors.CompressorInputStream; import org.apache.commons.compress.compressors.CompressorStreamFactory; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream; import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; @@ -104,6 +105,10 @@ public void testDetection() throws Exception { assertNotNull(xz); assertTrue(xz instanceof XZCompressorInputStream); + CompressorInputStream zlib = getStreamFor("bla.tar.deflatez"); + assertNotNull(zlib); + assertTrue(zlib instanceof DeflateCompressorInputStream); + try { factory.createCompressorInputStream(new ByteArrayInputStream(new byte[0])); fail("No exception thrown for an empty input stream"); From 8271475f973945e776ca91142349b8618c6f619e Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 23 May 2015 18:01:31 +0000 Subject: [PATCH 165/189] fix @since tag git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1681369 13f79535-47bb-0310-9956-ffa450edef68 --- .../compressors/deflate/DeflateCompressorInputStream.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java index b8ed19815b0..d4fec63fa1e 100644 --- a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStream.java @@ -106,7 +106,7 @@ public void close() throws IOException { * @return true, if this stream is zlib / deflate compressed with a header * stream, false otherwise * - * @since 1.9 + * @since 1.10 */ public static boolean matches(byte[] signature, int length) { return length > 3 && signature[0] == MAGIC_1 && ( From 8682c8d4f2281f838765a213d0241366255c99a0 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 9 Jun 2015 04:18:23 +0000 Subject: [PATCH 166/189] COMPRESS-317 ArrayIndexOutOfBoundsException in ZipArchiveEntry#getMergedFields git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1684309 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 7 ++++++- .../commons/compress/archivers/zip/ZipArchiveEntry.java | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 1d198775d33..716a11040f4 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -54,8 +54,13 @@ breaks backwards compatibility for code which used the old package. This also changes the superclass of ZCompressorInputStream. "> + + ArrayIndexOutOfBoundsException when ZIP extra fields are read + and the entry contains an UnparseableExtraField. + + due-to="Nick Burch"> CompressorStreamFactory can now auto-detect DEFLATE streams with ZLIB header. diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 6ee9dbbd5cc..1eab30cb6b0 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -377,7 +377,7 @@ private ZipExtraField[] copyOf(ZipExtraField[] src, int length) { private ZipExtraField[] getMergedFields() { final ZipExtraField[] zipExtraFields = copyOf(extraFields, extraFields.length + 1); - zipExtraFields[zipExtraFields.length] = unparseableExtra; + zipExtraFields[extraFields.length] = unparseableExtra; return zipExtraFields; } From 0967f25887b0779ea3472e655e5411cb36068ef4 Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Thu, 18 Jun 2015 15:48:13 +0000 Subject: [PATCH 167/189] Rename Security Reports page to avoid possible confusion with Commons main security page git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1686251 13f79535-47bb-0310-9956-ffa450edef68 --- src/site/site.xml | 2 +- src/site/xdoc/{security.xml => security-reports.xml} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename src/site/xdoc/{security.xml => security-reports.xml} (100%) diff --git a/src/site/site.xml b/src/site/site.xml index 134b84c1229..ab0c27e11ec 100644 --- a/src/site/site.xml +++ b/src/site/site.xml @@ -33,7 +33,7 @@ - + diff --git a/src/site/xdoc/security.xml b/src/site/xdoc/security-reports.xml similarity index 100% rename from src/site/xdoc/security.xml rename to src/site/xdoc/security-reports.xml From d1e2e3d8d29b6a693a2617aa1a0f774f438f6ada Mon Sep 17 00:00:00 2001 From: Sebastian Bazley Date: Tue, 30 Jun 2015 09:53:30 +0000 Subject: [PATCH 168/189] CP37 => CP38 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1688413 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 9e658d8b7ca..446af7a9be9 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ org.apache.commons commons-parent - 37 + 38 org.apache.commons From c9259fd82546bbacf9606509e5883bedde7fb062 Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Wed, 8 Jul 2015 08:36:32 +0000 Subject: [PATCH 169/189] Minor formating. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1689811 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/compressors/CompressorStreamFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java index 8dc3c525112..8f374f7eeb9 100644 --- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java +++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -80,6 +80,7 @@ public class CompressorStreamFactory { * @since 1.1 */ public static final String GZIP = "gz"; + /** * Constant (value {@value}) used to identify the PACK200 compression algorithm. * @since 1.3 @@ -140,7 +141,6 @@ public class CompressorStreamFactory { * If false, stop after the first stream and leave the * input position to point to the next byte after the stream */ - private volatile boolean decompressConcatenated = false; /** From a73d5f2e9b1f415201b312af8ff4af6a0f7623e1 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 21 Jul 2015 04:11:32 +0000 Subject: [PATCH 170/189] yet another potential AIOBException in zip package found by Earl Hood over in Ant's version of the same code git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1692044 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/UnshrinkingInputStream.java | 24 +++--- .../archivers/zip/ZipArchiveOutputStream.java | 2 +- .../compressors/lzw/LZWInputStream.java | 73 +++++++++++++++++-- .../compressors/z/ZCompressorInputStream.java | 25 +++---- 4 files changed, 91 insertions(+), 33 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java index 7210534b1c4..a09b72e43cd 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java @@ -36,20 +36,22 @@ class UnshrinkingInputStream extends LZWInputStream { public UnshrinkingInputStream(InputStream inputStream) throws IOException { super(inputStream, ByteOrder.LITTLE_ENDIAN); - setClearCode(codeSize); + setClearCode(DEFAULT_CODE_SIZE); initializeTables(MAX_CODE_SIZE); - isUsed = new boolean[prefixes.length]; + isUsed = new boolean[getPrefixesLength()]; for (int i = 0; i < (1 << 8); i++) { isUsed[i] = true; } - tableSize = clearCode + 1; + setTableSize(getClearCode() + 1); } @Override protected int addEntry(int previousCode, byte character) throws IOException { + int tableSize = getTableSize(); while ((tableSize < MAX_TABLE_SIZE) && isUsed[tableSize]) { tableSize++; } + setTableSize(tableSize); int idx = addEntry(previousCode, character, MAX_TABLE_SIZE); if (idx >= 0) { isUsed[idx] = true; @@ -60,14 +62,14 @@ protected int addEntry(int previousCode, byte character) throws IOException { private void partialClear() { final boolean[] isParent = new boolean[MAX_TABLE_SIZE]; for (int i = 0; i < isUsed.length; i++) { - if (isUsed[i] && prefixes[i] != -1) { - isParent[prefixes[i]] = true; + if (isUsed[i] && getPrefix(i) != UNUSED_PREFIX) { + isParent[getPrefix(i)] = true; } } - for (int i = clearCode + 1; i < isParent.length; i++) { + for (int i = getClearCode() + 1; i < isParent.length; i++) { if (!isParent[i]) { isUsed[i] = false; - prefixes[i] = -1; + setPrefix(i, UNUSED_PREFIX); } } } @@ -89,19 +91,19 @@ protected int decompressNextSymbol() throws IOException { final int code = readNextCode(); if (code < 0) { return -1; - } else if (code == clearCode) { + } else if (code == getClearCode()) { final int subCode = readNextCode(); if (subCode < 0) { throw new IOException("Unexpected EOF;"); } else if (subCode == 1) { - if (codeSize < MAX_CODE_SIZE) { - codeSize++; + if (getCodeSize() < MAX_CODE_SIZE) { + incrementCodeSize(); } else { throw new IOException("Attempt to increase code size beyond maximum"); } } else if (subCode == 2) { partialClear(); - tableSize = clearCode + 1; + setTableSize(getClearCode() + 1); } else { throw new IOException("Invalid clear code subcode " + subCode); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index c53ce9f25a6..169f088b913 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -1243,7 +1243,7 @@ private byte[] createCentralFileHeader(ZipArchiveEntry ze, ByteBuffer name, long int extraStart = CFH_FILENAME_OFFSET + nameLen; System.arraycopy(extra, 0, buf, extraStart, extra.length); - int commentStart = extraStart + commentLen; + int commentStart = extraStart + extra.length; // file comment System.arraycopy(commentB.array(), commentB.arrayOffset(), buf, commentStart, commentLen); diff --git a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java index 6900b7cfb8d..dc9212d6109 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java @@ -34,16 +34,19 @@ * @since 1.10 */ public abstract class LZWInputStream extends CompressorInputStream { + protected static final int DEFAULT_CODE_SIZE = 9; + protected static final int UNUSED_PREFIX = -1; + private final byte[] oneByte = new byte[1]; protected final BitInputStream in; - protected int clearCode = -1; - protected int codeSize = 9; - protected byte previousCodeFirstChar; - protected int previousCode = -1; - protected int tableSize = 0; - protected int[] prefixes; - protected byte[] characters; + private int clearCode = -1; + private int codeSize = DEFAULT_CODE_SIZE; + private byte previousCodeFirstChar; + private int previousCode = UNUSED_PREFIX; + private int tableSize; + private int[] prefixes; + private byte[] characters; private byte[] outputStack; private int outputStackLocation; @@ -178,4 +181,60 @@ private int readFromStack(byte[] b, int off, int len) { } return 0; } + + protected int getCodeSize() { + return codeSize; + } + + protected void resetCodeSize() { + this.codeSize = DEFAULT_CODE_SIZE; + } + + protected void incrementCodeSize() { + codeSize++; + } + + protected int getPreviousCode() { + return previousCode; + } + + protected byte getPreviousCodeFirstChar() { + return previousCodeFirstChar; + } + + protected void resetPreviousCode() { + this.previousCode = -1; + } + + protected int getPrefix(int offset) { + return prefixes[offset]; + } + + protected void setPrefix(int offset, int value) { + prefixes[offset] = value; + } + + protected int getPrefixesLength() { + return prefixes.length; + } + + protected int getClearCode() { + return clearCode; + } + + protected int getTableSize() { + return tableSize; + } + + protected void setTableSize(int newSize) { + tableSize = newSize; + } + + protected void setCharacter(int offset, byte value) { + characters[offset] = value; + } + + protected byte getCharacter(int offset) { + return characters[offset]; + } } diff --git a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java index 1bb65b50818..e21df06725f 100644 --- a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java @@ -49,17 +49,14 @@ public ZCompressorInputStream(InputStream inputStream) throws IOException { blockMode = (thirdByte & BLOCK_MODE_MASK) != 0; maxCodeSize = thirdByte & MAX_CODE_SIZE_MASK; if (blockMode) { - setClearCode(codeSize); + setClearCode(DEFAULT_CODE_SIZE); } initializeTables(maxCodeSize); clearEntries(); } private void clearEntries() { - tableSize = 1 << 8; - if (blockMode) { - tableSize++; - } + setTableSize((1 << 8) + (blockMode ? 1 : 0)); } /** @@ -100,11 +97,11 @@ private void reAlignReading() throws IOException { */ @Override protected int addEntry(int previousCode, byte character) throws IOException { - final int maxTableSize = 1 << codeSize; + final int maxTableSize = 1 << getCodeSize(); int r = addEntry(previousCode, character, maxTableSize); - if (tableSize == maxTableSize && codeSize < maxCodeSize) { + if (getTableSize() == maxTableSize && getCodeSize() < maxCodeSize) { reAlignReading(); - codeSize++; + incrementCodeSize(); } return r; } @@ -132,19 +129,19 @@ protected int decompressNextSymbol() throws IOException { final int code = readNextCode(); if (code < 0) { return -1; - } else if (blockMode && code == clearCode) { + } else if (blockMode && code == getClearCode()) { clearEntries(); reAlignReading(); - codeSize = 9; - previousCode = -1; + resetCodeSize(); + resetPreviousCode(); return 0; } else { boolean addedUnfinishedEntry = false; - if (code == tableSize) { + if (code == getTableSize()) { addRepeatOfPreviousCode(); addedUnfinishedEntry = true; - } else if (code > tableSize) { - throw new IOException(String.format("Invalid %d bit code 0x%x", codeSize, code)); + } else if (code > getTableSize()) { + throw new IOException(String.format("Invalid %d bit code 0x%x", getCodeSize(), code)); } return expandCodeToOutputStack(code, addedUnfinishedEntry); } From 9ebe7bbd313efb49f3496b8af7d4b4b78f73a5b7 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 21 Jul 2015 04:14:00 +0000 Subject: [PATCH 171/189] committed local change by accident git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1692045 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/UnshrinkingInputStream.java | 24 +++--- .../compressors/lzw/LZWInputStream.java | 73 ++----------------- .../compressors/z/ZCompressorInputStream.java | 25 ++++--- 3 files changed, 32 insertions(+), 90 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java index a09b72e43cd..7210534b1c4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java @@ -36,22 +36,20 @@ class UnshrinkingInputStream extends LZWInputStream { public UnshrinkingInputStream(InputStream inputStream) throws IOException { super(inputStream, ByteOrder.LITTLE_ENDIAN); - setClearCode(DEFAULT_CODE_SIZE); + setClearCode(codeSize); initializeTables(MAX_CODE_SIZE); - isUsed = new boolean[getPrefixesLength()]; + isUsed = new boolean[prefixes.length]; for (int i = 0; i < (1 << 8); i++) { isUsed[i] = true; } - setTableSize(getClearCode() + 1); + tableSize = clearCode + 1; } @Override protected int addEntry(int previousCode, byte character) throws IOException { - int tableSize = getTableSize(); while ((tableSize < MAX_TABLE_SIZE) && isUsed[tableSize]) { tableSize++; } - setTableSize(tableSize); int idx = addEntry(previousCode, character, MAX_TABLE_SIZE); if (idx >= 0) { isUsed[idx] = true; @@ -62,14 +60,14 @@ protected int addEntry(int previousCode, byte character) throws IOException { private void partialClear() { final boolean[] isParent = new boolean[MAX_TABLE_SIZE]; for (int i = 0; i < isUsed.length; i++) { - if (isUsed[i] && getPrefix(i) != UNUSED_PREFIX) { - isParent[getPrefix(i)] = true; + if (isUsed[i] && prefixes[i] != -1) { + isParent[prefixes[i]] = true; } } - for (int i = getClearCode() + 1; i < isParent.length; i++) { + for (int i = clearCode + 1; i < isParent.length; i++) { if (!isParent[i]) { isUsed[i] = false; - setPrefix(i, UNUSED_PREFIX); + prefixes[i] = -1; } } } @@ -91,19 +89,19 @@ protected int decompressNextSymbol() throws IOException { final int code = readNextCode(); if (code < 0) { return -1; - } else if (code == getClearCode()) { + } else if (code == clearCode) { final int subCode = readNextCode(); if (subCode < 0) { throw new IOException("Unexpected EOF;"); } else if (subCode == 1) { - if (getCodeSize() < MAX_CODE_SIZE) { - incrementCodeSize(); + if (codeSize < MAX_CODE_SIZE) { + codeSize++; } else { throw new IOException("Attempt to increase code size beyond maximum"); } } else if (subCode == 2) { partialClear(); - setTableSize(getClearCode() + 1); + tableSize = clearCode + 1; } else { throw new IOException("Invalid clear code subcode " + subCode); } diff --git a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java index dc9212d6109..6900b7cfb8d 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java @@ -34,19 +34,16 @@ * @since 1.10 */ public abstract class LZWInputStream extends CompressorInputStream { - protected static final int DEFAULT_CODE_SIZE = 9; - protected static final int UNUSED_PREFIX = -1; - private final byte[] oneByte = new byte[1]; protected final BitInputStream in; - private int clearCode = -1; - private int codeSize = DEFAULT_CODE_SIZE; - private byte previousCodeFirstChar; - private int previousCode = UNUSED_PREFIX; - private int tableSize; - private int[] prefixes; - private byte[] characters; + protected int clearCode = -1; + protected int codeSize = 9; + protected byte previousCodeFirstChar; + protected int previousCode = -1; + protected int tableSize = 0; + protected int[] prefixes; + protected byte[] characters; private byte[] outputStack; private int outputStackLocation; @@ -181,60 +178,4 @@ private int readFromStack(byte[] b, int off, int len) { } return 0; } - - protected int getCodeSize() { - return codeSize; - } - - protected void resetCodeSize() { - this.codeSize = DEFAULT_CODE_SIZE; - } - - protected void incrementCodeSize() { - codeSize++; - } - - protected int getPreviousCode() { - return previousCode; - } - - protected byte getPreviousCodeFirstChar() { - return previousCodeFirstChar; - } - - protected void resetPreviousCode() { - this.previousCode = -1; - } - - protected int getPrefix(int offset) { - return prefixes[offset]; - } - - protected void setPrefix(int offset, int value) { - prefixes[offset] = value; - } - - protected int getPrefixesLength() { - return prefixes.length; - } - - protected int getClearCode() { - return clearCode; - } - - protected int getTableSize() { - return tableSize; - } - - protected void setTableSize(int newSize) { - tableSize = newSize; - } - - protected void setCharacter(int offset, byte value) { - characters[offset] = value; - } - - protected byte getCharacter(int offset) { - return characters[offset]; - } } diff --git a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java index e21df06725f..1bb65b50818 100644 --- a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java @@ -49,14 +49,17 @@ public ZCompressorInputStream(InputStream inputStream) throws IOException { blockMode = (thirdByte & BLOCK_MODE_MASK) != 0; maxCodeSize = thirdByte & MAX_CODE_SIZE_MASK; if (blockMode) { - setClearCode(DEFAULT_CODE_SIZE); + setClearCode(codeSize); } initializeTables(maxCodeSize); clearEntries(); } private void clearEntries() { - setTableSize((1 << 8) + (blockMode ? 1 : 0)); + tableSize = 1 << 8; + if (blockMode) { + tableSize++; + } } /** @@ -97,11 +100,11 @@ private void reAlignReading() throws IOException { */ @Override protected int addEntry(int previousCode, byte character) throws IOException { - final int maxTableSize = 1 << getCodeSize(); + final int maxTableSize = 1 << codeSize; int r = addEntry(previousCode, character, maxTableSize); - if (getTableSize() == maxTableSize && getCodeSize() < maxCodeSize) { + if (tableSize == maxTableSize && codeSize < maxCodeSize) { reAlignReading(); - incrementCodeSize(); + codeSize++; } return r; } @@ -129,19 +132,19 @@ protected int decompressNextSymbol() throws IOException { final int code = readNextCode(); if (code < 0) { return -1; - } else if (blockMode && code == getClearCode()) { + } else if (blockMode && code == clearCode) { clearEntries(); reAlignReading(); - resetCodeSize(); - resetPreviousCode(); + codeSize = 9; + previousCode = -1; return 0; } else { boolean addedUnfinishedEntry = false; - if (code == getTableSize()) { + if (code == tableSize) { addRepeatOfPreviousCode(); addedUnfinishedEntry = true; - } else if (code > getTableSize()) { - throw new IOException(String.format("Invalid %d bit code 0x%x", getCodeSize(), code)); + } else if (code > tableSize) { + throw new IOException(String.format("Invalid %d bit code 0x%x", codeSize, code)); } return expandCodeToOutputStack(code, addedUnfinishedEntry); } From 171c3ac8d32ed884392b057800e691a621585d2a Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sun, 9 Aug 2015 16:06:09 +0000 Subject: [PATCH 172/189] COMPRESS-318 document ZipArchiveInputStream's limitations in the javadocs git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1694895 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/ZipArchiveEntry.java | 20 +++++++++++++ .../archivers/zip/ZipArchiveInputStream.java | 28 +++++++++++++++---- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 1eab30cb6b0..3d8d26c9795 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -216,6 +216,10 @@ public void setMethod(int method) { /** * Retrieves the internal file attributes. * + *

    Note: {@link ZipArchiveInputStream} is unable to fill + * this field, you must use {@link ZipFile} if you want to read + * entries using this attribute.

    + * * @return the internal file attributes */ public int getInternalAttributes() { @@ -232,6 +236,11 @@ public void setInternalAttributes(int value) { /** * Retrieves the external file attributes. + * + *

    Note: {@link ZipArchiveInputStream} is unable to fill + * this field, you must use {@link ZipFile} if you want to read + * entries using this attribute.

    + * * @return the external file attributes */ public long getExternalAttributes() { @@ -321,6 +330,12 @@ public void setExtraFields(ZipExtraField[] fields) { /** * Retrieves all extra fields that have been parsed successfully. + * + *

    Note: The set of extra fields may be incomplete when + * {@link ZipArchiveInputStream} has been used as some extra + * fields use the central directory to store additional + * information.

    + * * @return an array of the extra fields */ public ZipExtraField[] getExtraFields() { @@ -597,6 +612,11 @@ protected void setName(String name) { /** * Gets the uncompressed size of the entry data. + * + *

    Note: {@link ZipArchiveInputStream} may create + * entries that return {@link #SIZE_UNKNOWN SIZE_UNKNOWN} as long + * as the entry hasn't been read completely.

    + * * @return the entry size */ @Override diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java index 7a69141c296..5625c14b021 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -43,16 +43,32 @@ /** * Implements an input stream that can read Zip archives. * - *

    Note that {@link ZipArchiveEntry#getSize()} may return -1 if the - * DEFLATE algorithm is used, as the size information is not available - * from the header.

    - * - *

    The {@link ZipFile} class is preferred when reading from files.

    - * *

    As of Apache Commons Compress it transparently supports Zip64 * extensions and thus individual entries and archives larger than 4 * GB or with more than 65536 entries.

    * + *

    The {@link ZipFile} class is preferred when reading from files + * as {@link ZipArchiveInputStream} is limited by not being able to + * read the central directory header before returning entries. In + * particular {@link ZipArchiveInputStream}

    + * + *
      + * + *
    • may return entries that are not part of the central directory + * at all and shouldn't be considered part of the archive.
    • + * + *
    • may return several entries with the same name.
    • + * + *
    • will not return internal or external attributes.
    • + * + *
    • may return incomplete extra field data.
    • + * + *
    • may return unknown sizes and CRC values for entries until the + * next entry has been reached if the archive uses the data + * descriptor feature.
    • + * + *
    + * * @see ZipFile * @NotThreadSafe */ From f25588258e520cbf1d388d3d9b48a4942d3d7c28 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 11 Aug 2015 18:18:34 +0000 Subject: [PATCH 173/189] COMPRESS-300 remove protected fields from LZWInputStream git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1695345 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/zip/UnshrinkingInputStream.java | 24 +++---- .../compressors/lzw/LZWInputStream.java | 62 ++++++++++++++++--- .../compressors/z/ZCompressorInputStream.java | 25 ++++---- 3 files changed, 79 insertions(+), 32 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java index 7210534b1c4..a09b72e43cd 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java @@ -36,20 +36,22 @@ class UnshrinkingInputStream extends LZWInputStream { public UnshrinkingInputStream(InputStream inputStream) throws IOException { super(inputStream, ByteOrder.LITTLE_ENDIAN); - setClearCode(codeSize); + setClearCode(DEFAULT_CODE_SIZE); initializeTables(MAX_CODE_SIZE); - isUsed = new boolean[prefixes.length]; + isUsed = new boolean[getPrefixesLength()]; for (int i = 0; i < (1 << 8); i++) { isUsed[i] = true; } - tableSize = clearCode + 1; + setTableSize(getClearCode() + 1); } @Override protected int addEntry(int previousCode, byte character) throws IOException { + int tableSize = getTableSize(); while ((tableSize < MAX_TABLE_SIZE) && isUsed[tableSize]) { tableSize++; } + setTableSize(tableSize); int idx = addEntry(previousCode, character, MAX_TABLE_SIZE); if (idx >= 0) { isUsed[idx] = true; @@ -60,14 +62,14 @@ protected int addEntry(int previousCode, byte character) throws IOException { private void partialClear() { final boolean[] isParent = new boolean[MAX_TABLE_SIZE]; for (int i = 0; i < isUsed.length; i++) { - if (isUsed[i] && prefixes[i] != -1) { - isParent[prefixes[i]] = true; + if (isUsed[i] && getPrefix(i) != UNUSED_PREFIX) { + isParent[getPrefix(i)] = true; } } - for (int i = clearCode + 1; i < isParent.length; i++) { + for (int i = getClearCode() + 1; i < isParent.length; i++) { if (!isParent[i]) { isUsed[i] = false; - prefixes[i] = -1; + setPrefix(i, UNUSED_PREFIX); } } } @@ -89,19 +91,19 @@ protected int decompressNextSymbol() throws IOException { final int code = readNextCode(); if (code < 0) { return -1; - } else if (code == clearCode) { + } else if (code == getClearCode()) { final int subCode = readNextCode(); if (subCode < 0) { throw new IOException("Unexpected EOF;"); } else if (subCode == 1) { - if (codeSize < MAX_CODE_SIZE) { - codeSize++; + if (getCodeSize() < MAX_CODE_SIZE) { + incrementCodeSize(); } else { throw new IOException("Attempt to increase code size beyond maximum"); } } else if (subCode == 2) { partialClear(); - tableSize = clearCode + 1; + setTableSize(getClearCode() + 1); } else { throw new IOException("Invalid clear code subcode " + subCode); } diff --git a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java index 6900b7cfb8d..4a99a975014 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java @@ -34,16 +34,19 @@ * @since 1.10 */ public abstract class LZWInputStream extends CompressorInputStream { + protected static final int DEFAULT_CODE_SIZE = 9; + protected static final int UNUSED_PREFIX = -1; + private final byte[] oneByte = new byte[1]; protected final BitInputStream in; - protected int clearCode = -1; - protected int codeSize = 9; - protected byte previousCodeFirstChar; - protected int previousCode = -1; - protected int tableSize = 0; - protected int[] prefixes; - protected byte[] characters; + private int clearCode = -1; + private int codeSize = DEFAULT_CODE_SIZE; + private byte previousCodeFirstChar; + private int previousCode = UNUSED_PREFIX; + private int tableSize; + private int[] prefixes; + private byte[] characters; private byte[] outputStack; private int outputStackLocation; @@ -178,4 +181,49 @@ private int readFromStack(byte[] b, int off, int len) { } return 0; } + + protected int getCodeSize() { + return codeSize; + } + + protected void resetCodeSize() { + setCodeSize(DEFAULT_CODE_SIZE); + } + + protected void setCodeSize(int cs) { + this.codeSize = cs; + } + + protected void incrementCodeSize() { + codeSize++; + } + + protected void resetPreviousCode() { + this.previousCode = -1; + } + + protected int getPrefix(int offset) { + return prefixes[offset]; + } + + protected void setPrefix(int offset, int value) { + prefixes[offset] = value; + } + + protected int getPrefixesLength() { + return prefixes.length; + } + + protected int getClearCode() { + return clearCode; + } + + protected int getTableSize() { + return tableSize; + } + + protected void setTableSize(int newSize) { + tableSize = newSize; + } + } diff --git a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java index 1bb65b50818..e21df06725f 100644 --- a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java @@ -49,17 +49,14 @@ public ZCompressorInputStream(InputStream inputStream) throws IOException { blockMode = (thirdByte & BLOCK_MODE_MASK) != 0; maxCodeSize = thirdByte & MAX_CODE_SIZE_MASK; if (blockMode) { - setClearCode(codeSize); + setClearCode(DEFAULT_CODE_SIZE); } initializeTables(maxCodeSize); clearEntries(); } private void clearEntries() { - tableSize = 1 << 8; - if (blockMode) { - tableSize++; - } + setTableSize((1 << 8) + (blockMode ? 1 : 0)); } /** @@ -100,11 +97,11 @@ private void reAlignReading() throws IOException { */ @Override protected int addEntry(int previousCode, byte character) throws IOException { - final int maxTableSize = 1 << codeSize; + final int maxTableSize = 1 << getCodeSize(); int r = addEntry(previousCode, character, maxTableSize); - if (tableSize == maxTableSize && codeSize < maxCodeSize) { + if (getTableSize() == maxTableSize && getCodeSize() < maxCodeSize) { reAlignReading(); - codeSize++; + incrementCodeSize(); } return r; } @@ -132,19 +129,19 @@ protected int decompressNextSymbol() throws IOException { final int code = readNextCode(); if (code < 0) { return -1; - } else if (blockMode && code == clearCode) { + } else if (blockMode && code == getClearCode()) { clearEntries(); reAlignReading(); - codeSize = 9; - previousCode = -1; + resetCodeSize(); + resetPreviousCode(); return 0; } else { boolean addedUnfinishedEntry = false; - if (code == tableSize) { + if (code == getTableSize()) { addRepeatOfPreviousCode(); addedUnfinishedEntry = true; - } else if (code > tableSize) { - throw new IOException(String.format("Invalid %d bit code 0x%x", codeSize, code)); + } else if (code > getTableSize()) { + throw new IOException(String.format("Invalid %d bit code 0x%x", getCodeSize(), code)); } return expandCodeToOutputStack(code, addedUnfinishedEntry); } From 9e1ac89660190ec4d895855655891ad16d790fc4 Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Wed, 12 Aug 2015 03:24:07 +0000 Subject: [PATCH 174/189] lots of useless javadoc tags to make site build work on Java8 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1695419 13f79535-47bb-0310-9956-ffa450edef68 --- .../archivers/ArchiveOutputStream.java | 10 +-- .../compress/archivers/ar/ArArchiveEntry.java | 3 + .../archivers/arj/ArjArchiveEntry.java | 14 +++- .../archivers/arj/ArjArchiveInputStream.java | 6 +- .../cpio/CpioArchiveInputStream.java | 3 + .../archivers/dump/DumpArchiveEntry.java | 70 +++++++++++++------ .../dump/DumpArchiveInputStream.java | 16 +++-- .../archivers/dump/DumpArchiveSummary.java | 23 +++--- .../archivers/sevenz/SevenZArchiveEntry.java | 34 ++++++++- .../sevenz/SevenZMethodConfiguration.java | 2 + .../archivers/sevenz/SevenZOutputFile.java | 14 ++-- .../archivers/tar/TarArchiveEntry.java | 13 +++- .../archivers/tar/TarArchiveOutputStream.java | 1 + .../compress/archivers/tar/TarUtils.java | 2 + .../archivers/zip/ExtraFieldUtils.java | 1 + .../archivers/zip/GeneralPurposeBit.java | 20 ++++-- .../archivers/zip/ScatterZipOutputStream.java | 4 +- .../zip/UnsupportedZipFeatureException.java | 2 + .../Zip64ExtendedInformationExtraField.java | 16 +++++ .../archivers/zip/ZipArchiveEntry.java | 7 ++ .../archivers/zip/ZipArchiveInputStream.java | 6 ++ .../archivers/zip/ZipArchiveOutputStream.java | 10 +++ .../compress/archivers/zip/ZipEncoding.java | 4 +- .../compress/archivers/zip/ZipFile.java | 3 + .../compress/archivers/zip/ZipMethod.java | 3 + .../compress/archivers/zip/ZipUtil.java | 2 + .../bzip2/BZip2CompressorInputStream.java | 1 + .../DeflateCompressorOutputStream.java | 5 ++ .../pack200/Pack200CompressorInputStream.java | 24 +++++++ .../Pack200CompressorOutputStream.java | 12 ++++ .../xz/XZCompressorOutputStream.java | 7 +- .../compress/compressors/xz/XZUtils.java | 1 + .../commons/compress/utils/ArchiveUtils.java | 65 ++++++++--------- .../compress/utils/BitInputStream.java | 1 + .../utils/CRC32VerifyingInputStream.java | 8 +++ .../commons/compress/utils/IOUtils.java | 6 +- 36 files changed, 321 insertions(+), 98 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java index 3a5084a0862..e2d325fbadc 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java @@ -60,14 +60,14 @@ public abstract class ArchiveOutputStream extends OutputStream { * {@link #closeArchiveEntry()} to complete the process. * * @param entry describes the entry - * @throws IOException + * @throws IOException if an I/O error occurs */ public abstract void putArchiveEntry(ArchiveEntry entry) throws IOException; /** * Closes the archive entry, writing any trailer information that may * be required. - * @throws IOException + * @throws IOException if an I/O error occurs */ public abstract void closeArchiveEntry() throws IOException; @@ -82,11 +82,11 @@ public abstract class ArchiveOutputStream extends OutputStream { /** * Create an archive entry using the inputFile and entryName provided. * - * @param inputFile - * @param entryName + * @param inputFile the file to create the entry from + * @param entryName name to use for the entry * @return the ArchiveEntry set up with details from the file * - * @throws IOException + * @throws IOException if an I/O error occurs */ public abstract ArchiveEntry createArchiveEntry(File inputFile, String entryName) throws IOException; diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java index e32749cd631..ab419fd5d44 100644 --- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java @@ -108,6 +108,8 @@ public ArArchiveEntry(String name, long length, int userId, int groupId, /** * Create a new instance using the attributes of the given file + * @param inputFile the file to create an entry from + * @param entryName the name of the entry */ public ArArchiveEntry(File inputFile, String entryName) { // TODO sort out mode @@ -137,6 +139,7 @@ public int getMode() { /** * Last modified time in seconds since the epoch. + * @return the last modified date */ public long getLastModified() { return lastModified; diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java index 84fdae49f54..3ce49b592c6 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java @@ -64,7 +64,10 @@ public long getSize() { return localFileHeader.originalSize; } - /** True if the entry refers to a directory */ + /** True if the entry refers to a directory. + * + * @return True if the entry refers to a directory + */ public boolean isDirectory() { return localFileHeader.fileType == LocalFileHeader.FileTypes.DIRECTORY; } @@ -81,6 +84,8 @@ public boolean isDirectory() { * regardless of timezone if the archive has been created on a * non-Unix system and a time taking the current timezone into * account if the archive has beeen created on Unix.

    + * + * @return the last modified date */ public Date getLastModifiedDate() { long ts = isHostOsUnix() ? localFileHeader.dateTimeModified * 1000l @@ -92,6 +97,8 @@ public Date getLastModifiedDate() { * File mode of this entry. * *

    The format depends on the host os that created the entry.

    + * + * @return the file mode */ public int getMode() { return localFileHeader.fileAccessMode; @@ -101,6 +108,8 @@ public int getMode() { * File mode of this entry as Unix stat value. * *

    Will only be non-zero of the host os was UNIX. + * + * @return the Unix mode */ public int getUnixMode() { return isHostOsUnix() ? getMode() : 0; @@ -109,6 +118,7 @@ public int getUnixMode() { /** * The operating system the archive has been created on. * @see HostOs + * @return the host OS code */ public int getHostOs() { return localFileHeader.hostOS; @@ -117,6 +127,8 @@ public int getHostOs() { /** * Is the operating system the archive has been created on one * that is considered a UNIX OS by arj? + * @return whether the operating system the archive has been + * created on is considered a UNIX OS by arj */ public boolean isHostOsUnix() { return getHostOs() == HostOs.UNIX || getHostOs() == HostOs.NEXT; diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java index 09e26f6086e..96d1e8c7bbe 100644 --- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -53,7 +53,7 @@ public class ArjArchiveInputStream extends ArchiveInputStream { * @param inputStream the underlying stream, whose ownership is taken * @param charsetName the charset used for file names and comments * in the archive. May be {@code null} to use the platform default. - * @throws ArchiveException + * @throws ArchiveException if an exception occurs while reading */ public ArjArchiveInputStream(final InputStream inputStream, final String charsetName) throws ArchiveException { @@ -76,7 +76,7 @@ public ArjArchiveInputStream(final InputStream inputStream, * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in, * and using the CP437 character encoding. * @param inputStream the underlying stream, whose ownership is taken - * @throws ArchiveException + * @throws ArchiveException if an exception occurs while reading */ public ArjArchiveInputStream(final InputStream inputStream) throws ArchiveException { @@ -298,6 +298,7 @@ public static boolean matches(final byte[] signature, final int length) { /** * Gets the archive's recorded name. + * @return the archive's name */ public String getArchiveName() { return mainHeader.name; @@ -305,6 +306,7 @@ public String getArchiveName() { /** * Gets the archive's comment. + * @return the archive's comment */ public String getArchiveComment() { return mainHeader.comment; diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java index 915b56e54b7..6a9ae984294 100644 --- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java @@ -519,6 +519,9 @@ private void skipRemainderOfLastBlock() throws IOException { * Octal Binary value: * * 070707 - MAGIC_OLD_BINARY (held as a short) = 0x71C7 or 0xC771 + * @param signature data to match + * @param length length of data + * @return whether the buffer seems to contain CPIO data */ public static boolean matches(byte[] signature, int length) { if (length < 6) { diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java index 1cb62c75dc5..e5c7a0128be 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java @@ -227,10 +227,10 @@ public DumpArchiveEntry(String name, String simpleName) { /** * Constructor taking name, inode and type. * - * @param name - * @param simpleName - * @param ino - * @param type + * @param name the name + * @param simpleName the simple name + * @param ino the ino + * @param type the type */ protected DumpArchiveEntry(String name, String simpleName, int ino, TYPE type) { @@ -241,12 +241,6 @@ protected DumpArchiveEntry(String name, String simpleName, int ino, this.offset = 0; } - /** - * Constructor taking tape buffer. - * @param buffer - * @param offset - */ - /** * Returns the path of the entry. * @return the path of the entry. @@ -257,6 +251,7 @@ public String getSimpleName() { /** * Sets the path of the entry. + * @param simpleName the simple name */ protected void setSimpleName(String simpleName) { this.simpleName = simpleName; @@ -264,6 +259,7 @@ protected void setSimpleName(String simpleName) { /** * Returns the ino of the entry. + * @return the ino */ public int getIno() { return header.getIno(); @@ -271,6 +267,7 @@ public int getIno() { /** * Return the number of hard links to the entry. + * @return the number of hard links */ public int getNlink() { return nlink; @@ -278,6 +275,7 @@ public int getNlink() { /** * Set the number of hard links. + * @param nlink the number of hard links */ public void setNlink(int nlink) { this.nlink = nlink; @@ -285,6 +283,7 @@ public void setNlink(int nlink) { /** * Get file creation time. + * @return the creation time */ public Date getCreationTime() { return new Date(ctime); @@ -292,6 +291,7 @@ public Date getCreationTime() { /** * Set the file creation time. + * @param ctime the creation time */ public void setCreationTime(Date ctime) { this.ctime = ctime.getTime(); @@ -299,6 +299,7 @@ public void setCreationTime(Date ctime) { /** * Return the generation of the file. + * @return the generation */ public int getGeneration() { return generation; @@ -306,6 +307,7 @@ public int getGeneration() { /** * Set the generation of the file. + * @param generation the generation */ public void setGeneration(int generation) { this.generation = generation; @@ -313,6 +315,7 @@ public void setGeneration(int generation) { /** * Has this file been deleted? (On valid on incremental dumps.) + * @return whether the file has been deleted */ public boolean isDeleted() { return isDeleted; @@ -320,6 +323,7 @@ public boolean isDeleted() { /** * Set whether this file has been deleted. + * @param isDeleted whether the file has been deleted */ public void setDeleted(boolean isDeleted) { this.isDeleted = isDeleted; @@ -327,6 +331,7 @@ public void setDeleted(boolean isDeleted) { /** * Return the offset within the archive + * @return the offset */ public long getOffset() { return offset; @@ -334,6 +339,7 @@ public long getOffset() { /** * Set the offset within the archive. + * @param offset the offset */ public void setOffset(long offset) { this.offset = offset; @@ -341,6 +347,7 @@ public void setOffset(long offset) { /** * Return the tape volume where this file is located. + * @return the volume */ public int getVolume() { return volume; @@ -348,6 +355,7 @@ public int getVolume() { /** * Set the tape volume. + * @param volume the volume */ public void setVolume(int volume) { this.volume = volume; @@ -355,6 +363,7 @@ public void setVolume(int volume) { /** * Return the type of the tape segment header. + * @return the segment header */ public DumpArchiveConstants.SEGMENT_TYPE getHeaderType() { return header.getType(); @@ -362,6 +371,7 @@ public DumpArchiveConstants.SEGMENT_TYPE getHeaderType() { /** * Return the number of records in this segment. + * @return the number of records */ public int getHeaderCount() { return header.getCount(); @@ -369,6 +379,7 @@ public int getHeaderCount() { /** * Return the number of sparse records in this segment. + * @return the number of sparse records */ public int getHeaderHoles() { return header.getHoles(); @@ -376,22 +387,18 @@ public int getHeaderHoles() { /** * Is this a sparse record? + * @param idx index of the record to check + * @return whether this is a sparse record */ public boolean isSparseRecord(int idx) { return (header.getCdata(idx) & 0x01) == 0; } - /** - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { return ino; } - /** - * @see java.lang.Object#equals(Object o) - */ @Override public boolean equals(Object o) { if (o == this) { @@ -418,9 +425,6 @@ public boolean equals(Object o) { return true; } - /** - * @see java.lang.Object#toString() - */ @Override public String toString() { return getName(); @@ -430,8 +434,7 @@ public String toString() { * Populate the dump archive entry and tape segment header with * the contents of the buffer. * - * @param buffer - * @throws Exception + * @param buffer buffer to read content from */ static DumpArchiveEntry parse(byte[] buffer) { DumpArchiveEntry entry = new DumpArchiveEntry(); @@ -574,6 +577,7 @@ String getOriginalName() { /** * Sets the name of the entry. + * @param name the name */ public final void setName(String name) { this.originalName = name; @@ -588,12 +592,17 @@ public final void setName(String name) { this.name = name; } + /** + * The last modified date. + * @return the last modified date + */ public Date getLastModifiedDate() { return new Date(mtime); } /** * Is this a directory? + * @return whether this is a directory */ public boolean isDirectory() { return type == TYPE.DIRECTORY; @@ -601,6 +610,7 @@ public boolean isDirectory() { /** * Is this a regular file? + * @return whether this is a regular file */ public boolean isFile() { return type == TYPE.FILE; @@ -608,6 +618,7 @@ public boolean isFile() { /** * Is this a network device? + * @return whether this is a socket */ public boolean isSocket() { return type == TYPE.SOCKET; @@ -615,6 +626,7 @@ public boolean isSocket() { /** * Is this a character device? + * @return whether this is a character device */ public boolean isChrDev() { return type == TYPE.CHRDEV; @@ -622,6 +634,7 @@ public boolean isChrDev() { /** * Is this a block device? + * @return whether this is a block device */ public boolean isBlkDev() { return type == TYPE.BLKDEV; @@ -629,6 +642,7 @@ public boolean isBlkDev() { /** * Is this a fifo/pipe? + * @return whether this is a fifo */ public boolean isFifo() { return type == TYPE.FIFO; @@ -636,6 +650,7 @@ public boolean isFifo() { /** * Get the type of the entry. + * @return the type */ public TYPE getType() { return type; @@ -643,6 +658,7 @@ public TYPE getType() { /** * Set the type of the entry. + * @param type the type */ public void setType(TYPE type) { this.type = type; @@ -650,6 +666,7 @@ public void setType(TYPE type) { /** * Return the access permissions on the entry. + * @return the access permissions */ public int getMode() { return mode; @@ -657,6 +674,7 @@ public int getMode() { /** * Set the access permissions on the entry. + * @param mode the access permissions */ public void setMode(int mode) { this.mode = mode & 07777; @@ -665,6 +683,7 @@ public void setMode(int mode) { /** * Returns the permissions on the entry. + * @return the permissions */ public Set getPermissions() { return permissions; @@ -672,6 +691,7 @@ public Set getPermissions() { /** * Returns the size of the entry. + * @return the size */ public long getSize() { return isDirectory() ? SIZE_UNKNOWN : size; @@ -686,6 +706,7 @@ long getEntrySize() { /** * Set the size of the entry. + * @param size the size */ public void setSize(long size) { this.size = size; @@ -693,6 +714,7 @@ public void setSize(long size) { /** * Set the time the file was last modified. + * @param mtime the last modified time */ public void setLastModifiedDate(Date mtime) { this.mtime = mtime.getTime(); @@ -700,6 +722,7 @@ public void setLastModifiedDate(Date mtime) { /** * Returns the time the file was last accessed. + * @return the access time */ public Date getAccessTime() { return new Date(atime); @@ -707,6 +730,7 @@ public Date getAccessTime() { /** * Set the time the file was last accessed. + * @param atime the access time */ public void setAccessTime(Date atime) { this.atime = atime.getTime(); @@ -714,6 +738,7 @@ public void setAccessTime(Date atime) { /** * Return the user id. + * @return the user id */ public int getUserId() { return uid; @@ -721,6 +746,7 @@ public int getUserId() { /** * Set the user id. + * @param uid the user id */ public void setUserId(int uid) { this.uid = uid; @@ -728,6 +754,7 @@ public void setUserId(int uid) { /** * Return the group id + * @return the group id */ public int getGroupId() { return gid; @@ -735,6 +762,7 @@ public int getGroupId() { /** * Set the group id. + * @param gid the group id */ public void setGroupId(int gid) { this.gid = gid; diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java index 09431a4031c..fa8f6a73932 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -83,8 +83,8 @@ public class DumpArchiveInputStream extends ArchiveInputStream { * Constructor using the platform's default encoding for file * names. * - * @param is - * @throws ArchiveException + * @param is stream to read from + * @throws ArchiveException on error */ public DumpArchiveInputStream(InputStream is) throws ArchiveException { this(is, null); @@ -93,10 +93,11 @@ public DumpArchiveInputStream(InputStream is) throws ArchiveException { /** * Constructor. * - * @param is + * @param is stream to read from * @param encoding the encoding to use for file names, use null * for the platform's default encoding * @since 1.6 + * @throws ArchiveException on error */ public DumpArchiveInputStream(InputStream is, String encoding) throws ArchiveException { @@ -160,6 +161,7 @@ public long getBytesRead() { /** * Return the archive summary information. + * @return the summary */ public DumpArchiveSummary getSummary() { return summary; @@ -215,14 +217,13 @@ private void readBITS() throws IOException { /** * Read the next entry. + * @return the next entry + * @throws IOException on error */ public DumpArchiveEntry getNextDumpEntry() throws IOException { return getNextEntry(); } - /** - * Read the next entry. - */ @Override public DumpArchiveEntry getNextEntry() throws IOException { DumpArchiveEntry entry = null; @@ -536,6 +537,9 @@ public void close() throws IOException { * Look at the first few bytes of the file to decide if it's a dump * archive. With 32 bytes we can look at the magic value, with a full * 1k we can verify the checksum. + * @param buffer data to match + * @param length length of data + * @return whether the buffer seems to contain dump data */ public static boolean matches(byte[] buffer, int length) { // do we have enough of the header? diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java index 08b9e8f20a2..a030e005664 100644 --- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java +++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java @@ -69,6 +69,7 @@ public Date getDumpDate() { /** * Set dump date. + * @param dumpDate the dump date */ public void setDumpDate(Date dumpDate) { this.dumpDate = dumpDate.getTime(); @@ -84,6 +85,7 @@ public Date getPreviousDumpDate() { /** * Set previous dump date. + * @param previousDumpDate the previous dump dat */ public void setPreviousDumpDate(Date previousDumpDate) { this.previousDumpDate = previousDumpDate.getTime(); @@ -99,6 +101,7 @@ public int getVolume() { /** * Set volume (tape) number. + * @param volume the volume number */ public void setVolume(int volume) { this.volume = volume; @@ -118,6 +121,7 @@ public int getLevel() { /** * Set level. + * @param level the level */ public void setLevel(int level) { this.level = level; @@ -134,7 +138,7 @@ public String getLabel() { /** * Set dump label. - * @param label + * @param label the label */ public void setLabel(String label) { this.label = label; @@ -150,6 +154,7 @@ public String getFilesystem() { /** * Set the last mountpoint. + * @param filesystem the last mountpoint */ public void setFilesystem(String filesystem) { this.filesys = filesystem; @@ -165,7 +170,7 @@ public String getDevname() { /** * Set the device name. - * @param devname + * @param devname the device name */ public void setDevname(String devname) { this.devname = devname; @@ -173,7 +178,7 @@ public void setDevname(String devname) { /** * Get the hostname of the system where the dump was performed. - * @return hostname + * @return hostname the host name */ public String getHostname() { return hostname; @@ -181,6 +186,7 @@ public String getHostname() { /** * Set the hostname. + * @param hostname the host name */ public void setHostname(String hostname) { this.hostname = hostname; @@ -196,7 +202,7 @@ public int getFlags() { /** * Set the miscellaneous flags. - * @param flags + * @param flags flags */ public void setFlags(int flags) { this.flags = flags; @@ -212,7 +218,7 @@ public int getFirstRecord() { /** * Set the inode of the first record. - * @param firstrec + * @param firstrec the first record */ public void setFirstRecord(int firstrec) { this.firstrec = firstrec; @@ -229,6 +235,7 @@ public int getNTRec() { /** * Set the number of records per tape block. + * @param ntrec the number of records per tape block */ public void setNTRec(int ntrec) { this.ntrec = ntrec; @@ -278,9 +285,6 @@ public boolean isExtendedAttributes() { return (flags & 0x8000) == 0x8000; } - /** - * @see java.lang.Object#hashCode() - */ @Override public int hashCode() { int hash = 17; @@ -302,9 +306,6 @@ public int hashCode() { return hash; } - /** - * @see java.lang.Object#equals(Object) - */ @Override public boolean equals(Object o) { if (this == o) { diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java index 83599e02900..c3f2bb92bd8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java @@ -124,6 +124,7 @@ public void setAntiItem(boolean isAntiItem) { /** * Returns whether this entry has got a creation date at all. + * @return whether the entry has got a creation date */ public boolean getHasCreationDate() { return hasCreationDate; @@ -131,6 +132,7 @@ public boolean getHasCreationDate() { /** * Sets whether this entry has got a creation date at all. + * @param hasCreationDate whether the entry has got a creation date */ public void setHasCreationDate(boolean hasCreationDate) { this.hasCreationDate = hasCreationDate; @@ -140,6 +142,7 @@ public void setHasCreationDate(boolean hasCreationDate) { * Gets the creation date. * @throws UnsupportedOperationException if the entry hasn't got a * creation date. + * @return the creation date */ public Date getCreationDate() { if (hasCreationDate) { @@ -153,6 +156,7 @@ public Date getCreationDate() { /** * Sets the creation date using NTFS time (100 nanosecond units * since 1 January 1601) + * @param ntfsCreationDate the creation date */ public void setCreationDate(long ntfsCreationDate) { this.creationDate = ntfsCreationDate; @@ -160,6 +164,7 @@ public void setCreationDate(long ntfsCreationDate) { /** * Sets the creation date, + * @param creationDate the creation date */ public void setCreationDate(Date creationDate) { hasCreationDate = creationDate != null; @@ -170,6 +175,7 @@ public void setCreationDate(Date creationDate) { /** * Returns whether this entry has got a last modified date at all. + * @return whether this entry has got a last modified date at all */ public boolean getHasLastModifiedDate() { return hasLastModifiedDate; @@ -177,6 +183,8 @@ public boolean getHasLastModifiedDate() { /** * Sets whether this entry has got a last modified date at all. + * @param hasLastModifiedDate whether this entry has got a last + * modified date at all */ public void setHasLastModifiedDate(boolean hasLastModifiedDate) { this.hasLastModifiedDate = hasLastModifiedDate; @@ -186,6 +194,7 @@ public void setHasLastModifiedDate(boolean hasLastModifiedDate) { * Gets the last modified date. * @throws UnsupportedOperationException if the entry hasn't got a * last modified date. + * @return the last modified date */ public Date getLastModifiedDate() { if (hasLastModifiedDate) { @@ -199,6 +208,7 @@ public Date getLastModifiedDate() { /** * Sets the last modified date using NTFS time (100 nanosecond * units since 1 January 1601) + * @param ntfsLastModifiedDate the last modified date */ public void setLastModifiedDate(long ntfsLastModifiedDate) { this.lastModifiedDate = ntfsLastModifiedDate; @@ -206,6 +216,7 @@ public void setLastModifiedDate(long ntfsLastModifiedDate) { /** * Sets the last modified date, + * @param lastModifiedDate the last modified date */ public void setLastModifiedDate(Date lastModifiedDate) { hasLastModifiedDate = lastModifiedDate != null; @@ -216,6 +227,7 @@ public void setLastModifiedDate(Date lastModifiedDate) { /** * Returns whether this entry has got an access date at all. + * @return whether this entry has got an access date at all. */ public boolean getHasAccessDate() { return hasAccessDate; @@ -223,6 +235,7 @@ public boolean getHasAccessDate() { /** * Sets whether this entry has got an access date at all. + * @param hasAcessDate whether this entry has got an access date at all. */ public void setHasAccessDate(boolean hasAcessDate) { this.hasAccessDate = hasAcessDate; @@ -232,6 +245,7 @@ public void setHasAccessDate(boolean hasAcessDate) { * Gets the access date. * @throws UnsupportedOperationException if the entry hasn't got a * access date. + * @return the access date */ public Date getAccessDate() { if (hasAccessDate) { @@ -245,6 +259,7 @@ public Date getAccessDate() { /** * Sets the access date using NTFS time (100 nanosecond units * since 1 January 1601) + * @param ntfsAccessDate the access date */ public void setAccessDate(long ntfsAccessDate) { this.accessDate = ntfsAccessDate; @@ -252,6 +267,7 @@ public void setAccessDate(long ntfsAccessDate) { /** * Sets the access date, + * @param accessDate the access date */ public void setAccessDate(Date accessDate) { hasAccessDate = accessDate != null; @@ -262,6 +278,7 @@ public void setAccessDate(Date accessDate) { /** * Returns whether this entry has windows attributes. + * @return whether this entry has windows attributes. */ public boolean getHasWindowsAttributes() { return hasWindowsAttributes; @@ -269,6 +286,7 @@ public boolean getHasWindowsAttributes() { /** * Sets whether this entry has windows attributes. + * @param hasWindowsAttributes whether this entry has windows attributes. */ public void setHasWindowsAttributes(boolean hasWindowsAttributes) { this.hasWindowsAttributes = hasWindowsAttributes; @@ -276,6 +294,7 @@ public void setHasWindowsAttributes(boolean hasWindowsAttributes) { /** * Gets the windows attributes. + * @return the windows attributes */ public int getWindowsAttributes() { return windowsAttributes; @@ -283,6 +302,7 @@ public int getWindowsAttributes() { /** * Sets the windows attributes. + * @param windowsAttributes the windows attributes */ public void setWindowsAttributes(int windowsAttributes) { this.windowsAttributes = windowsAttributes; @@ -291,7 +311,8 @@ public void setWindowsAttributes(int windowsAttributes) { /** * Returns whether this entry has got a crc. * - * In general entries without streams don't have a CRC either. + *

    In general entries without streams don't have a CRC either.

    + * @return whether this entry has got a crc. */ public boolean getHasCrc() { return hasCrc; @@ -299,6 +320,7 @@ public boolean getHasCrc() { /** * Sets whether this entry has got a crc. + * @param hasCrc whether this entry has got a crc. */ public void setHasCrc(boolean hasCrc) { this.hasCrc = hasCrc; @@ -307,6 +329,7 @@ public void setHasCrc(boolean hasCrc) { /** * Gets the CRC. * @deprecated use getCrcValue instead. + * @return the CRC */ @Deprecated public int getCrc() { @@ -316,6 +339,7 @@ public int getCrc() { /** * Sets the CRC. * @deprecated use setCrcValue instead. + * @param crc the CRC */ @Deprecated public void setCrc(int crc) { @@ -325,6 +349,7 @@ public void setCrc(int crc) { /** * Gets the CRC. * @since Compress 1.7 + * @return the CRC */ public long getCrcValue() { return crc; @@ -333,6 +358,7 @@ public long getCrcValue() { /** * Sets the CRC. * @since Compress 1.7 + * @param crc the CRC */ public void setCrcValue(long crc) { this.crc = crc; @@ -341,6 +367,7 @@ public void setCrcValue(long crc) { /** * Gets the compressed CRC. * @deprecated use getCompressedCrcValue instead. + * @return the compressed CRC */ @Deprecated int getCompressedCrc() { @@ -350,6 +377,7 @@ int getCompressedCrc() { /** * Sets the compressed CRC. * @deprecated use setCompressedCrcValue instead. + * @param crc the CRC */ @Deprecated void setCompressedCrc(int crc) { @@ -359,6 +387,7 @@ void setCompressedCrc(int crc) { /** * Gets the compressed CRC. * @since Compress 1.7 + * @return the CRC */ long getCompressedCrcValue() { return compressedCrc; @@ -367,6 +396,7 @@ long getCompressedCrcValue() { /** * Sets the compressed CRC. * @since Compress 1.7 + * @param crc the CRC */ void setCompressedCrcValue(long crc) { this.compressedCrc = crc; @@ -419,6 +449,7 @@ void setCompressedSize(long size) { *

    The methods will be consulted in iteration order to create * the final output.

    * + * @param methods the methods to use for the content * @since 1.8 */ public void setContentMethods(Iterable methods) { @@ -445,6 +476,7 @@ public void setContentMethods(Iterable meth * the final output.

    * * @since 1.8 + * @return the methods to use for the content */ public Iterable getContentMethods() { return contentMethods; diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java index 753a561b0a3..59aa2e4a168 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfiguration.java @@ -64,6 +64,7 @@ public SevenZMethodConfiguration(SevenZMethod method, Object options) { /** * The specified method. + * @return the method */ public SevenZMethod getMethod() { return method; @@ -71,6 +72,7 @@ public SevenZMethod getMethod() { /** * The specified options. + * @return the options */ public Object getOptions() { return options; diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java index 72ca42f08c8..1886898c4fa 100644 --- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java @@ -77,6 +77,7 @@ public SevenZOutputFile(final File filename) throws IOException { * *

    This is a short form for passing a single-element iterable * to {@link #setContentMethods}.

    + * @param method the default compression method */ public void setContentCompression(SevenZMethod method) { setContentMethods(Collections.singletonList(new SevenZMethodConfiguration(method))); @@ -94,6 +95,7 @@ public void setContentCompression(SevenZMethod method) { * the final output.

    * * @since 1.8 + * @param methods the default (compression) methods */ public void setContentMethods(Iterable methods) { this.contentMethods = reverse(methods); @@ -102,7 +104,7 @@ public void setContentMethods(Iterable meth /** * Closes the archive, calling {@link #finish} if necessary. * - * @throws IOException + * @throws IOException on error */ public void close() throws IOException { if (!finished) { @@ -114,11 +116,11 @@ public void close() throws IOException { /** * Create an archive entry using the inputFile and entryName provided. * - * @param inputFile - * @param entryName + * @param inputFile file to create an entry from + * @param entryName the name to use * @return the ArchiveEntry set up with details from the file * - * @throws IOException + * @throws IOException on error */ public SevenZArchiveEntry createArchiveEntry(final File inputFile, final String entryName) throws IOException { @@ -136,7 +138,7 @@ public SevenZArchiveEntry createArchiveEntry(final File inputFile, * {@link #closeArchiveEntry()} to complete the process. * * @param archiveEntry describes the entry - * @throws IOException + * @throws IOException on error */ public void putArchiveEntry(final ArchiveEntry archiveEntry) throws IOException { final SevenZArchiveEntry entry = (SevenZArchiveEntry) archiveEntry; @@ -145,7 +147,7 @@ public void putArchiveEntry(final ArchiveEntry archiveEntry) throws IOException /** * Closes the archive entry. - * @throws IOException + * @throws IOException on error */ public void closeArchiveEntry() throws IOException { if (currentOutputStream != null) { diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java index f39f0d9631b..37358fe7f58 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -318,6 +318,7 @@ public TarArchiveEntry(byte[] headerBuf) { * @param encoding encoding to use for file names * @since 1.4 * @throws IllegalArgumentException if any of the numeric fields have an invalid format + * @throws IOException on error */ public TarArchiveEntry(byte[] headerBuf, ZipEncoding encoding) throws IOException { @@ -425,7 +426,7 @@ public void setLinkName(String link) { * * @return This entry's user id. * @deprecated use #getLongUserId instead as user ids can be - * bigger than {@link Integer.MAX_INT} + * bigger than {@link Integer#MAX_VALUE} */ @Deprecated public int getUserId() { @@ -466,7 +467,7 @@ public void setUserId(long userId) { * * @return This entry's group id. * @deprecated use #getLongGroupId instead as group ids can be - * bigger than {@link Integer.MAX_INT} + * bigger than {@link Integer#MAX_VALUE} */ @Deprecated public int getGroupId() { @@ -790,6 +791,7 @@ public boolean isDirectory() { * Check if this is a "normal file" * * @since 1.2 + * @return whether this is a "normal file" */ public boolean isFile() { if (file != null) { @@ -805,6 +807,7 @@ public boolean isFile() { * Check if this is a symbolic link entry. * * @since 1.2 + * @return whether this is a symbolic link */ public boolean isSymbolicLink() { return linkFlag == LF_SYMLINK; @@ -814,6 +817,7 @@ public boolean isSymbolicLink() { * Check if this is a link entry. * * @since 1.2 + * @return whether this is a link entry */ public boolean isLink() { return linkFlag == LF_LINK; @@ -823,6 +827,7 @@ public boolean isLink() { * Check if this is a character device entry. * * @since 1.2 + * @return whether this is a character device */ public boolean isCharacterDevice() { return linkFlag == LF_CHR; @@ -832,6 +837,7 @@ public boolean isCharacterDevice() { * Check if this is a block device entry. * * @since 1.2 + * @return whether this is a block device */ public boolean isBlockDevice() { return linkFlag == LF_BLK; @@ -841,6 +847,7 @@ public boolean isBlockDevice() { * Check if this is a FIFO (pipe) entry. * * @since 1.2 + * @return whether this is a FIFO entry */ public boolean isFIFO() { return linkFlag == LF_FIFO; @@ -896,6 +903,7 @@ public void writeEntryHeader(byte[] outbuf) { * extension for numeric fields if their value doesn't fit in the * maximum size of standard tar archives * @since 1.4 + * @throws IOException on error */ public void writeEntryHeader(byte[] outbuf, ZipEncoding encoding, boolean starMode) throws IOException { @@ -981,6 +989,7 @@ public void parseTarHeader(byte[] header) { * @since 1.4 * @throws IllegalArgumentException if any of the numeric fields * have an invalid format + * @throws IOException on error */ public void parseTarHeader(byte[] header, ZipEncoding encoding) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java index f12f9ccabef..803f6d2ff39 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java @@ -189,6 +189,7 @@ public void setBigNumberMode(int bigNumberMode) { /** * Whether to add a PAX extension header for non-ASCII file names. * @since 1.4 + * @param b whether to add a PAX extension header for non-ASCII file names. */ public void setAddPaxHeadersForNonAsciiNames(boolean b) { addPaxHeadersForNonAsciiNames = b; diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java index 94e175c210d..3cbf83f65a4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -280,6 +280,7 @@ public static String parseName(byte[] buffer, final int offset, final int length * @param encoding name of the encoding to use for file names * @since 1.4 * @return The entry name. + * @throws IOException on error */ public static String parseName(byte[] buffer, final int offset, final int length, @@ -345,6 +346,7 @@ public static int formatNameBytes(String name, byte[] buf, final int offset, fin * @param encoding name of the encoding to use for file names * @since 1.4 * @return The updated offset, i.e. offset + length + * @throws IOException on error */ public static int formatNameBytes(String name, byte[] buf, final int offset, final int length, diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java b/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java index 21cddf384e7..a0c43dea68c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java @@ -312,6 +312,7 @@ private UnparseableExtraField(int k) { /** * Key of the action to take. + * @return the key */ public int getKey() { return key; } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java index 6e80e364b6b..07775db9ef6 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java @@ -81,6 +81,7 @@ public GeneralPurposeBit() { /** * whether the current entry uses UTF8 for file name and comment. + * @return whether the current entry uses UTF8 for file name and comment. */ public boolean usesUTF8ForNames() { return languageEncodingFlag; @@ -88,6 +89,7 @@ public boolean usesUTF8ForNames() { /** * whether the current entry will use UTF8 for file name and comment. + * @param b whether the current entry will use UTF8 for file name and comment. */ public void useUTF8ForNames(boolean b) { languageEncodingFlag = b; @@ -95,6 +97,8 @@ public void useUTF8ForNames(boolean b) { /** * whether the current entry uses the data descriptor to store CRC + * and size information. + * @return whether the current entry uses the data descriptor to store CRC * and size information */ public boolean usesDataDescriptor() { @@ -103,6 +107,8 @@ public boolean usesDataDescriptor() { /** * whether the current entry will use the data descriptor to store + * CRC and size information. + * @param b whether the current entry will use the data descriptor to store * CRC and size information */ public void useDataDescriptor(boolean b) { @@ -110,28 +116,32 @@ public void useDataDescriptor(boolean b) { } /** - * whether the current entry is encrypted + * whether the current entry is encrypted. + * @return whether the current entry is encrypted */ public boolean usesEncryption() { return encryptionFlag; } /** - * whether the current entry will be encrypted + * whether the current entry will be encrypted. + * @param b whether the current entry will be encrypted */ public void useEncryption(boolean b) { encryptionFlag = b; } /** - * whether the current entry is encrypted using strong encryption + * whether the current entry is encrypted using strong encryption. + * @return whether the current entry is encrypted using strong encryption */ public boolean usesStrongEncryption() { return encryptionFlag && strongEncryptionFlag; } /** - * whether the current entry will be encrypted using strong encryption + * whether the current entry will be encrypted using strong encryption. + * @param b whether the current entry will be encrypted using strong encryption */ public void useStrongEncryption(boolean b) { strongEncryptionFlag = b; @@ -156,6 +166,7 @@ int getNumberOfShannonFanoTrees() { /** * Encodes the set bits in a form suitable for ZIP archives. + * @return the encoded general purpose bits */ public byte[] encode() { byte[] result = new byte[2]; @@ -188,6 +199,7 @@ public void encode(byte[] buf, int offset) { * * @param data local file header or a central directory entry. * @param offset offset at which the general purpose bit starts + * @return parsed flags */ public static GeneralPurposeBit parse(final byte[] data, final int offset) { final int generalPurposeFlag = ZipShort.getValue(data, offset); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java index 622e0b4a522..862666ffb8c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java @@ -133,7 +133,7 @@ public void close() throws IOException { * * @param file The file to offload compressed data into. * @return A ScatterZipOutputStream that is ready for use. - * @throws FileNotFoundException + * @throws FileNotFoundException if the file cannot be found */ public static ScatterZipOutputStream fileBased(File file) throws FileNotFoundException { return fileBased(file, Deflater.DEFAULT_COMPRESSION); @@ -145,7 +145,7 @@ public static ScatterZipOutputStream fileBased(File file) throws FileNotFoundExc * @param file The file to offload compressed data into. * @param compressionLevel The compression level to use, @see #Deflater * @return A ScatterZipOutputStream that is ready for use. - * @throws FileNotFoundException + * @throws FileNotFoundException if the file cannot be found */ public static ScatterZipOutputStream fileBased(File file, int compressionLevel) throws FileNotFoundException { ScatterGatherBackingStore bs = new FileBasedScatterGatherBackingStore(file); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java b/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java index b1aad770c9c..8316ab47ab8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java @@ -74,6 +74,7 @@ public UnsupportedZipFeatureException(Feature reason) { /** * The unsupported feature that has been used. + * @return The unsupported feature that has been used. */ public Feature getFeature() { return reason; @@ -81,6 +82,7 @@ public Feature getFeature() { /** * The entry using the unsupported feature. + * @return The entry using the unsupported feature. */ public ZipArchiveEntry getEntry() { return entry; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.java b/src/main/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.java index 65368800b2c..c7bec68ddcd 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.java @@ -88,6 +88,8 @@ public Zip64ExtendedInformationExtraField(ZipEightByteInteger size, * * @param size the entry's original size * @param compressedSize the entry's compressed size + * @param relativeHeaderOffset the entry's offset + * @param diskStart the disk start * * @throws IllegalArgumentException if size or compressedSize is null */ @@ -205,6 +207,12 @@ public void parseFromCentralDirectoryData(byte[] buffer, int offset, * field are optional and must only be present if their corresponding * entry inside the central directory contains the correct magic * value.

    + * + * @param hasUncompressedSize flag to read from central directory + * @param hasCompressedSize flag to read from central directory + * @param hasRelativeHeaderOffset flag to read from central directory + * @param hasDiskStart flag to read from central directory + * @throws ZipException on error */ public void reparseCentralDirectoryData(boolean hasUncompressedSize, boolean hasCompressedSize, @@ -248,6 +256,7 @@ public void reparseCentralDirectoryData(boolean hasUncompressedSize, /** * The uncompressed size stored in this extra field. + * @return The uncompressed size stored in this extra field. */ public ZipEightByteInteger getSize() { return size; @@ -255,6 +264,7 @@ public ZipEightByteInteger getSize() { /** * The uncompressed size stored in this extra field. + * @param size The uncompressed size stored in this extra field. */ public void setSize(ZipEightByteInteger size) { this.size = size; @@ -262,6 +272,7 @@ public void setSize(ZipEightByteInteger size) { /** * The compressed size stored in this extra field. + * @return The compressed size stored in this extra field. */ public ZipEightByteInteger getCompressedSize() { return compressedSize; @@ -269,6 +280,7 @@ public ZipEightByteInteger getCompressedSize() { /** * The uncompressed size stored in this extra field. + * @param compressedSize The uncompressed size stored in this extra field. */ public void setCompressedSize(ZipEightByteInteger compressedSize) { this.compressedSize = compressedSize; @@ -276,6 +288,7 @@ public void setCompressedSize(ZipEightByteInteger compressedSize) { /** * The relative header offset stored in this extra field. + * @return The relative header offset stored in this extra field. */ public ZipEightByteInteger getRelativeHeaderOffset() { return relativeHeaderOffset; @@ -283,6 +296,7 @@ public ZipEightByteInteger getRelativeHeaderOffset() { /** * The relative header offset stored in this extra field. + * @param rho The relative header offset stored in this extra field. */ public void setRelativeHeaderOffset(ZipEightByteInteger rho) { relativeHeaderOffset = rho; @@ -290,6 +304,7 @@ public void setRelativeHeaderOffset(ZipEightByteInteger rho) { /** * The disk start number stored in this extra field. + * @return The disk start number stored in this extra field. */ public ZipLong getDiskStartNumber() { return diskStart; @@ -297,6 +312,7 @@ public ZipLong getDiskStartNumber() { /** * The disk start number stored in this extra field. + * @param ds The disk start number stored in this extra field. */ public void setDiskStartNumber(ZipLong ds) { diskStart = ds; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java index 3d8d26c9795..4f6637316b4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -159,6 +159,8 @@ protected ZipArchiveEntry() { * the file is a directory. If the file is not a directory a * potential trailing forward slash will be stripped from the * entry name.

    + * @param inputFile file to create the entry from + * @param entryName name of the entry */ public ZipArchiveEntry(File inputFile, String entryName) { this(inputFile.isDirectory() && !entryName.endsWith("/") ? @@ -493,6 +495,7 @@ public void removeUnparseableExtraFieldData() { /** * Looks up an extra field by its header id. * + * @param type the header id * @return null if no such field exists. */ public ZipExtraField getExtraField(ZipShort type) { @@ -551,6 +554,7 @@ protected void setExtra() { /** * Sets the central directory part of extra fields. + * @param b an array of bytes to be parsed into extra fields */ public void setCentralDirectoryExtra(byte[] b) { try { @@ -659,6 +663,7 @@ protected void setName(String name, byte[] rawName) { *

    This method will return null if this instance has not been * read from an archive.

    * + * @return the raw name bytes * @since 1.2 */ public byte[] getRawName() { @@ -686,6 +691,7 @@ public int hashCode() { /** * The "general purpose bit" field. + * @return the general purpose bit * @since 1.1 */ public GeneralPurposeBit getGeneralPurposeBit() { @@ -694,6 +700,7 @@ public GeneralPurposeBit getGeneralPurposeBit() { /** * The "general purpose bit" field. + * @param b the general purpose bit * @since 1.1 */ public void setGeneralPurposeBit(GeneralPurposeBit b) { diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java index 5625c14b021..f9d240b003f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -167,6 +167,8 @@ public ZipArchiveInputStream(InputStream inputStream) { } /** + * Create an instance using the specified encoding + * @param inputStream the stream to wrap * @param encoding the encoding to use for file names, use null * for the platform's default encoding * @since 1.5 @@ -176,6 +178,8 @@ public ZipArchiveInputStream(InputStream inputStream, String encoding) { } /** + * Create an instance using the specified encoding + * @param inputStream the stream to wrap * @param encoding the encoding to use for file names, use null * for the platform's default encoding * @param useUnicodeExtraFields whether to use InfoZIP Unicode @@ -186,6 +190,8 @@ public ZipArchiveInputStream(InputStream inputStream, String encoding, boolean u } /** + * Create an instance using the specified encoding + * @param inputStream the stream to wrap * @param encoding the encoding to use for file names, use null * for the platform's default encoding * @param useUnicodeExtraFields whether to use InfoZIP Unicode diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java index 169f088b913..07ba40513a4 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -349,6 +349,9 @@ public String getEncoding() { * encoding is UTF-8. * *

    Defaults to true.

    + * + * @param b whether to set the language encoding flag if the file + * name encoding is UTF-8 */ public void setUseLanguageEncodingFlag(boolean b) { useUTF8Flag = b && ZipEncodingHelper.isUTF8(encoding); @@ -358,6 +361,8 @@ public void setUseLanguageEncodingFlag(boolean b) { * Whether to create Unicode Extra Fields. * *

    Defaults to NEVER.

    + * + * @param b whether to create Unicode Extra Fields. */ public void setCreateUnicodeExtraFields(UnicodeExtraFieldPolicy b) { createUnicodeExtraFields = b; @@ -368,6 +373,10 @@ public void setCreateUnicodeExtraFields(UnicodeExtraFieldPolicy b) { * the file name cannot be encoded using the specified encoding. * *

    Defaults to false.

    + * + * @param b whether to fall back to UTF and the language encoding + * flag if the file name cannot be encoded using the specified + * encoding. */ public void setFallbackToUTF8(boolean b) { fallbackToUTF8 = b; @@ -416,6 +425,7 @@ public void setFallbackToUTF8(boolean b) { * case the default is {@link Zip64Mode#Never Never}.

    * * @since 1.3 + * @param mode Whether Zip64 extensions will be used. */ public void setUseZip64(Zip64Mode mode) { zip64Mode = mode; diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java index 65d2044b5e5..d38b5560985 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java @@ -71,14 +71,14 @@ public interface ZipEncoding { * beginning of the encoded result, the byte buffer has a * backing array and the limit of the byte buffer points * to the end of the encoded result. - * @throws IOException + * @throws IOException on error */ ByteBuffer encode(String name) throws IOException; /** * @param data The byte values to decode. * @return The decoded string. - * @throws IOException + * @throws IOException on error */ String decode(byte [] data) throws IOException; } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java index 348d86b92e9..e8bcc032f98 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java @@ -340,6 +340,8 @@ public Iterable getEntriesInPhysicalOrder(String name) { *

    May return false if it is set up to use encryption or a * compression method that hasn't been implemented yet.

    * @since 1.1 + * @param ze the entry + * @return whether this class is able to read the given entry. */ public boolean canReadEntryData(ZipArchiveEntry ze) { return ZipUtil.canHandleEntryData(ze); @@ -371,6 +373,7 @@ private InputStream getRawInputStream(ZipArchiveEntry ze) { * * @param target The zipArchiveOutputStream to write the entries to * @param predicate A predicate that selects which entries to write + * @throws IOException on error */ public void copyRawEntries(ZipArchiveOutputStream target, ZipArchiveEntryPredicate predicate) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java index 5289eb5d081..efe59e49d05 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java @@ -206,6 +206,9 @@ public int getCode() { /** * returns the {@link ZipMethod} for the given code or null if the * method is not known. + * @param code the code + * @return the {@link ZipMethod} for the given code or null if the + * method is not known. */ public static ZipMethod getMethodByCode(int code) { return codeToEnum.get(code); diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java index 026da708e3f..b12b19f82b5 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipUtil.java @@ -211,6 +211,8 @@ public static Date fromDosTime(ZipLong zipDosTime) { /** * Converts DOS time to Java time (number of milliseconds since * epoch). + * @param dosTime time to convert + * @return converted time */ public static long dosToJavaTime(long dosTime) { Calendar cal = Calendar.getInstance(); diff --git a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java index b32b1f9b21e..44568600afe 100644 --- a/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java @@ -100,6 +100,7 @@ public class BZip2CompressorInputStream extends CompressorInputStream implements * read from the specified stream. This doesn't suppprt decompressing * concatenated .bz2 files. * + * @param in the InputStream from which this object should be created * @throws IOException * if the stream content is malformed or an I/O error occurs. * @throws NullPointerException diff --git a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStream.java b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStream.java index 9d89eca1807..3714fe44fb8 100644 --- a/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStream.java @@ -34,6 +34,8 @@ public class DeflateCompressorOutputStream extends CompressorOutputStream { /** * Creates a Deflate compressed output stream with the default parameters. + * @param outputStream the stream to wrap + * @throws IOException on error */ public DeflateCompressorOutputStream(OutputStream outputStream) throws IOException { this(outputStream, new DeflateParameters()); @@ -41,6 +43,9 @@ public DeflateCompressorOutputStream(OutputStream outputStream) throws IOExcepti /** * Creates a Deflate compressed output stream with the specified parameters. + * @param outputStream the stream to wrap + * @param parameters the deflate parameters to apply + * @throws IOException on error */ public DeflateCompressorOutputStream(OutputStream outputStream, DeflateParameters parameters) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java index fa04aef3593..9711a0cf25f 100644 --- a/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java @@ -50,6 +50,8 @@ public class Pack200CompressorInputStream extends CompressorInputStream { * *

    When reading from a file the File-arg constructor may * provide better performance.

    + * + * @param in the InputStream from which this object should be created */ public Pack200CompressorInputStream(final InputStream in) throws IOException { @@ -62,6 +64,9 @@ public Pack200CompressorInputStream(final InputStream in) * *

    When reading from a file the File-arg constructor may * provide better performance.

    + * + * @param in the InputStream from which this object should be created + * @param mode the strategy to use */ public Pack200CompressorInputStream(final InputStream in, final Pack200Strategy mode) @@ -75,6 +80,9 @@ public Pack200CompressorInputStream(final InputStream in, * *

    When reading from a file the File-arg constructor may * provide better performance.

    + * + * @param in the InputStream from which this object should be created + * @param props Pack200 properties to use */ public Pack200CompressorInputStream(final InputStream in, final Map props) @@ -88,6 +96,10 @@ public Pack200CompressorInputStream(final InputStream in, * *

    When reading from a file the File-arg constructor may * provide better performance.

    + * + * @param in the InputStream from which this object should be created + * @param mode the strategy to use + * @param props Pack200 properties to use */ public Pack200CompressorInputStream(final InputStream in, final Pack200Strategy mode, @@ -99,6 +111,8 @@ public Pack200CompressorInputStream(final InputStream in, /** * Decompresses the given file, caching the decompressed data in * memory. + * + * @param f the file to decompress */ public Pack200CompressorInputStream(final File f) throws IOException { this(f, Pack200Strategy.IN_MEMORY); @@ -107,6 +121,9 @@ public Pack200CompressorInputStream(final File f) throws IOException { /** * Decompresses the given file using the given strategy to cache * the results. + * + * @param f the file to decompress + * @param mode the strategy to use */ public Pack200CompressorInputStream(final File f, final Pack200Strategy mode) throws IOException { @@ -116,6 +133,9 @@ public Pack200CompressorInputStream(final File f, final Pack200Strategy mode) /** * Decompresses the given file, caching the decompressed data in * memory and using the given properties. + * + * @param f the file to decompress + * @param props Pack200 properties to use */ public Pack200CompressorInputStream(final File f, final Map props) @@ -126,6 +146,10 @@ public Pack200CompressorInputStream(final File f, /** * Decompresses the given file using the given strategy to cache * the results and the given properties. + * + * @param f the file to decompress + * @param mode the strategy to use + * @param props Pack200 properties to use */ public Pack200CompressorInputStream(final File f, final Pack200Strategy mode, final Map props) diff --git a/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java b/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java index cfb315d9e6b..9ecf6eb4226 100644 --- a/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java @@ -43,6 +43,8 @@ public class Pack200CompressorOutputStream extends CompressorOutputStream { /** * Compresses the given stream, caching the compressed data in * memory. + * + * @param out the stream to write to */ public Pack200CompressorOutputStream(final OutputStream out) throws IOException { @@ -52,6 +54,9 @@ public Pack200CompressorOutputStream(final OutputStream out) /** * Compresses the given stream using the given strategy to cache * the results. + * + * @param out the stream to write to + * @param mode the strategy to use */ public Pack200CompressorOutputStream(final OutputStream out, final Pack200Strategy mode) @@ -62,6 +67,9 @@ public Pack200CompressorOutputStream(final OutputStream out, /** * Compresses the given stream, caching the compressed data in * memory and using the given properties. + * + * @param out the stream to write to + * @param props Pack200 properties to use */ public Pack200CompressorOutputStream(final OutputStream out, final Map props) @@ -72,6 +80,10 @@ public Pack200CompressorOutputStream(final OutputStream out, /** * Compresses the given stream using the given strategy to cache * the results and the given properties. + * + * @param out the stream to write to + * @param mode the strategy to use + * @param props Pack200 properties to use */ public Pack200CompressorOutputStream(final OutputStream out, final Pack200Strategy mode, diff --git a/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorOutputStream.java b/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorOutputStream.java index e512b6605eb..0c192cc5f19 100644 --- a/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorOutputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/xz/XZCompressorOutputStream.java @@ -34,7 +34,9 @@ public class XZCompressorOutputStream extends CompressorOutputStream { /** * Creates a new XZ compressor using the default LZMA2 options. - * This is equivalent to XZCompressorOutputStream(6). + * This is equivalent to XZCompressorOutputStream(outputStream, 6). + * @param outputStream the stream to wrap + * @throws IOException on error */ public XZCompressorOutputStream(OutputStream outputStream) throws IOException { @@ -53,6 +55,9 @@ public XZCompressorOutputStream(OutputStream outputStream) * Unless the uncompressed size of the file exceeds 8 MiB, * 16 MiB, or 32 MiB, it is waste of memory to use the * presets 7, 8, or 9, respectively. + * @param outputStream the stream to wrap + * @param preset the preset + * @throws IOException on error */ public XZCompressorOutputStream(OutputStream outputStream, int preset) throws IOException { diff --git a/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java b/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java index 3334b66e1cb..d1742d9ee91 100644 --- a/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java +++ b/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java @@ -94,6 +94,7 @@ public static boolean matches(byte[] signature, int length) { /** * Are the classes required to support XZ compression available? * @since 1.5 + * @return true if the classes required to support XZ compression are available */ public static boolean isXZCompressionAvailable() { final CachedAvailability cachedResult = cachedXZAvailability; diff --git a/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java b/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java index 16beed2e053..8fee98c7346 100644 --- a/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java +++ b/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java @@ -40,6 +40,7 @@ private ArchiveUtils(){ * d 100 testfiles * * + * @param entry the entry * @return the representation of the entry */ public static String toString(ArchiveEntry entry){ @@ -59,10 +60,10 @@ public static String toString(ArchiveEntry entry){ /** * Check if buffer contents matches Ascii String. * - * @param expected - * @param buffer - * @param offset - * @param length + * @param expected expected string + * @param buffer the buffer + * @param offset offset to read from + * @param length length of the buffer * @return {@code true} if buffer is the same as the expected string */ public static boolean matchAsciiBuffer( @@ -79,8 +80,8 @@ public static boolean matchAsciiBuffer( /** * Check if buffer contents matches Ascii String. * - * @param expected - * @param buffer + * @param expected the expected strin + * @param buffer the buffer * @return {@code true} if buffer is the same as the expected string */ public static boolean matchAsciiBuffer(String expected, byte[] buffer){ @@ -91,7 +92,7 @@ public static boolean matchAsciiBuffer(String expected, byte[] buffer){ * Convert a string to Ascii bytes. * Used for comparing "magic" strings which need to be independent of the default Locale. * - * @param inputString + * @param inputString string to convert * @return the bytes */ public static byte[] toAsciiBytes(String inputString){ @@ -105,7 +106,7 @@ public static byte[] toAsciiBytes(String inputString){ /** * Convert an input byte array to a String using the ASCII character set. * - * @param inputBytes + * @param inputBytes bytes to convert * @return the bytes, interpreted as an Ascii string */ public static String toAsciiString(final byte[] inputBytes){ @@ -135,13 +136,13 @@ public static String toAsciiString(final byte[] inputBytes, int offset, int leng /** * Compare byte buffers, optionally ignoring trailing nulls * - * @param buffer1 - * @param offset1 - * @param length1 - * @param buffer2 - * @param offset2 - * @param length2 - * @param ignoreTrailingNulls + * @param buffer1 first buffer + * @param offset1 first offset + * @param length1 first length + * @param buffer2 second buffer + * @param offset2 second offset + * @param length2 second length + * @param ignoreTrailingNulls whether to ignore trailing nulls * @return {@code true} if buffer1 and buffer2 have same contents, having regard to trailing nulls */ public static boolean isEqual( @@ -179,12 +180,12 @@ public static boolean isEqual( /** * Compare byte buffers * - * @param buffer1 - * @param offset1 - * @param length1 - * @param buffer2 - * @param offset2 - * @param length2 + * @param buffer1 the first buffer + * @param offset1 the first offset + * @param length1 the first length + * @param buffer2 the second buffer + * @param offset2 the second offset + * @param length2 the second length * @return {@code true} if buffer1 and buffer2 have same contents */ public static boolean isEqual( @@ -196,8 +197,8 @@ public static boolean isEqual( /** * Compare byte buffers * - * @param buffer1 - * @param buffer2 + * @param buffer1 the first buffer + * @param buffer2 the second buffer * @return {@code true} if buffer1 and buffer2 have same contents */ public static boolean isEqual(final byte[] buffer1, final byte[] buffer2 ){ @@ -207,9 +208,9 @@ public static boolean isEqual(final byte[] buffer1, final byte[] buffer2 ){ /** * Compare byte buffers, optionally ignoring trailing nulls * - * @param buffer1 - * @param buffer2 - * @param ignoreTrailingNulls + * @param buffer1 the first buffer + * @param buffer2 the second buffer + * @param ignoreTrailingNulls whether to ignore tariling nulls * @return {@code true} if buffer1 and buffer2 have same contents */ public static boolean isEqual(final byte[] buffer1, final byte[] buffer2, boolean ignoreTrailingNulls){ @@ -219,12 +220,12 @@ public static boolean isEqual(final byte[] buffer1, final byte[] buffer2, boolea /** * Compare byte buffers, ignoring trailing nulls * - * @param buffer1 - * @param offset1 - * @param length1 - * @param buffer2 - * @param offset2 - * @param length2 + * @param buffer1 the first buffer + * @param offset1 the first offset + * @param length1 the first length + * @param buffer2 the second buffer + * @param offset2 the second offset + * @param length2 the second length * @return {@code true} if buffer1 and buffer2 have same contents, having regard to trailing nulls */ public static boolean isEqualWithNull( diff --git a/src/main/java/org/apache/commons/compress/utils/BitInputStream.java b/src/main/java/org/apache/commons/compress/utils/BitInputStream.java index dd1d9b67553..91aad685b91 100644 --- a/src/main/java/org/apache/commons/compress/utils/BitInputStream.java +++ b/src/main/java/org/apache/commons/compress/utils/BitInputStream.java @@ -75,6 +75,7 @@ public void clearBitCache() { * @return the bits concatenated as a long using the stream's byte order. * -1 if the end of the underlying stream has been reached before reading * the requested number of bits + * @throws IOException on error */ public long readBits(final int count) throws IOException { if (count < 0 || count > MAXIMUM_CACHE_SIZE) { diff --git a/src/main/java/org/apache/commons/compress/utils/CRC32VerifyingInputStream.java b/src/main/java/org/apache/commons/compress/utils/CRC32VerifyingInputStream.java index 0dc5b98823f..bac371669df 100644 --- a/src/main/java/org/apache/commons/compress/utils/CRC32VerifyingInputStream.java +++ b/src/main/java/org/apache/commons/compress/utils/CRC32VerifyingInputStream.java @@ -28,12 +28,20 @@ */ public class CRC32VerifyingInputStream extends ChecksumVerifyingInputStream { + /** + * @param in the stream to wrap + * @param size the of the stream's content + * @param expectedCrc32 the expected checksum + */ public CRC32VerifyingInputStream(final InputStream in, final long size, final int expectedCrc32) { this(in, size, expectedCrc32 & 0xFFFFffffl); } /** * @since 1.7 + * @param in the stream to wrap + * @param size the of the stream's content + * @param expectedCrc32 the expected checksum */ public CRC32VerifyingInputStream(final InputStream in, final long size, final long expectedCrc32) { super(new CRC32(), in, size, expectedCrc32); diff --git a/src/main/java/org/apache/commons/compress/utils/IOUtils.java b/src/main/java/org/apache/commons/compress/utils/IOUtils.java index 4d184b7fcee..940d29d9423 100644 --- a/src/main/java/org/apache/commons/compress/utils/IOUtils.java +++ b/src/main/java/org/apache/commons/compress/utils/IOUtils.java @@ -49,6 +49,7 @@ private IOUtils(){ * the InputStream to copy * @param output * the target Stream + * @return the number of bytes copied * @throws IOException * if an error occurs */ @@ -65,6 +66,7 @@ public static long copy(final InputStream input, final OutputStream output) thro * the target Stream * @param buffersize * the buffer size to use + * @return the number of bytes copied * @throws IOException * if an error occurs */ @@ -93,7 +95,7 @@ public static long copy(final InputStream input, final OutputStream output, int * @param input stream to skip bytes in * @param numToSkip the number of bytes to skip * @return the number of bytes actually skipped - * @throws IOException + * @throws IOException on error */ public static long skip(InputStream input, long numToSkip) throws IOException { long available = numToSkip; @@ -126,7 +128,7 @@ public static long skip(InputStream input, long numToSkip) throws IOException { * @param input stream to read from * @param b buffer to fill * @return the number of bytes actually read - * @throws IOException + * @throws IOException on error */ public static int readFully(InputStream input, byte[] b) throws IOException { return readFully(input, b, 0, b.length); From dec258f502a92ce230567cb6271ac6f0cc9de8be Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 15 Aug 2015 15:33:29 +0000 Subject: [PATCH 175/189] tag RC3 of Compress 1.10 git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/tags/COMPRESS-1.10-RC3@1696055 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 +- src/site/site.xml | 1 + src/site/xdoc/download_compress.xml | 26 +++++++++++++------------- src/site/xdoc/index.xml | 4 ++-- 4 files changed, 17 insertions(+), 16 deletions(-) diff --git a/pom.xml b/pom.xml index 446af7a9be9..e0e88765417 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ org.apache.commons commons-compress - 1.10-SNAPSHOT + 1.10 Apache Commons Compress http://commons.apache.org/proper/commons-compress/ diff --git a/src/site/site.xml b/src/site/site.xml index ab0c27e11ec..e7b9e437108 100644 --- a/src/site/site.xml +++ b/src/site/site.xml @@ -35,6 +35,7 @@ + diff --git a/src/site/xdoc/download_compress.xml b/src/site/xdoc/download_compress.xml index b48c5d11b6b..c8cfa4d2d60 100644 --- a/src/site/xdoc/download_compress.xml +++ b/src/site/xdoc/download_compress.xml @@ -95,32 +95,32 @@ limitations under the License.

    -
    +

    MethodOption TypeDescription
    BZIP2NumberBlock Size - an number between 1 and 9
    DEFLATENumberCompression Level - an number between 1 and 9
    - - - + + + - - - + + +
    commons-compress-1.9-bin.tar.gzmd5pgpcommons-compress-1.10-bin.tar.gzmd5pgp
    commons-compress-1.9-bin.zipmd5pgpcommons-compress-1.10-bin.zipmd5pgp
    - - - + + + - - - + + +
    commons-compress-1.9-src.tar.gzmd5pgpcommons-compress-1.10-src.tar.gzmd5pgp
    commons-compress-1.9-src.zipmd5pgpcommons-compress-1.10-src.zipmd5pgp
    diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml index 0f095b01c17..e539bdb8e8b 100644 --- a/src/site/xdoc/index.xml +++ b/src/site/xdoc/index.xml @@ -88,14 +88,14 @@ by the java.util.jar package of the Java class library. XZ and lzma support is provided by the public domain XZ for - Java library. As of Commons Compress 1.9 support for + Java library. As of Commons Compress 1.10 support for the lzma, Z and Snappy formats is read-only.

    The ar, arj, cpio, dump, tar, 7z and zip formats are supported as archivers where the zip implementation provides capabilities that go beyond the features found in java.util.zip. As of Commons Compress - 1.9 support for the dump and arj formats is + 1.10 support for the dump and arj formats is read-only - 7z can read most compressed and encrypted archives but only write unencrypted ones. LZMA(2) support in 7z requires XZ for From d977e67aad30a36b8b101cb0250e10bda035ac77 Mon Sep 17 00:00:00 2001 From: "Gary D. Gregory" Date: Sat, 15 Aug 2015 23:07:53 +0000 Subject: [PATCH 176/189] findbugs-maven-plugin 3.0.0 -> 3.0.2. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1696106 13f79535-47bb-0310-9956-ffa450edef68 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 446af7a9be9..cf567c21fcf 100644 --- a/pom.xml +++ b/pom.xml @@ -297,7 +297,7 @@ jar, tar, zip, dump, 7z, arj. org.codehaus.mojo findbugs-maven-plugin - 3.0.0 + 3.0.2 Normal Default From 1dcab3f854e6fffa16c842b8a10a6fa2163795ea Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Tue, 18 Aug 2015 17:51:21 +0000 Subject: [PATCH 177/189] newer version of findbugs has detected a potential NPE git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1696471 13f79535-47bb-0310-9956-ffa450edef68 --- .../commons/compress/archivers/tar/TarArchiveEntry.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java index 37358fe7f58..cdbc80f3b8f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java +++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -865,9 +865,9 @@ public TarArchiveEntry[] getDirectoryEntries() { } String[] list = file.list(); - TarArchiveEntry[] result = new TarArchiveEntry[list.length]; + TarArchiveEntry[] result = new TarArchiveEntry[list == null ? 0 : list.length]; - for (int i = 0; i < list.length; ++i) { + for (int i = 0; i < result.length; ++i) { result[i] = new TarArchiveEntry(new File(file, list[i])); } From e6e24766377e705e89358fb9cf6253e2e9e645db Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Sat, 22 Aug 2015 19:01:32 +0000 Subject: [PATCH 178/189] COMPRESS-321 X7875_NewUnix doesn't handle centra directory correctly git-svn-id: https://svn.apache.org/repos/asf/commons/proper/compress/trunk@1697106 13f79535-47bb-0310-9956-ffa450edef68 --- src/changes/changes.xml | 4 +++ .../compress/archivers/zip/X7875_NewUnix.java | 20 ++++++++++----- .../archivers/zip/X7875_NewUnixTest.java | 25 +++---------------- 3 files changed, 21 insertions(+), 28 deletions(-) diff --git a/src/changes/changes.xml b/src/changes/changes.xml index 55a9dc122b4..18c6cf8e926 100644 --- a/src/changes/changes.xml +++ b/src/changes/changes.xml @@ -44,6 +44,10 @@ The type attribute can be add,update,fix,remove. + + ArrayIndexOutOfBoundsException when InfoZIP type 7875 extra + fields are read from the central directory. + + * Local-header version: + * * Value Size Description * ----- ---- ----------- * 0x7875 Short tag for this extra block type ("ux") @@ -41,11 +43,19 @@ * UID Variable UID for this entry (little endian) * GIDSize 1 byte Size of GID field * GID Variable GID for this entry (little endian) + * + * Central-header version: + * + * Value Size Description + * ----- ---- ----------- + * 0x7855 Short tag for this extra block type ("Ux") + * TSize Short total data size for this block (0) * * @since 1.5 */ public class X7875_NewUnix implements ZipExtraField, Cloneable, Serializable { private static final ZipShort HEADER_ID = new ZipShort(0x7875); + private static final ZipShort ZERO = new ZipShort(0); private static final BigInteger ONE_THOUSAND = BigInteger.valueOf(1000); private static final long serialVersionUID = 1L; @@ -134,7 +144,7 @@ public ZipShort getLocalFileDataLength() { * @return a ZipShort for the length of the data of this extra field */ public ZipShort getCentralDirectoryLength() { - return getLocalFileDataLength(); // No different than local version. + return ZERO; } /** @@ -181,7 +191,7 @@ public byte[] getLocalFileDataData() { * @return get the data */ public byte[] getCentralDirectoryData() { - return getLocalFileDataData(); + return new byte[0]; } /** @@ -210,14 +220,12 @@ public void parseFromLocalFileData( } /** - * Doesn't do anything special since this class always uses the - * same data in central directory and local file data. + * Doesn't do anything since this class doesn't store anything + * inside the central directory. */ public void parseFromCentralDirectoryData( byte[] buffer, int offset, int length ) throws ZipException { - reset(); - parseFromLocalFileData(buffer, offset, length); } /** diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java index 97b87e443ea..03326f13cfc 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java @@ -27,6 +27,7 @@ import java.util.zip.ZipException; import static org.apache.commons.compress.AbstractTestCase.getFile; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -207,13 +208,6 @@ private void parseReparse( assertEquals(expectedUID, xf.getUID()); assertEquals(expectedGID, xf.getGID()); - // Initial central parse (init with garbage to avoid defaults causing test to pass). - xf.setUID(54321); - xf.setGID(12345); - xf.parseFromCentralDirectoryData(expected, 0, expected.length); - assertEquals(expectedUID, xf.getUID()); - assertEquals(expectedGID, xf.getGID()); - xf.setUID(uid); xf.setGID(gid); if (expected.length < 5) { @@ -239,22 +233,9 @@ private void parseReparse( assertEquals(expectedUID, xf.getUID()); assertEquals(expectedGID, xf.getGID()); - // Do the same as above, but with Central Directory data: - xf.setUID(uid); - xf.setGID(gid); - if (expected.length < 5) { - // We never emit zero-length entries. - assertEquals(5, xf.getCentralDirectoryLength().getValue()); - } else { - assertEquals(expected.length, xf.getCentralDirectoryLength().getValue()); - } + assertEquals(0, xf.getCentralDirectoryLength().getValue()); result = xf.getCentralDirectoryData(); - if (expected.length < 5) { - // We never emit zero-length entries. - assertTrue(Arrays.equals(new byte[]{1,1,0,1,0}, result)); - } else { - assertTrue(Arrays.equals(expected, result)); - } + assertArrayEquals(new byte[0], result); // And now we re-parse: xf.parseFromCentralDirectoryData(result, 0, result.length); From d85561d9017d4fb0d4fa0a75ca97d617aaae549b Mon Sep 17 00:00:00 2001 From: Stefan Bodewig Date: Thu, 27 Aug 2015 21:30:24 +0200 Subject: [PATCH 179/189] fix SCM coordinates --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index f9fa16239e4..bae00b752da 100644 --- a/pom.xml +++ b/pom.xml @@ -140,9 +140,9 @@ jar, tar, zip, dump, 7z, arj. - scm:svn:http://svn.apache.org/repos/asf/commons/proper/compress/trunk - scm:svn:https://svn.apache.org/repos/asf/commons/proper/compress/trunk - http://svn.apache.org/repos/asf/commons/proper/compress/trunk + scm:git:http://git-wip-us.apache.org/repos/asf/commons-compress.git + scm:git:https://git-wip-us.apache.org/repos/asf/commons-compress.git + https://git-wip-us.apache.org/repos/asf?p=commons-compress.git From 6041815da486afd4753209f37774a364d6623831 Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Thu, 3 Sep 2015 21:55:02 -0600 Subject: [PATCH 180/189] Added preliminary support to recognize PKWARE strong encryption headers. The headers are not decoded and one of the unit tests fails. --- .../archivers/zip/ExtraFieldUtils.java | 5 + .../archivers/zip/EncryptedArchiveTest.java | 100 ++++++++++++++++++ 2 files changed, 105 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java b/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java index 21cddf384e7..541421aa271 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java @@ -46,6 +46,11 @@ public class ExtraFieldUtils { register(UnicodePathExtraField.class); register(UnicodeCommentExtraField.class); register(Zip64ExtendedInformationExtraField.class); + register(X0014_X509Certificates.class); + register(X0015_CertificateIdForFile.class); + register(X0016_CertificateIdForCentralDirectory.class); + register(X0017_StrongEncryptionHeader.class); + register(X0019_EncryptionRecipientCertificateList.class); } /** diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java index a98850c2ec4..b0928bfa62b 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java @@ -75,4 +75,104 @@ public void testReadPasswordEncryptedEntryViaStream() } } } + + public void testReadPkwareEncryptedEntryViaZipFile() + throws IOException { + System.out.println("A"); + File file = getFile("pkware-encrypted.zip"); + ZipFile zf = null; + try { + zf = new ZipFile(file); + ZipArchiveEntry zae = zf.getEntry("LICENSE.txt"); + assertTrue(zae.getGeneralPurposeBit().usesEncryption()); + assertTrue(zae.getGeneralPurposeBit().usesStrongEncryption()); + assertFalse(zf.canReadEntryData(zae)); + try { + zf.getInputStream(zae); + fail("expected an exception"); + } catch (UnsupportedZipFeatureException ex) { + assertSame(UnsupportedZipFeatureException.Feature.ENCRYPTION, + ex.getFeature()); + } + } finally { + ZipFile.closeQuietly(zf); + } + } + + public void testReadPkwareEncryptedEntryViaStream() + throws IOException { + System.out.println("B"); + File file = getFile("pkware-encrypted.zip"); + ZipArchiveInputStream zin = null; + try { + zin = new ZipArchiveInputStream(new FileInputStream(file)); + ZipArchiveEntry zae = zin.getNextZipEntry(); + assertEquals("LICENSE.txt", zae.getName()); + assertTrue(zae.getGeneralPurposeBit().usesEncryption()); + assertTrue(zae.getGeneralPurposeBit().usesStrongEncryption()); + assertFalse(zin.canReadEntryData(zae)); + try { + byte[] buf = new byte[1024]; + zin.read(buf, 0, buf.length); + fail("expected an exception"); + } catch (UnsupportedZipFeatureException ex) { + assertSame(UnsupportedZipFeatureException.Feature.ENCRYPTION, + ex.getFeature()); + } + } finally { + if (zin != null) { + zin.close(); + } + } + } + + public void testReadPkwareFullyEncryptedEntryViaZipFile() + throws IOException { + System.out.println("C"); + File file = getFile("pkware-fully-encrypted.zip"); + ZipFile zf = null; + try { + zf = new ZipFile(file); + ZipArchiveEntry zae = zf.getEntry("1"); + assertTrue(zae.getGeneralPurposeBit().usesEncryption()); + assertTrue(zae.getGeneralPurposeBit().usesStrongEncryption()); + assertFalse(zf.canReadEntryData(zae)); + try { + zf.getInputStream(zae); + fail("expected an exception"); + } catch (UnsupportedZipFeatureException ex) { + assertSame(UnsupportedZipFeatureException.Feature.ENCRYPTION, + ex.getFeature()); + } + } finally { + ZipFile.closeQuietly(zf); + } + } + + public void testReadPkwareFullyEncryptedEntryViaStream() + throws IOException { + System.out.println("D"); + File file = getFile("pkware-fully-encrypted.zip"); + ZipArchiveInputStream zin = null; + try { + zin = new ZipArchiveInputStream(new FileInputStream(file)); + ZipArchiveEntry zae = zin.getNextZipEntry(); + assertEquals("1", zae.getName()); + assertTrue(zae.getGeneralPurposeBit().usesEncryption()); + assertTrue(zae.getGeneralPurposeBit().usesStrongEncryption()); + assertFalse(zin.canReadEntryData(zae)); + try { + byte[] buf = new byte[1024]; + zin.read(buf, 0, buf.length); + fail("expected an exception"); + } catch (UnsupportedZipFeatureException ex) { + assertSame(UnsupportedZipFeatureException.Feature.ENCRYPTION, + ex.getFeature()); + } + } finally { + if (zin != null) { + zin.close(); + } + } + } } From 2b7b3ff18faf0e4fe3d4033968f7fd726f6959e0 Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Thu, 3 Sep 2015 21:56:12 -0600 Subject: [PATCH 181/189] Preliminary support for PKWARE strong encryption headers. Nothing is done with the values and one of the unit test fails. --- .../archivers/zip/X0014_X509Certificates.java | 154 +++++++++++++++++ .../zip/X0015_CertificateIdForFile.java | 151 +++++++++++++++++ ...0016_CertificateIdForCentralDirectory.java | 152 +++++++++++++++++ .../zip/X0017_StrongEncryptionHeader.java | 157 +++++++++++++++++ ...19_EncryptionRecipientCertificateList.java | 160 ++++++++++++++++++ src/test/resources/pkware-encrypted.zip | Bin 0 -> 10399 bytes src/test/resources/pkware-fully-encrypted.zip | Bin 0 -> 12992 bytes 7 files changed, 774 insertions(+) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java create mode 100644 src/test/resources/pkware-encrypted.zip create mode 100644 src/test/resources/pkware-fully-encrypted.zip diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java new file mode 100644 index 00000000000..1c60fe21608 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * PKCS#7 Store for X.509 Certificates (0x0014): + * + * This field MUST contain information about each of the certificates + * files may be signed with. When the Central Directory Encryption + * feature is enabled for a ZIP file, this record will appear in + * the Archive Extra Data Record, otherwise it will appear in the + * first central directory record and will be ignored in any + * other record. + * + * Note: all fields stored in Intel low-byte/high-byte order. + * + *

    + *         Value     Size     Description
    + *         -----     ----     -----------
    + * (Store) 0x0014    2 bytes  Tag for this "extra" block type
    + *         TSize     2 bytes  Size of the store data
    + *         TData     TSize    Data about the store
    + * 
    + * + * @NotThreadSafe + */ +public class X0014_X509Certificates implements ZipExtraField { + private static final ZipShort HEADER_ID = new ZipShort(0x0014); + private static final long serialVersionUID = 1L; + + /** + * Get the header id. + * @return the header id + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Extra field data in local file data - without + * Header-ID or length specifier. + */ + private byte[] localData; + + /** + * Set the extra field data in the local file data - + * without Header-ID or length specifier. + * @param data the field data to use + */ + public void setLocalFileDataData(byte[] data) { + localData = ZipUtil.copy(data); + } + + /** + * Get the length of the local data. + * @return the length of the local data + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localData != null ? localData.length : 0); + } + + /** + * Get the local data. + * @return the local data + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localData); + } + + /** + * Extra field data in central directory - without + * Header-ID or length specifier. + */ + private byte[] centralData; + + /** + * Set the extra field data in central directory. + * @param data the data to use + */ + public void setCentralDirectoryData(byte[] data) { + centralData = ZipUtil.copy(data); + } + + /** + * Get the central data length. + * If there is no central data, get the local file data length. + * @return the central data length + */ + public ZipShort getCentralDirectoryLength() { + if (centralData != null) { + return new ZipShort(centralData.length); + } + return getLocalFileDataLength(); + } + + /** + * Get the central data. + * @return the central data if present, else return the local file data + */ + public byte[] getCentralDirectoryData() { + if (centralData != null) { + return ZipUtil.copy(centralData); + } + return getLocalFileDataData(); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) { + byte[] tmp = new byte[length]; + + System.out.println("Field: 0x0014"); + System.arraycopy(data, offset, tmp, 0, length); + setLocalFileDataData(tmp); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) + */ + public void parseFromCentralDirectoryData(byte[] data, int offset, + int length) { + System.out.println("Field: 0x0014"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setCentralDirectoryData(tmp); + if (localData == null) { + setLocalFileDataData(tmp); + } + } + +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java new file mode 100644 index 00000000000..9cdeada5a5a --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * X.509 Certificate ID and Signature for individual file (0x0015): + * + * This field contains the information about which certificate in + * the PKCS#7 store was used to sign a particular file. It also + * contains the signature data. This field can appear multiple + * times, but can only appear once per certificate. + * + * Note: all fields stored in Intel low-byte/high-byte order. + * + *
    + *         Value     Size     Description
    + *         -----     ----     -----------
    + * (CID)   0x0015    2 bytes  Tag for this "extra" block type
    + *         TSize     2 bytes  Size of data that follows
    + *         TData     TSize    Signature Data
    + * 
    + * + * @NotThreadSafe + */ +public class X0015_CertificateIdForFile implements ZipExtraField { + private static final ZipShort HEADER_ID = new ZipShort(0x0015); + private static final long serialVersionUID = 1L; + + /** + * Get the header id. + * @return the header id + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Extra field data in local file data - without + * Header-ID or length specifier. + */ + private byte[] localData; + + /** + * Set the extra field data in the local file data - + * without Header-ID or length specifier. + * @param data the field data to use + */ + public void setLocalFileDataData(byte[] data) { + localData = ZipUtil.copy(data); + } + + /** + * Get the length of the local data. + * @return the length of the local data + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localData != null ? localData.length : 0); + } + + /** + * Get the local data. + * @return the local data + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localData); + } + + /** + * Extra field data in central directory - without + * Header-ID or length specifier. + */ + private byte[] centralData; + + /** + * Set the extra field data in central directory. + * @param data the data to use + */ + public void setCentralDirectoryData(byte[] data) { + centralData = ZipUtil.copy(data); + } + + /** + * Get the central data length. + * If there is no central data, get the local file data length. + * @return the central data length + */ + public ZipShort getCentralDirectoryLength() { + if (centralData != null) { + return new ZipShort(centralData.length); + } + return getLocalFileDataLength(); + } + + /** + * Get the central data. + * @return the central data if present, else return the local file data + */ + public byte[] getCentralDirectoryData() { + if (centralData != null) { + return ZipUtil.copy(centralData); + } + return getLocalFileDataData(); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) { + System.out.println("Field: 0x0015"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setLocalFileDataData(tmp); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) + */ + public void parseFromCentralDirectoryData(byte[] data, int offset, + int length) { + System.out.println("Field: 0x0015"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setCentralDirectoryData(tmp); + if (localData == null) { + setLocalFileDataData(tmp); + } + } + +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java new file mode 100644 index 00000000000..fa2615bf50a --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * X.509 Certificate ID and Signature for central directory (0x0016): + * + * This field contains the information about which certificate in + * the PKCS#7 store was used to sign the central directory structure. + * When the Central Directory Encryption feature is enabled for a + * ZIP file, this record will appear in the Archive Extra Data Record, + * otherwise it will appear in the first central directory record. + * + * Note: all fields stored in Intel low-byte/high-byte order. + * + *
    + *         Value     Size     Description
    + *         -----     ----     -----------
    + * (CDID)  0x0016    2 bytes  Tag for this "extra" block type
    + *         TSize     2 bytes  Size of data that follows
    + *         TData     TSize    Data
    + * 
    + * + * @NotThreadSafe + */ +public class X0016_CertificateIdForCentralDirectory implements ZipExtraField { + private static final ZipShort HEADER_ID = new ZipShort(0x0016); + private static final long serialVersionUID = 1L; + + /** + * Get the header id. + * @return the header id + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Extra field data in local file data - without + * Header-ID or length specifier. + */ + private byte[] localData; + + /** + * Set the extra field data in the local file data - + * without Header-ID or length specifier. + * @param data the field data to use + */ + public void setLocalFileDataData(byte[] data) { + localData = ZipUtil.copy(data); + } + + /** + * Get the length of the local data. + * @return the length of the local data + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localData != null ? localData.length : 0); + } + + /** + * Get the local data. + * @return the local data + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localData); + } + + /** + * Extra field data in central directory - without + * Header-ID or length specifier. + */ + private byte[] centralData; + + /** + * Set the extra field data in central directory. + * @param data the data to use + */ + public void setCentralDirectoryData(byte[] data) { + centralData = ZipUtil.copy(data); + } + + /** + * Get the central data length. + * If there is no central data, get the local file data length. + * @return the central data length + */ + public ZipShort getCentralDirectoryLength() { + if (centralData != null) { + return new ZipShort(centralData.length); + } + return getLocalFileDataLength(); + } + + /** + * Get the central data. + * @return the central data if present, else return the local file data + */ + public byte[] getCentralDirectoryData() { + if (centralData != null) { + return ZipUtil.copy(centralData); + } + return getLocalFileDataData(); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) { + System.out.println("Field: 0x0016"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setLocalFileDataData(tmp); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) + */ + public void parseFromCentralDirectoryData(byte[] data, int offset, + int length) { + System.out.println("Field: 0x0016"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setCentralDirectoryData(tmp); + if (localData == null) { + setLocalFileDataData(tmp); + } + } + +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java new file mode 100644 index 00000000000..7c288585e19 --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * Strong Encryption Header (0x0017) + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * 0x0017    2 bytes  Tag for this "extra" block type
    + * TSize     2 bytes  Size of data that follows
    + * Format    2 bytes  Format definition for this record
    + * AlgID     2 bytes  Encryption algorithm identifier
    + * Bitlen    2 bytes  Bit length of encryption key
    + * Flags     2 bytes  Processing flags
    + * CertData  TSize-8  Certificate decryption extra field data
    + *                    (refer to the explanation for CertData
    + *                     in the section describing the 
    + *                     Certificate Processing Method under 
    + *                     the Strong Encryption Specification)
    + * 
    + * + * See the section describing the Strong Encryption Specification + * for details. Refer to the section in this document entitled + * "Incorporating PKWARE Proprietary Technology into Your Product" + * for more information. + * + * @NotThreadSafe + */ +public class X0017_StrongEncryptionHeader implements ZipExtraField { + private static final ZipShort HEADER_ID = new ZipShort(0x0017); + private static final long serialVersionUID = 1L; + + /** + * Get the header id. + * @return the header id + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Extra field data in local file data - without + * Header-ID or length specifier. + */ + private byte[] localData; + + /** + * Set the extra field data in the local file data - + * without Header-ID or length specifier. + * @param data the field data to use + */ + public void setLocalFileDataData(byte[] data) { + localData = ZipUtil.copy(data); + } + + /** + * Get the length of the local data. + * @return the length of the local data + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localData != null ? localData.length : 0); + } + + /** + * Get the local data. + * @return the local data + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localData); + } + + /** + * Extra field data in central directory - without + * Header-ID or length specifier. + */ + private byte[] centralData; + + /** + * Set the extra field data in central directory. + * @param data the data to use + */ + public void setCentralDirectoryData(byte[] data) { + centralData = ZipUtil.copy(data); + } + + /** + * Get the central data length. + * If there is no central data, get the local file data length. + * @return the central data length + */ + public ZipShort getCentralDirectoryLength() { + if (centralData != null) { + return new ZipShort(centralData.length); + } + return getLocalFileDataLength(); + } + + /** + * Get the central data. + * @return the central data if present, else return the local file data + */ + public byte[] getCentralDirectoryData() { + if (centralData != null) { + return ZipUtil.copy(centralData); + } + return getLocalFileDataData(); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) { + System.out.println("Field: 0x0017"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setLocalFileDataData(tmp); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) + */ + public void parseFromCentralDirectoryData(byte[] data, int offset, + int length) { + System.out.println("Field: 0x0017"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setCentralDirectoryData(tmp); + if (localData == null) { + setLocalFileDataData(tmp); + } + } + +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java new file mode 100644 index 00000000000..f8793a6d05b --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * PKCS#7 Encryption Recipient Certificate List (0x0019) + * + * This field MAY contain information about each of the certificates + * used in encryption processing and it can be used to identify who is + * allowed to decrypt encrypted files. This field should only appear + * in the archive extra data record. This field is not required and + * serves only to aid archive modifications by preserving public + * encryption key data. Individual security requirements may dictate + * that this data be omitted to deter information exposure. + * + * Note: all fields stored in Intel low-byte/high-byte order. + * + *
    + *          Value     Size     Description
    + *          -----     ----     -----------
    + * (CStore) 0x0019    2 bytes  Tag for this "extra" block type
    + *          TSize     2 bytes  Size of the store data
    + *          Version   2 bytes  Format version number - must 0x0001 at this time
    + *          CStore    (var)    PKCS#7 data blob
    + * 
    + * + * See the section describing the Strong Encryption Specification + * for details. Refer to the section in this document entitled + * "Incorporating PKWARE Proprietary Technology into Your Product" + * for more information. + * + * @NotThreadSafe + */ +public class X0019_EncryptionRecipientCertificateList implements ZipExtraField { + private static final ZipShort HEADER_ID = new ZipShort(0x0019); + private static final long serialVersionUID = 1L; + + /** + * Get the header id. + * @return the header id + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Extra field data in local file data - without + * Header-ID or length specifier. + */ + private byte[] localData; + + /** + * Set the extra field data in the local file data - + * without Header-ID or length specifier. + * @param data the field data to use + */ + public void setLocalFileDataData(byte[] data) { + localData = ZipUtil.copy(data); + } + + /** + * Get the length of the local data. + * @return the length of the local data + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localData != null ? localData.length : 0); + } + + /** + * Get the local data. + * @return the local data + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localData); + } + + /** + * Extra field data in central directory - without + * Header-ID or length specifier. + */ + private byte[] centralData; + + /** + * Set the extra field data in central directory. + * @param data the data to use + */ + public void setCentralDirectoryData(byte[] data) { + centralData = ZipUtil.copy(data); + } + + /** + * Get the central data length. + * If there is no central data, get the local file data length. + * @return the central data length + */ + public ZipShort getCentralDirectoryLength() { + if (centralData != null) { + return new ZipShort(centralData.length); + } + return getLocalFileDataLength(); + } + + /** + * Get the central data. + * @return the central data if present, else return the local file data + */ + public byte[] getCentralDirectoryData() { + if (centralData != null) { + return ZipUtil.copy(centralData); + } + return getLocalFileDataData(); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) { + System.out.println("Field: 0x0019"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setLocalFileDataData(tmp); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) + */ + public void parseFromCentralDirectoryData(byte[] data, int offset, + int length) { + System.out.println("Field: 0x0019"); + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setCentralDirectoryData(tmp); + if (localData == null) { + setLocalFileDataData(tmp); + } + } + +} diff --git a/src/test/resources/pkware-encrypted.zip b/src/test/resources/pkware-encrypted.zip new file mode 100644 index 0000000000000000000000000000000000000000..52d9a599fb9beafa2619b435878e984ab2e09c96 GIT binary patch literal 10399 zcmdsdcT`hdvv(2#i1d!sND-ta^j@U*9z|*hy%Rzcl-`jlAOcdQBhsXbRO!-`ru42T zO{EFkppU*j_df4;|NGXuZ&vo%XYZLaduN}S`OVCVjye`t3Lpo#1fY{(QEma7`qh#G z0H)jk06qW!&`_0E)Y4bvMf;#h05@e?Q$Vl6+eu>5t^}jZhNVFO02YA61^@)fMFM)_ zZNk%bS>PMi>xt^=@%K$X>41oxABa!hc}DKuiQvXjd3qgH+!Xrgpce!@l-ewNY=l1- zb2+y6L(|C6FO;Cu}4V5T)*hD zFSL$1W?@$!Zr%~=BN|kz9xUR;qQEwl$l;th?xZyYVzvzqqyRvG#heVp2AyOI3g4#1 z>Wk(Q3l;=TffswW!LSqn0ABNOF~vitq@WbfVmm%`zgnJ*^Y$!V2A$D&))5b3ddjda zIraM!+(pZUgPyp$-LGOk$6#+*J#}Db;@wxwo8c;)=&;t|XfNcLeSMOi)cU=O@>I?P zi?gcIXTm^>)F`PEe*hgNCQXpVCY&+U;V@w@njmnB4e#NoVn`gx+FO5d1W!{GUzd$e zZKw(3XFih*<8Ju|y~$JU?VZ$H22p{G9P;<*wet(#dZlg<-&aTF>S(Q@wsvyL9Td%| zaEVVE;p&nFzNAB>cKKBzkNc+~>JgtwnYl>w&&~$DRmmw?E=<~l+i^EpeIFc!N$0sO z8kZN`{?wx|_2KK81%6f_V9m_>RBW~1qp#->rBPeCfdZ}FJ~{9=7-aSd%&rVtj*8Ym z_*t(UeZKe1z8HYK5ps(e`AR@(2>2-GoHK*Zb@*jK-iXOpt#T!bC1TBY$}hskAIgW7 zRg!iR>aF}9?V&=-=jQgdWZ&>ashrGVQoj-}F7|B}nzCDFMZ0N1 zRr6a3htnOgc@~Y*rMD5pI_mtZc3rCEx8=tkdqB@)$?nXGigw}9JFxe%^bL)zsW%pz z*Yoq$>iJuHm#^*m2+(AO(&>jFPH8)_!Yqt9Mk%Xs;UO9!iJEdOXf6{ZUI&p3Z|3}( zVO&rM`Mj&rM9dJQA4mQeHZ=|wI3cJdsNC2u&QhBpUEZLS;-=$5jHi&yrAgnL^G$CG zDugySgG!?_Vz!qku20$KjZ)K3Dg>U!8NJ4z_l<8hZz9C4C)CZ$%v~QHD`%x-lZ;&V zeUqKaWhKD-)gpZLJv&#UUY^X%*^^0?Asn}tbBmS4YRd;#L9!ojY)E7 QL>jD2W$ znhU-C-I<+p^-F4IW8k9K;LuC22o<)m8-qN~xl(8~DK7d_o#z=7%`zg#iUKKhSk0NW zl7eE6I!B|l%@R>=1vgvyX845%TO40%W%vv>*6`oU8IL+T{3Zadd>-5G$%KD5|7E40 zmojoh1Jq~zR?72e-l(BE>ST=pNdG`MiMXOWG>k-7m|SM3ldK=71>npi8)vvCAZif1 zmQx>WKXy0Ef5|5>8SmO2ScqpD^x_5n?tDm<@%J#V-(o#b%d zZ~V=78)qVB-?;4le=@f9z(t}NxeQjQcz>smXBBFlYh+Mjj(_U_i=4$)2PXUVB& z)w`Jzmy63CH%xT5BReLg+FftP>nv1~B&`hBG~Xbu*w~an`?&+9U#D%I52T%>DeMRn z?;t<8zU86j@BPyM-Q{`hQTSmqNOtH+jXzFL55&eP5Scfzkgp`?)9-TcN!;55&v3Ys z)?qHck0im~)252L>~mtJC+4MhC%>36>z$Co#gC$`JbCq|yhsPe;U`sUst1`bw^PRZ z$1Z0QF(wNYt}0j0;Dl-Vt9fMPI5)REC%q->H6(&B?@~!!XE(Xh5A2(*e@K_Omm-(N zx>8&k@IejT6uf0$x>5u?cI;gfxwXY`+{Y5+1mJYRiI_UQ{_X2YwucK zi;oPeS0d$BjJLtIgjTZDkx>D|8sQ6^d|P{m@7L66&xP^&H{L1t(V$~_j?#>)xR*ks zXedCJTCS|U)NAlv_4%U9-m#0*Df+^W+|YL}F2O0JDa(~@9<`(UHE@ZSQPC*YP!XXZ zjuoQq^oSkhNf<#3$rG-*va`D)I7i%-sgj^sO9zX{rCV)lZ((z>IJrAVkld|PPbzOj zX)zd{){zi>`f?K^VtcR-Sb%=HbA)l&qJ1|k>G+~54lr-bbD}Sk>SsI?R zLeoweUaux=v5I16tO&diy`qi}j0V`;I7j6(sE9U^n*OZKn%AxJ6lS?Hm+!_5O-6if ztKU!_*!xK-PB0UJ$8DryVp^Jf5|M7> zGJlH)-O-lqNMrLT?IthiCUpu0Hh4iB|3G6)9qt_KSt^}Yu^+T-0Vyxo{vl3xGDEpyGny`I?QmWov1BViyt1A}#kKd%1`oGh@ zlt+*$d{q93IOfjR<}TEPdvu^mJ}C@IcPt>7#5Nu;0S~hDdicp62OsRp&+Lwk-D&)i zq!C!}rdYkrS9y0Ec#LF^NW;QarQr;|tD16&lK3pPEi9H_M)Hs=@@^x0deY)YI9h!T zImJMl!q6t-FSK0;T2!FYF@G0Q=^}|1MhJO(erP}1G^!=ErCt zBDjMdrV&|Q$$XoX#^uKWU#VX>+;z)Lk=oy>dw>ivaU_YXwPD#4`99F~<{>o_+2CW0 zZ#+e#DAxDJ=DLiak)pJN_nY@KSlR7*VRSVrwefl7{4E`RG>-ebQ7bgGFK-b*BJeY> z>??!sr;Njd$X63cG8_CH;DX|fi2c@9F-tsh zoDYM>`67sn1-ue&Jb$jLFrQ7co4tUg*~GVaDp}uYf)dey{R6o<*Y7%zZ<6M)s(V1UkkZm^72Y3G&L+s z`TZbkOFGXB^*eN0qt#T!8omtN*bWNQmK>+3Zzd~Sr_veSZYlUu{M%4ar-vnq3|?xf%4iUSC7$p;M^~3g{yNob|DMZC>uCY*k zk*TvteERKDlX3uRM9$Th<9P94Ms)B!sH@C2;#KJhIbrzwO$GQko@3IsnsOR0Uozk4 z8^b%z;$nM3%5%P3@CvI^)4tY@SZ|f7CNqczNxOm(Ys`sYzb7yuNL#;x;kE1yut7gD z1tCtqq~7v@(Qxd|;PW&&0+qNlQC`IMPI8M`tS z?b>0n%*kpxT2{JbV8)edLlLbX2LY?wEh@#YQJ)kCgb-MhwvLJY93lu-zspGx8Sr>LWyZ=Odt2QK-0CIrPx z(5}%4y5I+W1Z!Eyfum4N&p!C2AU}_AopZJ(_;uAs@mHw2<8w8df91|U@Mo>$HX5sS=PM)maP*Cc?L!&t~I>IU8%=(oK*kQT6)RM8T3g%9AjAZ{ylG zpVsk0k11Cl+X20JV1YdQy=*H5pZ4~#sqdDrzVzI4VZEzCQ)u92{uu{E6d*P(y8*5z zk$4;Fjh0N9jMKXn!FVRi9Y`Pojq;^T5-=^GAI1HCa1kO$9$%= zq>e6gp8BVEgz39ng_^-KZF$Eh_{6&s*rt(^nkze+i$k)R?F`sb+U;yrUE8lEUCL9+ zXj*lj2I{|bPBT;%2Lz4T*-TaV#o4pSF&cI__ApA{I>Q2j4QH7NNoYWD5j%R1$$ zP=p2!L-K({dU51zlA=1)d=xLUH-#0!G@sFlh7su!1qykmfGwrI{poOX-8-?hrc@PI z^_u9D*`Mcx8HoCGegk5gzwOCu%Z!INE)Nt+*N|CI_#RjF*{C&si8|BX>zA+jYN%JF z)4sO5)P6k%&9|``k^a5=@h~m^3o+&ec2gDPK{k=qpnJDCSgOK|vC=B*RQ`q1sdaVD zIwwO5jv+8|4S2HSB|9Z;#X+`{KEq1HfFHC`FOs)%`10)%!i(YHv0Ok^BP9B19k^jQ zB9LAFzILIv$Lm1`t<9TIpiM*>KO3wJL2X|9Hdysy2qvsV$Qzt;W2!uQ2*&f3UPmxFNFC=8)19 zeD_(tz)q9g%rnz2{qYIYHUSXq5-O8)y|VU2NQrdwNv>^veg+{H(Vdbjxort~j=^m- z6C~nD)`*w3yXlskixccTIp*La!}>I3e^-Sm>4cbN3H@Hx0J2LocfYu5ZDhoKSuyNH zqn5h6@>jk}-OiinLGPdA=4ufcBob^5+XoGYlg{!(JYP_apbswxS*%$2Dj`W}y3~=T2-!NGj3fxYF~SPWZQPg{Mr8GKvjK zvv~#`8QywD$CK%>r?6|LudTM~R6pItb7vvJI+wZ(aeGV=)Re$QL=h*HkkmAs5xp@S z4o<#db0@yNCYQ+LaL=@SJkD(1%X+coS&i8_=TvV;R!2m|p!u0dGq2OfZ=G(BHUov{ z!fzex^Bg6mh{>ee;O=XmKEUsMsxHAgifc>{aOgh;z77qUg$XA;s{KZ$7f?1Vpf=1r zgtHt;WqRxv02+TUt<3wuHJ^i%n?8LiHy2BoW1%=_4=|%u7EIZtral^X$qtH?Jird+ z(@_V4WC8!Kypu_Rse=H30LVp$`TSArx%}585I_S6lpXq@zBU51QLXB4!}MHEE{=$AsB}oWT+2gfYMzI zE|XK}`#K|B(AG$ZyerDh6@@_CySl)bpbQsed~&MaWCN6^2O6SfjrMj$Il>sB^gkIW zb*xbyt}Y0OmMayGAA<))VFcGNG1s#i=_qPCfCxnLw1O`!cv9kBFxAH_dLG(Q> zJ&-7SOKTKFUJk+oQ5Gb@Y*r_(ULdWNQ%7>>zI|Lr4-)`L#4eg<9dj?e+GB<1aT#rq zV6H0**`qx`9iTxe=_dRW=}>6^W&SHRU20ZVsd%TaI4!IC%P+p|&1|gTKO$qqi{6og z8+^CWXNz3NP5cxRZ;bw^A;;yc>oYs;K~b3W~-q0V{(zBCrv9r7RE#)qbJ?m)5pC5+8?#B!zI-2^j#0hc~aRNtWwoNRnh;Ltj{Pe z95I=YYl2*kZK0VVOy+qKoT=)D*RG;wdqyC#_vzTQDphtmgs+rDu5FH<^^J7V-&vs?+Q;1rTnSzL~v`Ay}rGz3pYj+k-UGX`Y!_H@2V~i75%B| zFb?E(2uw^&4DyTfrwEMVD!HN{2v4+~E6Vn}YKw^xd055}rF}`g!^i5;!QPI>s*e7#N>b(;= z)%2ZNR$ZI6o@+*gUK+7aXE$iz^D7#vBtBX|FCiqE@T^S>5GNNwL3YmV2 zh4TIi9EOyo!>ru}i}Q{EPcDtYnX`!Ws~?)qqzxv=jC|fnx(w%#185uAb5~kzQ~Pte zYcgm1JRg6AJ~GcAnttkYMTTY5L*8-*WwM956?uQP(C-!VzlP7=eF zgDAmjl_Bh--M1p!XTOye<%V_0l+^`2&3+p69NLtZ0=9i zH1#}szGgUry_*fYf4g+LROuSA`==)r`fy92GN;1knG71AJfrc8CSCQYdG+(LNRyiE zp@BC=(-tYA;)m@b42b^qXWgrbGX{m_u%M@Db3>QP8`{zW16)uA>-OX+)=XW=lBd4$ z;5X9R9}XVB9^K6ZnX`hp3qi`k~Xm>RTuURiC1TywyFVAjV zp4oZ=3ADG%)xNjrIE*WI91_5w92vi#^9rtBNb*}wJXHc^wJRYv_og>9)}M( z+*jLK=L6ldx|>xZHn&*z-xJSYj_BGB$9^kHVUeD%K8KU)&b{~UIa9&5%x7pY&JAcV zHfIhN2nYg^h?W8P&Yz~%!_UQTpO~)G{kJ3g&oUZ}hk(JLVt;rDei&3#0QTEM{2#%8 zjFtbtt^d^)`zZtRiv&Y-`#61XZ*chY2DR{{(jXI5Ka`N~u^jVF=vB6aNBLjMIbTIm zha*trUc6ga1MC^`kU)`zmFOA6LLK7ojV3JiQ8#j%m@2o4jtcrN?;eU&znV@`dwO@i zL4hfwNhTKOY>Gu&MVH0FBDDB3JDK-j;tYkc3PAh^#Ug~GIV{d z#EPdy6xtklt<%=Vrnc7ms4cgL2(@jqwA$Feupj0v^lzyKdF($yJChQTEUav<;=d`v>E`jT9e_YkdfI=-iE=|g1us@(0`ow*px1NQ zbJ)UJ{t}BsIsI*~d~QetjK@}h7ejy&T|7m6v2QTO%yFRCFUF)`W+1zc$rYZuKVPS|fmPqDUiwv$zYt;u`d7`W;PfVWZsM~E?G8ZLpq%*rn z88?&MAY#(+R2GEe#UH+imE1RJ9!qsIaI@6h9wlkbQsFk+3U|Z}-jP~lW9Q&rpz>m@ zIbF3(9gCJS=X!c#(m`HtjT{j4lFy!qNH24#NNESiD6t2f3(c_*$-k#C=RfMdzTKe* z{hB`87<4Gk2Du+>x7TTx~-{YC!Ebg0|27t7eX&~KCg5b|n=Ej374*l3# zt^9d&n%uY#k35&qnz+uP&|q9_42pdE3q_cEI^yi_4@v59wx$Wx$LahJ6#2j6ST7Jl z2!jxUzhg+4@Wt>qLiE6Rq1-==hk)%LlR8W+3KJBB2?#(11%>%BcdSt6-$6GXkc!gM z*51k5Ll)`k>x#C@ptY>@NaU*U&$xIze_%UC9eek zn7sb45HhUB>8Ak{1mCB1CQ3CTxOUW2K!>$Vxo}vuh#G&y82ZS^FxuOh%DUBu^|{yc zIV8MDRzeC=lXXpJyR1@*Ee|oHTReprFSfRP!`wurHa<0yJSwaIoam~ZvYA3^v=3>a zw!2YUMXS(zc0~IdJHOy0MeK;vG(MNvu{)1}SiKq13QOV$A^HS&%@wOosPQth{BveX z-?0}N)ZI$CsQaqohfH3P@aVbhikj(`qdi-;rUvwG<{Va*MiLX?NwCOwlf6%`ESaoM zD*Oj2vd;4kVeFy0ipncZIJd^G!G?{DWDj(8t3!4d3Fha>LT6Ftt@TRUQKSfk>etIh zsoH-*2-amxDun|jxbS*EfB*yq{N24`=RhGBRJIFuO9iF)1p!##-*=!tyy_$YLn`Is zMj;`|Rl|Yjn{N9QMGZd%kbfDGStuwx7>Mg+g}8t_D|?hR677oe{X2%pYq9Y|*#s0Y*IyM0%TD_w8Az12m~&$ z^>2`kFc%VrAz@GfVKFFDOavh;#xE#>fLhyN@=F9#6o#(5=972KxzA${U-(yUW4+e(!ymqC+#)Q87{7|t!n6bf_umTg*P_ZIZ^q@!;O<@gR zF>eIFBO2+f=!q0?LL1$JLlN>q4wn2-ssHBHe*htM|A;t8z~qW-K${D8H*4lA-Z(=N z-0>ATFQ1PT4ub^ytgF7@F9bnJ&Pz)^hNM;&!+?Ek=u;vB_8ntQ`(pidbm z2e;Ju4Ld){WIr@IIGLj=Q3;b)-QY!Qltxomn4^FsE)`jXXqEoQ?7iNsIq2zBW(uN4PVYQa!8e+UyY;Gs=P7;tbb&!$lqnI9AY00SVK0YE^7 zA^_^DTIz9N3ng=h+Vb1tGn(V$x;f!$ss!aOfWRXeV^UaX@1B%*l&UhZt}}mgJB0*` zuKP|goH9d%1)-%KN6{w9nAFSI?y>|6?kd!~X(9amjmz6Hi_)+%MiuHY9OTLVK}gb| zl-g?#?_jttzG3+|kKd#=(tpNhgg~|@9VYdz6x?vubdvsc7CZnQ00b$c#z#^eKou2z zTH|Ud-A*jpWz_}w*JrbTREhwA?PydFstXqZuwnJ=!nmi}RCCWmxYW3#2<*rW0}9Ax zQwg3aAbKwMaGERbs9F>;3}{OrYzv@$``RCEQtr@Nbnds!@V0;fT8f@O5!qH*__};& z#g?5^UdSd4eQDV5S8zVRYHD9XGHPItIy^%9rppexCgdFyPq)YUC=98XvjN+csMo}g zaIWW9Pd4(|I1@x4F}y-MNLgB>E0JJWn)wvh1PI!J73wuTj)C?-G`ib}sOz3$$8ykn z*KW0wrDlcGsgqJxs#q+p?FcdA*jASl#N_<#A114d8?JmjDVji-gY*|+TG}Mp&5hFP_8k1VP+9M=M@b@q_@aR>dL-LD+1*U(WnJtArxgNQ z7KJfoQbUR^qYEAkffWazh-MYdUO(p}nJeMK`jOo|kbYykXFK)V0bOClp>8-1335Zu zPizkSB!`$t*m!hoc;|t)a{KYJ3u6c*^;^ym>o=!I?2nU@nxpP{PNs1V~iD z$Oi{mJX0sKkM2+OsvotUk4A6~%$LP3zX6llU z5}4TE)KCC%Fr&_LLx@sGr#GZnIvE{sP!~wgM#oKE*l^pCbt${r8H9RYNqtYPT%4BS`iY(nPx#A1bP~64OrW}CqB}|!OXoxF zsH9Hm6M|9br-S6FwI2AQANN{iM*{4ilJ{zyw%%lm&!Ge zAs~#<4*M6#(+avfo1zgAR$R_EL&J4+kH8K+)0{F_lar2q&Heh(Vz4;bXuebcUnJ+Q zvW+_T?zKj#p~MDIma)ar%-3umhy;$~b&kU*y0=w-)npY8UWl8n8nMwK`ry=O)<8H;9&o2)5^nmyd@)bk0S;RPCP3=<9`=}+z7f& zWsvaZzcWHB4N|?G@$=pKH;Waxw3|=Nk@)UO*Pbw@r{Y&LS8L`AONq!5#mj|LV!bY0 z*8JZ2Q9!C1Exhx_ZD>Gs+g&{9dWT^-UqI67>7P?!f-LY*h1cyek)YeHeeOSl0wR`% zJrv4n z#RAU0Fo<&V%UZVILUOH$jfX?AGy3=h16@8wb!I5u~k;@x0VdqnEdh6=pITz4{eIU~FZFAg?K z5q00~*Ec>E8?>iihXq=`PlY-zX8Y9cj(5X2mk1KNCM%|!(~=Nup*!68@p9~$G&+IG zoYG`eANgoaw?+#$A9IQc+a6Lt>QG^+BezZSwU5ifm(MCuRpHyU;5gt+F`v%b_tMlO ziv*qV%6d=HXGCnZm63|EnuLi92g~;ws*|pg@DQC&L15-}Vc*vb4F~sCLbb7gr7t{Awmn9%jHI_jN*bYB;bv*iU<3Sx7aS!wWV=p)5c-Nh&&* z;Yx>-x`aErK--)$b$2$!PCmsP3_<{1=u5o>ev}Hu~f15R=kB zZ7$!%aYJKEA8HlPmcl!NGffBuR;K2!aBsP2rJRLPX0&Tu(dl|}5X#G3HXhlIb- zAe`Fw7;7pr#@`pOr6i2=BtA&$HuK4iIPAw6$r`wl8i(l46JV2b^TVvT01%Cc?oN6O z3QLUN95xuhGt#_0njCo#bvNl!s&iRYP$mc75Z?2lqLE+a{Nx*upXQFe=@lRWU709Nm4n%T5mL%wHf(|#b!RbA0 zUL}r5#9~9l2L?u>S&}497GftI7Q-2nVPDTk5t3ft#+HklxE7D{@!-!PxejP*wr6El z1~Dr_yG7J5vZ@^@wqw^KD6M~f(}o7|1`i^gTIg&cm>K7|3@dECo~ z;dD0>J#V$l(njpc^>5RBOQQ6vEkj7E@j}*n@Z81KdF^#VjL?R%2m9yj8U8zL4`$No zxUn7@fYOX3hAbOs#Gzs1A0-hyFmZ%_zo!VXZ^4>Sux4+!^P0~^X~+3J`HuE5g7=sB-BqHl&>8tL+iGwFV0{BG^+7u9#kq5f-0>l6_9H;qG z?9vjwac>L=R^!3fM}w;I)xg2t)WpN!elN=mO6+JJ?1ipogW(|~FqVOLko1h2IYERI zrIlF+ErTRpkI=wYVd^~%Ss$ytnG>{kBUP8Vg^Ruh&qEK=_}(dYDLQNVpT&>C!Q1DO&qfk&TOeOof!_DnZL= z(AWckm_&Q5$Z2B**%}2Izy&r}2=^FOwjT6%P5Z`Zy^gKnXh&6UjM&~e+~6(pg&!*= zoLdp~vNG^G0!8o2e-(oWcz*T+eQsP}C?GLA$2CNUs#%HG&wfk`50Dps_ zMI7BcO!Mi8GTDAtxLQ{+>wmnVhz()OPiXJLgT#>s65)BoOKT}&aH1q`UjCjn$M=v*(5NK>C{TMdcPU38Xi_2J>;O${XR%U9?}V&9H0Is$3}3VTiHBY_lQKU0q!TT z3nRo?e5K(f*aYWKdc&&^qIQ-XdP3s$4N4$PV)U;_!j-mR(3h@t`R{w1M+b;7s6wCu zQX7qmBh;@mcV?Q%bKn`?@HP2Vvm$4_wIgV(C#@ooB-}4GR!u^IBvc_R?O+>i!xj3FsMJk?gE6r*4-sR%MS`O7HNiYf~t!CsE9cLfCt1P zU-k$YHmRTtealvh0Db2&pS<5GG3c4>ZpG;lXhIJI2x*P#VI!^va`m_PHnlyTirH&6 z+G$ZJS@2lA#u_gCfEkWP(O872Gu`{K9tlaJKS(gh;rjgTYNZo}Fi|b#Gj+Ny4%XENX1;V*Eiv@kU%h$01oD$WJLkaha*QC$N!iKW`W-WHO1QPhJ@e$`VyAPu}bFpS1^b?&7qRu;hfD1r3{(}R3?D40R+J`HbfSV@Bw$!&3iT@5Lm z*Kh4);89G!L%-E>`-~93S*UT0v0n@|9epFK4}>RZSfvlvWYkDB2I`r9GQl8|_be-i z>IARO3Br_3OA*n)_Vp$gX>Zmmrz}r%G*niGQ=HX4(LWOA$X-WdAOH;6B=SRSCaQ27 z%;8=r^>XNnAxTQ}ZjGU>w#K`zeadzzt5Ghd9tS zAmWvR4)+U;&qU4#MtOjT88V4w`!;ciQSuH><+pXV-!u8nB7>foPRV(;N`v8BHh5Xyc! zL8818meM;^L7?<%QmJKsuMIwFzRky}?3 zrFW9+V^y90?dhtBn^qNe?lRUZW=qc8ekH5s-RvO-l`JP_NToS&T>9COVZicFFmg&!eTz0nWud{67ch_)!SBcao%8 zP~xY%kzXGkELn?yh3Ne$szWO%2tVUaCV*{dVO7%Qr;&s9=AN92_vc{t5`G*uJF4gn zPzFv{TMw}{=CW^;xOak#IC&LzY^Y+S|{3vqggTQrj`J@<6$SaQfW+pP@j#i>aA&I6A2~>u%c(2R3a+ zPg-Eg{gu2qHKH^GL$&7XQbVv6+&Vezpv2+g?B05{sz7T(z^42XDm@%@8&MTXftgT9 zZvy53No8KcJR6NzEP({gs3pYkDGb>i9;PydY&@%`LX!u{c?Ki@cr*A|wD)K^NmjUOPY4ZEi0Vb+)-K93nGf#@oV5JRM z)oxS7u0y%;$06fOn&gnu7Uu`Xvm;XW2dn9;R0aC`b#6WKSTDXC6e84D^Q)~sRktHC z&%k!g2{aGA9012Vxm(`$48we7G>Rjegou8sA)={C$_KyLA?nU7v=r^JR%NeIIn+1AAho@j7pg|B~n# z1RS2hTI?>5LVrC_@p2Z0e^vXoYoO8Q+14qYQ=}#`uU6E(8A-ty->?zcBnGW7TrC6S{pa0(IrBnoQ`*ag9QPYJ@7 zel^N@9a-m7WDsyhmXZe zK9dY>!qTQJKdpcyNl>Rk3-?)sHSp9)#z}06h)b$Ow9@Frx?CAX*?Hj7{8;0w0=Bih zs-vQ9x2dz}aYg6|SMI3yYoN91#>^=2O*?-AYG`RmZu3lf!3(O7d4v7i2guMm#$ z=)l{^I4ABD%d{~>c9e(+phX@L`#W@cX-S0BHN`JK;#5S50hYBHfxJVXs73EadmweO zj8j{P4v?*eXO%$@&pjB^HPb0N7j3N>mx55mM5}u)iFjFO*I7eL!h1DPj?>>dP}Xs-MdBY8phG054tnf6SfYua9;vF?>I-3nxcAgvOf_s_loZ=jlzT-{3XRv9)QmSc zzIe1mxkMvz1QT<}3=vLT$x|NXk*Oh?i@hz_cKT@m#v3n#xaZ!O&+zq*!IR+jM-Z2E(IP8-cznu!xz2AF5B zWg9`wv;B^IWm%Pj*)SZ9eeLP`EVOM|-OUtq1(Ig!M4I3Fd0FR;P=W9yh3!}74fIbM zXL#bTWy+`IF$5xVGYTWU!){tOp_Kmh0CjVgrp&Sf^Dzbhvy5-Z@Y-RicgFYr`A-k{Qr@xNg?- zFOx91RakZncO4<5;DQu%C^`iYS8)I%_J?*4NUSdivl{XOfG#?(ew_bJk6n+;ZC zKU0Ijw0maG9~J>ottAwaTgAx8<=lTMKXI^H%0+twEuGQeU9>-|TCjSh20P)S+^w;L zV{W=HqOk!I4>66g29TAKWcPE4S(GF7zFW{=LQyyIvoSiYd@_0Y!Y05kwKhm0Vg^!* z(GrPuha0v`NTlrx^A!#1cT9|(2{)V=F6l0ayZcVv%d!aq+L)|j$}@Q`({o2RI0_=p zb3!z?W&-#!ty?~a7}&R-A-B-yo2E15 zR@$gIeC4-RzoBez1QDvX5cFM~1($gj(W~opo%NT`_Dj#cShzVJLp3c2gGt%NE89;4 z-`o?=HC!p(2aQ=9r}p9(;-6Zx6YLkWl-~IAEUv!fJLBXXBIejB=dx~F9~sf?kB+~u$~<_V_Uq!N(z_Bzr|1NJoc3w81% zt!bp;R@s^)e#73Bg;LqI>2)gnl+K}UI%c;Nv^S)o2?B-AU!YZT`1$ByO4Z6g`Iuq~ zXeFz60pd`Xxxq|GjW$|v*NU{XwDQ~6%bW7(v15;V9VKi!r`jRf`=B`pJ;B9(xszkx zEU9yv2;1Q&B>08sZrPYz1GrDYqj+opHy!)2$dF3q;Bg)|{g;GZz7l2s?NUi7AO~u+ zKGO~SlvyV&?)y5@Bjz4uo{zXsEDPjlPpZY}qx(bw9ByxzS(9GmZOn`>siAV?7Szjp z6yERsgUCXSZ?|@S&_W^F5=rWibof7{TH0$v0~8yRIbR?Nh$UXV5&ds~q{UAn^lEEu zp#jp6YjPH{>2iZ{2+oK-(bTvM$%zyhRJOxBA}ET+NJyF?4~2sv_)Ke3)i9GAvsJ|O z$eVX3;z~P+S-g_Z$KoYL!eg%LW1wtO*zv?6ELn`m^}Atf znkiWH=qYT2TwKVjDS2VBOnGT}dBjcax}@5WO{77%v-GIr8p}FC5-)r`_}&KaD%#WL-ubhg8b|Fslt>Ox$)`j(vmW#-1j$ zNyY8@qK16iS2)0SX`Q=Vx%K@eQZHy z(i${P(AKU4pB7rz=QPn5_f`*A_1ZB{FJ}C{t)7H@bVtoz$ zjaef>IUb7~{v{}ulI%g%v9Zz8VT1 zhu4J6rKJbwe}ytRBeevpz|cl*9?JNkNx_p>8yv?n#Y zBYe&^+_}{7yCIX;-x+JM_gbFtDWaa|%$bahU7tTF%dO)jUa0pw!p7-LKHx4_TXefv z%i9SDiJNKBZxz{ux9^|ujm^5g(j7m(KxUg+#*p~s31J54ZH9uXqx5_mihEU!7A4|d zS=k_4)Zfn{L@*(skbfKoanlR=G@;@(%2nr?0=QAjjWtnSRJkOh;y!8gf;@HCSVAe` zalaozMXLQdBP2UL!mkiEeACsFSjcH;7kZ0#cv`%u>VdCh=ZFYe?-D9$v3>cm*%|6z z4}Wg`QQzn>wXkZnNIzdN$&6WXuKH_rVJQ=O7#iuQLBfd1;O}_|+ab)xY@1C0X@N!>N^um;m^67WadkKTww@SL_!Y-&%qS#~w~ zX2B%-kimEN4WAiG4ha8yoO{a6r=R%6c_wWt&Jvn=QCSn*E>%=#{#yY zAS*Vd@$X|5$zVH6Rc4n|7Q6Tbg5)-4(b_kx0*g zsXDKt+mJ8-qfF*UjBKwq_d0B1&az9lAik2+AW@Z&!b_sW9((IZ%~qDSiDTb_?h_k{M&e{5@9Y}3HAgGtUgBH~nX`fw=c3i2(!XNF8H6A!2E8KvxIf;; z%}aSgFefor3>uW#lvGiQto>faYhpyFkgdur3r+6qF=We8l%;NXan#nTX20>7yuVjU zK(8`L1XOp5QR?Cj31w0$wi_DQRh2NMFTsuhTCbZ(Lu5<4TJK~{gMT_BD6U%TTML5# zzPS%AnZ3S+Z^g~;pHvxI;M-Ge?DR0ptDfgie_xOzM+_L;X(j^bApycE!(3Lq-5PXR zIt|{__xrnGR+;%HDi9~UJxEM(n3x{Xrrr~YvQ=V{9sRnsCQDN(5V$o!`>m@ZigFR8 z!jr*Z)DRM71b6X?*T5AYGg3|RA1Kt$@^%;0s-})??HNGUrp9_|+q1mil?8$s?jtyP zYFOZl);Oqyf{Eqn>vqAmOOSiZ&XZUs4#A-h%DL#WekwAn%8HC@7T>|)*&!!j)|YNS z64&j8mkVMP<{n|*Icsf^td2xtf8S&j41WP*Z6&NY>bA}^u9&W*08O`e9u{G8yg29;)AtjVdho6uF z$lE{S22?RnsxzG4k4GM#z&-+w^cDh374(YbE8{3^->*75-SG2J>$=bq%I0L(`WP4eAdX!B|rM7d)jqri6h--@m+5DBj9M8Cv zIaP#=E79qL0CoEYroYWa@};A4`SN0&Ssmp@aPm@@dW0M6tnwet9mmnwJ{bo%f0PkO z#xVP3Ra8VQRxjz33!ajOU3(xd)1*X4(v`~ty7M6@9#ZO0V6zVWCn|?;rF|Yc_zHX4 ztEOVrez|-YeO`9%MYri(6)47X&lwEAJxEAZQ&z7ne4ZS9YQf0t)-_ux|I(~?^4FJZ z!xPX+U11PBW+T{xsCHkU56)%cPT`vJ>kS{KLoQQ`>I+%A_#9;DCU5AL`A)Y8Td-U* z8FXv&jve+Yi;Fs-HL0R;^6BA}p@qFLc(_#;t5AZv7@G4+4`8e{n@zouj1)FDy>L^G zkK71Zelm=*Nd_N*)T*sQ7dCSJlg2z-D$#KI^6FK4@KZ0X;Kyym&CWN;gjR{YU!Ipr zeq%<6Ebc8&3^^XZ9e}#v@Ks&Y?fBWzj$*M9Z>m*POrP6?L4O&4KFax8Do*Xka;D;Wq^OwWFDtYVi;h4RIBv*UQn;WYy6w9{_T?@I5Rryk+GgHxfl;KVPptH0 zX59E8GTr`f!n;K?|?7}LeGMDzGztkNI4f!X51 zrb-q^h|4Fq0D5>bIoQSxYG|4KjXr?S8p~wnZ6stVh^}IJeKsr=G&siVvD~m&DA6Om zJb%aN6DE*#v-D#Kn517X6B2-cdpC3YOZm9`qLdP-KoaW~c0|?=9P`*m#_Wl&T8wO*4=0&$J$!;4d=RqXa)^|@=fc#j^z9a7*%#D*0lb^na}+cJjTzTIVQX$7Ef zNlZ%XlD5YR0iP4Fp^YcP;0+G5Q*r}!P>;~9ZS=c6kB4nIRlr9FoNUAVm35NOfdUTP zu=E58-U~Hk@s7FYsY}0Lr5L$cNatJ;57MC+^Ih*-!ECsz@0(XkSat_mGtLSZmyXmq zpm80o^9?`RfzPKQcjx2w;-4^>4jP6H$S*r~7^|WeDyHagkl~YW+}U>ybI=|&>jR!$ zRkfurk#^3k`PVGGc`kpOU~(SRknAzKZN4X!&lKG=vUA&8`R&I7{o?$A089+3xY3v9 zTUAPRuHuSc8h<8^D@yJQBqxw>U$z@s26lYvoMKlh%r_jk5fRNq=&DilIrIxo=vKHK zD4_kkkS3s&4!qcnQew)g7*hw8I0ybFUaQ8WxnMAqyd4o66WqPs>1WQR_45eE4_QdP zLCDSYE2arpj-Dt*o1&h-`2CXDhq+o{kd(nHhyC{EOJI%{kyb%^4(Q)5bA&$nJvQ>2 zcV6?y*T=}I;KseKFfToFNAHGr?#7@t>uST9?KC&Dp#P0?JY{*+DZj9pJG`AP{6|mL zsP>7K^j=}jGEXLknrXYVGc#9782rbeLZ99cT9p#~BDDS{X z=;L4bt+S*-Xa%mE)*B00fD+_cK?nj*0IIe~9ImYvdtfzN9W^Zyjk{i1*G#K>Lf}?Z z9@T>c&v#=j>;=e{@U?yvM=iq#(pxFcE;{kVjBI0U#9S()b4S6^cDXFnq^>h+rIooK zAHUy??pDt0Da5Ap%A+?9(At=Q%GJE?Du+o4Epe2W7i?CqetHZ|W;YNsUwv26saIAUpFu^gxK!T}( zL!yDIsW9O);{Mx#MMGEdv^BJIF*OE?*gHAcI~lrI+S@S^GUER$hC{>rKVnrUS7#TX zys3-3y^}Q)0VCdj3K)u}PR{mrhCq3H7ofVKjirg9i>V0{2P50RaxgUd|6@SJ#?aXr z$OM$KGqH5DG;uYw0jjtfIU74!8ksr)MTCHKKnYglf1Fi}Yhg+6rdH&ginWaEA0Y&Q zXx!JH^bjm(7Fx>9)7(t$i+(D`Q_5`Z#}6wGVquenm4Xvk$;LfP9bcGtNfTk!%+_o) zv#xu?wc%{6$VEIzQ#9QAhoN3Q>|^D?L;>A=@S(PS`p+s8NK$55rM(*P{}Wd{{c7>yH*6H z#TLm8e@U^qKj>F*LJEB!XX=3pbI1SH;U8P#D$TfoVSX3Kyh#;;sW|?fRrb^L;m*qc zm#U=MF1WrK-J|tr?z}{i4JfmX{`RaK#&~;C0RAo;gsL+}t;5L}Ce rX_5X(wY<}(y~c73>YsU_{?(&r{!jZ~yZ>w{$p3oZ-~ZbG&-VWS`sij+ literal 0 HcmV?d00001 From bbab972a15b9bc3c2bac65078a04b0da499cb5ff Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Sat, 5 Sep 2015 11:42:35 -0600 Subject: [PATCH 182/189] Removed dependency on BouncyCastle to parse ASN.1 values. Initial work to decode headers. --- .../archivers/zip/PKWareExtraHeader.java | 496 ++++++++++++++++++ .../archivers/zip/X0014_X509Certificates.java | 89 ++-- .../zip/X0015_CertificateIdForFile.java | 104 +++- ...0016_CertificateIdForCentralDirectory.java | 101 ++-- .../zip/X0017_StrongEncryptionHeader.java | 362 +++++++++++-- ...19_EncryptionRecipientCertificateList.java | 76 +-- 6 files changed, 1075 insertions(+), 153 deletions(-) create mode 100644 src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java new file mode 100644 index 00000000000..cb14a0088bc --- /dev/null +++ b/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java @@ -0,0 +1,496 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; + +import java.io.FileOutputStream; +import java.io.IOException; + +/** + * Strong Encryption Header (0x0017) + * + * Certificate-based encryption: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * 0x0017    2 bytes  Tag for this "extra" block type
    + * TSize     2 bytes  Size of data that follows
    + * Format    2 bytes  Format definition for this record
    + * AlgID     2 bytes  Encryption algorithm identifier
    + * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    + * Flags     2 bytes  Processing flags
    + * RCount    4 bytes  Number of recipients. 
    + * HashAlg   2 bytes  Hash algorithm identifier
    + * HSize     2 bytes  Hash size
    + * SRList    (var)    Simple list of recipients hashed public keys
    + * 
    + * Flags -   This defines the processing flags.
    + * 
    + *           
      + *
    • 0x0007 - reserved for future use + *
    • 0x000F - reserved for future use + *
    • 0x0100 - Indicates non-OAEP key wrapping was used. If this + * this field is set, the version needed to extract must + * be at least 61. This means OAEP key wrapping is not + * used when generating a Master Session Key using + * ErdData. + *
    • 0x4000 - ErdData must be decrypted using 3DES-168, otherwise use the + * same algorithm used for encrypting the file contents. + *
    • 0x8000 - reserved for future use + *
    + * + * RCount - This defines the number intended recipients whose + * public keys were used for encryption. This identifies + * the number of elements in the SRList. + * + * see also: reserved1 + * + * HashAlg - This defines the hash algorithm used to calculate + * the public key hash of each public key used + * for encryption. This field currently supports + * only the following value for SHA-1 + * + * 0x8004 - SHA1 + * + * HSize - This defines the size of a hashed public key. + * + * SRList - This is a variable length list of the hashed + * public keys for each intended recipient. Each + * element in this list is HSize. The total size of + * SRList is determined using RCount * HSize. + *
    + * + * Password-based Extra Field 0x0017 in central header only. + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * 0x0017    2 bytes  Tag for this "extra" block type
    + * TSize     2 bytes  Size of data that follows
    + * Format    2 bytes  Format definition for this record
    + * AlgID     2 bytes  Encryption algorithm identifier
    + * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    + * Flags     2 bytes  Processing flags
    + * (more?)
    + * 
    + * + * Format - the data format identifier for this record. The only value + * allowed at this time is the integer value 2. + * + * Password-based Extra Field 0x0017 preceding compressed file data. + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * 0x0017    2 bytes  Tag for this "extra" block type
    + * IVSize    2 bytes  Size of initialization vector (IV)
    + * IVData    IVSize   Initialization vector for this file
    + * Size      4 bytes  Size of remaining decryption header data
    + * Format    2 bytes  Format definition for this record
    + * AlgID     2 bytes  Encryption algorithm identifier
    + * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    + * Flags     2 bytes  Processing flags
    + * ErdSize   2 bytes  Size of Encrypted Random Data
    + * ErdData   ErdSize  Encrypted Random Data
    + * Reserved1 4 bytes  Reserved certificate processing data
    + * Reserved2 (var)    Reserved for certificate processing data
    + * VSize     2 bytes  Size of password validation data
    + * VData     VSize-4  Password validation data
    + * VCRC32    4 bytes  Standard ZIP CRC32 of password validation data
    + *     
    + * IVData - The size of the IV should match the algorithm block size.
    + *          The IVData can be completely random data.  If the size of
    + *          the randomly generated data does not match the block size
    + *          it should be complemented with zero's or truncated as
    + *          necessary.  If IVSize is 0,then IV = CRC32 + Uncompressed
    + *          File Size (as a 64 bit little-endian, unsigned integer value).
    + * 
    + * Format -  the data format identifier for this record.  The only
    + *           value allowed at this time is the integer value 2.
    + * 
    + * ErdData - Encrypted random data is used to store random data that
    + *           is used to generate a file session key for encrypting
    + *           each file.  SHA1 is used to calculate hash data used to
    + *           derive keys.  File session keys are derived from a master
    + *           session key generated from the user-supplied password.
    + *           If the Flags field in the decryption header contains
    + *           the value 0x4000, then the ErdData field must be
    + *           decrypted using 3DES. If the value 0x4000 is not set,
    + *           then the ErdData field must be decrypted using AlgId.
    + * 
    + * Reserved1 - Reserved for certificate processing, if value is
    + *           zero, then Reserved2 data is absent.  See the explanation
    + *           under the Certificate Processing Method for details on
    + *           this data structure.
    + * 
    + * Reserved2 - If present, the size of the Reserved2 data structure
    + *           is located by skipping the first 4 bytes of this field
    + *           and using the next 2 bytes as the remaining size.  See
    + *           the explanation under the Certificate Processing Method
    + *           for details on this data structure.
    + * 
    + * VSize - This size value will always include the 4 bytes of the
    + *         VCRC32 data and will be greater than 4 bytes.
    + * 
    + * VData - Random data for password validation.  This data is VSize
    + *         in length and VSize must be a multiple of the encryption
    + *         block size.  VCRC32 is a checksum value of VData. 
    + *         VData and VCRC32 are stored encrypted and start the
    + *         stream of encrypted data for a file.
    + * 
    + * + * + * Reserved1 - Certificate Decryption Header Reserved1 Data: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * RCount    4 bytes  Number of recipients.
    + * 
    + * + * RCount - This defines the number intended recipients whose public keys were + * used for encryption. This defines the number of elements in the REList field + * defined below. + * + * + * Reserved2 - Certificate Decryption Header Reserved2 Data Structures: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * HashAlg   2 bytes  Hash algorithm identifier
    + * HSize     2 bytes  Hash size
    + * REList    (var)    List of recipient data elements
    + * 
    + * HashAlg - This defines the hash algorithm used to calculate
    + *           the public key hash of each public key used
    + *           for encryption. This field currently supports
    + *           only the following value for SHA-1
    + *    
    + *               0x8004 - SHA1
    + *                
    + * HSize -   This defines the size of a hashed public key
    + *           defined in REHData.
    + * 
    + * REList -  This is a variable length of list of recipient data. 
    + *           Each element in this list consists of a Recipient
    + *           Element data structure as follows:
    + * 
    + * + * Recipient Element (REList) Data Structure: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * RESize    2 bytes  Size of REHData + REKData
    + * REHData   HSize    Hash of recipients public key
    + * REKData   (var)    Simple key blob
    + * 
    + * 
    + * RESize -  This defines the size of an individual REList
    + *           element.  This value is the combined size of the
    + *           REHData field + REKData field.  REHData is defined by
    + *           HSize.  REKData is variable and can be calculated
    + *           for each REList element using RESize and HSize.
    + * 
    + * REHData - Hashed public key for this recipient.
    + * 
    + * REKData - Simple Key Blob.  The format of this data structure
    + *           is identical to that defined in the Microsoft
    + *           CryptoAPI and generated using the CryptExportKey()
    + *           function.  The version of the Simple Key Blob
    + *           supported at this time is 0x02 as defined by
    + *           Microsoft.
    + * 
    + * + * Algorithm IDs - integer identifier of the encryption algorithm from + * the following range + * + *
      + *
    • 0x6601 - DES
    • + *
    • 0x6602 - RC2 (version needed to extract < 5.2)
    • + *
    • 0x6603 - 3DES 168
    • + *
    • 0x6609 - 3DES 112
    • + *
    • 0x660E - AES 128
    • + *
    • 0x660F - AES 192
    • + *
    • 0x6610 - AES 256
    • + *
    • 0x6702 - RC2 (version needed to extract >= 5.2)
    • + *
    • 0x6720 - Blowfish
    • + *
    • 0x6721 - Twofish
    • + *
    • 0x6801 - RC4
    • + *
    • 0xFFFF - Unknown algorithm
    • + *
    + * + * Hash Algorithms - integer identifier of the hash algorithm from the + * following range + * + *
      + *
    • 0x0000 - none
    • + *
    • 0x0001 - CRC32
    • + *
    • 0x8003 - MD5
    • + *
    • 0x8004 - SHA1
    • + *
    • 0x8007 - RIPEMD160
    • + *
    • 0x800C - SHA256
    • + *
    • 0x800D - SHA384
    • + *
    • 0x800E - SHA512
    • + *
    + * + * Flags - Processing flags needed for decryption + * + *
      + *
    • 0x0001 - Password is required to decrypt
    • + *
    • 0x0002 - Certificates only
    • + *
    • 0x0003 - Password or certificate required to decrypt
    • + *
    • 0x0007 - reserved for future use + *
    • 0x000F - reserved for future use + *
    • 0x0100 - indicates non-OAEP key wrapping was used. If this field is set + * the version needed to extract must be at least 61. This means OAEP key + * wrapping is not used when generating a Master Session Key using ErdData. + *
    • 0x4000 - ErdData must be decrypted using 3DES-168, otherwise use the same + * algorithm used for encrypting the file contents. + *
    • 0x8000 - reserved for future use. + *
    + * + * See the section describing the Strong Encryption Specification for + * details. Refer to the section in this document entitled + * "Incorporating PKWARE Proprietary Technology into Your Product" for more + * information. + * + * @NotThreadSafe + */ +public class PKWareExtraHeader implements ZipExtraField { + private static final ZipShort HEADER_ID = new ZipShort(0x0017); + private static final long serialVersionUID = 1L; + + /** + * Get the header id. + * + * @return the header id + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Extra field data in local file data - without Header-ID or length + * specifier. + */ + private byte[] localData; + + private int format; + private int algId; + private int bitlen; + private int flags; + private int rcount; + private int hashAlg; + private int hashSize; + + // encryption data + private byte ivData[]; + private byte erdData[]; + + /** + * Convert bytes to unsigned int, LSB. + * + * @param data + * @param off + * @param len + * @return + */ + int bytesToUnsignedInt(byte[] data, int off, int len) { + int x = 0; + for (int i = 0; i < len; i++) { + x += signedByteToUnsignedInt(data[off + i]) << (8 * i); + } + + return x; + } + + /** + * Set the extra field data in the local file data - without Header-ID or + * length specifier. + * + * @param data + * the field data to use + */ + public void setLocalFileDataData(byte[] data) { + try { + FileOutputStream os = new FileOutputStream("/tmp/17.dat"); + os.write(data); + os.close(); + } catch (IOException e) { + System.out.println(e.getMessage()); + } + + localData = ZipUtil.copy(data); + } + + /** + * Get the length of the local data. + * + * @return the length of the local data + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localData != null ? localData.length : 0); + } + + /** + * Get the local data. + * + * @return the local data + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localData); + } + + /** + * Extra field data in central directory - without Header-ID or length + * specifier. + */ + private byte[] centralData; + + /** + * Set the extra field data in central directory. + * + * @param data + * the data to use + */ + public void setCentralDirectoryData(byte[] data) { + centralData = ZipUtil.copy(data); + } + + /** + * Get the central data length. If there is no central data, get the local + * file data length. + * + * @return the central data length + */ + public ZipShort getCentralDirectoryLength() { + if (centralData != null) { + return new ZipShort(centralData.length); + } + return getLocalFileDataLength(); + } + + /** + * Get the central data. + * + * @return the central data if present, else return the local file data + */ + public byte[] getCentralDirectoryData() { + if (centralData != null) { + return ZipUtil.copy(centralData); + } + return getLocalFileDataData(); + } + + /** + * Parse central directory format. + * + * @param data + * @param offset + * @param length + */ + public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { + this.format = bytesToUnsignedInt(data, offset, 2); + this.algId = bytesToUnsignedInt(data, offset + 2, 2); + this.bitlen = bytesToUnsignedInt(data, offset + 4, 2); + this.flags = bytesToUnsignedInt(data, offset + 6, 2); + + if (length > 8) { + this.rcount = bytesToUnsignedInt(data, offset + 8, 4); + this.hashAlg = bytesToUnsignedInt(data, offset + 12, 2); + this.hashSize = bytesToUnsignedInt(data, offset + 14, 2); + // srlist... hashed public keys + } + + System.out.printf("17: format : %d\n", this.format); + System.out.printf("17: algId : %x\n", this.algId); + System.out.printf("17: bitlen : %d\n", this.bitlen); + System.out.printf("17: flags : %x\n", this.flags); + System.out.printf("17: rcount : %d\n", this.rcount); + System.out.printf("17: hashAlg : %x\n", this.hashAlg); + System.out.printf("17: hashSize: %d\n", this.hashSize); + } + + /** + * Parse file header format. (Password only?) + * + * @param data + * @param offset + * @param length + */ + public void parseFileFormat(byte[] data, int offset, int length) { + int ivSize = bytesToUnsignedInt(data, offset, 2); + this.ivData = new byte[ivSize]; + System.arraycopy(data, offset + 4, this.ivData, 0, ivSize); + + int size = bytesToUnsignedInt(data, offset + ivSize + 2, 4); + this.format = bytesToUnsignedInt(data, offset + ivSize + 6, 2); + this.algId = bytesToUnsignedInt(data, offset + ivSize + 8, 2); + this.bitlen = bytesToUnsignedInt(data, offset + ivSize + 10, 2); + this.flags = bytesToUnsignedInt(data, offset + ivSize + 12, 2); + + int erdSize = bytesToUnsignedInt(data, offset + ivSize + 14, 2); + this.erdData = new byte[erdSize]; + System.arraycopy(data, offset + ivSize + 16, this.erdData, 0, erdSize); + // reserved + // vsize + // vdata + // vcrc32 + + System.out.printf("17: format : %d\n", this.format); + System.out.printf("17: algId : %x\n", this.algId); + System.out.printf("17: bitlen : %d\n", this.bitlen); + System.out.printf("17: flags : %x\n", this.flags); + } + + /** + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. + * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) { + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + parseFileFormat(data, offset, length); + } + + /** + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. + * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) + */ + public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setCentralDirectoryData(tmp); + parseCentralDirectoryFormat(data, offset, length); + } +} diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java index 1c60fe21608..41f6b2ce06b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java @@ -18,15 +18,17 @@ */ package org.apache.commons.compress.archivers.zip; +import java.io.FileOutputStream; +import java.io.IOException; + /** * PKCS#7 Store for X.509 Certificates (0x0014): * - * This field MUST contain information about each of the certificates - * files may be signed with. When the Central Directory Encryption - * feature is enabled for a ZIP file, this record will appear in - * the Archive Extra Data Record, otherwise it will appear in the - * first central directory record and will be ignored in any - * other record. + * This field MUST contain information about each of the certificates files may + * be signed with. When the Central Directory Encryption feature is enabled for + * a ZIP file, this record will appear in the Archive Extra Data Record, + * otherwise it will appear in the first central directory record and will be + * ignored in any other record. * * Note: all fields stored in Intel low-byte/high-byte order. * @@ -40,12 +42,13 @@ * * @NotThreadSafe */ -public class X0014_X509Certificates implements ZipExtraField { +public class X0014_X509Certificates extends PKWareExtraHeader implements ZipExtraField { private static final ZipShort HEADER_ID = new ZipShort(0x0014); private static final long serialVersionUID = 1L; - + /** * Get the header id. + * * @return the header id */ public ZipShort getHeaderId() { @@ -53,22 +56,38 @@ public ZipShort getHeaderId() { } /** - * Extra field data in local file data - without - * Header-ID or length specifier. + * Extra field data in local file data - without Header-ID or length + * specifier. */ private byte[] localData; + private int rcount; + /** - * Set the extra field data in the local file data - - * without Header-ID or length specifier. - * @param data the field data to use + * Set the extra field data in the local file data - without Header-ID or + * length specifier. + * + * @param data + * the field data to use */ public void setLocalFileDataData(byte[] data) { + byte[] data1 = new byte[data.length - 2]; + System.arraycopy(data1, 0, data, 2, data.length - 2); + + try { + FileOutputStream os = new FileOutputStream("/tmp/14.dat"); + os.write(data); + os.close(); + } catch (IOException e) { + System.out.println(e.getMessage()); + } + localData = ZipUtil.copy(data); } /** * Get the length of the local data. + * * @return the length of the local data */ public ZipShort getLocalFileDataLength() { @@ -77,6 +96,7 @@ public ZipShort getLocalFileDataLength() { /** * Get the local data. + * * @return the local data */ public byte[] getLocalFileDataData() { @@ -84,22 +104,25 @@ public byte[] getLocalFileDataData() { } /** - * Extra field data in central directory - without - * Header-ID or length specifier. + * Extra field data in central directory - without Header-ID or length + * specifier. */ private byte[] centralData; /** * Set the extra field data in central directory. - * @param data the data to use + * + * @param data + * the data to use */ public void setCentralDirectoryData(byte[] data) { centralData = ZipUtil.copy(data); } /** - * Get the central data length. - * If there is no central data, get the local file data length. + * Get the central data length. If there is no central data, get the local + * file data length. + * * @return the central data length */ public ZipShort getCentralDirectoryLength() { @@ -111,6 +134,7 @@ public ZipShort getCentralDirectoryLength() { /** * Get the central data. + * * @return the central data if present, else return the local file data */ public byte[] getCentralDirectoryData() { @@ -121,34 +145,39 @@ public byte[] getCentralDirectoryData() { } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) */ public void parseFromLocalFileData(byte[] data, int offset, int length) { byte[] tmp = new byte[length]; - - System.out.println("Field: 0x0014"); System.arraycopy(data, offset, tmp, 0, length); setLocalFileDataData(tmp); } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) */ - public void parseFromCentralDirectoryData(byte[] data, int offset, - int length) { - System.out.println("Field: 0x0014"); + public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setCentralDirectoryData(tmp); if (localData == null) { setLocalFileDataData(tmp); } - } + this.rcount = bytesToUnsignedInt(data, offset, 2); + + System.out.printf("14: rcount: %d\n", rcount); + } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java index 9cdeada5a5a..6694512a7a8 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java @@ -18,13 +18,18 @@ */ package org.apache.commons.compress.archivers.zip; +import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; + +import java.io.FileOutputStream; +import java.io.IOException; + /** * X.509 Certificate ID and Signature for individual file (0x0015): * - * This field contains the information about which certificate in - * the PKCS#7 store was used to sign a particular file. It also - * contains the signature data. This field can appear multiple - * times, but can only appear once per certificate. + * This field contains the information about which certificate in the PKCS#7 + * store was used to sign a particular file. It also contains the signature + * data. This field can appear multiple times, but can only appear once per + * certificate. * * Note: all fields stored in Intel low-byte/high-byte order. * @@ -38,12 +43,13 @@ * * @NotThreadSafe */ -public class X0015_CertificateIdForFile implements ZipExtraField { +public class X0015_CertificateIdForFile extends PKWareExtraHeader implements ZipExtraField { private static final ZipShort HEADER_ID = new ZipShort(0x0015); private static final long serialVersionUID = 1L; /** * Get the header id. + * * @return the header id */ public ZipShort getHeaderId() { @@ -51,22 +57,36 @@ public ZipShort getHeaderId() { } /** - * Extra field data in local file data - without - * Header-ID or length specifier. + * Extra field data in local file data - without Header-ID or length + * specifier. */ private byte[] localData; + private int rcount; + private int hashAlg; + /** - * Set the extra field data in the local file data - - * without Header-ID or length specifier. - * @param data the field data to use + * Set the extra field data in the local file data - without Header-ID or + * length specifier. + * + * @param data + * the field data to use */ public void setLocalFileDataData(byte[] data) { + try { + FileOutputStream os = new FileOutputStream("/tmp/15.dat"); + os.write(data); + os.close(); + } catch (IOException e) { + System.out.println(e.getMessage()); + } + localData = ZipUtil.copy(data); } /** * Get the length of the local data. + * * @return the length of the local data */ public ZipShort getLocalFileDataLength() { @@ -75,6 +95,7 @@ public ZipShort getLocalFileDataLength() { /** * Get the local data. + * * @return the local data */ public byte[] getLocalFileDataData() { @@ -82,22 +103,36 @@ public byte[] getLocalFileDataData() { } /** - * Extra field data in central directory - without - * Header-ID or length specifier. + * Extra field data in central directory - without Header-ID or length + * specifier. */ private byte[] centralData; /** * Set the extra field data in central directory. - * @param data the data to use + * + * @param data + * the data to use */ public void setCentralDirectoryData(byte[] data) { + try { + FileOutputStream os = new FileOutputStream("/tmp/15.dat"); + os.write(data); + os.close(); + + os = new FileOutputStream("/tmp/15.2.dat"); + os.write(data, 2, data.length - 2); + os.close(); + } catch (IOException e) { + System.out.println(e.getMessage()); + } centralData = ZipUtil.copy(data); } /** - * Get the central data length. - * If there is no central data, get the local file data length. + * Get the central data length. If there is no central data, get the local + * file data length. + * * @return the central data length */ public ZipShort getCentralDirectoryLength() { @@ -109,6 +144,7 @@ public ZipShort getCentralDirectoryLength() { /** * Get the central data. + * * @return the central data if present, else return the local file data */ public byte[] getCentralDirectoryData() { @@ -119,33 +155,51 @@ public byte[] getCentralDirectoryData() { } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) */ public void parseFromLocalFileData(byte[] data, int offset, int length) { - System.out.println("Field: 0x0015"); byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setLocalFileDataData(tmp); } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) */ - public void parseFromCentralDirectoryData(byte[] data, int offset, - int length) { - System.out.println("Field: 0x0015"); + public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setCentralDirectoryData(tmp); if (localData == null) { setLocalFileDataData(tmp); } + + this.rcount = bytesToUnsignedInt(data, offset, 2); + this.hashAlg = bytesToUnsignedInt(data, offset + 2, 2); + + System.out.printf("15: rcount: %d\n", rcount); + System.out.printf("15: hashAlg: %x\n", hashAlg); + + System.out.printf("15: [2] %d %x\n", bytesToUnsignedInt(data, offset + 4, 2), + bytesToUnsignedInt(data, offset + 4, 2)); + System.out.printf("15: [3] %d %x\n", bytesToUnsignedInt(data, offset + 6, 4), + bytesToUnsignedInt(data, offset + 6, 4)); + System.out.printf("15: [4] %d %x\n", bytesToUnsignedInt(data, offset + 10, 4), + bytesToUnsignedInt(data, offset + 10, 4)); + System.out.printf("15: [5] %d %x\n", bytesToUnsignedInt(data, offset + 14, 2), + bytesToUnsignedInt(data, offset + 14, 2)); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java index fa2615bf50a..2bc5af7533b 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java @@ -18,14 +18,19 @@ */ package org.apache.commons.compress.archivers.zip; +import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; + +import java.io.FileOutputStream; +import java.io.IOException; + /** * X.509 Certificate ID and Signature for central directory (0x0016): * - * This field contains the information about which certificate in - * the PKCS#7 store was used to sign the central directory structure. - * When the Central Directory Encryption feature is enabled for a - * ZIP file, this record will appear in the Archive Extra Data Record, - * otherwise it will appear in the first central directory record. + * This field contains the information about which certificate in the PKCS#7 + * store was used to sign the central directory structure. When the Central + * Directory Encryption feature is enabled for a ZIP file, this record will + * appear in the Archive Extra Data Record, otherwise it will appear in the + * first central directory record. * * Note: all fields stored in Intel low-byte/high-byte order. * @@ -39,12 +44,13 @@ * * @NotThreadSafe */ -public class X0016_CertificateIdForCentralDirectory implements ZipExtraField { +public class X0016_CertificateIdForCentralDirectory extends PKWareExtraHeader implements ZipExtraField { private static final ZipShort HEADER_ID = new ZipShort(0x0016); private static final long serialVersionUID = 1L; /** * Get the header id. + * * @return the header id */ public ZipShort getHeaderId() { @@ -52,15 +58,20 @@ public ZipShort getHeaderId() { } /** - * Extra field data in local file data - without - * Header-ID or length specifier. + * Extra field data in local file data - without Header-ID or length + * specifier. */ private byte[] localData; + private int rcount; + private int hashAlg; + /** - * Set the extra field data in the local file data - - * without Header-ID or length specifier. - * @param data the field data to use + * Set the extra field data in the local file data - without Header-ID or + * length specifier. + * + * @param data + * the field data to use */ public void setLocalFileDataData(byte[] data) { localData = ZipUtil.copy(data); @@ -68,6 +79,7 @@ public void setLocalFileDataData(byte[] data) { /** * Get the length of the local data. + * * @return the length of the local data */ public ZipShort getLocalFileDataLength() { @@ -76,6 +88,7 @@ public ZipShort getLocalFileDataLength() { /** * Get the local data. + * * @return the local data */ public byte[] getLocalFileDataData() { @@ -83,22 +96,33 @@ public byte[] getLocalFileDataData() { } /** - * Extra field data in central directory - without - * Header-ID or length specifier. + * Extra field data in central directory - without Header-ID or length + * specifier. */ private byte[] centralData; /** * Set the extra field data in central directory. - * @param data the data to use + * + * @param data + * the data to use */ public void setCentralDirectoryData(byte[] data) { + try { + FileOutputStream os = new FileOutputStream("/tmp/16.dat"); + os.write(data); + os.close(); + } catch (IOException e) { + System.out.println(e.getMessage()); + } + centralData = ZipUtil.copy(data); } /** - * Get the central data length. - * If there is no central data, get the local file data length. + * Get the central data length. If there is no central data, get the local + * file data length. + * * @return the central data length */ public ZipShort getCentralDirectoryLength() { @@ -110,6 +134,7 @@ public ZipShort getCentralDirectoryLength() { /** * Get the central data. + * * @return the central data if present, else return the local file data */ public byte[] getCentralDirectoryData() { @@ -120,33 +145,49 @@ public byte[] getCentralDirectoryData() { } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * This should never be called for this header type. + * + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) */ public void parseFromLocalFileData(byte[] data, int offset, int length) { - System.out.println("Field: 0x0016"); byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setLocalFileDataData(tmp); } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) */ - public void parseFromCentralDirectoryData(byte[] data, int offset, - int length) { - System.out.println("Field: 0x0016"); + public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setCentralDirectoryData(tmp); - if (localData == null) { - setLocalFileDataData(tmp); - } - } + this.rcount = bytesToUnsignedInt(data, offset, 2); + this.hashAlg = bytesToUnsignedInt(data, offset + 2, 2); + + System.out.printf("16: rcount: %d\n", rcount); + System.out.printf("16: hashAlg: %x\n", hashAlg); + + System.out.printf("16: [2] %d %x\n", bytesToUnsignedInt(data, offset + 4, 2), + bytesToUnsignedInt(data, offset + 4, 2)); + System.out.printf("16: [3] %d %x\n", bytesToUnsignedInt(data, offset + 6, 4), + bytesToUnsignedInt(data, offset + 6, 4)); + System.out.printf("16: [4] %d %x\n", bytesToUnsignedInt(data, offset + 10, 4), + bytesToUnsignedInt(data, offset + 10, 4)); + System.out.printf("16: [5] %d %x\n", bytesToUnsignedInt(data, offset + 14, 2), + bytesToUnsignedInt(data, offset + 14, 2)); + } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java index 7c288585e19..c89ac806bc0 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -18,9 +18,68 @@ */ package org.apache.commons.compress.archivers.zip; +import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; + +import java.io.FileOutputStream; +import java.io.IOException; + /** * Strong Encryption Header (0x0017) * + * Certificate-based encryption: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * 0x0017    2 bytes  Tag for this "extra" block type
    + * TSize     2 bytes  Size of data that follows
    + * Format    2 bytes  Format definition for this record
    + * AlgID     2 bytes  Encryption algorithm identifier
    + * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    + * Flags     2 bytes  Processing flags
    + * RCount    4 bytes  Number of recipients. 
    + * HashAlg   2 bytes  Hash algorithm identifier
    + * HSize     2 bytes  Hash size
    + * SRList    (var)    Simple list of recipients hashed public keys
    + * 
    + * Flags -   This defines the processing flags.
    + * 
    + *           
      + *
    • 0x0007 - reserved for future use + *
    • 0x000F - reserved for future use + *
    • 0x0100 - Indicates non-OAEP key wrapping was used. If this + * this field is set, the version needed to extract must + * be at least 61. This means OAEP key wrapping is not + * used when generating a Master Session Key using + * ErdData. + *
    • 0x4000 - ErdData must be decrypted using 3DES-168, otherwise use the + * same algorithm used for encrypting the file contents. + *
    • 0x8000 - reserved for future use + *
    + * + * RCount - This defines the number intended recipients whose + * public keys were used for encryption. This identifies + * the number of elements in the SRList. + * + * see also: reserved1 + * + * HashAlg - This defines the hash algorithm used to calculate + * the public key hash of each public key used + * for encryption. This field currently supports + * only the following value for SHA-1 + * + * 0x8004 - SHA1 + * + * HSize - This defines the size of a hashed public key. + * + * SRList - This is a variable length list of the hashed + * public keys for each intended recipient. Each + * element in this list is HSize. The total size of + * SRList is determined using RCount * HSize. + *
    + * + * Password-based Extra Field 0x0017 in central header only. + * *
      * Value     Size     Description
      * -----     ----     -----------
    @@ -28,28 +87,170 @@
      * TSize     2 bytes  Size of data that follows
      * Format    2 bytes  Format definition for this record
      * AlgID     2 bytes  Encryption algorithm identifier
    - * Bitlen    2 bytes  Bit length of encryption key
    + * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
      * Flags     2 bytes  Processing flags
    - * CertData  TSize-8  Certificate decryption extra field data
    - *                    (refer to the explanation for CertData
    - *                     in the section describing the 
    - *                     Certificate Processing Method under 
    - *                     the Strong Encryption Specification)
    + * (more?)
    + * 
    + * + * Format - the data format identifier for this record. The only value + * allowed at this time is the integer value 2. + * + * Password-based Extra Field 0x0017 preceding compressed file data. + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * 0x0017    2 bytes  Tag for this "extra" block type
    + * IVSize    2 bytes  Size of initialization vector (IV)
    + * IVData    IVSize   Initialization vector for this file
    + * Size      4 bytes  Size of remaining decryption header data
    + * Format    2 bytes  Format definition for this record
    + * AlgID     2 bytes  Encryption algorithm identifier
    + * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    + * Flags     2 bytes  Processing flags
    + * ErdSize   2 bytes  Size of Encrypted Random Data
    + * ErdData   ErdSize  Encrypted Random Data
    + * Reserved1 4 bytes  Reserved certificate processing data
    + * Reserved2 (var)    Reserved for certificate processing data
    + * VSize     2 bytes  Size of password validation data
    + * VData     VSize-4  Password validation data
    + * VCRC32    4 bytes  Standard ZIP CRC32 of password validation data
    + *     
    + * IVData - The size of the IV should match the algorithm block size.
    + *          The IVData can be completely random data.  If the size of
    + *          the randomly generated data does not match the block size
    + *          it should be complemented with zero's or truncated as
    + *          necessary.  If IVSize is 0,then IV = CRC32 + Uncompressed
    + *          File Size (as a 64 bit little-endian, unsigned integer value).
    + * 
    + * Format -  the data format identifier for this record.  The only
    + *           value allowed at this time is the integer value 2.
    + * 
    + * ErdData - Encrypted random data is used to store random data that
    + *           is used to generate a file session key for encrypting
    + *           each file.  SHA1 is used to calculate hash data used to
    + *           derive keys.  File session keys are derived from a master
    + *           session key generated from the user-supplied password.
    + *           If the Flags field in the decryption header contains
    + *           the value 0x4000, then the ErdData field must be
    + *           decrypted using 3DES. If the value 0x4000 is not set,
    + *           then the ErdData field must be decrypted using AlgId.
    + * 
    + * Reserved1 - Reserved for certificate processing, if value is
    + *           zero, then Reserved2 data is absent.  See the explanation
    + *           under the Certificate Processing Method for details on
    + *           this data structure.
    + * 
    + * Reserved2 - If present, the size of the Reserved2 data structure
    + *           is located by skipping the first 4 bytes of this field
    + *           and using the next 2 bytes as the remaining size.  See
    + *           the explanation under the Certificate Processing Method
    + *           for details on this data structure.
    + * 
    + * VSize - This size value will always include the 4 bytes of the
    + *         VCRC32 data and will be greater than 4 bytes.
    + * 
    + * VData - Random data for password validation.  This data is VSize
    + *         in length and VSize must be a multiple of the encryption
    + *         block size.  VCRC32 is a checksum value of VData. 
    + *         VData and VCRC32 are stored encrypted and start the
    + *         stream of encrypted data for a file.
    + * 
    + * + * + * Reserved1 - Certificate Decryption Header Reserved1 Data: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * RCount    4 bytes  Number of recipients.
    + * 
    + * + * RCount - This defines the number intended recipients whose public keys were + * used for encryption. This defines the number of elements in the REList field + * defined below. + * + * + * Reserved2 - Certificate Decryption Header Reserved2 Data Structures: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * HashAlg   2 bytes  Hash algorithm identifier
    + * HSize     2 bytes  Hash size
    + * REList    (var)    List of recipient data elements
    + * 
    + * HashAlg - This defines the hash algorithm used to calculate
    + *           the public key hash of each public key used
    + *           for encryption. This field currently supports
    + *           only the following value for SHA-1
    + *    
    + *               0x8004 - SHA1
    + *                
    + * HSize -   This defines the size of a hashed public key
    + *           defined in REHData.
    + * 
    + * REList -  This is a variable length of list of recipient data. 
    + *           Each element in this list consists of a Recipient
    + *           Element data structure as follows:
    + * 
    + * + * Recipient Element (REList) Data Structure: + * + *
    + * Value     Size     Description
    + * -----     ----     -----------
    + * RESize    2 bytes  Size of REHData + REKData
    + * REHData   HSize    Hash of recipients public key
    + * REKData   (var)    Simple key blob
    + * 
    + * 
    + * RESize -  This defines the size of an individual REList
    + *           element.  This value is the combined size of the
    + *           REHData field + REKData field.  REHData is defined by
    + *           HSize.  REKData is variable and can be calculated
    + *           for each REList element using RESize and HSize.
    + * 
    + * REHData - Hashed public key for this recipient.
    + * 
    + * REKData - Simple Key Blob.  The format of this data structure
    + *           is identical to that defined in the Microsoft
    + *           CryptoAPI and generated using the CryptExportKey()
    + *           function.  The version of the Simple Key Blob
    + *           supported at this time is 0x02 as defined by
    + *           Microsoft.
      * 
    + * + * Flags - Processing flags needed for decryption + * + *
      + *
    • 0x0001 - Password is required to decrypt
    • + *
    • 0x0002 - Certificates only
    • + *
    • 0x0003 - Password or certificate required to decrypt
    • + *
    • 0x0007 - reserved for future use + *
    • 0x000F - reserved for future use + *
    • 0x0100 - indicates non-OAEP key wrapping was used. If this field is set + * the version needed to extract must be at least 61. This means OAEP key + * wrapping is not used when generating a Master Session Key using ErdData. + *
    • 0x4000 - ErdData must be decrypted using 3DES-168, otherwise use the same + * algorithm used for encrypting the file contents. + *
    • 0x8000 - reserved for future use. + *
    * - * See the section describing the Strong Encryption Specification - * for details. Refer to the section in this document entitled - * "Incorporating PKWARE Proprietary Technology into Your Product" - * for more information. + * See the section describing the Strong Encryption Specification for + * details. Refer to the section in this document entitled + * "Incorporating PKWARE Proprietary Technology into Your Product" for more + * information. * * @NotThreadSafe */ -public class X0017_StrongEncryptionHeader implements ZipExtraField { +public class X0017_StrongEncryptionHeader extends PKWareExtraHeader implements ZipExtraField { private static final ZipShort HEADER_ID = new ZipShort(0x0017); private static final long serialVersionUID = 1L; /** * Get the header id. + * * @return the header id */ public ZipShort getHeaderId() { @@ -57,22 +258,45 @@ public ZipShort getHeaderId() { } /** - * Extra field data in local file data - without - * Header-ID or length specifier. + * Extra field data in local file data - without Header-ID or length + * specifier. */ private byte[] localData; + private int format; + private int algId; + private int bitlen; + private int flags; + private int rcount; + private int hashAlg; + private int hashSize; + + // encryption data + private byte ivData[]; + private byte erdData[]; + /** - * Set the extra field data in the local file data - - * without Header-ID or length specifier. - * @param data the field data to use + * Set the extra field data in the local file data - without Header-ID or + * length specifier. + * + * @param data + * the field data to use */ public void setLocalFileDataData(byte[] data) { + try { + FileOutputStream os = new FileOutputStream("/tmp/17.dat"); + os.write(data); + os.close(); + } catch (IOException e) { + System.out.println(e.getMessage()); + } + localData = ZipUtil.copy(data); } /** * Get the length of the local data. + * * @return the length of the local data */ public ZipShort getLocalFileDataLength() { @@ -81,6 +305,7 @@ public ZipShort getLocalFileDataLength() { /** * Get the local data. + * * @return the local data */ public byte[] getLocalFileDataData() { @@ -88,22 +313,25 @@ public byte[] getLocalFileDataData() { } /** - * Extra field data in central directory - without - * Header-ID or length specifier. + * Extra field data in central directory - without Header-ID or length + * specifier. */ private byte[] centralData; /** * Set the extra field data in central directory. - * @param data the data to use + * + * @param data + * the data to use */ public void setCentralDirectoryData(byte[] data) { centralData = ZipUtil.copy(data); } /** - * Get the central data length. - * If there is no central data, get the local file data length. + * Get the central data length. If there is no central data, get the local + * file data length. + * * @return the central data length */ public ZipShort getCentralDirectoryLength() { @@ -115,6 +343,7 @@ public ZipShort getCentralDirectoryLength() { /** * Get the central data. + * * @return the central data if present, else return the local file data */ public byte[] getCentralDirectoryData() { @@ -125,33 +354,94 @@ public byte[] getCentralDirectoryData() { } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * Parse central directory format. + * + * @param data + * @param offset + * @param length + */ + public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { + this.format = bytesToUnsignedInt(data, offset, 2); + this.algId = bytesToUnsignedInt(data, offset + 2, 2); + this.bitlen = bytesToUnsignedInt(data, offset + 4, 2); + this.flags = bytesToUnsignedInt(data, offset + 6, 2); + + if (length > 8) { + this.rcount = bytesToUnsignedInt(data, offset + 8, 4); + this.hashAlg = bytesToUnsignedInt(data, offset + 12, 2); + this.hashSize = bytesToUnsignedInt(data, offset + 14, 2); + // srlist... hashed public keys + } + + System.out.printf("17: format : %d\n", this.format); + System.out.printf("17: algId : %x\n", this.algId); + System.out.printf("17: bitlen : %d\n", this.bitlen); + System.out.printf("17: flags : %x\n", this.flags); + System.out.printf("17: rcount : %d\n", this.rcount); + System.out.printf("17: hashAlg : %x\n", this.hashAlg); + System.out.printf("17: hashSize: %d\n", this.hashSize); + } + + /** + * Parse file header format. (Password only?) + * + * @param data + * @param offset + * @param length + */ + public void parseFileFormat(byte[] data, int offset, int length) { + int ivSize = bytesToUnsignedInt(data, offset, 2); + this.ivData = new byte[ivSize]; + System.arraycopy(data, offset + 4, this.ivData, 0, ivSize); + + int size = bytesToUnsignedInt(data, offset + ivSize + 2, 4); + this.format = bytesToUnsignedInt(data, offset + ivSize + 6, 2); + this.algId = bytesToUnsignedInt(data, offset + ivSize + 8, 2); + this.bitlen = bytesToUnsignedInt(data, offset + ivSize + 10, 2); + this.flags = bytesToUnsignedInt(data, offset + ivSize + 12, 2); + + int erdSize = bytesToUnsignedInt(data, offset + ivSize + 14, 2); + this.erdData = new byte[erdSize]; + System.arraycopy(data, offset + ivSize + 16, this.erdData, 0, erdSize); + // reserved + // vsize + // vdata + // vcrc32 + + System.out.printf("17: format : %d\n", this.format); + System.out.printf("17: algId : %x\n", this.algId); + System.out.printf("17: bitlen : %d\n", this.bitlen); + System.out.printf("17: flags : %x\n", this.flags); + } + + /** + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) */ public void parseFromLocalFileData(byte[] data, int offset, int length) { - System.out.println("Field: 0x0017"); byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); - setLocalFileDataData(tmp); + parseFileFormat(data, offset, length); } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) */ - public void parseFromCentralDirectoryData(byte[] data, int offset, - int length) { - System.out.println("Field: 0x0017"); + public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setCentralDirectoryData(tmp); - if (localData == null) { - setLocalFileDataData(tmp); - } + parseCentralDirectoryFormat(data, offset, length); } - } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java index f8793a6d05b..e300d757b39 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java @@ -21,13 +21,13 @@ /** * PKCS#7 Encryption Recipient Certificate List (0x0019) * - * This field MAY contain information about each of the certificates - * used in encryption processing and it can be used to identify who is - * allowed to decrypt encrypted files. This field should only appear - * in the archive extra data record. This field is not required and - * serves only to aid archive modifications by preserving public - * encryption key data. Individual security requirements may dictate - * that this data be omitted to deter information exposure. + * This field MAY contain information about each of the certificates used in + * encryption processing and it can be used to identify who is allowed to + * decrypt encrypted files. This field should only appear in the archive extra + * data record. This field is not required and serves only to aid archive + * modifications by preserving public encryption key data. Individual security + * requirements may dictate that this data be omitted to deter information + * exposure. * * Note: all fields stored in Intel low-byte/high-byte order. * @@ -40,19 +40,20 @@ * CStore (var) PKCS#7 data blob * * - * See the section describing the Strong Encryption Specification - * for details. Refer to the section in this document entitled - * "Incorporating PKWARE Proprietary Technology into Your Product" - * for more information. + * See the section describing the Strong Encryption Specification for + * details. Refer to the section in this document entitled + * "Incorporating PKWARE Proprietary Technology into Your Product" for more + * information. * * @NotThreadSafe */ -public class X0019_EncryptionRecipientCertificateList implements ZipExtraField { +public class X0019_EncryptionRecipientCertificateList extends PKWareExtraHeader implements ZipExtraField { private static final ZipShort HEADER_ID = new ZipShort(0x0019); private static final long serialVersionUID = 1L; /** * Get the header id. + * * @return the header id */ public ZipShort getHeaderId() { @@ -60,15 +61,17 @@ public ZipShort getHeaderId() { } /** - * Extra field data in local file data - without - * Header-ID or length specifier. + * Extra field data in local file data - without Header-ID or length + * specifier. */ private byte[] localData; /** - * Set the extra field data in the local file data - - * without Header-ID or length specifier. - * @param data the field data to use + * Set the extra field data in the local file data - without Header-ID or + * length specifier. + * + * @param data + * the field data to use */ public void setLocalFileDataData(byte[] data) { localData = ZipUtil.copy(data); @@ -76,6 +79,7 @@ public void setLocalFileDataData(byte[] data) { /** * Get the length of the local data. + * * @return the length of the local data */ public ZipShort getLocalFileDataLength() { @@ -84,6 +88,7 @@ public ZipShort getLocalFileDataLength() { /** * Get the local data. + * * @return the local data */ public byte[] getLocalFileDataData() { @@ -91,22 +96,25 @@ public byte[] getLocalFileDataData() { } /** - * Extra field data in central directory - without - * Header-ID or length specifier. + * Extra field data in central directory - without Header-ID or length + * specifier. */ private byte[] centralData; /** * Set the extra field data in central directory. - * @param data the data to use + * + * @param data + * the data to use */ public void setCentralDirectoryData(byte[] data) { centralData = ZipUtil.copy(data); } /** - * Get the central data length. - * If there is no central data, get the local file data length. + * Get the central data length. If there is no central data, get the local + * file data length. + * * @return the central data length */ public ZipShort getCentralDirectoryLength() { @@ -118,6 +126,7 @@ public ZipShort getCentralDirectoryLength() { /** * Get the central data. + * * @return the central data if present, else return the local file data */ public byte[] getCentralDirectoryData() { @@ -128,27 +137,30 @@ public byte[] getCentralDirectoryData() { } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) */ public void parseFromLocalFileData(byte[] data, int offset, int length) { - System.out.println("Field: 0x0019"); byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setLocalFileDataData(tmp); } /** - * @param data the array of bytes. - * @param offset the source location in the data array. - * @param length the number of bytes to use in the data array. + * @param data + * the array of bytes. + * @param offset + * the source location in the data array. + * @param length + * the number of bytes to use in the data array. * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) */ - public void parseFromCentralDirectoryData(byte[] data, int offset, - int length) { - System.out.println("Field: 0x0019"); + public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { byte[] tmp = new byte[length]; System.arraycopy(data, offset, tmp, 0, length); setCentralDirectoryData(tmp); From 9670458b97e350fc8f0a779820acefbd0f4c0ab4 Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Sat, 5 Sep 2015 12:12:51 -0600 Subject: [PATCH 183/189] Added test file for PKWare strong password encryption. Fixed bug exposed by this. --- .../zip/X0017_StrongEncryptionHeader.java | 2 +- .../archivers/zip/EncryptedArchiveTest.java | 50 ++++++++++++++++++ .../resources/pkware-password-encrypted.zip | Bin 0 -> 4418 bytes 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 src/test/resources/pkware-password-encrypted.zip diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java index c89ac806bc0..8eb0657c5f1 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -366,7 +366,7 @@ public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { this.bitlen = bytesToUnsignedInt(data, offset + 4, 2); this.flags = bytesToUnsignedInt(data, offset + 6, 2); - if (length > 8) { + if (length > offset + 8) { this.rcount = bytesToUnsignedInt(data, offset + 8, 4); this.hashAlg = bytesToUnsignedInt(data, offset + 12, 2); this.hashSize = bytesToUnsignedInt(data, offset + 14, 2); diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java index b0928bfa62b..e991d6ddec9 100644 --- a/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java +++ b/src/test/java/org/apache/commons/compress/archivers/zip/EncryptedArchiveTest.java @@ -175,4 +175,54 @@ public void testReadPkwareFullyEncryptedEntryViaStream() } } } + + public void testReadPkwarePasswordEncryptedEntryViaZipFile() + throws IOException { + System.out.println("E"); + File file = getFile("pkware-password-encrypted.zip"); + ZipFile zf = null; + try { + zf = new ZipFile(file); + ZipArchiveEntry zae = zf.getEntry("LICENSE.txt"); + assertTrue(zae.getGeneralPurposeBit().usesEncryption()); + assertTrue(zae.getGeneralPurposeBit().usesStrongEncryption()); + assertFalse(zf.canReadEntryData(zae)); + try { + zf.getInputStream(zae); + fail("expected an exception"); + } catch (UnsupportedZipFeatureException ex) { + assertSame(UnsupportedZipFeatureException.Feature.ENCRYPTION, + ex.getFeature()); + } + } finally { + ZipFile.closeQuietly(zf); + } + } + + public void testReadPkwarePasswordEncryptedEntryViaStream() + throws IOException { + System.out.println("F"); + File file = getFile("pkware-password-encrypted.zip"); + ZipArchiveInputStream zin = null; + try { + zin = new ZipArchiveInputStream(new FileInputStream(file)); + ZipArchiveEntry zae = zin.getNextZipEntry(); + assertEquals("LICENSE.txt", zae.getName()); + assertTrue(zae.getGeneralPurposeBit().usesEncryption()); + assertTrue(zae.getGeneralPurposeBit().usesStrongEncryption()); + assertFalse(zin.canReadEntryData(zae)); + try { + byte[] buf = new byte[1024]; + zin.read(buf, 0, buf.length); + fail("expected an exception"); + } catch (UnsupportedZipFeatureException ex) { + assertSame(UnsupportedZipFeatureException.Feature.ENCRYPTION, + ex.getFeature()); + } + } finally { + if (zin != null) { + zin.close(); + } + } + } } diff --git a/src/test/resources/pkware-password-encrypted.zip b/src/test/resources/pkware-password-encrypted.zip new file mode 100644 index 0000000000000000000000000000000000000000..db3b3013e5e514ff635bd45d1ee0443b68d6e5f0 GIT binary patch literal 4418 zcmZ|TRa6s>+raUSj*-$Jj0UBJ0mA4;2Lck(HKbFzYtk`5rosqC=^7y62t}F!DoA%T zdhinw-rxU!-kbO0J>&-K^XkeK8FKpj8<;8fz*?IN*_D4+)bY{dWoY5)N6 zNMBPMW~wa_6dFVi(3O}*?>#>c#N23IY3=12fPnx2F@PQp00IHo0HU0Gu$~lOG#Jj> zH@vPayjwV|klU8ux#-mmu_tjI6!p&Ab%vvbXF=g@qFpj&aXwbQDDhYxSze{Ofkg?P znw4*FW7D1Fz>b@%tJ8$zevC;ooH5=zA<-)q=#y7YztC;HWZROCxLvypPSr29mUVzA zc79E|>u@&As?hL(w#gxfX~r2p9+qC1`r;qdzuyC50c(2SnGgv^hm9)uZ5|!y8SRgW z95tl7^4k&jD}&8`lD)SWJKa8$ zKUM~l547&ibn>St#yc`OL0aQu8YZkh+>Y)lva<@;!lcZQK#0A|dST9l?&mTKgx( z*1}j-{C88L>MH#!#O_WK2bt~5HM;W$^zON2FRdYE6y9*n-q=9y>xT@Y3a&9?Hmnzz zJknCyx0AiyNX?|gmYLeA&W{7RR-hu)%(Ji8Sr}#))&09~xYjq_^pJBH^ia0{RV{ks6XF%=@ zozin!%)y#ppS#m>KY+b#m`M%2!LcL@qY3ebE6IJH1pd=tZwI*nbzCH*E}m<=3md#dr_QBm^yOQEXpDeLW9*E$ z)qrWPNlD%3U*-YlwYH>+l$t6nUfj0f%lcKe)EDhnx^@q>=4Tf6Ulnsg6QZjK1rtPna zk_;SV-#*IHhIB}cqBYBkc}wJi;otc6mCuj7aqbhD?AsgrAOY`{@x?5e6SH-*ob6}* zQJ$-5^CzE^OzV=n{F}zvIlCgY8|HbTUzp5*x|-zH8{o@cXWbz9p!%oEB(X zGwtXcX1=u<{zW+|#4x*XDygOYJ^iyQC1|elvk)^1z`<>#bHB=mv&%ZAQoDK`$0qLJ zBed0aYnls2(GBg68~#;r$U?{BiHpX%h|!2aKMxSmm0CUu@&R8Twq3ta}3UT=XHv>`>o~i%e%il=TW#iq^3PRO}9qDA$wF#U5BBr3xgDhEXJrd?8wZ9P!GZs zumC)u*jI<;gqHN}F8#P(0fH5|H2E!d{pz|-n-~g!OeE46=ErkW5TcFTycb zH&C@bDR`)`bt;!+z_&QhUdsVkq3TN*$oL+YvFk!oELc+6DPGMv)B4!I);X}AKpk1> zmA(!trP3jFW}VB6A=5!TN*yj~1y;p=#~BrEL>WG|cF}#I_W7Bjr%EkXs9Va)?86JJ zs;)w_ZVL~?ZV+>``(mMmGSwyQ4n19BRNd0fVyEM|&&xPiKPw;i2jS#eX)&zrtn6u_ z&}$^5F##VyS%{@nQ*tDm|2W1wj^D@*G{=RsB9$Xu3Q%Ql3QnE|ga%-E^xogYANxx{ zL_vbYT0bv%A}d07um-cuu6BvjVa2vl+rEHkWf|Ii7gr)?h5T%+Z^yDAz7ch=Vu(LW znZ-Z$4wIZXz{yxdIw_6M!O*YmwKz-wIq^`?v>~t!5+O2#!e0}}YCBpE3f~tijhx__ z*L{-?oC0IE<}-mMX&x+l%j3-i*oFN^tjyPBpg&>^)%AA(7v&*8`pQkk}y(pgdk*86Q#8GGI- z$n6q{lw&>uVr;X^g^&#;9}9JZm8$tEhqA#BzzK_mWmN?T4ml4wsQ1V)_Ryr zTffYe?e_Q`>Bk0Kj%ysXsr>neGgHe=xolzu#-K(1nMZ?ln8&AKbyuiU6r7_=hv5xl zPFEg?9(`fNwAKWklMbjF??7&k_$SJ}dF9A#a|eRCh^>${^B0HZCaroY$HuT@-uF3@ z!(N}YH4&$IR`AN~$N7Im3Xp=)Go>fyXv6@~Ch@Yocre<3ykkb>6$7J{mP^gmiQgcm z4%1q7S+r64O`7`6^3R7s^;|F2tVv{_8T@fwOng@Wg)GG$_^ePBx$?v#egyxd+lv#O z#gTBso*54^GFND3Y;0JOD$LtKRw8D#t30zBf^ZVlDURvpg>IH%FF!zwwBB8tQkPPy z2ff>tX>V!)+L?&3l((s;a8Zjs`|OQZttLh?ssC(R7)t5o=U99`Tk&aWv#r77Mq<>* zCiLOAFJ4R);T7bVd>t4wBKe|Q&pq%LvV!bWK;9i95@f{j^;i1F{Lp74Ec|yOI_Yl# zhs)DxbKZ?6C|qtDzdtp54tya=E?umV&RQJ;PN4gvRZocwm%izk90+>N!1&voAV&!Q z8fx5*enRNZFs_R-2r4#PA+OC_IS0IzP~ua z;zVAH*i+8~J$g-tp&2v41l#SxtF|Pz?tIUVf}Jv5H%t5sYe@FFeFfv!de2j zMOK+fl~SgOk~qLou8GH%Mn^NJq7HxIzsL6=AUfAfXUw6sokP3d1?+ zs%7Wh&Uo4W)HW3tsBh5%Vg(=fu$&j%-J-x;A{5sEXBs7fa?XfD{~Q%biBed6H+!A zBDYQTR?9X{o$3xgD_zbkF|0!)%;1*F{F50QSr(RksE$Dbt3RRxt+z)9n{Yb~fZ}dj zEiDVS8&}gl;NEU%3oBX`+j75Bb*{$C{q%$DI;p;;4jm%b+cIpC&pY0eOOs`g53!WU zbiQGJx)IP%stqTwJ_AHg^WST9L!3VLj^b-Lr0Mb3lukPc|DOH(6r09=;)Z#us***e zCNy<_Qg+rY`^cfLO^%Fvj`w2t3Vrnu(Rm)$%!>{WRqnF1c+uJn4f9ZVQXxOzfUdca zLTOj;ip=^1LhVcsH^__P7jNW;hDqVI(uHnT?fR-Ual1nK^{tLMegKYvAFPMquMus49CB#aCpMiJC| zLlZzt=P|yu$y{Hl866q^*K>;gqpg&o*C_j5b{gmP=)B9COTLVR*=W!WKcn=o`vlsE znZLUB0%u)n(t@s`h@HosI8t~l;^1i*$Z7v;j2b8I1 zROp1IN*X7a>id?gkmTP-Vw6)ml@29^Myg>?@=b>?zDbwejraHI)t3xDc{Wn))00w! zYV4R&mgV`2VjZkL(*Mh(acN?AeqT%gJvI1VPk`I9{oop5-xM0!Bv*!JJX2iWHp z@vO?un5uNtBqvS7nN;M#gN}#wImqGE|QK&R4WWB*NTI3*cjV3|Ok<9u|}-nDw-o727t^O+^J~y8b8iIuqt= zYTf=^5@nZN@yUI1P!-=%ABFZCc^d7my3pNaern2;(w*_jhiDi=Bm2uzo0nW=_gneo zs&smQ$lRwlQ)gMx${%x2TUC`byLpYeTTM?|6ux}@mZ`SmZ`w$le*N|+dG{p}%*X&g z<1uktEW5ngswJ)bk|ORy+~hxWyz!ORB;W#)p-bv*1P$c>^_a&2G|i62wFx17g7B7^ zyA--9;W8oG#>ZvVf_Fzbaz!AaEhi~16G!EJtUHB?Bb}b1bxx)F+|AOZssrqi5k}hN zcS@0==YO0pcUobr_q?Tw%X-7(Dt;2G9;d!5GumX_$ohGYJ)#BXNU3-wP)0KOUWfjT z6;I351FE#1mONP8URToFj@vfIte|(SFq885ljuot99MG<&DewC^_~4X_CW)=oJT?p z?xMuTV0ThHDTfbaKJ&&h0sg;$A^V>Q0008O|Mmam z466TK0s(A*ShcY|!>tJ*T)%O+=U?x8`TSQ_00= Date: Sat, 5 Sep 2015 12:31:53 -0600 Subject: [PATCH 184/189] Replaced custom byte-to-int with standard one. Removed duplicate code in PKWareExtraHeader. --- .../archivers/zip/PKWareExtraHeader.java | 442 +----------------- .../archivers/zip/X0014_X509Certificates.java | 2 +- .../zip/X0015_CertificateIdForFile.java | 22 +- ...0016_CertificateIdForCentralDirectory.java | 22 +- .../zip/X0017_StrongEncryptionHeader.java | 32 +- 5 files changed, 39 insertions(+), 481 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java index cb14a0088bc..babcb766ddf 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java @@ -18,208 +18,8 @@ */ package org.apache.commons.compress.archivers.zip; -import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; - -import java.io.FileOutputStream; -import java.io.IOException; - /** - * Strong Encryption Header (0x0017) - * - * Certificate-based encryption: - * - *
    - * Value     Size     Description
    - * -----     ----     -----------
    - * 0x0017    2 bytes  Tag for this "extra" block type
    - * TSize     2 bytes  Size of data that follows
    - * Format    2 bytes  Format definition for this record
    - * AlgID     2 bytes  Encryption algorithm identifier
    - * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    - * Flags     2 bytes  Processing flags
    - * RCount    4 bytes  Number of recipients. 
    - * HashAlg   2 bytes  Hash algorithm identifier
    - * HSize     2 bytes  Hash size
    - * SRList    (var)    Simple list of recipients hashed public keys
    - * 
    - * Flags -   This defines the processing flags.
    - * 
    - *           
      - *
    • 0x0007 - reserved for future use - *
    • 0x000F - reserved for future use - *
    • 0x0100 - Indicates non-OAEP key wrapping was used. If this - * this field is set, the version needed to extract must - * be at least 61. This means OAEP key wrapping is not - * used when generating a Master Session Key using - * ErdData. - *
    • 0x4000 - ErdData must be decrypted using 3DES-168, otherwise use the - * same algorithm used for encrypting the file contents. - *
    • 0x8000 - reserved for future use - *
    - * - * RCount - This defines the number intended recipients whose - * public keys were used for encryption. This identifies - * the number of elements in the SRList. - * - * see also: reserved1 - * - * HashAlg - This defines the hash algorithm used to calculate - * the public key hash of each public key used - * for encryption. This field currently supports - * only the following value for SHA-1 - * - * 0x8004 - SHA1 - * - * HSize - This defines the size of a hashed public key. - * - * SRList - This is a variable length list of the hashed - * public keys for each intended recipient. Each - * element in this list is HSize. The total size of - * SRList is determined using RCount * HSize. - *
    - * - * Password-based Extra Field 0x0017 in central header only. - * - *
    - * Value     Size     Description
    - * -----     ----     -----------
    - * 0x0017    2 bytes  Tag for this "extra" block type
    - * TSize     2 bytes  Size of data that follows
    - * Format    2 bytes  Format definition for this record
    - * AlgID     2 bytes  Encryption algorithm identifier
    - * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    - * Flags     2 bytes  Processing flags
    - * (more?)
    - * 
    - * - * Format - the data format identifier for this record. The only value - * allowed at this time is the integer value 2. - * - * Password-based Extra Field 0x0017 preceding compressed file data. - * - *
    - * Value     Size     Description
    - * -----     ----     -----------
    - * 0x0017    2 bytes  Tag for this "extra" block type
    - * IVSize    2 bytes  Size of initialization vector (IV)
    - * IVData    IVSize   Initialization vector for this file
    - * Size      4 bytes  Size of remaining decryption header data
    - * Format    2 bytes  Format definition for this record
    - * AlgID     2 bytes  Encryption algorithm identifier
    - * Bitlen    2 bytes  Bit length of encryption key (32-448 bits)
    - * Flags     2 bytes  Processing flags
    - * ErdSize   2 bytes  Size of Encrypted Random Data
    - * ErdData   ErdSize  Encrypted Random Data
    - * Reserved1 4 bytes  Reserved certificate processing data
    - * Reserved2 (var)    Reserved for certificate processing data
    - * VSize     2 bytes  Size of password validation data
    - * VData     VSize-4  Password validation data
    - * VCRC32    4 bytes  Standard ZIP CRC32 of password validation data
    - *     
    - * IVData - The size of the IV should match the algorithm block size.
    - *          The IVData can be completely random data.  If the size of
    - *          the randomly generated data does not match the block size
    - *          it should be complemented with zero's or truncated as
    - *          necessary.  If IVSize is 0,then IV = CRC32 + Uncompressed
    - *          File Size (as a 64 bit little-endian, unsigned integer value).
    - * 
    - * Format -  the data format identifier for this record.  The only
    - *           value allowed at this time is the integer value 2.
    - * 
    - * ErdData - Encrypted random data is used to store random data that
    - *           is used to generate a file session key for encrypting
    - *           each file.  SHA1 is used to calculate hash data used to
    - *           derive keys.  File session keys are derived from a master
    - *           session key generated from the user-supplied password.
    - *           If the Flags field in the decryption header contains
    - *           the value 0x4000, then the ErdData field must be
    - *           decrypted using 3DES. If the value 0x4000 is not set,
    - *           then the ErdData field must be decrypted using AlgId.
    - * 
    - * Reserved1 - Reserved for certificate processing, if value is
    - *           zero, then Reserved2 data is absent.  See the explanation
    - *           under the Certificate Processing Method for details on
    - *           this data structure.
    - * 
    - * Reserved2 - If present, the size of the Reserved2 data structure
    - *           is located by skipping the first 4 bytes of this field
    - *           and using the next 2 bytes as the remaining size.  See
    - *           the explanation under the Certificate Processing Method
    - *           for details on this data structure.
    - * 
    - * VSize - This size value will always include the 4 bytes of the
    - *         VCRC32 data and will be greater than 4 bytes.
    - * 
    - * VData - Random data for password validation.  This data is VSize
    - *         in length and VSize must be a multiple of the encryption
    - *         block size.  VCRC32 is a checksum value of VData. 
    - *         VData and VCRC32 are stored encrypted and start the
    - *         stream of encrypted data for a file.
    - * 
    - * - * - * Reserved1 - Certificate Decryption Header Reserved1 Data: - * - *
    - * Value     Size     Description
    - * -----     ----     -----------
    - * RCount    4 bytes  Number of recipients.
    - * 
    - * - * RCount - This defines the number intended recipients whose public keys were - * used for encryption. This defines the number of elements in the REList field - * defined below. - * - * - * Reserved2 - Certificate Decryption Header Reserved2 Data Structures: - * - *
    - * Value     Size     Description
    - * -----     ----     -----------
    - * HashAlg   2 bytes  Hash algorithm identifier
    - * HSize     2 bytes  Hash size
    - * REList    (var)    List of recipient data elements
    - * 
    - * HashAlg - This defines the hash algorithm used to calculate
    - *           the public key hash of each public key used
    - *           for encryption. This field currently supports
    - *           only the following value for SHA-1
    - *    
    - *               0x8004 - SHA1
    - *                
    - * HSize -   This defines the size of a hashed public key
    - *           defined in REHData.
    - * 
    - * REList -  This is a variable length of list of recipient data. 
    - *           Each element in this list consists of a Recipient
    - *           Element data structure as follows:
    - * 
    - * - * Recipient Element (REList) Data Structure: - * - *
    - * Value     Size     Description
    - * -----     ----     -----------
    - * RESize    2 bytes  Size of REHData + REKData
    - * REHData   HSize    Hash of recipients public key
    - * REKData   (var)    Simple key blob
    - * 
    - * 
    - * RESize -  This defines the size of an individual REList
    - *           element.  This value is the combined size of the
    - *           REHData field + REKData field.  REHData is defined by
    - *           HSize.  REKData is variable and can be calculated
    - *           for each REList element using RESize and HSize.
    - * 
    - * REHData - Hashed public key for this recipient.
    - * 
    - * REKData - Simple Key Blob.  The format of this data structure
    - *           is identical to that defined in the Microsoft
    - *           CryptoAPI and generated using the CryptExportKey()
    - *           function.  The version of the Simple Key Blob
    - *           supported at this time is 0x02 as defined by
    - *           Microsoft.
    - * 
    + * Base class for all PKWare strong crypto extra headers. * * Algorithm IDs - integer identifier of the encryption algorithm from * the following range @@ -252,245 +52,9 @@ *
  • 0x800D - SHA384
  • *
  • 0x800E - SHA512
  • * - * - * Flags - Processing flags needed for decryption - * - *
      - *
    • 0x0001 - Password is required to decrypt
    • - *
    • 0x0002 - Certificates only
    • - *
    • 0x0003 - Password or certificate required to decrypt
    • - *
    • 0x0007 - reserved for future use - *
    • 0x000F - reserved for future use - *
    • 0x0100 - indicates non-OAEP key wrapping was used. If this field is set - * the version needed to extract must be at least 61. This means OAEP key - * wrapping is not used when generating a Master Session Key using ErdData. - *
    • 0x4000 - ErdData must be decrypted using 3DES-168, otherwise use the same - * algorithm used for encrypting the file contents. - *
    • 0x8000 - reserved for future use. - *
    * - * See the section describing the Strong Encryption Specification for - * details. Refer to the section in this document entitled - * "Incorporating PKWARE Proprietary Technology into Your Product" for more - * information. - * - * @NotThreadSafe + * TODO: define enums for crypto and hash algorithms. */ -public class PKWareExtraHeader implements ZipExtraField { - private static final ZipShort HEADER_ID = new ZipShort(0x0017); - private static final long serialVersionUID = 1L; - - /** - * Get the header id. - * - * @return the header id - */ - public ZipShort getHeaderId() { - return HEADER_ID; - } - - /** - * Extra field data in local file data - without Header-ID or length - * specifier. - */ - private byte[] localData; - - private int format; - private int algId; - private int bitlen; - private int flags; - private int rcount; - private int hashAlg; - private int hashSize; - - // encryption data - private byte ivData[]; - private byte erdData[]; - - /** - * Convert bytes to unsigned int, LSB. - * - * @param data - * @param off - * @param len - * @return - */ - int bytesToUnsignedInt(byte[] data, int off, int len) { - int x = 0; - for (int i = 0; i < len; i++) { - x += signedByteToUnsignedInt(data[off + i]) << (8 * i); - } - - return x; - } - - /** - * Set the extra field data in the local file data - without Header-ID or - * length specifier. - * - * @param data - * the field data to use - */ - public void setLocalFileDataData(byte[] data) { - try { - FileOutputStream os = new FileOutputStream("/tmp/17.dat"); - os.write(data); - os.close(); - } catch (IOException e) { - System.out.println(e.getMessage()); - } - - localData = ZipUtil.copy(data); - } - - /** - * Get the length of the local data. - * - * @return the length of the local data - */ - public ZipShort getLocalFileDataLength() { - return new ZipShort(localData != null ? localData.length : 0); - } - - /** - * Get the local data. - * - * @return the local data - */ - public byte[] getLocalFileDataData() { - return ZipUtil.copy(localData); - } - - /** - * Extra field data in central directory - without Header-ID or length - * specifier. - */ - private byte[] centralData; - - /** - * Set the extra field data in central directory. - * - * @param data - * the data to use - */ - public void setCentralDirectoryData(byte[] data) { - centralData = ZipUtil.copy(data); - } - - /** - * Get the central data length. If there is no central data, get the local - * file data length. - * - * @return the central data length - */ - public ZipShort getCentralDirectoryLength() { - if (centralData != null) { - return new ZipShort(centralData.length); - } - return getLocalFileDataLength(); - } - - /** - * Get the central data. - * - * @return the central data if present, else return the local file data - */ - public byte[] getCentralDirectoryData() { - if (centralData != null) { - return ZipUtil.copy(centralData); - } - return getLocalFileDataData(); - } - - /** - * Parse central directory format. - * - * @param data - * @param offset - * @param length - */ - public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { - this.format = bytesToUnsignedInt(data, offset, 2); - this.algId = bytesToUnsignedInt(data, offset + 2, 2); - this.bitlen = bytesToUnsignedInt(data, offset + 4, 2); - this.flags = bytesToUnsignedInt(data, offset + 6, 2); - - if (length > 8) { - this.rcount = bytesToUnsignedInt(data, offset + 8, 4); - this.hashAlg = bytesToUnsignedInt(data, offset + 12, 2); - this.hashSize = bytesToUnsignedInt(data, offset + 14, 2); - // srlist... hashed public keys - } - - System.out.printf("17: format : %d\n", this.format); - System.out.printf("17: algId : %x\n", this.algId); - System.out.printf("17: bitlen : %d\n", this.bitlen); - System.out.printf("17: flags : %x\n", this.flags); - System.out.printf("17: rcount : %d\n", this.rcount); - System.out.printf("17: hashAlg : %x\n", this.hashAlg); - System.out.printf("17: hashSize: %d\n", this.hashSize); - } - - /** - * Parse file header format. (Password only?) - * - * @param data - * @param offset - * @param length - */ - public void parseFileFormat(byte[] data, int offset, int length) { - int ivSize = bytesToUnsignedInt(data, offset, 2); - this.ivData = new byte[ivSize]; - System.arraycopy(data, offset + 4, this.ivData, 0, ivSize); - - int size = bytesToUnsignedInt(data, offset + ivSize + 2, 4); - this.format = bytesToUnsignedInt(data, offset + ivSize + 6, 2); - this.algId = bytesToUnsignedInt(data, offset + ivSize + 8, 2); - this.bitlen = bytesToUnsignedInt(data, offset + ivSize + 10, 2); - this.flags = bytesToUnsignedInt(data, offset + ivSize + 12, 2); - - int erdSize = bytesToUnsignedInt(data, offset + ivSize + 14, 2); - this.erdData = new byte[erdSize]; - System.arraycopy(data, offset + ivSize + 16, this.erdData, 0, erdSize); - // reserved - // vsize - // vdata - // vcrc32 - - System.out.printf("17: format : %d\n", this.format); - System.out.printf("17: algId : %x\n", this.algId); - System.out.printf("17: bitlen : %d\n", this.bitlen); - System.out.printf("17: flags : %x\n", this.flags); - } - - /** - * @param data - * the array of bytes. - * @param offset - * the source location in the data array. - * @param length - * the number of bytes to use in the data array. - * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) - */ - public void parseFromLocalFileData(byte[] data, int offset, int length) { - byte[] tmp = new byte[length]; - System.arraycopy(data, offset, tmp, 0, length); - parseFileFormat(data, offset, length); - } +public abstract class PKWareExtraHeader implements ZipExtraField { - /** - * @param data - * the array of bytes. - * @param offset - * the source location in the data array. - * @param length - * the number of bytes to use in the data array. - * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) - */ - public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { - byte[] tmp = new byte[length]; - System.arraycopy(data, offset, tmp, 0, length); - setCentralDirectoryData(tmp); - parseCentralDirectoryFormat(data, offset, length); - } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java index 41f6b2ce06b..31fdf174ddd 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java @@ -176,7 +176,7 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { setLocalFileDataData(tmp); } - this.rcount = bytesToUnsignedInt(data, offset, 2); + this.rcount = ZipShort.getValue(data, offset); System.out.printf("14: rcount: %d\n", rcount); } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java index 6694512a7a8..b988ec198d3 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java @@ -18,8 +18,6 @@ */ package org.apache.commons.compress.archivers.zip; -import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; - import java.io.FileOutputStream; import java.io.IOException; @@ -186,20 +184,20 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { setLocalFileDataData(tmp); } - this.rcount = bytesToUnsignedInt(data, offset, 2); - this.hashAlg = bytesToUnsignedInt(data, offset + 2, 2); + this.rcount = ZipShort.getValue(data, offset); + this.hashAlg = ZipShort.getValue(data, offset + 2); System.out.printf("15: rcount: %d\n", rcount); System.out.printf("15: hashAlg: %x\n", hashAlg); - System.out.printf("15: [2] %d %x\n", bytesToUnsignedInt(data, offset + 4, 2), - bytesToUnsignedInt(data, offset + 4, 2)); - System.out.printf("15: [3] %d %x\n", bytesToUnsignedInt(data, offset + 6, 4), - bytesToUnsignedInt(data, offset + 6, 4)); - System.out.printf("15: [4] %d %x\n", bytesToUnsignedInt(data, offset + 10, 4), - bytesToUnsignedInt(data, offset + 10, 4)); - System.out.printf("15: [5] %d %x\n", bytesToUnsignedInt(data, offset + 14, 2), - bytesToUnsignedInt(data, offset + 14, 2)); + System.out.printf("15: [2] %d %x\n", ZipShort.getValue(data, offset + 4), + ZipShort.getValue(data, offset + 4)); + System.out.printf("15: [3] %d %x\n", ZipLong.getValue(data, offset + 6), + ZipLong.getValue(data, offset + 6)); + System.out.printf("15: [4] %d %x\n", ZipLong.getValue(data, offset + 10), + ZipLong.getValue(data, offset + 10)); + System.out.printf("15: [5] %d %x\n", ZipShort.getValue(data, offset + 14), + ZipShort.getValue(data, offset + 14)); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java index 2bc5af7533b..6431756da6a 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java @@ -18,8 +18,6 @@ */ package org.apache.commons.compress.archivers.zip; -import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; - import java.io.FileOutputStream; import java.io.IOException; @@ -175,19 +173,19 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { System.arraycopy(data, offset, tmp, 0, length); setCentralDirectoryData(tmp); - this.rcount = bytesToUnsignedInt(data, offset, 2); - this.hashAlg = bytesToUnsignedInt(data, offset + 2, 2); + this.rcount = ZipShort.getValue(data, offset); + this.hashAlg = ZipShort.getValue(data, offset + 2); System.out.printf("16: rcount: %d\n", rcount); System.out.printf("16: hashAlg: %x\n", hashAlg); - System.out.printf("16: [2] %d %x\n", bytesToUnsignedInt(data, offset + 4, 2), - bytesToUnsignedInt(data, offset + 4, 2)); - System.out.printf("16: [3] %d %x\n", bytesToUnsignedInt(data, offset + 6, 4), - bytesToUnsignedInt(data, offset + 6, 4)); - System.out.printf("16: [4] %d %x\n", bytesToUnsignedInt(data, offset + 10, 4), - bytesToUnsignedInt(data, offset + 10, 4)); - System.out.printf("16: [5] %d %x\n", bytesToUnsignedInt(data, offset + 14, 2), - bytesToUnsignedInt(data, offset + 14, 2)); + System.out.printf("16: [2] %d %x\n", ZipShort.getValue(data, offset + 4), + ZipShort.getValue(data, offset + 4)); + System.out.printf("16: [3] %d %x\n", ZipLong.getValue(data, offset + 6), + ZipLong.getValue(data, offset + 6)); + System.out.printf("16: [4] %d %x\n", ZipLong.getValue(data, offset + 10), + ZipLong.getValue(data, offset + 10)); + System.out.printf("16: [5] %d %x\n", ZipShort.getValue(data, offset + 14), + ZipShort.getValue(data, offset + 14)); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java index 8eb0657c5f1..1c034eee022 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -18,8 +18,6 @@ */ package org.apache.commons.compress.archivers.zip; -import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; - import java.io.FileOutputStream; import java.io.IOException; @@ -267,7 +265,7 @@ public ZipShort getHeaderId() { private int algId; private int bitlen; private int flags; - private int rcount; + private long rcount; private int hashAlg; private int hashSize; @@ -361,15 +359,15 @@ public byte[] getCentralDirectoryData() { * @param length */ public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { - this.format = bytesToUnsignedInt(data, offset, 2); - this.algId = bytesToUnsignedInt(data, offset + 2, 2); - this.bitlen = bytesToUnsignedInt(data, offset + 4, 2); - this.flags = bytesToUnsignedInt(data, offset + 6, 2); + this.format = ZipShort.getValue(data, offset); + this.algId = ZipShort.getValue(data, offset + 2); + this.bitlen = ZipShort.getValue(data, offset + 4); + this.flags = ZipShort.getValue(data, offset + 6); if (length > offset + 8) { - this.rcount = bytesToUnsignedInt(data, offset + 8, 4); - this.hashAlg = bytesToUnsignedInt(data, offset + 12, 2); - this.hashSize = bytesToUnsignedInt(data, offset + 14, 2); + this.rcount = ZipLong.getValue(data, offset + 8); + this.hashAlg = ZipShort.getValue(data, offset + 12); + this.hashSize = ZipShort.getValue(data, offset + 14); // srlist... hashed public keys } @@ -390,17 +388,17 @@ public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { * @param length */ public void parseFileFormat(byte[] data, int offset, int length) { - int ivSize = bytesToUnsignedInt(data, offset, 2); + int ivSize = ZipShort.getValue(data, offset); this.ivData = new byte[ivSize]; System.arraycopy(data, offset + 4, this.ivData, 0, ivSize); - int size = bytesToUnsignedInt(data, offset + ivSize + 2, 4); - this.format = bytesToUnsignedInt(data, offset + ivSize + 6, 2); - this.algId = bytesToUnsignedInt(data, offset + ivSize + 8, 2); - this.bitlen = bytesToUnsignedInt(data, offset + ivSize + 10, 2); - this.flags = bytesToUnsignedInt(data, offset + ivSize + 12, 2); + long size = ZipLong.getValue(data, offset + ivSize + 2); + this.format =ZipShort.getValue(data, offset + ivSize + 6); + this.algId = ZipShort.getValue(data, offset + ivSize + 8); + this.bitlen = ZipShort.getValue(data, offset + ivSize + 10); + this.flags = ZipShort.getValue(data, offset + ivSize + 12); - int erdSize = bytesToUnsignedInt(data, offset + ivSize + 14, 2); + int erdSize = ZipShort.getValue(data, offset + ivSize + 14); this.erdData = new byte[erdSize]; System.arraycopy(data, offset + ivSize + 16, this.erdData, 0, erdSize); // reserved From d24553fa8eda71eeecf918e19a8d8fa481b54d02 Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Sat, 5 Sep 2015 14:43:06 -0600 Subject: [PATCH 185/189] Created enum for EncryptionAlgorithm and HashAlgorithm. --- .../archivers/zip/PKWareExtraHeader.java | 106 ++++++++++++++++++ .../zip/X0015_CertificateIdForFile.java | 26 +++-- ...0016_CertificateIdForCentralDirectory.java | 24 ++-- .../zip/X0017_StrongEncryptionHeader.java | 18 +-- 4 files changed, 141 insertions(+), 33 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java index babcb766ddf..fbd1c0920c5 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java @@ -18,6 +18,11 @@ */ package org.apache.commons.compress.archivers.zip; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.zip.ZipEntry; + /** * Base class for all PKWare strong crypto extra headers. * @@ -57,4 +62,105 @@ */ public abstract class PKWareExtraHeader implements ZipExtraField { + /** + * Encryption algorithm. + */ + public enum EncryptionAlgorithm { + DES(0x6601), + RC2pre52(0x6602), + TripleDES168(0x6603), + TripleDES192(0x6609), + AES128(0x660E), + AES192(0x660F), + AES256(0x6610), + RC2(0x6702), + RC4(0x6801), + UNKNOWN(0xFFFF); + + private final int code; + + private static final Map codeToEnum; + + static { + Map cte = new HashMap(); + for (EncryptionAlgorithm method : values()) { + cte.put(Integer.valueOf(method.getCode()), method); + } + codeToEnum = Collections.unmodifiableMap(cte); + } + + /** + * private constructor for enum style class. + */ + EncryptionAlgorithm(int code) { + this.code = code; + } + + /** + * the algorithm id. + * + * @return the PKWare AlgorithmId + */ + public int getCode() { + return code; + } + + /** + * returns the EncryptionAlgorithm for the given code or null if the + * method is not known. + */ + public static EncryptionAlgorithm getAlgorithmByCode(int code) { + return codeToEnum.get(Integer.valueOf(code)); + } + } + + /** + * Hash Algorithm + */ + public enum HashAlgorithm { + NONE(0), + CRC32(1), + MD5(0x8003), + SHA1(0x8004), + RIPEND160(0x8007), + SHA256(0x800C), + SHA384(0x800D), + SHA512(0x800E); + + private final int code; + + private static final Map codeToEnum; + + static { + Map cte = new HashMap(); + for (HashAlgorithm method : values()) { + cte.put(Integer.valueOf(method.getCode()), method); + } + codeToEnum = Collections.unmodifiableMap(cte); + } + + /** + * private constructor for enum style class. + */ + HashAlgorithm(int code) { + this.code = code; + } + + /** + * the hash algorithm ID. + * + * @return the PKWare hashAlg + */ + public int getCode() { + return code; + } + + /** + * returns the HashAlgorithm for the given code or null if the method is + * not known. + */ + public static HashAlgorithm getAlgorithmByCode(int code) { + return codeToEnum.get(Integer.valueOf(code)); + } + } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java index b988ec198d3..1a559209d13 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java @@ -61,7 +61,7 @@ public ZipShort getHeaderId() { private byte[] localData; private int rcount; - private int hashAlg; + private HashAlgorithm hashAlg; /** * Set the extra field data in the local file data - without Header-ID or @@ -185,19 +185,21 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { } this.rcount = ZipShort.getValue(data, offset); - this.hashAlg = ZipShort.getValue(data, offset + 2); + this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); System.out.printf("15: rcount: %d\n", rcount); - System.out.printf("15: hashAlg: %x\n", hashAlg); - - System.out.printf("15: [2] %d %x\n", ZipShort.getValue(data, offset + 4), - ZipShort.getValue(data, offset + 4)); - System.out.printf("15: [3] %d %x\n", ZipLong.getValue(data, offset + 6), - ZipLong.getValue(data, offset + 6)); - System.out.printf("15: [4] %d %x\n", ZipLong.getValue(data, offset + 10), - ZipLong.getValue(data, offset + 10)); - System.out.printf("15: [5] %d %x\n", ZipShort.getValue(data, offset + 14), - ZipShort.getValue(data, offset + 14)); + System.out.printf("15: hashAlg: %s\n", hashAlg); + + int size = ZipShort.getValue(data, offset + 4); + + //System.out.printf("16: [2] %d %x\n", ZipShort.getValue(data, offset + 4), + // ZipShort.getValue(data, offset + 4)); + //System.out.printf("16: [3] %d %x\n", ZipShort.getValue(data, offset + 6), + // ZipShort.getValue(data, offset + 6)); + System.out.printf("15: len: %d, offset+size: %d\n", length, size + 8); + int size2 = ZipShort.getValue(data, offset + 6 + size); + System.out.printf("15: size2: %d\n", size2); + System.out.printf("15: len: %d, offset+size*: %d\n", length, size + 10 + size2); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java index 6431756da6a..cec0777c0b0 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java @@ -62,7 +62,7 @@ public ZipShort getHeaderId() { private byte[] localData; private int rcount; - private int hashAlg; + private HashAlgorithm hashAlg; /** * Set the extra field data in the local file data - without Header-ID or @@ -174,18 +174,18 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { setCentralDirectoryData(tmp); this.rcount = ZipShort.getValue(data, offset); - this.hashAlg = ZipShort.getValue(data, offset + 2); + this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); System.out.printf("16: rcount: %d\n", rcount); - System.out.printf("16: hashAlg: %x\n", hashAlg); - - System.out.printf("16: [2] %d %x\n", ZipShort.getValue(data, offset + 4), - ZipShort.getValue(data, offset + 4)); - System.out.printf("16: [3] %d %x\n", ZipLong.getValue(data, offset + 6), - ZipLong.getValue(data, offset + 6)); - System.out.printf("16: [4] %d %x\n", ZipLong.getValue(data, offset + 10), - ZipLong.getValue(data, offset + 10)); - System.out.printf("16: [5] %d %x\n", ZipShort.getValue(data, offset + 14), - ZipShort.getValue(data, offset + 14)); + System.out.printf("16: hashAlg: %s\n", hashAlg); + int size = ZipShort.getValue(data, offset + 4); + int size2 = ZipShort.getValue(data, offset + 6); + System.out.printf("16: diff: %d\n", size - size2); + + //System.out.printf("16: [2] %d %x\n", ZipShort.getValue(data, offset + 4), + // ZipShort.getValue(data, offset + 4)); + //System.out.printf("16: [3] %d %x\n", ZipShort.getValue(data, offset + 6), + // ZipShort.getValue(data, offset + 6)); + System.out.printf("16: len: %d, offset+size: %d\n", length, size + 8); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java index 1c034eee022..b2410d994a5 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -262,11 +262,11 @@ public ZipShort getHeaderId() { private byte[] localData; private int format; - private int algId; + private EncryptionAlgorithm algId; private int bitlen; private int flags; private long rcount; - private int hashAlg; + private HashAlgorithm hashAlg; private int hashSize; // encryption data @@ -360,23 +360,23 @@ public byte[] getCentralDirectoryData() { */ public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { this.format = ZipShort.getValue(data, offset); - this.algId = ZipShort.getValue(data, offset + 2); + this.algId = EncryptionAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); this.bitlen = ZipShort.getValue(data, offset + 4); this.flags = ZipShort.getValue(data, offset + 6); if (length > offset + 8) { this.rcount = ZipLong.getValue(data, offset + 8); - this.hashAlg = ZipShort.getValue(data, offset + 12); + this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 12)); this.hashSize = ZipShort.getValue(data, offset + 14); // srlist... hashed public keys } System.out.printf("17: format : %d\n", this.format); - System.out.printf("17: algId : %x\n", this.algId); + System.out.printf("17: algId : %s\n", this.algId); System.out.printf("17: bitlen : %d\n", this.bitlen); System.out.printf("17: flags : %x\n", this.flags); System.out.printf("17: rcount : %d\n", this.rcount); - System.out.printf("17: hashAlg : %x\n", this.hashAlg); + System.out.printf("17: hashAlg : %s\n", this.hashAlg); System.out.printf("17: hashSize: %d\n", this.hashSize); } @@ -393,8 +393,8 @@ public void parseFileFormat(byte[] data, int offset, int length) { System.arraycopy(data, offset + 4, this.ivData, 0, ivSize); long size = ZipLong.getValue(data, offset + ivSize + 2); - this.format =ZipShort.getValue(data, offset + ivSize + 6); - this.algId = ZipShort.getValue(data, offset + ivSize + 8); + this.format = ZipShort.getValue(data, offset + ivSize + 6); + this.algId = EncryptionAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + ivSize + 8)); this.bitlen = ZipShort.getValue(data, offset + ivSize + 10); this.flags = ZipShort.getValue(data, offset + ivSize + 12); @@ -407,7 +407,7 @@ public void parseFileFormat(byte[] data, int offset, int length) { // vcrc32 System.out.printf("17: format : %d\n", this.format); - System.out.printf("17: algId : %x\n", this.algId); + System.out.printf("17: algId : %s\n", this.algId); System.out.printf("17: bitlen : %d\n", this.bitlen); System.out.printf("17: flags : %x\n", this.flags); } From 3d647e4f47d6f15413df21a88ff0e90ca4558671 Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Sat, 5 Sep 2015 15:00:59 -0600 Subject: [PATCH 186/189] Cleaned up messages. --- .../archivers/zip/X0014_X509Certificates.java | 2 +- .../zip/X0015_CertificateIdForFile.java | 8 ++++--- ...0016_CertificateIdForCentralDirectory.java | 8 +++++-- .../zip/X0017_StrongEncryptionHeader.java | 21 +++++++------------ 4 files changed, 20 insertions(+), 19 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java index 31fdf174ddd..8f6a2d4e25c 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java @@ -178,6 +178,6 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { this.rcount = ZipShort.getValue(data, offset); - System.out.printf("14: rcount: %d\n", rcount); + System.out.printf("X509Certificates: rcount?: %d\n", rcount); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java index 1a559209d13..6ac580acbdd 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java @@ -36,6 +36,8 @@ * ----- ---- ----------- * (CID) 0x0015 2 bytes Tag for this "extra" block type * TSize 2 bytes Size of data that follows + * RCount 4 bytes Number of recipients. (inferred) + * HashAlg 2 bytes Hash algorithm identifier. (inferred) * TData TSize Signature Data * * @@ -187,9 +189,9 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { this.rcount = ZipShort.getValue(data, offset); this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); - System.out.printf("15: rcount: %d\n", rcount); - System.out.printf("15: hashAlg: %s\n", hashAlg); + System.out.printf("CertificateId For File (CD): rcount: %d, hashAlg: %s\n", rcount, hashAlg); + /* int size = ZipShort.getValue(data, offset + 4); //System.out.printf("16: [2] %d %x\n", ZipShort.getValue(data, offset + 4), @@ -200,6 +202,6 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { int size2 = ZipShort.getValue(data, offset + 6 + size); System.out.printf("15: size2: %d\n", size2); System.out.printf("15: len: %d, offset+size*: %d\n", length, size + 10 + size2); + */ } - } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java index cec0777c0b0..6616d21b218 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java @@ -37,6 +37,8 @@ * ----- ---- ----------- * (CDID) 0x0016 2 bytes Tag for this "extra" block type * TSize 2 bytes Size of data that follows + * RCount 4 bytes Number of recipients. (inferred) + * HashAlg 2 bytes Hash algorithm identifier. (inferred) * TData TSize Data * * @@ -176,8 +178,9 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { this.rcount = ZipShort.getValue(data, offset); this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); - System.out.printf("16: rcount: %d\n", rcount); - System.out.printf("16: hashAlg: %s\n", hashAlg); + System.out.printf("CertificateId For CD (CD): rcount: %d, hashAlg: %s\n", rcount, hashAlg); + + /* int size = ZipShort.getValue(data, offset + 4); int size2 = ZipShort.getValue(data, offset + 6); System.out.printf("16: diff: %d\n", size - size2); @@ -187,5 +190,6 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { //System.out.printf("16: [3] %d %x\n", ZipShort.getValue(data, offset + 6), // ZipShort.getValue(data, offset + 6)); System.out.printf("16: len: %d, offset+size: %d\n", length, size + 8); + */ } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java index b2410d994a5..46cba696922 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -363,21 +363,19 @@ public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { this.algId = EncryptionAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); this.bitlen = ZipShort.getValue(data, offset + 4); this.flags = ZipShort.getValue(data, offset + 6); + this.rcount = ZipLong.getValue(data, offset + 8); - if (length > offset + 8) { - this.rcount = ZipLong.getValue(data, offset + 8); + if (rcount > 0) { this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 12)); this.hashSize = ZipShort.getValue(data, offset + 14); // srlist... hashed public keys } - System.out.printf("17: format : %d\n", this.format); - System.out.printf("17: algId : %s\n", this.algId); - System.out.printf("17: bitlen : %d\n", this.bitlen); - System.out.printf("17: flags : %x\n", this.flags); - System.out.printf("17: rcount : %d\n", this.rcount); - System.out.printf("17: hashAlg : %s\n", this.hashAlg); - System.out.printf("17: hashSize: %d\n", this.hashSize); + if (rcount > 0) { + System.out.printf("Strong Encryption Header (CD): alg: %s, flags: %x, rcount: %d, hash: %s\n", this.algId, this.flags, this.rcount, this.hashAlg); + } else { + System.out.printf("Strong Encryption Header (CD): alg: %s, flags: %x\n", this.algId, this.flags); + } } /** @@ -406,10 +404,7 @@ public void parseFileFormat(byte[] data, int offset, int length) { // vdata // vcrc32 - System.out.printf("17: format : %d\n", this.format); - System.out.printf("17: algId : %s\n", this.algId); - System.out.printf("17: bitlen : %d\n", this.bitlen); - System.out.printf("17: flags : %x\n", this.flags); + System.out.printf("Strong Encryption Header (file): alg: %s, flags: %x\n", this.algId, this.flags); } /** From a916a718b7589c60ac792d92c99aed354dd04c2f Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Sun, 20 Sep 2015 20:27:42 -0600 Subject: [PATCH 187/189] Removed debugging scaffolding. --- .../archivers/zip/X0014_X509Certificates.java | 17 ---- .../zip/X0015_CertificateIdForFile.java | 53 ++++-------- ...0016_CertificateIdForCentralDirectory.java | 41 ++++----- .../zip/X0017_StrongEncryptionHeader.java | 84 ++++++++++++++----- ...19_EncryptionRecipientCertificateList.java | 1 - 5 files changed, 94 insertions(+), 102 deletions(-) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java index 8f6a2d4e25c..669700bb213 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0014_X509Certificates.java @@ -61,8 +61,6 @@ public ZipShort getHeaderId() { */ private byte[] localData; - private int rcount; - /** * Set the extra field data in the local file data - without Header-ID or * length specifier. @@ -71,17 +69,6 @@ public ZipShort getHeaderId() { * the field data to use */ public void setLocalFileDataData(byte[] data) { - byte[] data1 = new byte[data.length - 2]; - System.arraycopy(data1, 0, data, 2, data.length - 2); - - try { - FileOutputStream os = new FileOutputStream("/tmp/14.dat"); - os.write(data); - os.close(); - } catch (IOException e) { - System.out.println(e.getMessage()); - } - localData = ZipUtil.copy(data); } @@ -175,9 +162,5 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { if (localData == null) { setLocalFileDataData(tmp); } - - this.rcount = ZipShort.getValue(data, offset); - - System.out.printf("X509Certificates: rcount?: %d\n", rcount); } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java index 6ac580acbdd..44295db3741 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0015_CertificateIdForFile.java @@ -18,9 +18,6 @@ */ package org.apache.commons.compress.archivers.zip; -import java.io.FileOutputStream; -import java.io.IOException; - /** * X.509 Certificate ID and Signature for individual file (0x0015): * @@ -64,6 +61,22 @@ public ZipShort getHeaderId() { private int rcount; private HashAlgorithm hashAlg; + + /** + * Get record count. + * @return + */ + public int getRecordCount() { + return rcount; + } + + /** + * Get hash algorithm. + * @return + */ + public HashAlgorithm getHashAlgorithm() { + return hashAlg; + } /** * Set the extra field data in the local file data - without Header-ID or @@ -73,14 +86,6 @@ public ZipShort getHeaderId() { * the field data to use */ public void setLocalFileDataData(byte[] data) { - try { - FileOutputStream os = new FileOutputStream("/tmp/15.dat"); - os.write(data); - os.close(); - } catch (IOException e) { - System.out.println(e.getMessage()); - } - localData = ZipUtil.copy(data); } @@ -115,17 +120,6 @@ public byte[] getLocalFileDataData() { * the data to use */ public void setCentralDirectoryData(byte[] data) { - try { - FileOutputStream os = new FileOutputStream("/tmp/15.dat"); - os.write(data); - os.close(); - - os = new FileOutputStream("/tmp/15.2.dat"); - os.write(data, 2, data.length - 2); - os.close(); - } catch (IOException e) { - System.out.println(e.getMessage()); - } centralData = ZipUtil.copy(data); } @@ -188,20 +182,5 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { this.rcount = ZipShort.getValue(data, offset); this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); - - System.out.printf("CertificateId For File (CD): rcount: %d, hashAlg: %s\n", rcount, hashAlg); - - /* - int size = ZipShort.getValue(data, offset + 4); - - //System.out.printf("16: [2] %d %x\n", ZipShort.getValue(data, offset + 4), - // ZipShort.getValue(data, offset + 4)); - //System.out.printf("16: [3] %d %x\n", ZipShort.getValue(data, offset + 6), - // ZipShort.getValue(data, offset + 6)); - System.out.printf("15: len: %d, offset+size: %d\n", length, size + 8); - int size2 = ZipShort.getValue(data, offset + 6 + size); - System.out.printf("15: size2: %d\n", size2); - System.out.printf("15: len: %d, offset+size*: %d\n", length, size + 10 + size2); - */ } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java index 6616d21b218..2696c6128f2 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0016_CertificateIdForCentralDirectory.java @@ -18,9 +18,6 @@ */ package org.apache.commons.compress.archivers.zip; -import java.io.FileOutputStream; -import java.io.IOException; - /** * X.509 Certificate ID and Signature for central directory (0x0016): * @@ -66,6 +63,22 @@ public ZipShort getHeaderId() { private int rcount; private HashAlgorithm hashAlg; + /** + * Get record count. + * @return + */ + public int getRecordCount() { + return rcount; + } + + /** + * Get hash algorithm. + * @return + */ + public HashAlgorithm getHashAlgorithm() { + return hashAlg; + } + /** * Set the extra field data in the local file data - without Header-ID or * length specifier. @@ -108,14 +121,6 @@ public byte[] getLocalFileDataData() { * the data to use */ public void setCentralDirectoryData(byte[] data) { - try { - FileOutputStream os = new FileOutputStream("/tmp/16.dat"); - os.write(data); - os.close(); - } catch (IOException e) { - System.out.println(e.getMessage()); - } - centralData = ZipUtil.copy(data); } @@ -177,19 +182,5 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { this.rcount = ZipShort.getValue(data, offset); this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 2)); - - System.out.printf("CertificateId For CD (CD): rcount: %d, hashAlg: %s\n", rcount, hashAlg); - - /* - int size = ZipShort.getValue(data, offset + 4); - int size2 = ZipShort.getValue(data, offset + 6); - System.out.printf("16: diff: %d\n", size - size2); - - //System.out.printf("16: [2] %d %x\n", ZipShort.getValue(data, offset + 4), - // ZipShort.getValue(data, offset + 4)); - //System.out.printf("16: [3] %d %x\n", ZipShort.getValue(data, offset + 6), - // ZipShort.getValue(data, offset + 6)); - System.out.printf("16: len: %d, offset+size: %d\n", length, size + 8); - */ } } diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java index 46cba696922..3b31ad2731d 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -18,9 +18,6 @@ */ package org.apache.commons.compress.archivers.zip; -import java.io.FileOutputStream; -import java.io.IOException; - /** * Strong Encryption Header (0x0017) * @@ -272,7 +269,39 @@ public ZipShort getHeaderId() { // encryption data private byte ivData[]; private byte erdData[]; + + // encryption key + private byte recipientKeyHash[]; + private byte keyBlob[]; + + // password verification data + private byte vData[]; + private byte vCRC32[]; + /** + * Get record count. + * @return + */ + public long getRecordCount() { + return rcount; + } + + /** + * Get hash algorithm. + * @return + */ + public HashAlgorithm getHashAlgorithm() { + return hashAlg; + } + + /** + * Get encryption algorithm. + * @return + */ + public EncryptionAlgorithm getEncryptionAlgorithm() { + return algId; + } + /** * Set the extra field data in the local file data - without Header-ID or * length specifier. @@ -281,14 +310,6 @@ public ZipShort getHeaderId() { * the field data to use */ public void setLocalFileDataData(byte[] data) { - try { - FileOutputStream os = new FileOutputStream("/tmp/17.dat"); - os.write(data); - os.close(); - } catch (IOException e) { - System.out.println(e.getMessage()); - } - localData = ZipUtil.copy(data); } @@ -369,12 +390,11 @@ public void parseCentralDirectoryFormat(byte[] data, int offset, int length) { this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + 12)); this.hashSize = ZipShort.getValue(data, offset + 14); // srlist... hashed public keys - } - - if (rcount > 0) { - System.out.printf("Strong Encryption Header (CD): alg: %s, flags: %x, rcount: %d, hash: %s\n", this.algId, this.flags, this.rcount, this.hashAlg); - } else { - System.out.printf("Strong Encryption Header (CD): alg: %s, flags: %x\n", this.algId, this.flags); + for (int i = 0; i < this.rcount; i++) { + for (int j = 0; j < this.hashSize; j++) { + // ZipUtil.signedByteToUnsignedInt(data[offset + 16 + (i * this.hashSize) + j])); + } + } } } @@ -399,12 +419,32 @@ public void parseFileFormat(byte[] data, int offset, int length) { int erdSize = ZipShort.getValue(data, offset + ivSize + 14); this.erdData = new byte[erdSize]; System.arraycopy(data, offset + ivSize + 16, this.erdData, 0, erdSize); - // reserved - // vsize - // vdata - // vcrc32 + + this.rcount = ZipLong.getValue(data, offset + ivSize + 16 + erdSize); + System.out.println("rcount: " + rcount); + if (rcount == 0) { + int vSize = ZipShort.getValue(data, offset + ivSize + 20 + erdSize); + this.vData = new byte[vSize - 4]; + this.vCRC32 = new byte[4]; + System.arraycopy(data, offset + ivSize + 22 + erdSize , this.vData, 0, vSize - 4); + System.arraycopy(data, offset + ivSize + 22 + erdSize + vSize - 4, vCRC32, 0, 4); + } else { + this.hashAlg = HashAlgorithm.getAlgorithmByCode(ZipShort.getValue(data, offset + ivSize + 20 + erdSize)); + this.hashSize = ZipShort.getValue(data, offset + ivSize + 22 + erdSize); + int resize = ZipShort.getValue(data, offset + ivSize + 24 + erdSize); + this.recipientKeyHash = new byte[this.hashSize]; + this.keyBlob = new byte[resize - this.hashSize]; + System.arraycopy(data, offset + ivSize + 24 + erdSize, this.recipientKeyHash, 0, this.hashSize); + System.arraycopy(data, offset + ivSize + 24 + erdSize + this.hashSize, this.keyBlob, 0, resize - this.hashSize); - System.out.printf("Strong Encryption Header (file): alg: %s, flags: %x\n", this.algId, this.flags); + int vSize = ZipShort.getValue(data, offset + ivSize + 26 + erdSize + resize); + this.vData = new byte[vSize - 4]; + this.vCRC32 = new byte[4]; + System.arraycopy(data, offset + ivSize + 22 + erdSize + resize, this.vData, 0, vSize - 4); + System.arraycopy(data, offset + ivSize + 22 + erdSize + resize + vSize - 4, vCRC32, 0, 4); + } + + // validate values? } /** diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java index e300d757b39..2fad2771ff7 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0019_EncryptionRecipientCertificateList.java @@ -168,5 +168,4 @@ public void parseFromCentralDirectoryData(byte[] data, int offset, int length) { setLocalFileDataData(tmp); } } - } From 985b43c15edfad71a6b3293fa995d8fd0d1ef2b4 Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Sat, 24 Oct 2015 09:59:17 -0600 Subject: [PATCH 188/189] Added url for keyblob information. --- .../compress/archivers/zip/X0017_StrongEncryptionHeader.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java index 3b31ad2731d..ca9c8291e4f 100644 --- a/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java +++ b/src/main/java/org/apache/commons/compress/archivers/zip/X0017_StrongEncryptionHeader.java @@ -214,6 +214,8 @@ * function. The version of the Simple Key Blob * supported at this time is 0x02 as defined by * Microsoft. + * + * For more details see https://msdn.microsoft.com/en-us/library/aa920051.aspx * * * Flags - Processing flags needed for decryption From ed4b3326ec487b68e6899627b1a0f8e0dcc2e070 Mon Sep 17 00:00:00 2001 From: Bear Giles Date: Sat, 24 Oct 2015 10:43:50 -0600 Subject: [PATCH 189/189] Merged with upstream. (I think) --- pom.xml | 265 ------------------ .../compressors/lzw/LZWInputStream.java | 14 +- .../compressors/z/ZCompressorInputStream.java | 9 - 3 files changed, 7 insertions(+), 281 deletions(-) diff --git a/pom.xml b/pom.xml index 2b4638ac2cd..bae00b752da 100644 --- a/pom.xml +++ b/pom.xml @@ -20,12 +20,7 @@ org.apache.commons commons-parent -<<<<<<< HEAD -<<<<<<< HEAD - 35 -======= 38 ->>>>>>> origin/master org.apache.commons @@ -321,266 +316,6 @@ jar, tar, zip, dump, 7z, arj. .gitignore .gitattributes .projectile -======= - 34 - - - org.apache.commons - commons-compress2 - 2.0.0-SNAPSHOT - Apache Commons Compress - http://commons.apache.org/proper/commons-compress/ - - -Apache Commons Compress software defines an API for working with compression and archive formats. -These include: bzip2, gzip, pack200, lzma, xz and ar, cpio, jar, tar, zip, dump, 7z, arj. - - - - 1.7 - 1.7 - compress - COMPRESS - 12310904 - - 2.0.0 - RC1 - - - - jira - http://issues.apache.org/jira/browse/COMPRESS - - - - - junit - junit - 4.11 - test - - - org.tukaani - xz - 1.4 - - - - - - Torsten Curdt - tcurdt - tcurdt at apache.org - - - Stefan Bodewig - bodewig - bodewig at apache.org - - - Sebastian Bazley - sebb - sebb at apache.org - - - Christian Grobmeier - grobmeier - grobmeier at apache.org - - - Julius Davies - julius - julius at apache.org - - - Damjan Jovanovic - damjan - damjan at apache.org - - - Emmanuel Bourg - ebourg - ebourg at apache.org - - - - - - Wolfgang Glas - wolfgang.glas at ev-i.at - - - Christian Kohlschütte - ck@newsclub.de - - - Bear Giles - bgiles@coyotesong.com - - - Michael Kuss - mail at michael minus kuss.de - - - Lasse Collin - lasse.collin@tukaani.org - - - John Kodis - - - BELUGA BEHR - - - - - scm:svn:http://svn.apache.org/repos/asf/commons/proper/compress/trunk - scm:svn:https://svn.apache.org/repos/asf/commons/proper/compress/trunk - http://svn.apache.org/repos/asf/commons/proper/compress/trunk - - - - - - - maven-assembly-plugin - - - src/main/assembly/bin.xml - src/main/assembly/src.xml - - gnu - - - - maven-jar-plugin - - - - - org.apache.commons.compress2 - - - - - - org.apache.felix - maven-bundle-plugin - - - org.tukaani.xz;resolution:=optional - - - - - org.apache.maven.plugins - maven-scm-publish-plugin - - - javadocs - - - - - - - - - - - org.apache.maven.plugins - maven-changes-plugin - ${commons.changes.version} - - - %URL%/%ISSUE% - - - - - - changes-report - jira-report - - - - - - - org.codehaus.mojo - cobertura-maven-plugin - 2.6 - - - - org.apache.maven.plugins - maven-pmd-plugin - 2.5 - - 200 - ${maven.compiler.source} - - ${basedir}/pmd-ruleset.xml - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - - true - ${maven.compiler.source} - ${commons.encoding} - ${commons.docEncoding} - true - - ${commons.javadoc.java.link} - ${commons.javadoc.javaee.link} - - - - Immutable - a - This class is immutable - - - NotThreadSafe - a - This class is not thread-safe - - - ThreadSafe - a - This class is thread-safe - - - - - - org.codehaus.mojo - findbugs-maven-plugin - 2.5.3 - - Normal - Default - ${basedir}/findbugs-exclude-filter.xml - - - - org.apache.rat - apache-rat-plugin - ${commons.rat.version} - - - - src/test/resources/** - - PROPOSAL.txt - .pmd - .gitignore - .gitattributes ->>>>>>> refs/remotes/origin/compress-2.0 diff --git a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java index 4a99a975014..fa2232eefc3 100644 --- a/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/lzw/LZWInputStream.java @@ -40,13 +40,13 @@ public abstract class LZWInputStream extends CompressorInputStream { private final byte[] oneByte = new byte[1]; protected final BitInputStream in; - private int clearCode = -1; - private int codeSize = DEFAULT_CODE_SIZE; - private byte previousCodeFirstChar; - private int previousCode = UNUSED_PREFIX; - private int tableSize; - private int[] prefixes; - private byte[] characters; + protected int clearCode = -1; + protected int codeSize = DEFAULT_CODE_SIZE; + protected byte previousCodeFirstChar; + protected int previousCode = UNUSED_PREFIX; + protected int tableSize; + protected int[] prefixes; + protected byte[] characters; private byte[] outputStack; private int outputStackLocation; diff --git a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java index 3b9ab94bfab..7ec38e27c79 100644 --- a/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java +++ b/src/main/java/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java @@ -102,12 +102,8 @@ protected int addEntry(int previousCode, byte character) throws IOException { int r = addEntry(previousCode, character, maxTableSize); if (getTableSize() == maxTableSize && getCodeSize() < maxCodeSize) { reAlignReading(); -<<<<<<< HEAD codeSize++; System.err.println("codeSize grew to " + codeSize); -======= - incrementCodeSize(); ->>>>>>> origin/master } return r; } @@ -138,14 +134,9 @@ protected int decompressNextSymbol() throws IOException { } else if (blockMode && code == getClearCode()) { clearEntries(); reAlignReading(); -<<<<<<< HEAD codeSize = 9; System.err.println("codeSize reset to 9 after reading clearCode"); previousCode = -1; -======= - resetCodeSize(); - resetPreviousCode(); ->>>>>>> origin/master return 0; } else { boolean addedUnfinishedEntry = false;