diff --git a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java index 89879e559c2a..c681c652516e 100644 --- a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java +++ b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java @@ -21,6 +21,7 @@ import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; +import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -29,17 +30,22 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; +import java.io.OutputStreamWriter; import java.net.InetAddress; import java.net.URI; import java.net.UnknownHostException; import java.nio.charset.Charset; +import java.nio.charset.CharsetEncoder; +import java.nio.charset.StandardCharsets; import java.nio.file.AccessDeniedException; import java.nio.file.FileSystems; import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -52,7 +58,7 @@ import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; -import org.apache.commons.collections4.map.CaseInsensitiveMap; +import org.apache.commons.collections.map.CaseInsensitiveMap; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.hadoop.classification.InterfaceAudience; @@ -596,11 +602,10 @@ public static long getDU(File dir) { return dir.length(); } else { File[] allFiles = dir.listFiles(); - if(allFiles != null) { - for (int i = 0; i < allFiles.length; i++) { - boolean isSymLink = org.apache.commons.io.FileUtils.isSymlink(allFiles[i]); - if(!isSymLink) { - size += getDU(allFiles[i]); + if (allFiles != null) { + for (File f : allFiles) { + if (!org.apache.commons.io.FileUtils.isSymlink(f)) { + size += getDU(f); } } } @@ -889,10 +894,13 @@ private static void unTarUsingTar(InputStream inputStream, File untarDir, private static void unTarUsingTar(File inFile, File untarDir, boolean gzipped) throws IOException { StringBuffer untarCommand = new StringBuffer(); + // not using canonical path here; this postpones relative path + // resolution until bash is executed. + final String source = "'" + FileUtil.makeSecureShellPath(inFile) + "'"; if (gzipped) { - untarCommand.append(" gzip -dc '"); - untarCommand.append(FileUtil.makeSecureShellPath(inFile)); - untarCommand.append("' | ("); + untarCommand.append(" gzip -dc ") + .append(source) + .append(" | ("); } untarCommand.append("cd '"); untarCommand.append(FileUtil.makeSecureShellPath(untarDir)); @@ -902,15 +910,17 @@ private static void unTarUsingTar(File inFile, File untarDir, if (gzipped) { untarCommand.append(" -)"); } else { - untarCommand.append(FileUtil.makeSecureShellPath(inFile)); + untarCommand.append(source); } + LOG.debug("executing [{}]", untarCommand); String[] shellCmd = { "bash", "-c", untarCommand.toString() }; ShellCommandExecutor shexec = new ShellCommandExecutor(shellCmd); shexec.execute(); int exitcode = shexec.getExitCode(); if (exitcode != 0) { throw new IOException("Error untarring file " + inFile + - ". Tar process exited with exit code " + exitcode); + ". Tar process exited with exit code " + exitcode + + " from command " + untarCommand); } } @@ -969,6 +979,14 @@ private static void unpackEntries(TarArchiveInputStream tis, + " would create entry outside of " + outputDir); } + if (entry.isSymbolicLink() || entry.isLink()) { + String canonicalTargetPath = getCanonicalPath(entry.getLinkName(), outputDir); + if (!canonicalTargetPath.startsWith(targetDirPath)) { + throw new IOException( + "expanding " + entry.getName() + " would create entry outside of " + outputDir); + } + } + if (entry.isDirectory()) { File subDir = new File(outputDir, entry.getName()); if (!subDir.mkdirs() && !subDir.isDirectory()) { @@ -984,10 +1002,12 @@ private static void unpackEntries(TarArchiveInputStream tis, } if (entry.isSymbolicLink()) { - // Create symbolic link relative to tar parent dir - Files.createSymbolicLink(FileSystems.getDefault() - .getPath(outputDir.getPath(), entry.getName()), - FileSystems.getDefault().getPath(entry.getLinkName())); + // Create symlink with canonical target path to ensure that we don't extract + // outside targetDirPath + String canonicalTargetPath = getCanonicalPath(entry.getLinkName(), outputDir); + Files.createSymbolicLink( + FileSystems.getDefault().getPath(outputDir.getPath(), entry.getName()), + FileSystems.getDefault().getPath(canonicalTargetPath)); return; } @@ -999,7 +1019,8 @@ private static void unpackEntries(TarArchiveInputStream tis, } if (entry.isLink()) { - File src = new File(outputDir, entry.getLinkName()); + String canonicalTargetPath = getCanonicalPath(entry.getLinkName(), outputDir); + File src = new File(canonicalTargetPath); HardLink.createHardLink(src, outputFile); return; } @@ -1017,6 +1038,20 @@ private static void unpackEntries(TarArchiveInputStream tis, } } + /** + * Gets the canonical path for the given path. + * + * @param path The path for which the canonical path needs to be computed. + * @param parentDir The parent directory to use if the path is a relative path. + * @return The canonical path of the given path. + */ + private static String getCanonicalPath(String path, File parentDir) throws IOException { + java.nio.file.Path targetPath = Paths.get(path); + return (targetPath.isAbsolute() ? + new File(path) : + new File(parentDir, path)).getCanonicalPath(); + } + /** * Class for creating hardlinks. * Supports Unix, WindXP. @@ -1349,7 +1384,8 @@ static String execCommand(File f, String... cmd) throws IOException { String[] args = new String[cmd.length + 1]; System.arraycopy(cmd, 0, args, 0, cmd.length); args[cmd.length] = f.getCanonicalPath(); - return Shell.execCommand(args); + String output = Shell.execCommand(args); + return output; } /** @@ -1362,9 +1398,9 @@ static String execCommand(File f, String... cmd) throws IOException { * @see java.io.File#createTempFile(String, String, File) * @see java.io.File#deleteOnExit() */ - public static File createLocalTempFile(final File basefile, - final String prefix, - final boolean isDeleteOnExit) + public static final File createLocalTempFile(final File basefile, + final String prefix, + final boolean isDeleteOnExit) throws IOException { File tmp = File.createTempFile(prefix + basefile.getName(), "", basefile.getParentFile()); @@ -1485,7 +1521,8 @@ public static String[] createJarWithClassPath(String inputClassPath, Path pwd, Path targetDir, Map callerEnv) throws IOException { // Replace environment variables, case-insensitive on Windows - Map env = Shell.WINDOWS ? new CaseInsensitiveMap<>(callerEnv) : + @SuppressWarnings("unchecked") + Map env = Shell.WINDOWS ? new CaseInsensitiveMap(callerEnv) : callerEnv; String[] classPathEntries = inputClassPath.split(File.pathSeparator); for (int i = 0; i < classPathEntries.length; ++i) { @@ -1647,4 +1684,235 @@ public static boolean compareFs(FileSystem srcFs, FileSystem destFs) { // check for ports return srcUri.getPort()==dstUri.getPort(); } + + /** + * Writes bytes to a file. This utility method opens the file for writing, + * creating the file if it does not exist, or overwrites an existing file. All + * bytes in the byte array are written to the file. + * + * @param fs the file system with which to create the file + * @param path the path to the file + * @param bytes the byte array with the bytes to write + * + * @return the file system + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileSystem write(final FileSystem fs, final Path path, + final byte[] bytes) throws IOException { + + Objects.requireNonNull(path); + Objects.requireNonNull(bytes); + + try (FSDataOutputStream out = fs.createFile(path).overwrite(true).build()) { + out.write(bytes); + } + + return fs; + } + + /** + * Writes bytes to a file. This utility method opens the file for writing, + * creating the file if it does not exist, or overwrites an existing file. All + * bytes in the byte array are written to the file. + * + * @param fileContext the file context with which to create the file + * @param path the path to the file + * @param bytes the byte array with the bytes to write + * + * @return the file context + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileContext write(final FileContext fileContext, + final Path path, final byte[] bytes) throws IOException { + + Objects.requireNonNull(path); + Objects.requireNonNull(bytes); + + try (FSDataOutputStream out = + fileContext.create(path).overwrite(true).build()) { + out.write(bytes); + } + + return fileContext; + } + + /** + * Write lines of text to a file. Each line is a char sequence and is written + * to the file in sequence with each line terminated by the platform's line + * separator, as defined by the system property {@code + * line.separator}. Characters are encoded into bytes using the specified + * charset. This utility method opens the file for writing, creating the file + * if it does not exist, or overwrites an existing file. + * + * @param fs the file system with which to create the file + * @param path the path to the file + * @param lines a Collection to iterate over the char sequences + * @param cs the charset to use for encoding + * + * @return the file system + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileSystem write(final FileSystem fs, final Path path, + final Iterable lines, final Charset cs) + throws IOException { + + Objects.requireNonNull(path); + Objects.requireNonNull(lines); + Objects.requireNonNull(cs); + + CharsetEncoder encoder = cs.newEncoder(); + try (FSDataOutputStream out = fs.createFile(path).overwrite(true).build(); + BufferedWriter writer = + new BufferedWriter(new OutputStreamWriter(out, encoder))) { + for (CharSequence line : lines) { + writer.append(line); + writer.newLine(); + } + } + return fs; + } + + /** + * Write lines of text to a file. Each line is a char sequence and is written + * to the file in sequence with each line terminated by the platform's line + * separator, as defined by the system property {@code + * line.separator}. Characters are encoded into bytes using the specified + * charset. This utility method opens the file for writing, creating the file + * if it does not exist, or overwrites an existing file. + * + * @param fileContext the file context with which to create the file + * @param path the path to the file + * @param lines a Collection to iterate over the char sequences + * @param cs the charset to use for encoding + * + * @return the file context + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileContext write(final FileContext fileContext, + final Path path, final Iterable lines, + final Charset cs) throws IOException { + + Objects.requireNonNull(path); + Objects.requireNonNull(lines); + Objects.requireNonNull(cs); + + CharsetEncoder encoder = cs.newEncoder(); + try (FSDataOutputStream out = fileContext.create(path).overwrite(true).build(); + BufferedWriter writer = + new BufferedWriter(new OutputStreamWriter(out, encoder))) { + for (CharSequence line : lines) { + writer.append(line); + writer.newLine(); + } + } + return fileContext; + } + + /** + * Write a line of text to a file. Characters are encoded into bytes using the + * specified charset. This utility method opens the file for writing, creating + * the file if it does not exist, or overwrites an existing file. + * + * @param fs the file system with which to create the file + * @param path the path to the file + * @param charseq the char sequence to write to the file + * @param cs the charset to use for encoding + * + * @return the file system + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileSystem write(final FileSystem fs, final Path path, + final CharSequence charseq, final Charset cs) throws IOException { + + Objects.requireNonNull(path); + Objects.requireNonNull(charseq); + Objects.requireNonNull(cs); + + CharsetEncoder encoder = cs.newEncoder(); + try (FSDataOutputStream out = fs.createFile(path).overwrite(true).build(); + BufferedWriter writer = + new BufferedWriter(new OutputStreamWriter(out, encoder))) { + writer.append(charseq); + } + return fs; + } + + /** + * Write a line of text to a file. Characters are encoded into bytes using the + * specified charset. This utility method opens the file for writing, creating + * the file if it does not exist, or overwrites an existing file. + * + * @param fs the file context with which to create the file + * @param path the path to the file + * @param charseq the char sequence to write to the file + * @param cs the charset to use for encoding + * + * @return the file context + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileContext write(final FileContext fs, final Path path, + final CharSequence charseq, final Charset cs) throws IOException { + + Objects.requireNonNull(path); + Objects.requireNonNull(charseq); + Objects.requireNonNull(cs); + + CharsetEncoder encoder = cs.newEncoder(); + try (FSDataOutputStream out = fs.create(path).overwrite(true).build(); + BufferedWriter writer = + new BufferedWriter(new OutputStreamWriter(out, encoder))) { + writer.append(charseq); + } + return fs; + } + + /** + * Write a line of text to a file. Characters are encoded into bytes using + * UTF-8. This utility method opens the file for writing, creating the file if + * it does not exist, or overwrites an existing file. + * + * @param fs the files system with which to create the file + * @param path the path to the file + * @param charseq the char sequence to write to the file + * + * @return the file system + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileSystem write(final FileSystem fs, final Path path, + final CharSequence charseq) throws IOException { + return write(fs, path, charseq, StandardCharsets.UTF_8); + } + + /** + * Write a line of text to a file. Characters are encoded into bytes using + * UTF-8. This utility method opens the file for writing, creating the file if + * it does not exist, or overwrites an existing file. + * + * @param fileContext the files system with which to create the file + * @param path the path to the file + * @param charseq the char sequence to write to the file + * + * @return the file context + * + * @throws NullPointerException if any of the arguments are {@code null} + * @throws IOException if an I/O error occurs creating or writing to the file + */ + public static FileContext write(final FileContext fileContext, + final Path path, final CharSequence charseq) throws IOException { + return write(fileContext, path, charseq, StandardCharsets.UTF_8); + } } diff --git a/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java b/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java index 5373c03bcc07..94748c676266 100644 --- a/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/solr/core/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -55,6 +55,8 @@ import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; +import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs; + /**************************************************************** * Implement the FileSystem API for the raw local filesystem. * @@ -1034,7 +1036,8 @@ private FileStatus getNativeFileLinkStatus(final Path f, boolean dereference) throws IOException { checkPath(f); Stat stat = new Stat(f, getDefaultBlockSize(f), dereference, this); - return stat.getFileStatus(); + FileStatus status = stat.getFileStatus(); + return status; } @Override @@ -1043,4 +1046,21 @@ public Path getLinkTarget(Path f) throws IOException { // return an unqualified symlink target return fi.getSymlink(); } + + @Override + public boolean hasPathCapability(final Path path, final String capability) + throws IOException { + switch (validatePathCapabilityArgs(makeQualified(path), capability)) { + case CommonPathCapabilities.FS_APPEND: + case CommonPathCapabilities.FS_CONCAT: + case CommonPathCapabilities.FS_PATHHANDLES: + case CommonPathCapabilities.FS_PERMISSIONS: + case CommonPathCapabilities.FS_TRUNCATE: + return true; + case CommonPathCapabilities.FS_SYMLINKS: + return FileSystem.areSymlinksEnabled(); + default: + return super.hasPathCapability(path, capability); + } + } } diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java index c75ef8df72de..6f2085b43756 100644 --- a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java +++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java @@ -40,6 +40,7 @@ import java.util.concurrent.ForkJoinTask; import java.util.concurrent.RecursiveAction; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.ReentrantReadWriteLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,8 +62,8 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaInputStreams; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.MultipleIOException; -import org.apache.hadoop.util.AutoCloseableLock; import org.apache.hadoop.util.DataChecksum; +import org.apache.hadoop.util.DataChecksum.Type; import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.ShutdownHookManager; @@ -176,7 +177,7 @@ public int compare(File f1, File f2) { if (addReplicaThreadPool == null) { // initialize add replica fork join pool - initializeAddReplicaPool(conf); + initializeAddReplicaPool(conf, (FsDatasetImpl) volume.getDataset()); } // Make the dfs usage to be saved during shutdown. shutdownHook = new Runnable() { @@ -189,9 +190,9 @@ public void run() { SHUTDOWN_HOOK_PRIORITY); } - private synchronized void initializeAddReplicaPool(Configuration conf) { + private synchronized static void initializeAddReplicaPool(Configuration conf, + FsDatasetImpl dataset) { if (addReplicaThreadPool == null) { - FsDatasetImpl dataset = (FsDatasetImpl) volume.getDataset(); int numberOfBlockPoolSlice = dataset.getVolumeCount() * dataset.getBPServiceCount(); int poolsize = Math.max(numberOfBlockPoolSlice, @@ -697,6 +698,10 @@ private long validateIntegrityAndSetLength(File blockFile, long genStamp) { // read and handle the common header here. For now just a version final DataChecksum checksum = BlockMetadataHeader.readDataChecksum( checksumIn, metaFile); + if (Type.NULL.equals(checksum.getChecksumType())) { + // in case of NULL checksum type consider full file as valid + return blockFileLen; + } int bytesPerChecksum = checksum.getBytesPerChecksum(); int checksumSize = checksum.getChecksumSize(); long numChunks = Math.min( @@ -759,7 +764,7 @@ void shutdown(BlockListAsLongs blocksListToPersist) { private boolean readReplicasFromCache(ReplicaMap volumeMap, final RamDiskReplicaTracker lazyWriteReplicaMap) { - ReplicaMap tmpReplicaMap = new ReplicaMap(new AutoCloseableLock()); + ReplicaMap tmpReplicaMap = new ReplicaMap(new ReentrantReadWriteLock()); File replicaFile = new File(currentDir, REPLICA_CACHE_FILE); // Check whether the file exists or not. if (!replicaFile.exists()) { diff --git a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java index c73a9803d077..0680f265dea7 100644 --- a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java +++ b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java @@ -145,12 +145,16 @@ public final class HttpServer2 implements FilterContainer { // idle timeout in milliseconds public static final String HTTP_IDLE_TIMEOUT_MS_KEY = "hadoop.http.idle_timeout.ms"; - public static final int HTTP_IDLE_TIMEOUT_MS_DEFAULT = 10000; + public static final int HTTP_IDLE_TIMEOUT_MS_DEFAULT = 60000; public static final String HTTP_TEMP_DIR_KEY = "hadoop.http.temp.dir"; public static final String FILTER_INITIALIZER_PROPERTY = "hadoop.http.filter.initializers"; + public static final String HTTP_SNI_HOST_CHECK_ENABLED_KEY + = "hadoop.http.sni.host.check.enabled"; + public static final boolean HTTP_SNI_HOST_CHECK_ENABLED_DEFAULT = false; + // The ServletContext attribute where the daemon Configuration // gets stored. public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf"; @@ -225,6 +229,8 @@ public static class Builder { private boolean xFrameEnabled; private XFrameOption xFrameOption = XFrameOption.SAMEORIGIN; + private boolean sniHostCheckEnabled; + public Builder setName(String name){ this.name = name; return this; @@ -369,6 +375,17 @@ public Builder setXFrameOption(String option) { return this; } + /** + * Enable or disable sniHostCheck. + * + * @param sniHostCheckEnabled Enable sniHostCheck if true, else disable it. + * @return Builder. + */ + public Builder setSniHostCheckEnabled(boolean sniHostCheckEnabled) { + this.sniHostCheckEnabled = sniHostCheckEnabled; + return this; + } + /** * A wrapper of {@link Configuration#getPassword(String)}. It returns * String instead of char[]. @@ -461,6 +478,13 @@ public HttpServer2 build() throws IOException { int backlogSize = conf.getInt(HTTP_SOCKET_BACKLOG_SIZE_KEY, HTTP_SOCKET_BACKLOG_SIZE_DEFAULT); + // If setSniHostCheckEnabled() is used to enable SNI hostname check, + // configuration lookup is skipped. + if (!sniHostCheckEnabled) { + sniHostCheckEnabled = conf.getBoolean(HTTP_SNI_HOST_CHECK_ENABLED_KEY, + HTTP_SNI_HOST_CHECK_ENABLED_DEFAULT); + } + for (URI ep : endpoints) { final ServerConnector connector; String scheme = ep.getScheme(); @@ -504,10 +528,12 @@ private ServerConnector createHttpChannelConnector( private ServerConnector createHttpsChannelConnector( Server server, HttpConfiguration httpConfig) { httpConfig.setSecureScheme(HTTPS_SCHEME); - httpConfig.addCustomizer(new SecureRequestCustomizer()); + httpConfig.addCustomizer( + new SecureRequestCustomizer(sniHostCheckEnabled)); ServerConnector conn = createHttpChannelConnector(server, httpConfig); - SslContextFactory.Server sslContextFactory = new SslContextFactory.Server(); + SslContextFactory.Server sslContextFactory = + new SslContextFactory.Server(); sslContextFactory.setNeedClientAuth(needsClientAuth); sslContextFactory.setKeyManagerPassword(keyPassword); if (keyStore != null) { @@ -577,9 +603,9 @@ private void initializeWebServer(String name, String hostName, threadPool.setMaxThreads(Math.max(maxThreads, 4)); } - SessionHandler sessionHandler = webAppContext.getSessionHandler(); - sessionHandler.setHttpOnly(true); - sessionHandler.getSessionCookieConfig().setSecure(true); + SessionHandler handler = webAppContext.getSessionHandler(); + handler.setHttpOnly(true); + handler.getSessionCookieConfig().setSecure(true); ContextHandlerCollection contexts = new ContextHandlerCollection(); RequestLog requestLog = HttpRequestLog.getRequestLog(name); @@ -785,6 +811,7 @@ public void addJerseyResourcePackage(final String packageName, addJerseyResourcePackage(packageName, pathSpec, Collections.emptyMap()); } + /** * Add a Jersey resource package. * @param packageName The Java package name containing the Jersey resource. @@ -792,8 +819,7 @@ public void addJerseyResourcePackage(final String packageName, * @param params properties and features for ResourceConfig */ public void addJerseyResourcePackage(final String packageName, - final String pathSpec, Map params) { - + final String pathSpec, Map params) { LOG.info("addJerseyResourcePackage: packageName={}, pathcpec={}" , packageName, pathSpec); final ServletHolder sh = new ServletHolder(ServletContainer.class); @@ -1209,7 +1235,7 @@ private static void bindListener(ServerConnector listener) throws Exception { * @return returns the exception */ private static BindException constructBindException(ServerConnector listener, - BindException ex) { + IOException ex) { BindException be = new BindException("Port in use: " + listener.getHost() + ":" + listener.getPort()); if (ex != null) { @@ -1231,7 +1257,7 @@ private void bindForSinglePort(ServerConnector listener, int port) try { bindListener(listener); break; - } catch (BindException ex) { + } catch (IOException ex) { if (port == 0 || !findPort) { throw constructBindException(listener, ex); } @@ -1251,13 +1277,13 @@ private void bindForSinglePort(ServerConnector listener, int port) */ private void bindForPortRange(ServerConnector listener, int startPort) throws Exception { - BindException bindException = null; + IOException ioException = null; try { bindListener(listener); return; - } catch (BindException ex) { + } catch (IOException ex) { // Ignore exception. - bindException = ex; + ioException = ex; } for(Integer port : portRanges) { if (port == startPort) { @@ -1268,12 +1294,16 @@ private void bindForPortRange(ServerConnector listener, int startPort) try { bindListener(listener); return; - } catch (BindException ex) { + } catch (IOException ex) { + if (!(ex instanceof BindException) + && !(ex.getCause() instanceof BindException)) { + throw ex; + } // Ignore exception. Move to next port. - bindException = ex; + ioException = ex; } } - throw constructBindException(listener, bindException); + throw constructBindException(listener, ioException); } /** @@ -1371,10 +1401,10 @@ public String toString() { /** * Checks the user has privileges to access to instrumentation servlets. - * + *

* If hadoop.security.instrumentation.requires.admin is set to FALSE * (default value) it always returns TRUE. - * + *

* If hadoop.security.instrumentation.requires.admin is set to TRUE * it will check that if the current user is in the admin ACLS. If the user is * in the admin ACLs it returns TRUE, otherwise it returns FALSE. @@ -1504,10 +1534,12 @@ public RequestQuoter(HttpServletRequest rawRequest) { /** * Return the set of parameter names, quoting each name. */ + @SuppressWarnings("unchecked") @Override public Enumeration getParameterNames() { return new Enumeration() { - private Enumeration rawIterator = rawRequest.getParameterNames(); + private Enumeration rawIterator = + rawRequest.getParameterNames(); @Override public boolean hasMoreElements() { return rawIterator.hasMoreElements(); diff --git a/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java b/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java index 6699ce131083..ba01646364d9 100644 --- a/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java +++ b/solr/core/src/test/org/apache/hadoop/util/DiskChecker.java @@ -168,6 +168,11 @@ private static void checkAccessByFileMethods(File dir) throw new DiskErrorException("Directory is not writable: " + dir.toString()); } + + if (!FileUtil.canExecute(dir)) { + throw new DiskErrorException("Directory is not executable: " + + dir.toString()); + } } /**