Skip to content
Permalink
Browse files
HBASE-27076. [HBOSS] compile against hadoop 3.3.2+ only. (#34)
Cut out all hadoop 3.2 support from files/build and move the
s3 client binding code back into hadoop-testutils.

The other hadoop profiles and the java test code to query/act
on different versions are retained, so that if a future version
ever needs to be dynamic again, the code is ready.

* removes the hadoop 3.2 profile; retains the 3.3 and default ones
* hadoop.version is set to 3.3.2.
* version checking only handles HadoopVersion = 3.3
* Hadoop33EmbeddedS3ClientFactory is in hadoop-testutils source tree
* the hadoop3-3-testutils and hadoop3-2-testutils modules are gone
* TestUtils.renameToExistingDestinationSupported() is always true; uses
  in assumeTrue() calls cut
* cut hbase-oss/src/test/resources/contract/hadoop-3.2/s3a.xml;
  choice of contract profile is still dynamic.
* travis to only check 3.3 profile
* readme updated

Change-Id: Idc9181537443904004f2e79baefe7a2c0fe67902

Signed-off-by: Andrew Purtell <apurtell@apache.org>
  • Loading branch information
steveloughran committed Jun 15, 2022
1 parent 8ba3a91 commit 03af0bfc7d0884975bdb1cf47e67972f2374acf4
Showing 11 changed files with 28 additions and 398 deletions.
@@ -18,7 +18,6 @@ jdk:
- openjdk8
- openjdk11
env:
- HADOOP_PROFILE=3.2
- HADOOP_PROFILE=3.3
dist: xenial
os: linux
@@ -20,12 +20,12 @@
import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.fs.s3a.S3ClientFactory;
import org.apache.hadoop.hbase.oss.EmbeddedS3.EmbeddedAmazonS3;

import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.s3.AmazonS3;

import org.apache.hadoop.fs.s3a.S3ClientFactory;
import org.apache.hadoop.hbase.oss.EmbeddedS3.EmbeddedAmazonS3;

/**
* An S3ClientFactory for Hadoop 3.3 releases which have the change from
* HADOOP-13551. Builds on top of Hadoop32EmbeddedS3ClientFactory.

This file was deleted.

This file was deleted.

This file was deleted.

@@ -136,26 +136,23 @@ src/test/resources/auth-keys.xml, which should be ignored by source control.

### Hadoop Versions

HBoss mainly depends on *org.apache.hadoop.fs.FileSystem* contract, and
current HBoss version is compatible with Hadoop releases *3.2.2* and *3.3.1*.
HBoss is compiled against Hadoop 3.3.2; this allows it to implement
hadoop 3.3-only APIs and provide the behavior which the contract
tests expect.

The hadoop-3.3 profile exists and can still be enabled, however it does
not change the build in any way.

There are Maven profiles defined for the above mentioned Hadoop 3 versions.
Support for Hadoop 2 has been dropped as off Oct 2021.
These are activated via the property `hadoop.profile`. These profiles choose
a specific Hadoop release in that major line, defaulting to versions as defined
in `hadoop32.version` and `hadoop33.version`. By default, Hadoop 3.3 is used by
the build.

### HBase Versions
HBoss testing suite relies on HBase Zookeeper testing utility class, which has
been changing over different HBase releases. Current HBoss version is guaranteed
to compile with HBase *2.3.6*. Support for different HBase versions,
including HBase 1 through profiles has been dropped as off Oct 2021.
including HBase 1 through profiles has been dropped as of Oct 2021.

### Examples of different build profile usage/combinations, and related dependency versions picked:

mvn verify # Defaults to Hadoop 3.3.1, HBase 2.3.6
mvn verify -Dhadoop.profile=3.2 # Activates Hadoop 3.2.2, HBase 2.3.6
mvn verify -Dhadoop.profile=3.3 # Activates Hadoop 3.3.1, HBase 2.3.6
mvn verify # Defaults to Hadoop 3.3.2, HBase 2.3.6
mvn verify -Dhadoop.profile=3.3 # no difference


@@ -56,25 +56,6 @@
<fs.hboss.sync.impl>org.apache.hadoop.hbase.oss.sync.ZKTreeLockManager</fs.hboss.sync.impl>
</properties>
</profile>
<profile>
<id>hadoop3.2</id>
<activation>
<property>
<name>hadoop.profile</name>
<value>3.2</value>
</property>
</activation>
<properties>
<HBOSS_HADOOP_VERSION>3.2</HBOSS_HADOOP_VERSION>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hbase.filesystem</groupId>
<artifactId>hadoop3-2-testutils</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>hadoop3.3</id>
<activation>
@@ -83,16 +64,6 @@
<value>3.3</value>
</property>
</activation>
<properties>
<HBOSS_HADOOP_VERSION>3.3</HBOSS_HADOOP_VERSION>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hbase.filesystem</groupId>
<artifactId>hadoop3-3-testutils</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</profile>
<profile>
<id>hadoop-default</id>
@@ -101,16 +72,6 @@
<name>!hadoop.profile</name>
</property>
</activation>
<properties>
<HBOSS_HADOOP_VERSION>3.3</HBOSS_HADOOP_VERSION>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.hbase.filesystem</groupId>
<artifactId>hadoop3-3-testutils</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</profile>
</profiles>

@@ -42,8 +42,11 @@ public class TestUtils {
public static final Logger LOG =
LoggerFactory.getLogger(TestUtils.class);

public static enum HadoopVersion {
HADOOP32("3.2"),
/**
* Hadoop version as determined by the passed in system property.
*/
public enum HadoopVersion {

HADOOP33("3.3");

private final String versionIdentifier;
@@ -78,13 +81,15 @@ public static void conditionalStart(Configuration conf) {
}
}

/**
* Add the appropriate contract resources for the active hadoop
* version.
* @param conf configuration to update
*/
public static void addContract(Configuration conf) {
final HadoopVersion version = getDesiredHadoopVersion();
String contractFile;
switch (version) {
case HADOOP32:
contractFile = "contract/hadoop-3.2/s3a.xml";
break;
case HADOOP33:
contractFile = "contract/hadoop-3.3/s3a.xml";
break;
@@ -106,8 +111,6 @@ public static void addContract(Configuration conf) {
public static String getEmbeddedS3ClientFactoryClassName() {
final HadoopVersion version = getDesiredHadoopVersion();
switch (version) {
case HADOOP32:
return "org.apache.hadoop.hbase.oss.Hadoop32EmbeddedS3ClientFactory";
case HADOOP33:
return "org.apache.hadoop.hbase.oss.Hadoop33EmbeddedS3ClientFactory";
}
@@ -137,16 +140,6 @@ static HadoopVersion getDesiredHadoopVersion() {
throw new RuntimeException("Unable to determine S3ClientFactory to instantiate");
}

public static boolean renameToExistingDestinationSupported() {
HadoopVersion version = getDesiredHadoopVersion();
// Hadoop 3.2 and below don't support the additional checks added
// by HADOOP-16721 around renames.
if (version == HadoopVersion.HADOOP32) {
return false;
}
return true;
}

public static String getScheme(Configuration conf) {
String dataUri = conf.get(Constants.DATA_URI);
try {
@@ -144,7 +144,6 @@ public void testRenameFileMoveToNonExistentDirectory() throws Exception {
@Test
public void testRenameDirectoryAsExistingFile() throws Exception {
assumeTrue(renameSupported());
assumeTrue(TestUtils.renameToExistingDestinationSupported());

Path src = path("testRenameDirectoryAsExistingFile/dir");
fs.mkdirs(src);
@@ -156,14 +155,12 @@ public void testRenameDirectoryAsExistingFile() throws Exception {

@Test
public void testRenameFileAsExistingFile() throws Exception {
assumeTrue(TestUtils.renameToExistingDestinationSupported());
intercept(FileAlreadyExistsException.class,
() -> super.testRenameFileAsExistingFile());
}

@Test
public void testRenameNonExistentPath() throws Exception {
assumeTrue(TestUtils.renameToExistingDestinationSupported());
intercept(FileNotFoundException.class,
() -> super.testRenameNonExistentPath());

0 comments on commit 03af0bf

Please sign in to comment.