Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OMID-231 Build and test Omid with Hadoop 3 #117

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ lib/
*.iws
*~
*.swp
/dev-support/artifacts/**
/dev-support/work/**


# Generated website files
generated-website/
Expand Down
2 changes: 2 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,7 @@ script:
else
git checkout -b tmp-build-branch
&&
dev-support/rebuild_hbase.sh detect
&&
mvn clean test ;
fi
142 changes: 142 additions & 0 deletions dev-support/cache-apache-project-artifact.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# This was lovingly copied from Apache HBase

set -e
function usage {
echo "Usage: ${0} [options] /path/to/download/file.tar.gz download/fragment/eg/project/subdir/some-artifact-version.tar.gz"
echo ""
echo " --force for a redownload even if /path/to/download/file.tar.gz exists."
echo " --working-dir /path/to/use Path for writing tempfiles. must exist."
echo " defaults to making a directory via mktemp that we clean."
echo " --keys url://to/project/KEYS where to get KEYS. needed to check signature on download."
echo ""
exit 1
}
# if no args specified, show usage
if [ $# -lt 2 ]; then
usage
fi


# Get arguments
declare done_if_cached="true"
declare working_dir
declare cleanup="true"
declare keys
while [ $# -gt 0 ]
do
case "$1" in
--force) shift; done_if_cached="false";;
--working-dir) shift; working_dir=$1; cleanup="false"; shift;;
--keys) shift; keys=$1; shift;;
--) shift; break;;
-*) usage ;;
*) break;; # terminate while loop
esac
done

# should still have required args
if [ $# -lt 2 ]; then
usage
fi

target="$1"
artifact="$2"

if [ -f "${target}" ] && [ "true" = "${done_if_cached}" ]; then
echo "Reusing existing download of '${artifact}'."
exit 0
fi

if [ -z "${working_dir}" ]; then
if ! working_dir="$(mktemp -d -t hbase-download-apache-artifact)" ; then
echo "Failed to create temporary working directory. Please specify via --working-dir" >&2
exit 1
fi
else
# absolutes please
working_dir="$(cd "$(dirname "${working_dir}")"; pwd)/$(basename "${working_dir}")"
if [ ! -d "${working_dir}" ]; then
echo "passed working directory '${working_dir}' must already exist." >&2
exit 1
fi
fi

function cleanup {
if [ -n "${keys}" ]; then
echo "Stopping gpg agent daemon"
gpgconf --homedir "${working_dir}/.gpg" --kill gpg-agent
echo "Stopped gpg agent daemon"
fi

if [ "true" = "${cleanup}" ]; then
echo "cleaning up temp space."
rm -rf "${working_dir}"
fi
}
trap cleanup EXIT SIGQUIT

echo "New download of '${artifact}'"

# N.B. this comes first so that if gpg falls over we skip the expensive download.
if [ -n "${keys}" ]; then
if [ ! -d "${working_dir}/.gpg" ]; then
rm -rf "${working_dir}/.gpg"
mkdir -p "${working_dir}/.gpg"
chmod -R 700 "${working_dir}/.gpg"
fi
gpgconf --homedir "${working_dir}/.gpg" --create-socketdir || true
#shellcheck disable=SC2086
echo "socketdir is $(gpgconf --homedir ${working_dir}/.gpg --list-dirs socketdir)"
echo "installing project KEYS"
curl -L --fail -o "${working_dir}/KEYS" "${keys}"
if ! gpg --homedir "${working_dir}/.gpg" --import "${working_dir}/KEYS" ; then
echo "ERROR importing the keys via gpg failed. If the output above mentions this error:" >&2
echo " gpg: can't connect to the agent: File name too long" >&2
# we mean to give them the command to run, not to run it.
#shellcheck disable=SC2016
echo 'then you prolly need to create /var/run/user/$(id -u)' >&2
echo "see this thread on gnupg-users: https://s.apache.org/uI7x" >&2
exit 2
fi

echo "downloading signature"
curl -L --fail -o "${working_dir}/artifact.asc" "https://archive.apache.org/dist/${artifact}.asc"
fi

echo "downloading artifact"
if ! curl --dump-header "${working_dir}/artifact_download_headers.txt" -L --fail -o "${working_dir}/artifact" "https://www.apache.org/dyn/closer.lua/${artifact}?action=download" ; then
echo "Artifact wasn't in mirror system. falling back to archive.a.o."
curl --dump-header "${working_dir}/artifact_fallback_headers.txt" -L --fail -o "${working_dir}/artifact" "http://archive.apache.org/dist/${artifact}"
fi

if [ -n "${keys}" ]; then
echo "verifying artifact signature"
gpg --homedir "${working_dir}/.gpg" --verify "${working_dir}/artifact.asc"
echo "signature good."
fi

echo "moving artifact into place at '${target}'"
# ensure we're on the same filesystem
mv "${working_dir}/artifact" "${target}.copying"
# attempt atomic move
mv "${target}.copying" "${target}"
echo "all done!"
77 changes: 77 additions & 0 deletions dev-support/rebuild_hbase.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Rebuilds HBase with -Dhadoop.profile=3.0 locally, to work around PHOENIX-5993
# Intended mainly for CI jobs, but can simplify manual rebuilds as well.


DEV_SUPPORT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
ARTIFACTS_DIR="$DEV_SUPPORT/artifacts"
WORK_DIR="$DEV_SUPPORT/work"

if [[ ! -z "$MAVEN_SETTINGS_FILE" ]]; then
SETTINGS=( "--settings" "$MAVEN_SETTINGS_FILE" )
fi

if [[ ! -z "$MAVEN_LOCAL_REPO" ]]; then
LOCALREPO="-Dmaven.repo.local=${MAVEN_LOCAL_REPO}"
fi

if [[ "$1" == "detect" ]]; then
set -e
cd "$DEV_SUPPORT/.."
HBASE_VERSION=$(mvn ${SETTINGS[@]} help:evaluate -Dexpression=hbase.version -q -DforceStdout $LOCALREPO)
echo "HBASE_VERSION=$HBASE_VERSION"
cd "$DEV_SUPPORT"
set +e
else
HBASE_VERSION="$1"
fi

# The name of the Apache Hbase source file
HBASE_SOURCE_NAME="hbase-$HBASE_VERSION-src.tar.gz"
# The relative path on the ASF mirrors for the Hbase source file
HBASE_SOURCE_MIRROR_NAME="hbase/$HBASE_VERSION/$HBASE_SOURCE_NAME"

# Downloads the specified HBase version source, extracts it,
# then rebuilds and installs the maven artifacts locally with -Dhadoop.profile=3.0

if [ $# -ne 1 ]
then
echo "Supply the Hbase version as paramater i.e.: rebuild_hbase.sh 2.2.6 "
fi

mkdir "$ARTIFACTS_DIR"
mkdir "$WORK_DIR"

$DEV_SUPPORT/cache-apache-project-artifact.sh --keys https://downloads.apache.org/hbase/KEYS \
--working-dir "$WORK_DIR" "$ARTIFACTS_DIR/$HBASE_SOURCE_NAME" "$HBASE_SOURCE_MIRROR_NAME"

if [[ ! -z "$MAVEN_SETTINGS_FILE" ]]; then
SETTINGS=( "--settings" "$MAVEN_SETTINGS_FILE" )
fi

STARTDIR=$PWD
cd $ARTIFACTS_DIR
tar xfz hbase-$HBASE_VERSION-src.tar.gz
cd hbase-$HBASE_VERSION
echo mvn ${SETTINGS[@]} clean install -Dhadoop.profile=3.0 -DskipTests -B $LOCALREPO
mvn ${SETTINGS[@]} clean install -Dhadoop.profile=3.0 -DskipTests -B $LOCALREPO
cd ${STARTDIR}

39 changes: 25 additions & 14 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,8 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

<!-- 3rd-Party Library Versioning -->
<hbase.version>2.4.10</hbase.version>
<hadoop.version>2.10.0</hadoop.version>
<hbase.version>2.4.13</hbase.version>
<hadoop.version>3.1.4</hadoop.version>
<phoenix.thirdparty.version>2.0.0</phoenix.thirdparty.version>
<guice.version>3.0</guice.version>
<testng.version>6.10</testng.version>
Expand Down Expand Up @@ -378,6 +378,9 @@

<!-- Exclude config and scripts -->
<exclude>**/dev-utils/*</exclude>
<exclude>dev-support/artifacts/**</exclude>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also we should exclude this and dev-support/work/** folders from apache-rat-plugin
And maybe move those exclusions from a profile to apply globally but that could be handled as a separate task as it is not related to the scope of this change.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, that's not not strictly in scope, but makes testing the rat settings easier.

<exclude>dev-support/work/**</exclude>


<!-- Exclude assembly -->
<exclude>**/maven/assembly/*</exclude>
Expand All @@ -394,6 +397,7 @@
<exclude>**/src/main/java/org/apache/omid/benchmarks/utils/ScrambledZipfianGenerator.java
</exclude>


<!-- Taken from https://github.com/apache/hbase -->
<exclude>**/src/main/java/org/apache/omid/committable/hbase/RegionSplitter.java</exclude>

Expand Down Expand Up @@ -432,6 +436,25 @@
</dependencies>
</plugin>

<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*.yml</exclude>
<exclude>**/*.properties</exclude>
<exclude>**/hbase-site.xml</exclude>
<exclude>**/test-output/**</exclude>
<exclude>doc/site/site.xml</exclude>
<exclude>doc/images/ModuleDependencies.graffle</exclude>
<exclude>misc/findbugs-exclude.xml</exclude>
<exclude>misc/omid_checks.xml</exclude>
<exclude>dev-support/artifacts/**</exclude>
<exclude>dev-support/work/**</exclude>
</excludes>
</configuration>
</plugin>

</plugins>

<extensions>
Expand Down Expand Up @@ -485,18 +508,6 @@
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<configuration>
<excludes>
<exclude>**/*.yml</exclude>
<exclude>**/*.properties</exclude>
<exclude>**/hbase-site.xml</exclude>
<exclude>**/test-output/**</exclude>
<exclude>doc/site/site.xml</exclude>
<exclude>doc/images/ModuleDependencies.graffle</exclude>
<exclude>misc/findbugs-exclude.xml</exclude>
<exclude>misc/omid_checks.xml</exclude>
</excludes>
</configuration>
<executions>
<execution>
<phase>package</phase>
Expand Down