From ea69b8711f012a505aa0b74beb05d8a6e747e1ec Mon Sep 17 00:00:00 2001 From: Nick Dimiduk Date: Thu, 26 Mar 2020 16:35:28 -0700 Subject: [PATCH] HBASE-24049 use hadoop-2.10.0 for "packaging and integration" check Signed-off-by: stack Signed-off-by: Jan Hentschel Signed-off-by: Viraj Jasani --- dev-support/Jenkinsfile | 5 ++++- .../hbase_nightly_pseudo-distributed-test.sh | 20 +++++++++++++------ dev-support/hbase_nightly_source-artifact.sh | 2 +- 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index 9a696e3830c1..5994bbbdaaab 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -121,7 +121,7 @@ pipeline { } stage ('hadoop 2 cache') { environment { - HADOOP2_VERSION="2.8.5" + HADOOP2_VERSION="2.10.0" } steps { // directory must be unique for each parallel stage, because jenkins runs them in the same workspace :( @@ -656,6 +656,7 @@ pipeline { --hbase-client-install "hbase-client" \ "hbase-install" \ "hadoop-2/bin/hadoop" \ + hadoop-2/share/hadoop/yarn/timelineservice \ hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \ hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \ hadoop-2/bin/mapred \ @@ -675,6 +676,7 @@ pipeline { --hbase-client-install hbase-client \ hbase-install \ hadoop-3/bin/hadoop \ + hadoop-3/share/hadoop/yarn/timelineservice \ hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \ hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \ hadoop-3/bin/mapred \ @@ -690,6 +692,7 @@ pipeline { --hbase-client-install hbase-client \ hbase-install \ hadoop-3/bin/hadoop \ + hadoop-3/share/hadoop/yarn/timelineservice \ hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \ hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \ hadoop-3/bin/mapred \ diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh b/dev-support/hbase_nightly_pseudo-distributed-test.sh index 2d77184cedaa..b5fdf998540c 100755 --- a/dev-support/hbase_nightly_pseudo-distributed-test.sh +++ b/dev-support/hbase_nightly_pseudo-distributed-test.sh @@ -18,7 +18,7 @@ set -e function usage { - echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable" + echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/share/hadoop/yarn/timelineservice /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable" echo "" echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data." echo " defaults to 'zk-data' in the working-dir." @@ -67,9 +67,10 @@ if [ $# -lt 5 ]; then fi component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")" hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")" -yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")" -mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")" -mapred_exec="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")" +timeline_service_dir="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")" +yarn_server_tests_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")" +mapred_jobclient_test_jar="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")" +mapred_exec="$(cd "$(dirname "$6")"; pwd)/$(basename "$6")" if [ ! -x "${hadoop_exec}" ]; then echo "hadoop cli does not appear to be executable." >&2 @@ -285,18 +286,25 @@ echo "Starting up Hadoop" if [ "${hadoop_version%.*.*}" -gt 2 ]; then "${mapred_exec}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" & else - HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" & + HADOOP_CLASSPATH="${timeline_service_dir}/*:${timeline_service_dir}/lib/*:${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" & fi echo "$!" > "${working_dir}/hadoop.pid" +# 2 + 4 + 8 + .. + 256 ~= 8.5 minutes. +max_sleep_time=512 sleep_time=2 -until [ -s "${working_dir}/hbase-conf/core-site.xml" ]; do +until [[ -s "${working_dir}/hbase-conf/core-site.xml" || "${sleep_time}" -ge "${max_sleep_time}" ]]; do printf '\twaiting for Hadoop to finish starting up.\n' sleep "${sleep_time}" sleep_time="$((sleep_time*2))" done +if [ "${sleep_time}" -ge "${max_sleep_time}" ] ; then + echo "time out waiting for Hadoop to startup" >&2 + exit 1 +fi + if [ "${hadoop_version%.*.*}" -gt 2 ]; then echo "Verifying configs" "${hadoop_exec}" --config "${working_dir}/hbase-conf/" conftest diff --git a/dev-support/hbase_nightly_source-artifact.sh b/dev-support/hbase_nightly_source-artifact.sh index cd17a32cbff3..bbd92fb1650a 100755 --- a/dev-support/hbase_nightly_source-artifact.sh +++ b/dev-support/hbase_nightly_source-artifact.sh @@ -182,7 +182,7 @@ if mvn -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" cl fi done fi -echo "Building a binary tarball from the source tarball failed. see srctarball_install.log for details." +echo "Building a binary tarball from the source tarball failed. see ${working_dir}/srctarball_install.log for details." # Copy up the rat.txt to the working dir so available in build archive in case rat complaints. # rat.txt can be under any module target dir... copy them all up renaming them to include parent dir as we go. find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v "$NAME" "${working_dir}/${NAME//\//_}"; done