Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
223 changes: 0 additions & 223 deletions dev-support/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -899,224 +899,6 @@ pipeline {
} //script
} //steps
} //stage ('yetus jdk17 hadoop3 backwards compatibility checks')

// This is meant to mimic what a release manager will do to create RCs.
// See http://hbase.apache.org/book.html#maven.release
// TODO (HBASE-23870): replace this with invocation of the release tool
stage ('packaging and integration') {
agent {
node {
label 'hbase'
}
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
BRANCH = "${env.BRANCH_NAME}"
}
steps {
dir('component') {
checkout scm
}
sh '''#!/bin/bash -e
echo "Setting up directories"
rm -rf "output-srctarball" && mkdir "output-srctarball"
rm -rf "output-integration" && mkdir "output-integration" "output-integration/hadoop-2" "output-integration/hadoop-3" "output-integration/hadoop-3-shaded"
rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
rm -rf "hbase-install" && mkdir "hbase-install"
rm -rf "hbase-client" && mkdir "hbase-client"
rm -rf "hbase-hadoop3-install"
rm -rf "hbase-hadoop3-client"
rm -rf "hadoop-2" && mkdir "hadoop-2"
rm -rf "hadoop-3" && mkdir "hadoop-3"
rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
rm -rf ".m2-for-src" && mkdir ".m2-for-src"
# remove old hadoop tarballs in workspace
rm -rf hadoop-2*.tar.gz
rm -rf hadoop-3*.tar.gz
rm -f "output-integration/commentfile"
'''
sh '''#!/usr/bin/env bash
set -e
rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
echo "got the following saved stats in 'output-srctarball/machine'"
ls -lh "output-srctarball/machine"
'''
sh '''#!/bin/bash -e
echo "Checking the steps for an RM to make a source artifact, then a binary artifact."
docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" --workdir=/hbase hbase-integration-test \
"component/dev-support/hbase_nightly_source-artifact.sh" \
--intermediate-file-dir output-srctarball \
--unpack-temp-dir unpacked_src_tarball \
--maven-m2-initial .m2-for-repo \
--maven-m2-src-build .m2-for-src \
--clean-source-checkout \
component
if [ $? -eq 0 ]; then
echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
else
echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
exit 1
fi
'''
echo "unpacking the hbase bin tarball into 'hbase-install' and the client tarball into 'hbase-client'"
sh '''#!/bin/bash -e
if [ 2 -ne $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | grep -v hadoop3 | wc -l) ]; then
echo '(x) {color:red}-1 testing binary artifact{color}\n-- source tarball did not produce the expected binaries.' >>output-srctarball/commentfile
exit 1
fi
install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-bin.tar.gz | grep -v client-bin | grep -v hadoop3)
tar --strip-component=1 -xzf "${install_artifact}" -C "hbase-install"
client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-client-bin.tar.gz | grep -v hadoop3)
tar --strip-component=1 -xzf "${client_artifact}" -C "hbase-client"
if [ 2 -eq $(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz | wc -l) ]; then
echo "hadoop3 artifacts available, unpacking the hbase hadoop3 bin tarball into 'hbase-hadoop3-install' and the client hadoop3 tarball into 'hbase-hadoop3-client'"
mkdir hbase-hadoop3-install
mkdir hbase-hadoop3-client
hadoop3_install_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-bin.tar.gz | grep -v client-bin)
tar --strip-component=1 -xzf "${hadoop3_install_artifact}" -C "hbase-hadoop3-install"
hadoop3_client_artifact=$(ls -1 "${WORKSPACE}"/unpacked_src_tarball/hbase-assembly/target/hbase-*-hadoop3-*-client-bin.tar.gz)
tar --strip-component=1 -xzf "${hadoop3_client_artifact}" -C "hbase-hadoop3-client"
fi
'''
unstash 'hadoop-2'
sh '''#!/bin/bash -xe
if [[ "${BRANCH}" == *"branch-2"* ]]; then
echo "Attempting to use run an instance on top of Hadoop 2."
artifact=$(ls -1 "${WORKSPACE}"/hadoop-2*.tar.gz | head -n 1)
tar --strip-components=1 -xzf "${artifact}" -C "hadoop-2"
docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-8" --workdir=/hbase hbase-integration-test \
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
--single-process \
--working-dir output-integration/hadoop-2 \
--hbase-client-install "hbase-client" \
hbase-install \
hadoop-2/bin/hadoop \
hadoop-2/share/hadoop/yarn/timelineservice \
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-2/bin/mapred \
>output-integration/hadoop-2.log 2>&1
if [ $? -ne 0 ]; then
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
exit 2
fi
echo "(/) {color:green}+1 client integration test for HBase 2 {color}" >output-integration/commentfile
else
echo "Skipping to run against Hadoop 2 for branch ${BRANCH}"
fi
'''
script {
for (hadoop3_version in hadoop3_versions) {
env.HADOOP3_VERSION = hadoop3_version;
echo "env.HADOOP3_VERSION" + env.hadoop3_version;
stage ("packaging and integration Hadoop 3 inner stage ") {
unstash "hadoop-" + env.HADOOP3_VERSION
sh '''#!/bin/bash -e
echo "Attempting to use run an instance on top of Hadoop ${HADOOP3_VERSION}."
# Clean up any previous tested Hadoop3 files before unpacking the current one
rm -rf hadoop-3/*
# Create working dir
rm -rf "output-integration/hadoop-${HADOOP3_VERSION}" && mkdir "output-integration/hadoop-${HADOOP3_VERSION}"
rm -rf "output-integration/hadoop-${HADOOP3_VERSION}-shaded" && mkdir "output-integration/hadoop-${HADOOP3_VERSION}-shaded"
artifact=$(ls -1 "${WORKSPACE}"/hadoop-${HADOOP3_VERSION}-bin.tar.gz | head -n 1)
tar --strip-components=1 -xzf "${artifact}" -C "hadoop-3"
# we need to patch some files otherwise minicluster will fail to start, see MAPREDUCE-7471
${BASEDIR}/dev-support/patch-hadoop3.sh hadoop-3
hbase_install_dir="hbase-install"
hbase_client_dir="hbase-client"
if [ -d "hbase-hadoop3-install" ]; then
echo "run hadoop3 client integration test against hbase hadoop3 binaries"
hbase_install_dir="hbase-hadoop3-install"
hbase_client_dir="hbase-hadoop3-client"
fi
docker build -t hbase-integration-test -f "${BASEDIR}/dev-support/docker/Dockerfile" .
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
-e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" \
--workdir=/hbase hbase-integration-test \
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
--single-process \
--working-dir output-integration/hadoop-${HADOOP3_VERSION} \
--hbase-client-install ${hbase_client_dir} \
${hbase_install_dir} \
hadoop-3/bin/hadoop \
hadoop-3/share/hadoop/yarn/timelineservice \
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-3/bin/mapred \
>output-integration/hadoop-${HADOOP3_VERSION}.log 2>&1
if [ $? -ne 0 ]; then
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop ${HADOOP3_VERSION}. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-${HADOOP3_VERSION}.log]. (note that this means we didn't check the Hadoop ${HADOOP3_VERSION} shaded client)" >> output-integration/commentfile
exit 2
fi
echo "Attempting to use run an instance on top of Hadoop ${HADOOP3_VERSION}, relying on the Hadoop client artifacts for the example client program."
docker run --rm -v "${WORKSPACE}":/hbase -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro \
-u `id -u`:`id -g` -e JAVA_HOME="/usr/lib/jvm/java-17" \
-e HADOOP_OPTS="--add-opens java.base/java.lang=ALL-UNNAMED" \
--workdir=/hbase hbase-integration-test \
component/dev-support/hbase_nightly_pseudo-distributed-test.sh \
--single-process \
--hadoop-client-classpath hadoop-3/share/hadoop/client/hadoop-client-api-*.jar:hadoop-3/share/hadoop/client/hadoop-client-runtime-*.jar \
--working-dir output-integration/hadoop-${HADOOP3_VERSION}-shaded \
--hbase-client-install ${hbase_client_dir} \
${hbase_install_dir} \
hadoop-3/bin/hadoop \
hadoop-3/share/hadoop/yarn/timelineservice \
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
hadoop-3/bin/mapred \
>output-integration/hadoop-${HADOOP3_VERSION}-shaded.log 2>&1
if [ $? -ne 0 ]; then
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop ${HADOOP3_VERSION} using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-${HADOOP3_VERSION}-shaded.log]." >> output-integration/commentfile
exit 2
fi
echo "(/) {color:green}+1 client integration test for ${HADOOP3_VERSION} {color}" >> output-integration/commentfile
'''
} //stage ("packaging and integration Hadoop 3 inner stage ")
} //for
} // script
} //steps
post {
always {
sh '''#!/bin/bash -e
if [ ! -f "output-integration/commentfile" ]; then
echo "(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-srctarball/commentfile
echo "(x) {color:red}-1 client integration test{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console]." >output-integration/commentfile
fi
'''
stash name: 'srctarball-result', includes: "output-srctarball/commentfile,output-integration/commentfile"
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
transfers: [
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
sourceFiles: "output-srctarball/hbase-src.tar.gz"
)
]
)
])
// remove the big src tarball, store the nightlies url in hbase-src.html
sh '''#!/bin/bash -e
SRC_TAR="${WORKSPACE}/output-srctarball/hbase-src.tar.gz"
if [ -f "${SRC_TAR}" ]; then
echo "Remove ${SRC_TAR} for saving space"
rm -rf "${SRC_TAR}"
python3 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html"
else
echo "No hbase-src.tar.gz, skipping"
fi
'''
archiveArtifacts artifacts: 'output-srctarball/*'
archiveArtifacts artifacts: 'output-srctarball/**/*'
archiveArtifacts artifacts: 'output-integration/*'
archiveArtifacts artifacts: 'output-integration/**/*'
} //always
} //post
} //stage packaging
} // parallel
} //stage:_health checks
} //stages
Expand All @@ -1134,15 +916,12 @@ pipeline {
rm -rf ${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}
rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3}
rm -rf ${OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-*
rm -rf output-srctarball
rm -rf output-integration
'''
unstash 'general-result'
unstash 'jdk8-hadoop2-result'
unstash 'jdk8-hadoop3-result'
unstash 'jdk11-hadoop3-result'
unstash 'jdk17-hadoop3-result'
unstash 'srctarball-result'

def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
Expand All @@ -1157,8 +936,6 @@ pipeline {
unstash("jdk17-hadoop3-backwards-result-${hadoop3_version}")
results.add("${env.OUTPUT_DIR_RELATIVE_JDK17_HADOOP3_BACKWARDS}-${hadoop3_version}/commentfile")
}
results.add('output-srctarball/commentfile')
results.add('output-integration/commentfile')
echo env.BRANCH_NAME
echo env.BUILD_URL
echo currentBuild.result
Expand Down
Loading