diff --git a/.gitignore b/.gitignore index 6b9751b..6f52a5c 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,6 @@ debian/php5-tarantool.substvars debian/php5-tarantool/ build .*.sw[a-z] + +# Unencrypted private GPG keys for deployment. +.travis/*.asc diff --git a/.travis.yml b/.travis.yml index 1ab75ee..94339ea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -129,20 +129,61 @@ python: script: - | + # Make shell strictier. + # + # - Exit with a failure on a first failed command. + # - Print each executed commmand. + set -ex + if [ -n "${TARANTOOL_VERSION}" ]; then ./test.sh elif [ -n "${OS}" ] && [ -n "${DIST}" ]; then git clone --depth 1 https://github.com/packpack/packpack.git ./packpack/packpack if [ "${OS}" = "el" ]; then - export OS=centos + DOCKER_IMAGE="centos:${DIST}" + else + DOCKER_IMAGE="${OS}:${DIST}" fi docker run \ --volume "$(realpath .):/tarantool-php" \ --workdir /tarantool-php \ --rm \ - "${OS}:${DIST}" \ + "${DOCKER_IMAGE}" \ ./test.pkg.sh else exit 1 fi + + # Deploy + # ------ + + # Skip deployment when it is not expected. + if [ -z "${OS}" ] || [ -z "${DIST}" ]; then + echo "Skip deployment: it is pure testing job w/o any RPM / Deb artefacts" + exit 0 + fi + if [ "${TRAVIS_REPO_SLUG}" != "tarantool/tarantool-php" ]; then + echo "Skip deployment: it is a fork, not the base repository" + exit 0 + fi + if [ "${TRAVIS_EVENT_TYPE}" != "push" ]; then + echo "Skip deployment: event is not 'push', but ${TRAVIS_EVENT_TYPE}" + exit 0 + fi + + # Choose destination to push packages. + if [ "${TRAVIS_BRANCH}" == "master" ] || [ -n "${TRAVIS_TAG}" ]; then + echo "Set production deployment parameters" + configuration=production + else + echo "Set staging deployment parameters" + configuration=staging + fi + + # Deploy to packagecloud repositories. + ./.travis/deploy_packagecloud.sh ${configuration} + + # Deploy to S3 based repositories. + ./.travis/deploy_s3_dependencies.sh + ./.travis/deploy_s3.sh ${configuration} diff --git a/.travis/deploy_packagecloud.sh b/.travis/deploy_packagecloud.sh new file mode 100755 index 0000000..eb93593 --- /dev/null +++ b/.travis/deploy_packagecloud.sh @@ -0,0 +1,132 @@ +#!/bin/sh + +# Deploy to packagecloud repositories +# ----------------------------------- +# +# `deploy_packagecloud.sh` is equivalent to +# `deploy_packagecloud.sh staging`. +# +# `deploy_packagecloud.sh staging` requires the following +# environment variables: +# +# - OS +# - DIST +# - DEPLOY_STAGING_PACKAGECLOUD_USER +# - DEPLOY_STAGING_PACKAGECLOUD_TOKEN +# +# `deploy_packagecloud.sh production` requires the following +# environment variables: +# +# - OS +# - DIST +# - DEPLOY_PRODUCTION_PACKAGECLOUD_USER +# - DEPLOY_PRODUCTION_PACKAGECLOUD_TOKEN +# +# If one of those variables is not set or empty, then deployment +# will be skipped. + +# Make shell strictier. +# +# - Exit with a failure on a first failed command. +# - Exit with a failure on an attempt to use an unset variable. +# - Print each executed commmand. +# +# Note: The script expects that Travis-CI will filter sensitive +# information (such as a token): 'Display value in build log' +# toogle should be OFF for to keep a value secure. +set -eux + +configuration=${1:-staging} + +# Choose credentials. +if [ ${configuration} = staging ]; then + DEPLOY_PACKAGECLOUD_USER="${DEPLOY_STAGING_PACKAGECLOUD_USER:-}" + DEPLOY_PACKAGECLOUD_TOKEN="${DEPLOY_STAGING_PACKAGECLOUD_TOKEN:-}" +elif [ ${configuration} = production ]; then + DEPLOY_PACKAGECLOUD_USER="${DEPLOY_PRODUCTION_PACKAGECLOUD_USER:-}" + DEPLOY_PACKAGECLOUD_TOKEN="${DEPLOY_PRODUCTION_PACKAGECLOUD_TOKEN:-}" +else + echo "Unknown configuration: ${configuration}" + exit 1 +fi + +# Skip deployment if some variables are not set or empty. +if [ -z "${OS:-}" ] || [ -z "${DIST:-}" ] || \ + [ -z "${DEPLOY_PACKAGECLOUD_USER}" ] || \ + [ -z "${DEPLOY_PACKAGECLOUD_TOKEN}" ]; then + echo "Skip deployment: some of necessary environment" + echo "variables are not set or empty" + exit 0 +fi + +# Verify that packpack is cloned into the current directory. +packagecloud_tool=./packpack/tools/packagecloud +if [ ! -f "${packagecloud_tool}" ]; then + echo "Could not find ${packagecloud_tool}" + exit 1 +fi + +# Staging repository: keep older packages in case of a +# version clash. +# +# It would be better to replace old ones, but there is no +# such option in the packagecloud tool we use. It may be +# important if we'll have some manual or automatic testing +# upward a staging repository. But at least CI will not fail +# because a package is already exists. +push_args="" +if [ "${configuration}" = staging ]; then + push_args="${push_args} --ignore-duplicates" +fi + +# Setup environment variables for the packagecloud tool. +export PACKAGECLOUD_TOKEN="${DEPLOY_PACKAGECLOUD_TOKEN}" + +# We have tarantool repositories on packagecloud.io up to +# 2_4. The next ones present only in the S3 based storage. +for repo in 1_6 1_7 1_9 1_10 2x 2_2 2_3 2_4; do + # FIXME: Enable *.ddeb when packagecloud.io will support it. + for file in build/*.rpm build/*.deb build/*.dsc; do + extension=${file##*.} + + # Skip non-matched globs: say, build/*.rpm on Debian. + basename="$(basename "${file}" ".${extension}")" + [ "${basename}" = "*" ] && continue + + # Push all source files listed in .dsc file together with + # the file. + # + # FIXME: It seems logical to move this logic to the + # packagecloud tool we use. + files="${file}" + if [ "${extension}" = "dsc" ]; then + parse_dsc_file='{ + if ($0 == "Files:") { + FILES_SECTION = 1; + } else if (FILES_SECTION != 0) { + print "build/"$3; + } + }' + files="${files} $(awk "${parse_dsc_file}" "${file}")" + fi + + user=${DEPLOY_PACKAGECLOUD_USER} + + # Retry failed attempts to upload a package. + # + # packagecloud.io sometimes replieds with 502 Bad Gateway + # for attempts to push, so retrying is important here. + # + # FIXME: This way we don't differentiate network errors + # and all other ones. It would be much better to retry + # from inside the packagecloud tool (requests library + # supports it). + for i in $(seq 1 5); do + # FIXME: The tool fetches distributions.json each + # time. It can cache the data somewhere and reuse + # during some time period until expiration. + ${packagecloud_tool} push ${push_args} ${user}/${repo} \ + ${extension} ${OS} ${DIST} ${files} && break + done + done +done diff --git a/.travis/deploy_production_s3_gpg_private_key.asc.enc b/.travis/deploy_production_s3_gpg_private_key.asc.enc new file mode 100644 index 0000000..d5af471 Binary files /dev/null and b/.travis/deploy_production_s3_gpg_private_key.asc.enc differ diff --git a/.travis/deploy_s3.sh b/.travis/deploy_s3.sh new file mode 100755 index 0000000..cdd8684 --- /dev/null +++ b/.travis/deploy_s3.sh @@ -0,0 +1,167 @@ +#!/bin/sh + +# Deploy to S3 based repositories +# ------------------------------- +# +# `deploy_s3.sh` is equivalent to `deploy_s3.sh staging`. +# +# `deploy_s3.sh staging` requires the following environment +# variables: +# +# - OS +# - DIST +# - DEPLOY_STAGING_S3_ENDPOINT_URL="https://..." +# - DEPLOY_STAGING_S3_LIVE_DIR="s3://my_bucket/foo/bar/live" +# - DEPLOY_STAGING_S3_RELEASE_DIR="s3://my_bucket/foo/bar/release" +# - DEPLOY_STAGING_S3_ACCESS_KEY_ID +# - DEPLOY_STAGING_S3_SECRET_ACCESS_KEY +# - DEPLOY_STAGING_S3_GPG_KEY_FILE_KEY (32 bytes in hex) +# - DEPLOY_STAGING_S3_GPG_KEY_FILE_IV (16 bytes in hex) +# +# `deploy_s3.sh production` requires the following environment +# variables: +# +# - OS +# - DIST +# - DEPLOY_PRODUCTION_S3_ENDPOINT_URL="https://..." +# - DEPLOY_PRODUCTION_S3_LIVE_DIR="s3://my_bucket/foo/bar/live" +# - DEPLOY_PRODUCTION_S3_RELEASE_DIR="s3://my_bucket/foo/bar/release" +# - DEPLOY_PRODUCTION_S3_ACCESS_KEY_ID +# - DEPLOY_PRODUCTION_S3_SECRET_ACCESS_KEY +# - DEPLOY_PRODUCTION_S3_GPG_KEY_FILE_KEY (32 bytes in hex) +# - DEPLOY_PRODUCTION_S3_GPG_KEY_FILE_IV (16 bytes in hex) +# +# If one of those variables is not set or empty, then deployment +# will be skipped. + +# Make shell strictier. +# +# - Exit with a failure on a first failed command. +# - Exit with a failure on an attempt to use an unset variable. +# - Print each executed commmand. +# +# Note: The script expects that Travis-CI will filter sensitive +# information (such as a token): 'Display value in build log' +# toogle should be OFF for to keep a value secure. +set -eux + +configuration=${1:-staging} + +# Choose URLs, directories, keys and so. +if [ ${configuration} = staging ]; then + DEPLOY_S3_ENDPOINT_URL="${DEPLOY_STAGING_S3_ENDPOINT_URL:-}" + DEPLOY_S3_LIVE_DIR="${DEPLOY_STAGING_S3_LIVE_DIR:-}" + DEPLOY_S3_RELEASE_DIR="${DEPLOY_STAGING_S3_RELEASE_DIR:-}" + DEPLOY_S3_ACCESS_KEY_ID="${DEPLOY_STAGING_S3_ACCESS_KEY_ID:-}" + DEPLOY_S3_SECRET_ACCESS_KEY="${DEPLOY_STAGING_S3_SECRET_ACCESS_KEY:-}" + DEPLOY_S3_GPG_KEY_FILE_KEY="${DEPLOY_STAGING_S3_GPG_KEY_FILE_KEY:-}" + DEPLOY_S3_GPG_KEY_FILE_IV="${DEPLOY_STAGING_S3_GPG_KEY_FILE_IV:-}" +elif [ ${configuration} = production ]; then + DEPLOY_S3_ENDPOINT_URL="${DEPLOY_PRODUCTION_S3_ENDPOINT_URL:-}" + DEPLOY_S3_LIVE_DIR="${DEPLOY_PRODUCTION_S3_LIVE_DIR:-}" + DEPLOY_S3_RELEASE_DIR="${DEPLOY_PRODUCTION_S3_RELEASE_DIR:-}" + DEPLOY_S3_ACCESS_KEY_ID="${DEPLOY_PRODUCTION_S3_ACCESS_KEY_ID:-}" + DEPLOY_S3_SECRET_ACCESS_KEY="${DEPLOY_PRODUCTION_S3_SECRET_ACCESS_KEY:-}" + DEPLOY_S3_GPG_KEY_FILE_KEY="${DEPLOY_PRODUCTION_S3_GPG_KEY_FILE_KEY:-}" + DEPLOY_S3_GPG_KEY_FILE_IV="${DEPLOY_PRODUCTION_S3_GPG_KEY_FILE_IV:-}" +else + echo "Unknown configuration: ${configuration}" + exit 1 +fi + +# Skip deployment if some variables are not set or empty. +if [ -z "${OS:-}" ] || [ -z "${DIST:-}" ] || \ + [ -z "${DEPLOY_S3_ENDPOINT_URL}" ] || \ + [ -z "${DEPLOY_S3_LIVE_DIR}" ] || \ + [ -z "${DEPLOY_S3_RELEASE_DIR}" ] || \ + [ -z "${DEPLOY_S3_ACCESS_KEY_ID}" ] || \ + [ -z "${DEPLOY_S3_SECRET_ACCESS_KEY}" ] || \ + [ -z "${DEPLOY_S3_GPG_KEY_FILE_KEY}" ] || \ + [ -z "${DEPLOY_S3_GPG_KEY_FILE_IV}" ]; then + echo "Skip deployment: some of necessary environment" + echo "variables are not set or empty" + exit 0 +fi + +# Download the tool to deploy to an S3 based repository. +ref=f84cb1aae3144f5677feacf6be31bd4f15e91c2d +base_url="https://raw.githubusercontent.com/tarantool/tarantool/${ref}" +curl -Ssfo update_repo.sh "${base_url}/tools/update_repo.sh" +chmod a+x update_repo.sh + +# FIXME: Upstream the patch. +patch -p1 -i .travis/update-repo-sh-use-right-gpg-key.patch + +# Decrypt a GPG key. +gpg_key_file=".travis/deploy_${configuration}_s3_gpg_private_key.asc" +openssl aes-256-cbc -K "${DEPLOY_S3_GPG_KEY_FILE_KEY}" \ + -iv "${DEPLOY_S3_GPG_KEY_FILE_IV}" -in "${gpg_key_file}.enc" \ + -out "${gpg_key_file}" -d + +# Import GPG key for signing repository files. +gpg --import --batch "${gpg_key_file}" + +# Extract GPG key id for signing repository files. +# +# This way works for both GnuPG 1 and GnuPG 2. The alternative +# would be using '--import-options show-only', but it is available +# only in GnuPG 2. See https://unix.stackexchange.com/a/468889 +mkdir -m 0700 temp-gpg-home +gpg --homedir temp-gpg-home --import --batch "${gpg_key_file}" +export GPG_SIGN_KEY="$(gpg --homedir temp-gpg-home --list-secret-keys \ + --with-colons | grep ^sec: | cut -d: -f5)" +rm -rf temp-gpg-home + +# Setup environment variables for the update_repo.sh tool. +export AWS_S3_ENDPOINT_URL="${DEPLOY_S3_ENDPOINT_URL}" +export AWS_ACCESS_KEY_ID="${DEPLOY_S3_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${DEPLOY_S3_SECRET_ACCESS_KEY}" + +# ${product} value may affect location of *.deb, *.rpm and related +# files relative to a base repository URL. We can provide it or +# miss: the script will generate correct repository metainfo +# anyway. +# +# However providing meaningful value for this option enables +# grouping of related set of packages into a subdirectory named as +# ${product} (only for Deb repositories at moment of writing +# this). +# +# It is enabled here for consistency with locations of other Deb +# packages in our repositories, but in fact it is the internal +# detail, which does not lead to any change in the user +# experience. +product=php-tarantool + +# Setup arguments that are common for all repositories +# (1.10, 2.1, ...). +update_repo_args="--os=${OS} --distribution=${DIST} --product=${product}" + +# Staging repository: rewrite a package if there is a previous one +# of the same version. +# +# Note: It differs from a logic in deploy_packagecloud.sh. +if [ "${configuration}" = staging ]; then + update_repo_args="${update_repo_args} --force" +fi + +# Deploy to S3 based repositories. +for repo in 1.10 2.1 2.2 2.3 2.4 2.5; do + # Note: The update_repo.sh tool automatically find + # *.{rpm,deb,dsc} within a passed directory, so we just + # pass the directory name: 'build'. + + # FIXME: Machine-local locking that is used in the + # update_repo.sh tool is insufficient when we deploy from a + # just created virtual machine. + + # Deploy to live repository (per-push). + bucket="${DEPLOY_S3_LIVE_DIR}/${repo}" + ./update_repo.sh ${update_repo_args} --bucket="${bucket}" build + + # Deploy to release repository (tagged commits). + if [ -n "${TRAVIS_TAG:-}" ]; then + bucket="${DEPLOY_S3_RELEASE_DIR}/${repo}" + ./update_repo.sh ${update_repo_args} --bucket="${bucket}" build + fi +done diff --git a/.travis/deploy_s3_dependencies.sh b/.travis/deploy_s3_dependencies.sh new file mode 100755 index 0000000..b147b18 --- /dev/null +++ b/.travis/deploy_s3_dependencies.sh @@ -0,0 +1,21 @@ +#!/bin/sh + +# Make shell strictier. +# +# - Exit with a failure on a first failed command. +# - Exit with a failure on an attempt to use an unset variable. +# - Print each executed commmand. +set -eux + +# Prevent procmail package from asking configuration parameters +# interactively. +# See https://github.com/packpack/packpack/issues/7 +export DEBIAN_FRONTEND=noninteractive +SUDO="sudo -E" + +${SUDO} apt-get update > /dev/null + +${SUDO} apt-get install -y procmail # for lockfile tool +${SUDO} apt-get install -y awscli +${SUDO} apt-get install -y reprepro +${SUDO} apt-get install -y createrepo diff --git a/.travis/deploy_staging_s3_gpg_private_key.asc.enc b/.travis/deploy_staging_s3_gpg_private_key.asc.enc new file mode 100644 index 0000000..bf4225e Binary files /dev/null and b/.travis/deploy_staging_s3_gpg_private_key.asc.enc differ diff --git a/.travis/update-repo-sh-use-right-gpg-key.patch b/.travis/update-repo-sh-use-right-gpg-key.patch new file mode 100644 index 0000000..59afdc0 --- /dev/null +++ b/.travis/update-repo-sh-use-right-gpg-key.patch @@ -0,0 +1,29 @@ +--- a/update_repo.sh 2020-06-09 17:35:03.332961335 +0300 ++++ b/update_repo.sh 2020-06-23 02:26:55.532653673 +0300 +@@ -415,7 +415,7 @@ + done + # resign the selfsigned InRelease file + $rm_file InRelease +- gpg --clearsign -o InRelease Release ++ gpg -u $GPG_SIGN_KEY --clearsign -o InRelease Release + # resign the Release file + $rm_file Release.gpg + gpg -u $GPG_SIGN_KEY -abs -o Release.gpg Release +@@ -784,7 +784,7 @@ + EOF + done + tail -n 1 repodata.adding/repomd.xml >>repodata/repomd.xml +- gpg --detach-sign --armor repodata/repomd.xml ++ gpg -u $GPG_SIGN_KEY --detach-sign --armor repodata/repomd.xml + + # copy the packages to S3 + for file in $pack_rpms ; do +@@ -901,7 +901,7 @@ + tail -n 1 repomd_saved.xml >>repomd.xml + rm -f repomd_saved.xml repomd.xml.asc + popd +- gpg --detach-sign --armor repodata/repomd.xml ++ gpg -u $GPG_SIGN_KEY --detach-sign --armor repodata/repomd.xml + + # update the metadata at the S3 + $aws_sync_public repodata "$bucket_path/$repopath/repodata"