Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add periodic etcd scraping to integration tests #106190

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
32 changes: 32 additions & 0 deletions hack/lib/etcd.sh
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,39 @@ kube::etcd::start() {
curl -fs -X POST "${KUBE_INTEGRATION_ETCD_URL}/v3/kv/put" -d '{"key": "X3Rlc3Q=", "value": ""}'
}

kube::etcd::start_scraping() {
if [[ -d "${ARTIFACTS:-}" ]]; then
ETCD_SCRAPE_DIR="${ARTIFACTS}/etcd-scrapes"
else
ETCD_SCRAPE_DIR=$(mktemp -d -t test.XXXXXX)/etcd-scrapes
fi
kube::log::info "Periodically scraping etcd to ${ETCD_SCRAPE_DIR} ."
mkdir -p "${ETCD_SCRAPE_DIR}"
(
while sleep 30; do
kube::etcd::scrape
done
) &
ETCD_SCRAPE_PID=$!
}

kube::etcd::scrape() {
curl -s -S "${KUBE_INTEGRATION_ETCD_URL}/metrics" > "${ETCD_SCRAPE_DIR}/next" && mv "${ETCD_SCRAPE_DIR}/next" "${ETCD_SCRAPE_DIR}/$(date +%s).scrape"
}


kube::etcd::stop() {
if [[ -n "${ETCD_SCRAPE_PID:-}" ]] && [[ -n "${ETCD_SCRAPE_DIR:-}" ]] ; then
kill "${ETCD_SCRAPE_PID}" &>/dev/null || :
wait "${ETCD_SCRAPE_PID}" &>/dev/null || :
kube::etcd::scrape || :
(
# shellcheck disable=SC2015
cd "${ETCD_SCRAPE_DIR}"/.. && \
tar czf etcd-scrapes.tgz etcd-scrapes && \
rm -rf etcd-scrapes || :
)
fi
if [[ -n "${ETCD_PID-}" ]]; then
kill "${ETCD_PID}" &>/dev/null || :
wait "${ETCD_PID}" &>/dev/null || :
Expand Down
3 changes: 3 additions & 0 deletions hack/make-rules/test-integration.sh
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ runTests() {
kube::log::status "Starting etcd instance"
CLEANUP_REQUIRED=1
kube::etcd::start
# shellcheck disable=SC2034
local ETCD_SCRAPE_PID # Set in kube::etcd::start_scraping, used in cleanup
kube::etcd::start_scraping
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The idea of doing this scrape using a simple curl for integration tests seems lightweight enough to try. I'm not entirely sure how you will stitch this back together with which tests were running at a particular time, but given that it doesn't really add to our dependency stack, I'm ok giving it a try.

Copy link
Member

@aojea aojea Nov 8, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the job stores the etcd logs and the test output with timestamps, it is not great but better than before 🤷 we can correlate those logs with the graphs obtained here

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In the case I was looking at, there was a log with timestamps.

kube::log::status "Running integration test cases"

make -C "${KUBE_ROOT}" test \
Expand Down
98 changes: 98 additions & 0 deletions hack/run-prometheus-on-etcd-scrapes.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
#!/usr/bin/env bash

# Copyright 2021 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Unpacks a tarfile of etcd scrapes and runs a simple web server exposing it
# and a Prometheus server scraping that simple web server.
# The simple web server listens on port 9091.
# The Prometheus server is run in a container and looks for the
# simple web server at the host's first global IPv4 address.

# Usage: $0 scrapes_tar_pathname
#
# Where scrapes_tar_pathname is a gzipped tar archive containing
# files whose name is of the form
# <timestamp>.scrape
# where <timestamp> is seconds since Jan 1, 1970 UTC.
# Each such file is taken to be a scrape that lacks timestamps,
# and the timestamp from the filename is multiplied by the necessary 1000
# and added to the data in that file.

# This requires a:
# - `docker run` command
# - an `ip` or `ifconfig` command that this script knows how to wrangle
# - an `nc` command that serve-prom-scrapes.sh knows how to wrangle

if (( $# != 1 )); then
echo "Usage: $0 \$scrapes_tar_pathname" >&2
exit 1
fi

scrapes_file="$1"

if ! [[ -r "$scrapes_file" ]]; then
echo "$0: $scrapes_file is not a readable file" >&2
exit 2
fi

SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")

CONFIG="/tmp/$(cd /tmp && mktemp config.XXXXXX)"
UNPACKDIR="/tmp/$(cd /tmp && mktemp -d unpack.XXXXXX)"
SERVER_PID=""

cleanup_prom() {
rm -f "$CONFIG"
rm -rf "$UNPACKDIR"
if [[ -n "$SERVER_PID" ]]; then
kill "$SERVER_PID"
fi
}

trap cleanup_prom EXIT

chmod +r "$CONFIG" "$UNPACKDIR"

tar xzf "$scrapes_file" -C "$UNPACKDIR"

if which ip > /dev/null; then
IPADDR=$(ip addr show scope global up |
grep -w inet | head -1 |
awk '{ print $2 }' | awk -F/ '{ print $1 }')
else
IPADDR=$(ifconfig | grep -w inet | grep -Fv 127.0.0. | head -1 |
awk '{ print $2 }' | awk -F/ '{ print $1 }')
fi

echo
echo "Historic metrics will be at http://\${any_local_address}:9091/\${any_path}"
echo "Prometheus will listen on port 9090 and scrape historic metrics from http://${IPADDR}:9091/metrics"
sleep 1
echo

cat > "$CONFIG" <<EOF
global:
scrape_interval: 30s

scrape_configs:

- job_name: local
static_configs:
- targets: ['${IPADDR}:9091']
EOF

"${SCRIPT_ROOT}/serve-prom-scrapes.sh" 9091 "$UNPACKDIR" &
SERVER_PID=$!
docker run -p 9090:9090 -v "${CONFIG}:/config.yaml" prom/prometheus --config.file=/config.yaml --storage.tsdb.retention.time=3650d
67 changes: 67 additions & 0 deletions hack/serve-prom-scrapes.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#!/usr/bin/env bash

# Copyright 2021 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Serves a collection of scrape files up to Prometheus scraping.

# Usage: $0 port_num scrapes-dir
#
# Where scrapes-dir has descendant files whose name is of the form
# <timestamp>.scrape
# where <timestamp> is seconds since Jan 1, 1970 UTC.
# Each such file is taken to be a scrape that lacks timestamps,
# and the timestamp from the filename is multiplied by the necessary 1000
# and added to the data in that file.

# This requires an `nc` comment that this script knows how to wrangle.

if (( $# != 2 )); then
echo "Usage: $0 port_num scrapes_dir" >&2
exit 1
fi

port_num="$1"
scrapes_dir="$2"
response_file="/tmp/$(cd /tmp && mktemp response.XXXXXX)"

cleanup_serve() {
rm -rf "$response_file"
}

trap cleanup_serve EXIT

chmod +r "$response_file"

transform() {
path="$1"
base="$(basename "$path")"
seconds="${base%.scrape}"
sed 's/^\([^#].*\)$/\1 '"${seconds}000/" "$path"
}

find_and_transform() {
echo -n $'HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n' > "$response_file"
find "$scrapes_dir" -name "*.scrape" -print0 | sort -z | while read -d '' -r scrapename; do transform "$scrapename" >> "$response_file"; done
}

find_and_transform

if man nc | grep -wq -e -N
then dashen=-N
else dashen=
fi

# shellcheck disable=SC2086
while true; do nc -l $dashen 0.0.0.0 "$port_num" < "$response_file" > /dev/null; sleep 10; done