diff --git a/.evergreen/generated_configs/functions.yml b/.evergreen/generated_configs/functions.yml index a28cd2596e..7d9ab2df3b 100644 --- a/.evergreen/generated_configs/functions.yml +++ b/.evergreen/generated_configs/functions.yml @@ -162,9 +162,45 @@ functions: # Send dashboard data send dashboard data: - - command: perf.send + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/perf-submission-setup.sh + working_dir: src + include_expansions_in_env: + - requester + - revision_order_id + - project_id + - version_id + - build_variant + - parsed_order_id + - task_name + - task_id + - execution + - is_mainline + type: test + - command: expansions.update params: - file: src/results.json + file: src/expansion.yml + - command: subprocess.exec + params: + binary: bash + args: + - .evergreen/scripts/perf-submission.sh + working_dir: src + include_expansions_in_env: + - requester + - revision_order_id + - project_id + - version_id + - build_variant + - parsed_order_id + - task_name + - task_id + - execution + - is_mainline + type: test # Setup system setup system: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index e13976d8c7..c54908f5d7 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -41,7 +41,6 @@ ec2_assume_role, expansions_update, git_get_project, - perf_send, ) from shrub.v3.evg_task import EvgTask, EvgTaskDependency, EvgTaskRef @@ -1103,8 +1102,28 @@ def create_attach_benchmark_test_results_func(): def create_send_dashboard_data_func(): - cmd = perf_send(file="src/results.json") - return "send dashboard data", [cmd] + includes = [ + "requester", + "revision_order_id", + "project_id", + "version_id", + "build_variant", + "parsed_order_id", + "task_name", + "task_id", + "execution", + "is_mainline", + ] + cmds = [ + get_subprocess_exec( + include_expansions_in_env=includes, args=[".evergreen/scripts/perf-submission-setup.sh"] + ), + expansions_update(file="src/expansion.yml"), + get_subprocess_exec( + include_expansions_in_env=includes, args=[".evergreen/scripts/perf-submission.sh"] + ), + ] + return "send dashboard data", cmds mod = sys.modules[__name__] diff --git a/.evergreen/scripts/perf-submission-setup.sh b/.evergreen/scripts/perf-submission-setup.sh new file mode 100755 index 0000000000..ecb38751a5 --- /dev/null +++ b/.evergreen/scripts/perf-submission-setup.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# We use the requester expansion to determine whether the data is from a mainline evergreen run or not + +set -eu + +# shellcheck disable=SC2154 +if [ "${requester}" == "commit" ]; then + echo "is_mainline: true" >> expansion.yml +else + echo "is_mainline: false" >> expansion.yml +fi + +# We parse the username out of the order_id as patches append that in and SPS does not need that information +# shellcheck disable=SC2154 +echo "parsed_order_id: $(echo "${revision_order_id}" | awk -F'_' '{print $NF}')" >> expansion.yml diff --git a/.evergreen/scripts/perf-submission.sh b/.evergreen/scripts/perf-submission.sh new file mode 100755 index 0000000000..f7c3ea6664 --- /dev/null +++ b/.evergreen/scripts/perf-submission.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# We use the requester expansion to determine whether the data is from a mainline evergreen run or not + +set -eu + +# Submit the performance data to the SPS endpoint +# shellcheck disable=SC2154 +response=$(curl -s -w "\nHTTP_STATUS:%{http_code}" -X 'POST' \ + "https://performance-monitoring-api.corp.mongodb.com/raw_perf_results/cedar_report?project=${project_id}&version=${version_id}&variant=${build_variant}&order=${parsed_order_id}&task_name=${task_name}&task_id=${task_id}&execution=${execution}&mainline=${is_mainline}" \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -d @results.json) + +http_status=$(echo "$response" | grep "HTTP_STATUS" | awk -F':' '{print $2}') +response_body=$(echo "$response" | sed '/HTTP_STATUS/d') + +# We want to throw an error if the data was not successfully submitted +if [ "$http_status" -ne 200 ]; then + echo "Error: Received HTTP status $http_status" + echo "Response Body: $response_body" + exit 1 +fi + +echo "Response Body: $response_body" +echo "HTTP Status: $http_status" diff --git a/test/performance/async_perf_test.py b/test/performance/async_perf_test.py index 969437f9c9..6eb31ea4fe 100644 --- a/test/performance/async_perf_test.py +++ b/test/performance/async_perf_test.py @@ -144,7 +144,15 @@ async def asyncTearDown(self): }, }, "metrics": [ - {"name": "megabytes_per_sec", "type": "MEDIAN", "value": megabytes_per_sec}, + { + "name": "megabytes_per_sec", + "type": "MEDIAN", + "value": megabytes_per_sec, + "metadata": { + "improvement_direction": "up", + "measurement_unit": "megabytes_per_second", + }, + }, ], } ) diff --git a/test/performance/perf_test.py b/test/performance/perf_test.py index 39487eff6d..5688d28d2d 100644 --- a/test/performance/perf_test.py +++ b/test/performance/perf_test.py @@ -151,7 +151,15 @@ def tearDown(self): }, }, "metrics": [ - {"name": "megabytes_per_sec", "type": "MEDIAN", "value": megabytes_per_sec}, + { + "name": "megabytes_per_sec", + "type": "MEDIAN", + "value": megabytes_per_sec, + "metadata": { + "improvement_direction": "up", + "measurement_unit": "megabytes_per_second", + }, + }, ], } )