diff --git a/.buildkite/README.md b/.buildkite/README.md new file mode 100644 index 00000000000000..6b9be00898d249 --- /dev/null +++ b/.buildkite/README.md @@ -0,0 +1,10 @@ +# Kibana / Buildkite + +## Directory Structure + +- `hooks` - special directory used by Buildkite agents for [hooks](https://buildkite.com/docs/agent/v3/hooks) +- `pipelines` - contains pipeline definitions +- `scripts/common` - scripts that get `source`d by other scripts to set environment variables or import shared functions +- `scripts/lifecycle` - general scripts for tasks that run before or after individual steps or the entire build +- `scripts/steps` - scripts that define something that will run for a step defined in a pipeline +- `scripts/*` - all other scripts are building blocks that make up the tasks in pipelines. They may be run by other scripts, but should not be `source`d diff --git a/.buildkite/agents.json b/.buildkite/agents.json new file mode 100644 index 00000000000000..797b7e71f2be68 --- /dev/null +++ b/.buildkite/agents.json @@ -0,0 +1,79 @@ +{ + "gcp": { + "project": "elastic-kibana-ci", + "zones": ["us-central1-a", "us-central1-b", "us-central1-c", "us-central1-f"], + "serviceAccount": "elastic-buildkite-agent@elastic-kibana-ci.iam.gserviceaccount.com", + "imageFamily": "kb-ubuntu", + "subnetwork": "buildkite", + "disableExternalIp": true, + "diskType": "pd-ssd", + "diskSizeGb": 75, + "overprovision": 0, + "minimumAgents": 0, + "maximumAgents": 50, + "gracefulStopAfterMins": 360, + "hardStopAfterMins": 540, + "idleTimeoutMins": 10, + "exitAfterOneJob": false, + + "agents": [ + { + "queue": "default", + "name": "kb-default", + "minimumAgents": 1, + "maximumAgents": 100, + "idleTimeoutMins": 60, + "machineType": "e2-small" + }, + { + "queue": "c2-8", + "name": "kb-c2-8", + "machineType": "c2-standard-8", + "localSsds": 1 + }, + { + "queue": "c2-4", + "name": "kb-c2-4", + "machineType": "c2-standard-4", + "localSsds": 1 + }, + { + "queue": "jest", + "name": "kb-jest", + "machineType": "n2-standard-2", + "diskSizeGb": 128 + }, + { + "queue": "ci-group", + "name": "kb-cigroup", + "machineType": "n2-standard-8", + "diskSizeGb": 256 + }, + { + "queue": "ci-group-4", + "name": "kb-cigroup-4", + "machineType": "n2-standard-4", + "diskSizeGb": 128 + }, + { + "queue": "ci-group-4d", + "name": "kb-cigroup-4d", + "machineType": "n2d-standard-4", + "diskSizeGb": 128 + }, + { + "queue": "ci-group-6", + "name": "kb-cigroup-6", + "machineType": "n2-custom-6-16384", + "diskSizeGb": 128 + }, + { + "queue": "packer", + "name": "kb-packer", + "serviceAccount": "buildkite-packer-agent@elastic-kibana-ci.iam.gserviceaccount.com", + "maximumAgents": 10, + "machineType": "e2-small" + } + ] + } +} diff --git a/.buildkite/hooks/post-command b/.buildkite/hooks/post-command new file mode 100644 index 00000000000000..21a4326498fc9d --- /dev/null +++ b/.buildkite/hooks/post-command @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/lifecycle/post_command.sh diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command new file mode 100644 index 00000000000000..58a2c5f0b499d4 --- /dev/null +++ b/.buildkite/hooks/pre-command @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/lifecycle/pre_command.sh diff --git a/.buildkite/pipelines/on_merge.yml b/.buildkite/pipelines/on_merge.yml new file mode 100644 index 00000000000000..bceb1796479a2d --- /dev/null +++ b/.buildkite/pipelines/on_merge.yml @@ -0,0 +1,24 @@ +env: + GITHUB_COMMIT_STATUS_ENABLED: 'true' + GITHUB_COMMIT_STATUS_CONTEXT: 'buildkite/on-merge' +steps: + - command: .buildkite/scripts/lifecycle/pre_build.sh + label: Pre-Build + + - wait + + - command: .buildkite/scripts/steps/on_merge_build_and_metrics.sh + label: Default Build and Metrics + agents: + queue: c2-8 + + - command: .buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh + label: Build TS Refs and Check Public API Docs + agents: + queue: c2-4 + + - wait: ~ + continue_on_failure: true + + - command: .buildkite/scripts/lifecycle/post_build.sh + label: Post-Build diff --git a/.buildkite/scripts/bootstrap.sh b/.buildkite/scripts/bootstrap.sh new file mode 100755 index 00000000000000..7ae925b262b9f4 --- /dev/null +++ b/.buildkite/scripts/bootstrap.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/scripts/common/util.sh + +echo "--- yarn install and bootstrap" +yarn kbn bootstrap --verbose + +### +### upload ts-refs-cache artifacts as quickly as possible so they are available for download +### +if [[ "${BUILD_TS_REFS_CACHE_CAPTURE:-}" == "true" ]]; then + echo "--- Upload ts-refs-cache" + cd "$KIBANA_DIR/target/ts_refs_cache" + gsutil cp "*.zip" 'gs://kibana-ci-ts-refs-cache/' + cd "$KIBANA_DIR" +fi + +if [[ "$DISABLE_BOOTSTRAP_VALIDATION" != "true" ]]; then + verify_no_git_changes 'yarn kbn bootstrap' +fi diff --git a/.buildkite/scripts/build_kibana.sh b/.buildkite/scripts/build_kibana.sh new file mode 100755 index 00000000000000..19dbbcb025fb97 --- /dev/null +++ b/.buildkite/scripts/build_kibana.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -euo pipefail + +export KBN_NP_PLUGINS_BUILT=true + +echo "--- Build Kibana Distribution" +node scripts/build --debug --no-oss + +echo "--- Archive Kibana Distribution" +linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')" +installDir="$KIBANA_DIR/install/kibana" +mkdir -p "$installDir" +tar -xzf "$linuxBuild" -C "$installDir" --strip=1 +mkdir -p "$KIBANA_BUILD_LOCATION" +cp -pR install/kibana/. "$KIBANA_BUILD_LOCATION/" diff --git a/.buildkite/scripts/build_kibana_plugins.sh b/.buildkite/scripts/build_kibana_plugins.sh new file mode 100644 index 00000000000000..f4d82699ef92d1 --- /dev/null +++ b/.buildkite/scripts/build_kibana_plugins.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +set -euo pipefail + +echo "--- Build Platform Plugins" +node scripts/build_kibana_platform_plugins \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/common/fixtures/plugins" \ + --scan-dir "$KIBANA_DIR/examples" \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/common/fixtures/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \ + --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \ + --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \ + --scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \ + --scan-dir "$XPACK_DIR/test/usage_collection/plugins" \ + --scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \ + --scan-dir "$XPACK_DIR/examples" \ + --verbose + +echo "--- Archive built plugins" +shopt -s globstar +tar -zcf \ + target/kibana-default-plugins.tar.gz \ + x-pack/plugins/**/target/public \ + x-pack/test/**/target/public \ + examples/**/target/public \ + x-pack/examples/**/target/public \ + test/**/target/public diff --git a/.buildkite/scripts/common/env.sh b/.buildkite/scripts/common/env.sh new file mode 100755 index 00000000000000..ff09126f60b089 --- /dev/null +++ b/.buildkite/scripts/common/env.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash + +export CI=true + +KIBANA_DIR=$(pwd) +export KIBANA_DIR +export XPACK_DIR="$KIBANA_DIR/x-pack" + +export CACHE_DIR="$HOME/.kibana" +PARENT_DIR="$(cd "$KIBANA_DIR/.."; pwd)" +export PARENT_DIR +export WORKSPACE="${WORKSPACE:-$PARENT_DIR}" + +KIBANA_PKG_BRANCH="$(jq -r .branch "$KIBANA_DIR/package.json")" +export KIBANA_PKG_BRANCH +export KIBANA_BASE_BRANCH="$KIBANA_PKG_BRANCH" + +export GECKODRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache" +export CHROMEDRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache" +export RE2_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache" +export CYPRESS_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/cypress" + +export NODE_OPTIONS="--max-old-space-size=4096" + +export FORCE_COLOR=1 +export TEST_BROWSER_HEADLESS=1 + +export ELASTIC_APM_ENVIRONMENT=ci +export ELASTIC_APM_TRANSACTION_SAMPLE_RATE=0.1 + +CI_REPORTING_ENABLED=false # TODO enable when ready, only controls checks reporter and APM + +if is_pr; then + export ELASTIC_APM_ACTIVE=false + export CHECKS_REPORTER_ACTIVE="${CI_REPORTING_ENABLED-}" + + # These can be removed once we're not supporting Jenkins and Buildkite at the same time + # These are primarily used by github checks reporter and can be configured via /github_checks_api.json + export ghprbGhRepository="elastic/kibana" + export ghprbActualCommit="$BUILDKITE_COMMIT" + export BUILD_URL="$BUILDKITE_BUILD_URL" + + # set_git_merge_base # TODO for PRs +else + export ELASTIC_APM_ACTIVE="${CI_REPORTING_ENABLED-}" + export CHECKS_REPORTER_ACTIVE=false +fi + +export FLEET_PACKAGE_REGISTRY_PORT=6104 +export TEST_CORS_SERVER_PORT=6105 + +export DETECT_CHROMEDRIVER_VERSION=true +export CHROMEDRIVER_FORCE_DOWNLOAD=true + +export GCS_UPLOAD_PREFIX=FAKE_UPLOAD_PREFIX # TODO remove the need for this + +export KIBANA_BUILD_LOCATION="$WORKSPACE/kibana-build-xpack" + +if [[ "${BUILD_TS_REFS_CACHE_ENABLE:-}" != "true" ]]; then + export BUILD_TS_REFS_CACHE_ENABLE=false +fi + +export BUILD_TS_REFS_DISABLE=true +export DISABLE_BOOTSTRAP_VALIDATION=true + +export TEST_KIBANA_HOST=localhost +export TEST_KIBANA_PORT=6101 +export TEST_KIBANA_URL="http://elastic:changeme@localhost:6101" +export TEST_ES_URL="http://elastic:changeme@localhost:6102" +export TEST_ES_TRANSPORT_PORT=6103 +export TEST_CORS_SERVER_PORT=6106 +export ALERTING_PROXY_PORT=6105 diff --git a/.buildkite/scripts/common/setup_bazel.sh b/.buildkite/scripts/common/setup_bazel.sh new file mode 100644 index 00000000000000..16ea9a39003535 --- /dev/null +++ b/.buildkite/scripts/common/setup_bazel.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +KIBANA_BUILDBUDDY_CI_API_KEY=$(vault read -field=value secret/kibana-issues/dev/kibana-buildbuddy-ci-api-key) +export KIBANA_BUILDBUDDY_CI_API_KEY + +cp "$KIBANA_DIR/src/dev/ci_setup/.bazelrc-ci" "$HOME/.bazelrc" + +### +### append auth token to buildbuddy into "$HOME/.bazelrc"; +### +echo "# Appended by .buildkite/scripts/setup_bazel.sh" >> "$HOME/.bazelrc" +echo "build --remote_header=x-buildbuddy-api-key=$KIBANA_BUILDBUDDY_CI_API_KEY" >> "$HOME/.bazelrc" + +### +### remove write permissions on buildbuddy remote cache for prs +### +if [[ "${BUILDKITE_PULL_REQUEST:-}" && "$BUILDKITE_PULL_REQUEST" != "false" ]] ; then + { + echo "# Uploads logs & artifacts without writing to cache" + echo "build --noremote_upload_local_results" + } >> "$HOME/.bazelrc" +fi diff --git a/.buildkite/scripts/common/setup_node.sh b/.buildkite/scripts/common/setup_node.sh new file mode 100755 index 00000000000000..6a81862f2b097d --- /dev/null +++ b/.buildkite/scripts/common/setup_node.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash + +echo "--- Setup Node" + +NODE_VERSION="$(cat "$KIBANA_DIR/.node-version")" +export NODE_VERSION +export NODE_DIR="$CACHE_DIR/node/$NODE_VERSION" +export NODE_BIN_DIR="$NODE_DIR/bin" +export YARN_OFFLINE_CACHE="$CACHE_DIR/yarn-offline-cache" + +if [[ ! -d "$NODE_DIR" ]]; then + hostArch="$(command uname -m)" + case "${hostArch}" in + x86_64 | amd64) nodeArch="x64" ;; + aarch64) nodeArch="arm64" ;; + *) nodeArch="${hostArch}" ;; + esac + + nodeUrl="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-$nodeArch.tar.gz" + + echo "node.js v$NODE_VERSION not found at $NODE_DIR, downloading from $nodeUrl" + + mkdir -p "$NODE_DIR" + curl --silent -L "$nodeUrl" | tar -xz -C "$NODE_DIR" --strip-components=1 +else + echo "node.js v$NODE_VERSION already installed to $NODE_DIR, re-using" + ls -alh "$NODE_BIN_DIR" +fi + +export PATH="$NODE_BIN_DIR:$PATH" + + +echo "--- Setup Yarn" + +YARN_VERSION=$(node -e "console.log(String(require('./package.json').engines.yarn || '').replace(/^[^\d]+/,''))") +export YARN_VERSION + +if [[ ! $(which yarn) || $(yarn --version) != "$YARN_VERSION" ]]; then + npm install -g "yarn@^${YARN_VERSION}" +fi + +yarn config set yarn-offline-mirror "$YARN_OFFLINE_CACHE" + +YARN_GLOBAL_BIN=$(yarn global bin) +export YARN_GLOBAL_BIN +export PATH="$PATH:$YARN_GLOBAL_BIN" diff --git a/.buildkite/scripts/common/util.sh b/.buildkite/scripts/common/util.sh new file mode 100755 index 00000000000000..653b0c4e9cdbb7 --- /dev/null +++ b/.buildkite/scripts/common/util.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +checks-reporter-with-killswitch() { + if [ "$CHECKS_REPORTER_ACTIVE" == "true" ] ; then + yarn run github-checks-reporter "$@" + else + arguments=("$@"); + "${arguments[@]:1}"; + fi +} + +is_pr() { + [[ "${GITHUB_PR_NUMBER-}" ]] && return + false +} + +verify_no_git_changes() { + RED='\033[0;31m' + C_RESET='\033[0m' # Reset color + + GIT_CHANGES="$(git ls-files --modified)" + if [ "$GIT_CHANGES" ]; then + echo -e "\n${RED}ERROR: '$1' caused changes to the following files:${C_RESET}\n" + echo -e "$GIT_CHANGES\n" + exit 1 + fi +} + +# docker_run can be used in place of `docker run` +# it automatically passes along all of Buildkite's tracked environment variables, and mounts the buildkite-agent in the running container +docker_run() { + args=() + + if [[ -n "${BUILDKITE_ENV_FILE:-}" ]] ; then + # Read in the env file and convert to --env params for docker + # This is because --env-file doesn't support newlines or quotes per https://docs.docker.com/compose/env-file/#syntax-rules + while read -r var; do + args+=( --env "${var%%=*}" ) + done < "$BUILDKITE_ENV_FILE" + fi + + BUILDKITE_AGENT_BINARY_PATH=$(command -v buildkite-agent) + args+=( + "--env" "BUILDKITE_JOB_ID" + "--env" "BUILDKITE_BUILD_ID" + "--env" "BUILDKITE_AGENT_ACCESS_TOKEN" + "--volume" "$BUILDKITE_AGENT_BINARY_PATH:/usr/bin/buildkite-agent" + ) + + docker run "${args[@]}" "$@" +} diff --git a/.buildkite/scripts/download_build_artifacts.sh b/.buildkite/scripts/download_build_artifacts.sh new file mode 100755 index 00000000000000..6a6b7246753f66 --- /dev/null +++ b/.buildkite/scripts/download_build_artifacts.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ ! -d "$KIBANA_BUILD_LOCATION/bin" ]]; then + echo '--- Downloading Distribution and Plugin artifacts' + + cd "$WORKSPACE" + + buildkite-agent artifact download kibana-default.tar.gz . + buildkite-agent artifact download kibana-default-plugins.tar.gz . + + mkdir -p "$KIBANA_BUILD_LOCATION" + tar -xzf kibana-default.tar.gz -C "$KIBANA_BUILD_LOCATION" --strip=1 + + cd "$KIBANA_DIR" + + tar -xzf ../kibana-default-plugins.tar.gz +fi diff --git a/.buildkite/scripts/lifecycle/ci_stats.js b/.buildkite/scripts/lifecycle/ci_stats.js new file mode 100644 index 00000000000000..1e7aec3079ee30 --- /dev/null +++ b/.buildkite/scripts/lifecycle/ci_stats.js @@ -0,0 +1,59 @@ +const https = require('https'); +const token = process.env.CI_STATS_TOKEN; +const host = process.env.CI_STATS_HOST; + +const request = (url, options, data = null) => { + const httpOptions = { + ...options, + headers: { + ...(options.headers || {}), + Authorization: `token ${token}`, + }, + }; + + return new Promise((resolve, reject) => { + console.log(`Calling https://${host}${url}`); + + const req = https.request(`https://${host}${url}`, httpOptions, (res) => { + if (res.statusCode < 200 || res.statusCode >= 300) { + return reject(new Error(`Status Code: ${res.statusCode}`)); + } + + const data = []; + res.on('data', (d) => { + data.push(d); + }); + + res.on('end', () => { + try { + let resp = Buffer.concat(data).toString(); + + try { + if (resp.trim()) { + resp = JSON.parse(resp); + } + } catch (ex) { + console.error(ex); + } + + resolve(resp); + } catch (ex) { + reject(ex); + } + }); + }); + + req.on('error', reject); + + if (data) { + req.write(JSON.stringify(data)); + } + + req.end(); + }); +}; + +module.exports = { + get: (url) => request(url, { method: 'GET' }), + post: (url, data) => request(url, { method: 'POST' }, data), +}; diff --git a/.buildkite/scripts/lifecycle/ci_stats_complete.js b/.buildkite/scripts/lifecycle/ci_stats_complete.js new file mode 100644 index 00000000000000..46aa72aed20243 --- /dev/null +++ b/.buildkite/scripts/lifecycle/ci_stats_complete.js @@ -0,0 +1,17 @@ +const ciStats = require('./ci_stats'); + +// TODO - this is okay for now but should really be replaced with an API call, especially once retries are enabled +const BUILD_STATUS = process.env.BUILD_FAILED === 'true' ? 'FAILURE' : 'SUCCESS'; + +(async () => { + try { + if (process.env.CI_STATS_BUILD_ID) { + await ciStats.post(`/v1/build/_complete?id=${process.env.CI_STATS_BUILD_ID}`, { + result: BUILD_STATUS, + }); + } + } catch (ex) { + console.error(ex); + process.exit(1); + } +})(); diff --git a/.buildkite/scripts/lifecycle/ci_stats_start.js b/.buildkite/scripts/lifecycle/ci_stats_start.js new file mode 100644 index 00000000000000..35a1e7030af5f9 --- /dev/null +++ b/.buildkite/scripts/lifecycle/ci_stats_start.js @@ -0,0 +1,30 @@ +const { execSync } = require('child_process'); +const ciStats = require('./ci_stats'); + +(async () => { + try { + const build = await ciStats.post('/v1/build', { + jenkinsJobName: process.env.BUILDKITE_PIPELINE_NAME, + jenkinsJobId: process.env.BUILDKITE_BUILD_ID, + jenkinsUrl: process.env.BUILDKITE_BUILD_URL, + prId: process.env.GITHUB_PR_NUMBER || null, + }); + + execSync(`buildkite-agent meta-data set ci_stats_build_id "${build.id}"`); + + // TODO Will need to set MERGE_BASE for PRs + + await ciStats.post(`/v1/git_info?buildId=${build.id}`, { + branch: process.env.BUILDKITE_BRANCH.replace(/^(refs\/heads\/|origin\/)/, ''), + commit: process.env.BUILDKITE_COMMIT, + targetBranch: + process.env.GITHUB_PR_TARGET_BRANCH || + process.env.BUILDKITE_PULL_REQUEST_BASE_BRANCH || + null, + mergeBase: process.env.GITHUB_PR_MERGE_BASE || null, // TODO confirm GITHUB_PR_MERGE_BASE or switch to final var + }); + } catch (ex) { + console.error(ex); + process.exit(1); + } +})(); diff --git a/.buildkite/scripts/lifecycle/commit_status_complete.sh b/.buildkite/scripts/lifecycle/commit_status_complete.sh new file mode 100755 index 00000000000000..1766404719632d --- /dev/null +++ b/.buildkite/scripts/lifecycle/commit_status_complete.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ "${GITHUB_COMMIT_STATUS_ENABLED:-}" == "true" ]]; then + COMMIT_STATUS=success + if [[ "${BUILD_FAILED:-}" == "true" ]]; then + COMMIT_STATUS=failure + fi + + GITHUB_COMMIT_STATUS_CONTEXT=${GITHUB_COMMIT_STATUS_CONTEXT:-"buildkite/$BUILDKITE_PIPELINE_NAME"} + + gh api "repos/elastic/kibana/statuses/$BUILDKITE_COMMIT" -f state="$COMMIT_STATUS" -f target_url="$BUILDKITE_BUILD_URL" -f context="$GITHUB_COMMIT_STATUS_CONTEXT" --silent +fi diff --git a/.buildkite/scripts/lifecycle/commit_status_start.sh b/.buildkite/scripts/lifecycle/commit_status_start.sh new file mode 100755 index 00000000000000..d7e91dd7e0de5b --- /dev/null +++ b/.buildkite/scripts/lifecycle/commit_status_start.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ "${GITHUB_COMMIT_STATUS_ENABLED:-}" == "true" ]]; then + GITHUB_COMMIT_STATUS_CONTEXT=${GITHUB_COMMIT_STATUS_CONTEXT:-"buildkite/$BUILDKITE_PIPELINE_NAME"} + + gh api "repos/elastic/kibana/statuses/$BUILDKITE_COMMIT" -f state=pending -f target_url="$BUILDKITE_BUILD_URL" -f context="$GITHUB_COMMIT_STATUS_CONTEXT" --silent +fi diff --git a/.buildkite/scripts/lifecycle/post_build.sh b/.buildkite/scripts/lifecycle/post_build.sh new file mode 100755 index 00000000000000..06b51d78a836a9 --- /dev/null +++ b/.buildkite/scripts/lifecycle/post_build.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -euo pipefail + +BUILD_FAILED=$(buildkite-agent meta-data get build_failed --default "false") +export BUILD_FAILED + +"$(dirname "${0}")/commit_status_complete.sh" + +node "$(dirname "${0}")/ci_stats_complete.js" diff --git a/.buildkite/scripts/lifecycle/post_command.sh b/.buildkite/scripts/lifecycle/post_command.sh new file mode 100755 index 00000000000000..89630a874bbd71 --- /dev/null +++ b/.buildkite/scripts/lifecycle/post_command.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ "$BUILDKITE_COMMAND_EXIT_STATUS" != "0" ]]; then + buildkite-agent meta-data set build_failed true +fi diff --git a/.buildkite/scripts/lifecycle/pre_build.sh b/.buildkite/scripts/lifecycle/pre_build.sh new file mode 100755 index 00000000000000..2ddf9a73d0622e --- /dev/null +++ b/.buildkite/scripts/lifecycle/pre_build.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -euo pipefail + +"$(dirname "${0}")/commit_status_start.sh" + +export CI_STATS_TOKEN="$(vault read -field=api_token secret/kibana-issues/dev/kibana_ci_stats)" +export CI_STATS_HOST="$(vault read -field=api_host secret/kibana-issues/dev/kibana_ci_stats)" + +node "$(dirname "${0}")/ci_stats_start.js" diff --git a/.buildkite/scripts/lifecycle/pre_command.sh b/.buildkite/scripts/lifecycle/pre_command.sh new file mode 100755 index 00000000000000..d9e79d2d3252b5 --- /dev/null +++ b/.buildkite/scripts/lifecycle/pre_command.sh @@ -0,0 +1,67 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# Set up a custom ES Snapshot Manifest if one has been specified for this build +{ + ES_SNAPSHOT_MANIFEST=${ES_SNAPSHOT_MANIFEST:-$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST --default '')} + export ES_SNAPSHOT_MANIFEST + + if [[ "${ES_SNAPSHOT_MANIFEST:-}" ]]; then + cat << EOF | buildkite-agent annotate --style "info" --context es-snapshot-manifest + This build is running using a custom Elasticsearch snapshot. + + ES Snapshot Manifest: $ES_SNAPSHOT_MANIFEST + + To use this locally, simply prefix your commands with: + + \`\`\` + ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST" + \`\`\` + + e.g. + + \`\`\` + ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST" node scripts/functional_tests_server.js + \`\`\` +EOF + fi +} + +# Setup CI Stats +{ + CI_STATS_BUILD_ID="$(buildkite-agent meta-data get ci_stats_build_id --default '')" + export CI_STATS_BUILD_ID + + if [[ "$CI_STATS_BUILD_ID" ]]; then + echo "CI Stats Build ID: $CI_STATS_BUILD_ID" + + CI_STATS_TOKEN="$(vault read -field=api_token secret/kibana-issues/dev/kibana_ci_stats)" + export CI_STATS_TOKEN + + CI_STATS_HOST="$(vault read -field=api_host secret/kibana-issues/dev/kibana_ci_stats)" + export CI_STATS_HOST + + KIBANA_CI_STATS_CONFIG=$(jq -n \ + --arg buildId "$CI_STATS_BUILD_ID" \ + --arg apiUrl "https://$CI_STATS_HOST" \ + --arg apiToken "$CI_STATS_TOKEN" \ + '{buildId: $buildId, apiUrl: $apiUrl, apiToken: $apiToken}' \ + ) + export KIBANA_CI_STATS_CONFIG + fi +} + +GITHUB_TOKEN=$(vault read -field=github_token secret/kibana-issues/dev/kibanamachine) +export GITHUB_TOKEN + +# By default, all steps should set up these things to get a full environment before running +# It can be skipped for pipeline upload steps though, to make job start time a little faster +if [[ "${SKIP_CI_SETUP:-}" != "true" ]]; then + if [[ -d .buildkite/scripts && "${BUILDKITE_COMMAND:-}" != "buildkite-agent pipeline upload"* ]]; then + source .buildkite/scripts/common/util.sh + source .buildkite/scripts/common/env.sh + source .buildkite/scripts/common/setup_node.sh + source .buildkite/scripts/common/setup_bazel.sh + fi +fi diff --git a/.buildkite/scripts/packer_cache.sh b/.buildkite/scripts/packer_cache.sh new file mode 100755 index 00000000000000..45d3dc439ff4d0 --- /dev/null +++ b/.buildkite/scripts/packer_cache.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/scripts/common/env.sh +source .buildkite/scripts/common/setup_node.sh + +yarn kbn bootstrap diff --git a/.buildkite/scripts/post_build_kibana.sh b/.buildkite/scripts/post_build_kibana.sh new file mode 100755 index 00000000000000..a4f8d71b77105b --- /dev/null +++ b/.buildkite/scripts/post_build_kibana.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ ! "${DISABLE_CI_STATS_SHIPPING:-}" ]]; then + echo "--- Ship Kibana Distribution Metrics to CI Stats" + node scripts/ship_ci_stats \ + --metrics target/optimizer_bundle_metrics.json \ + --metrics packages/kbn-ui-shared-deps/target/metrics.json +fi + +echo "--- Upload Build Artifacts" +# Moving to `target/` first will keep `buildkite-agent` from including directories in the artifact name +cd "$KIBANA_DIR/target" +mv kibana-*-linux-x86_64.tar.gz kibana-default.tar.gz +buildkite-agent artifact upload kibana-default.tar.gz +buildkite-agent artifact upload kibana-default-plugins.tar.gz +cd - diff --git a/.buildkite/scripts/saved_object_field_metrics.sh b/.buildkite/scripts/saved_object_field_metrics.sh new file mode 100755 index 00000000000000..3b6c63eeff3b0c --- /dev/null +++ b/.buildkite/scripts/saved_object_field_metrics.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/scripts/common/util.sh + +echo '--- Default Saved Object Field Metrics' +cd "$XPACK_DIR" +checks-reporter-with-killswitch "Capture Kibana Saved Objects field count metrics" \ + node scripts/functional_tests \ + --debug --bail \ + --kibana-install-dir "$KIBANA_BUILD_LOCATION" \ + --config test/saved_objects_field_count/config.ts diff --git a/.buildkite/scripts/steps/on_merge_build_and_metrics.sh b/.buildkite/scripts/steps/on_merge_build_and_metrics.sh new file mode 100755 index 00000000000000..fdf06981ab568e --- /dev/null +++ b/.buildkite/scripts/steps/on_merge_build_and_metrics.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -euo pipefail + +export DISABLE_BOOTSTRAP_VALIDATION=true +export BUILD_TS_REFS_DISABLE=true + +.buildkite/scripts/bootstrap.sh +.buildkite/scripts/build_kibana.sh +.buildkite/scripts/post_build_kibana.sh +.buildkite/scripts/saved_object_field_metrics.sh diff --git a/.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh b/.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh new file mode 100755 index 00000000000000..35bc137ee4a02f --- /dev/null +++ b/.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -euo pipefail + +export BUILD_TS_REFS_CACHE_ENABLE=true +export BUILD_TS_REFS_CACHE_CAPTURE=true +export DISABLE_BOOTSTRAP_VALIDATION=true +export BUILD_TS_REFS_DISABLE=false + +.buildkite/scripts/bootstrap.sh + +echo "--- Build API Docs" +node scripts/build_api_docs diff --git a/.ci/Jenkinsfile_baseline_capture b/.ci/Jenkinsfile_baseline_capture index d074da1cb1926c..9d662f4b39fb9b 100644 --- a/.ci/Jenkinsfile_baseline_capture +++ b/.ci/Jenkinsfile_baseline_capture @@ -37,7 +37,7 @@ kibanaPipeline(timeoutMinutes: 210) { tasks([ kibanaPipeline.functionalTestProcess('oss-baseline', './test/scripts/jenkins_baseline.sh'), kibanaPipeline.functionalTestProcess('xpack-baseline', './test/scripts/jenkins_xpack_baseline.sh'), - kibanaPipeline.scriptTask('Check Public API Docs', 'test/scripts/checks/plugin_public_api_docs.sh'), + kibanaPipeline.scriptTask('Check Public API Docs', 'test/scripts/checks/baseline_plugin_public_api_docs.sh'), ]) } } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9c9200ce33e7ae..b071e06f1bc54e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -174,6 +174,7 @@ /.bazelversion @elastic/kibana-operations /WORKSPACE.bazel @elastic/kibana-operations #CC# /packages/kbn-expect/ @elastic/kibana-operations +/.buildkite/ @elastic/kibana-operations # Quality Assurance /src/dev/code_coverage @elastic/kibana-qa @@ -339,7 +340,7 @@ #CC# /x-pack/plugins/security_solution/ @elastic/security-solution # Security Solution sub teams -/x-pack/plugins/case @elastic/security-threat-hunting +/x-pack/plugins/cases @elastic/security-threat-hunting /x-pack/plugins/timelines @elastic/security-threat-hunting /x-pack/test/case_api_integration @elastic/security-threat-hunting /x-pack/plugins/lists @elastic/security-detections-response diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 2a5fc914662b63..336f7e5165d07f 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -2,6 +2,7 @@ Summarize your PR. If it involves visual changes include a screenshot or gif. + ### Checklist Delete any items that are not applicable to this PR. @@ -15,6 +16,25 @@ Delete any items that are not applicable to this PR. - [ ] This renders correctly on smaller devices using a responsive layout. (You can test this [in your browser](https://www.browserstack.com/guide/responsive-testing-on-local-server)) - [ ] This was checked for [cross-browser compatibility](https://www.elastic.co/support/matrix#matrix_browsers) + +### Risk Matrix + +Delete this section if it is not applicable to this PR. + +Before closing this PR, invite QA, stakeholders, and other developers to +identify risks that should be tested prior to the change/feature release. + +When forming the risk matrix, consider some of the following examples and how +they may potentially impact the change: + +| Risk | Probability | Severity | Mitigation/Notes | +|---------------------------|-------------|----------|-------------------------| +| Multiple Spaces—unexpected behavior in non-default Kibana Space. | Low | High | Integration tests will verify that all features are still supported in non-default Kibana Space and when user switches between spaces. | +| Multiple nodes—Elasticsearch polling might have race conditions when multiple Kibana nodes are polling for the same tasks. | High | Low | Tasks are idempotent, so executing them multiple times will not result in logical error, but will degrade performance. To test for this case we add plenty of unit tests around this logic and document manual testing procedure. | +| Code should gracefully handle cases when feature X or plugin Y are disabled. | Medium | High | Unit tests will verify that any feature flag or plugin combination still results in our service operational. | +| [See more potential risk examples](../RISK_MATRIX.mdx) | + + ### For maintainers - [ ] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process) diff --git a/.i18nrc.json b/.i18nrc.json index dc01a10b6a686b..57dffa4147e525 100644 --- a/.i18nrc.json +++ b/.i18nrc.json @@ -3,6 +3,7 @@ "console": "src/plugins/console", "core": "src/core", "discover": "src/plugins/discover", + "bfetch": "src/plugins/bfetch", "dashboard": "src/plugins/dashboard", "data": "src/plugins/data", "embeddableApi": "src/plugins/embeddable", diff --git a/RISK_MATRIX.mdx b/RISK_MATRIX.mdx new file mode 100644 index 00000000000000..71b0198489934a --- /dev/null +++ b/RISK_MATRIX.mdx @@ -0,0 +1,63 @@ +# Risk consideration + +When merging a new feature of considerable size or modifying an existing one, +consider adding a *Risk Matrix* section to your PR in collaboration with other +developers on your team and the QA team. + +Below are some general themes to consider for the *Risk Matrix*. (Feel free to +add to this list.) + + +## General risks + +- What happens when your feature is used in a non-default space or a custom + space? +- What happens when there are multiple Kibana nodes using the same Elasticsearch + cluster? +- What happens when a plugin you depend on is disabled? +- What happens when a feature you depend on is disabled? +- Is your change working correctly regardless of `kibana.yml` configuration or + UI Setting configuration? (For example, does it support both + `state:storeInSessionStorage` UI setting states?) +- What happens when a third party integration you depend on is not responding? +- How is authentication handled with third party services? +- Does the feature work in Elastic Cloud? +- Does the feature create a setting that needs to be exposed, or configured + differently than the default, on the Elastic Cloud? +- Is there a significant performance impact that may affect Cloud Kibana + instances? +- Does your feature need to be aware of running in a container? +- Does the feature Work with security disabled, or fails gracefully? +- Are there performance risks associated with your feature? Does it potentially + access or create: (1) many fields; (2) many indices; (3) lots of data; + (4) lots of saved objects; (5) large saved objects. +- Could this cause memory to leak in either the browser or server? +- Will your feature still work if Kibana is run behind a reverse proxy? +- Does your feature affect other plugins? +- If you write to the file system, what happens if Kibana node goes down? What + happens if there are multiple Kibana nodes? +- Are migrations handled gracefully? Does the feature affect old indices or + saved objects? +- Are you using any technologies, protocols, techniques, conventions, libraries, + NPM modules, etc. that may be new or unprecedented in Kibana? + + +## Security risks + +Check to ensure that best practices are used to mitigate common vulnerabilities: + +- Cross-site scripting (XSS) +- Cross-site request forgery (CSRF) +- Remote-code execution (RCE) +- Server-side request forgery (SSRF) +- Prototype pollution +- Information disclosure +- Tabnabbing + +In addition to these risks, in general, server-side input validation should be +implemented as strictly as possible. Extra care should be taken when user input +is used to construct URLs or data structures; this is a common source of +injection attacks and other vulnerabilities. For more information on all of +these topics, see [Security best practices][security-best-practices]. + +[security-best-practices]: https://www.elastic.co/guide/en/kibana/master/security-best-practices.html diff --git a/dev_docs/tutorials/expressions.mdx b/dev_docs/tutorials/expressions.mdx new file mode 100644 index 00000000000000..f0fc1dc595cfa4 --- /dev/null +++ b/dev_docs/tutorials/expressions.mdx @@ -0,0 +1,129 @@ +--- +id: kibDevTutorialExpressions +slug: /kibana-dev-docs/tutorials/expressions +title: Kibana Expressions Service +summary: Kibana Expressions Service +date: 2021-06-01 +tags: ['kibana', 'onboarding', 'dev', 'architecture'] +--- + +## Expressions service + +Expression service exposes a registry of reusable functions primary used for fetching and transposing data and a registry of renderer functions that can render data into a DOM element. +Adding functions is easy and so is reusing them. An expression is a chain of functions with provided arguments, which given a single input translates to a single output. +Each expression is representable by a human friendly string which a user can type. + +### creating expressions + +Here is a very simple expression string: + + essql 'select column1, column2 from myindex' | mapColumn name=column3 fn='{ column1 + 3 }' | table + + +It consists of 3 functions: + + - essql which runs given sql query against elasticsearch and returns the results + - `mapColumn`, which computes a new column from existing ones; + - `table`, which prepares the data for rendering in a tabular format. + +The same expression could also be constructed in the code: + +```ts +import { buildExpression, buildExpressionFunction } from 'src/plugins/expressions'; + +const expression = buildExpression([ + buildExpressionFunction('essql', [ q: 'select column1, column2 from myindex' ]), + buildExpressionFunction('mapColumn', [ name: 'column3', expression: 'column1 + 3' ]), + buildExpressionFunction('table'), +] +``` + +Note: Consumers need to be aware which plugin registers specific functions with expressions function registry and import correct type definitions from there. + + + The `expressions` service is available on both server and client, with similar APIs. + + +### Running expressions + +Expression service exposes `execute` method which allows you to execute an expression: + +```ts +const executionContract = expressions.execute(expression, input); +const result = await executionContract.getData(); +``` + + + Check the full spec of execute function [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.execution.md) + + +In addition, on the browser side, there are two additional ways to run expressions and render the results. + +#### React expression renderer component + +This is the easiest way to get expressions rendered inside your application. + +```ts + +``` + + + Check the full spec of ReactExpressionRenderer component props [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.reactexpressionrendererprops.md) + + +#### Expression loader + +If you are not using React, you can use the loader expression service provides to achieve the same: + +```ts +const handler = loader(domElement, expression, params); +``` + + + Check the full spec of expression loader params [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md) + + +### Creating new expression functions + +Creating a new expression function is easy, just call `registerFunction` method on expressions service setup contract with your function definition: + +```ts +const functionDefinition = { + name: 'clog', + args: {}, + help: 'Outputs the context to the console', + fn: (input: unknown) => { + // eslint-disable-next-line no-console + console.log(input); + return input; + }, +}; + +expressions.registerFunction(functionDefinition); +``` + + + Check the full interface of ExpressionFuntionDefinition [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionfunctiondefinition.md) + + +### Creating new expression renderers + +Adding new renderers is just as easy as adding functions: + +```ts +const rendererDefinition = { + name: 'debug', + help: 'Outputs the context to the dom element', + render: (domElement, input, handlers) => { + // eslint-disable-next-line no-console + domElement.innerText = JSON.strinfigy(input); + handlers.done(); + }, +}; + +expressions.registerRenderer(rendererDefinition); +``` + + + Check the full interface of ExpressionRendererDefinition [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderdefinition.md) + diff --git a/docs/developer/getting-started/index.asciidoc b/docs/developer/getting-started/index.asciidoc index 5ab05812019591..ac8eff132fcfe8 100644 --- a/docs/developer/getting-started/index.asciidoc +++ b/docs/developer/getting-started/index.asciidoc @@ -92,6 +92,10 @@ may need to run: yarn kbn clean ---- +NOTE: Running this command is only necessary in rare circumstance where you need to recover +a consistent state when problems arise. If you need to run this command often, complete +this form to provide feedback: https://ela.st/yarn-kbn-clean + If you have failures during `yarn kbn bootstrap` you may have some corrupted packages in your yarn cache which you can clean with: diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md index e362bc4e0329c4..447823a5c34910 100644 --- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.correctiveactions.md @@ -15,6 +15,6 @@ correctiveActions: { [key: string]: any; }; }; - manualSteps?: string[]; + manualSteps: string[]; }; ``` diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md new file mode 100644 index 00000000000000..3a76bc60ee6306 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) + +## DeprecationsDetails.deprecationType property + +(optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab. + +Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. + +Signature: + +```typescript +deprecationType?: 'config' | 'feature'; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md index bb77e4247711f0..7592b8486d950f 100644 --- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md @@ -14,7 +14,8 @@ export interface DeprecationsDetails | Property | Type | Description | | --- | --- | --- | -| [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md) | {
api?: {
path: string;
method: 'POST' | 'PUT';
body?: {
[key: string]: any;
};
};
manualSteps?: string[];
} | | +| [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md) | {
api?: {
path: string;
method: 'POST' | 'PUT';
body?: {
[key: string]: any;
};
};
manualSteps: string[];
} | | +| [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) | 'config' | 'feature' | (optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab.Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. | | [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md) | string | | | [level](./kibana-plugin-core-server.deprecationsdetails.level.md) | 'warning' | 'critical' | 'fetch_error' | levels: - warning: will not break deployment upon upgrade - critical: needs to be addressed before upgrade. - fetch\_error: Deprecations service failed to grab the deprecation details for the domain. | | [message](./kibana-plugin-core-server.deprecationsdetails.message.md) | string | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.gettimeshift.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.gettimeshift.md new file mode 100644 index 00000000000000..de0d41286c0bbb --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.gettimeshift.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfig](./kibana-plugin-plugins-data-public.aggconfig.md) > [getTimeShift](./kibana-plugin-plugins-data-public.aggconfig.gettimeshift.md) + +## AggConfig.getTimeShift() method + +Signature: + +```typescript +getTimeShift(): undefined | moment.Duration; +``` +Returns: + +`undefined | moment.Duration` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.hastimeshift.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.hastimeshift.md new file mode 100644 index 00000000000000..024b0766ffd7b0 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.hastimeshift.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfig](./kibana-plugin-plugins-data-public.aggconfig.md) > [hasTimeShift](./kibana-plugin-plugins-data-public.aggconfig.hastimeshift.md) + +## AggConfig.hasTimeShift() method + +Signature: + +```typescript +hasTimeShift(): boolean; +``` +Returns: + +`boolean` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.md index d4a8eddf51cfcd..a96626d1a485d7 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfig.md @@ -46,8 +46,10 @@ export declare class AggConfig | [getRequestAggs()](./kibana-plugin-plugins-data-public.aggconfig.getrequestaggs.md) | | | | [getResponseAggs()](./kibana-plugin-plugins-data-public.aggconfig.getresponseaggs.md) | | | | [getTimeRange()](./kibana-plugin-plugins-data-public.aggconfig.gettimerange.md) | | | +| [getTimeShift()](./kibana-plugin-plugins-data-public.aggconfig.gettimeshift.md) | | | | [getValue(bucket)](./kibana-plugin-plugins-data-public.aggconfig.getvalue.md) | | | | [getValueBucketPath()](./kibana-plugin-plugins-data-public.aggconfig.getvaluebucketpath.md) | | Returns the bucket path containing the main value the agg will produce (e.g. for sum of bytes it will point to the sum, for median it will point to the 50 percentile in the percentile multi value bucket) | +| [hasTimeShift()](./kibana-plugin-plugins-data-public.aggconfig.hastimeshift.md) | | | | [isFilterable()](./kibana-plugin-plugins-data-public.aggconfig.isfilterable.md) | | | | [makeLabel(percentageMode)](./kibana-plugin-plugins-data-public.aggconfig.makelabel.md) | | | | [nextId(list)](./kibana-plugin-plugins-data-public.aggconfig.nextid.md) | static | Calculate the next id based on the ids in this list {array} list - a list of objects with id properties | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.forcenow.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.forcenow.md new file mode 100644 index 00000000000000..8040c2939e2e45 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.forcenow.md @@ -0,0 +1,11 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [forceNow](./kibana-plugin-plugins-data-public.aggconfigs.forcenow.md) + +## AggConfigs.forceNow property + +Signature: + +```typescript +forceNow?: Date; +``` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.getsearchsourcetimefilter.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.getsearchsourcetimefilter.md new file mode 100644 index 00000000000000..1f8bc1300a0a86 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.getsearchsourcetimefilter.md @@ -0,0 +1,72 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [getSearchSourceTimeFilter](./kibana-plugin-plugins-data-public.aggconfigs.getsearchsourcetimefilter.md) + +## AggConfigs.getSearchSourceTimeFilter() method + +Signature: + +```typescript +getSearchSourceTimeFilter(forceNow?: Date): RangeFilter[] | { + meta: { + index: string | undefined; + params: {}; + alias: string; + disabled: boolean; + negate: boolean; + }; + query: { + bool: { + should: { + bool: { + filter: { + range: { + [x: string]: { + gte: string; + lte: string; + }; + }; + }[]; + }; + }[]; + minimum_should_match: number; + }; + }; + }[]; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| forceNow | Date | | + +Returns: + +`RangeFilter[] | { + meta: { + index: string | undefined; + params: {}; + alias: string; + disabled: boolean; + negate: boolean; + }; + query: { + bool: { + should: { + bool: { + filter: { + range: { + [x: string]: { + gte: string; + lte: string; + }; + }; + }[]; + }; + }[]; + minimum_should_match: number; + }; + }; + }[]` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.gettimeshiftinterval.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.gettimeshiftinterval.md new file mode 100644 index 00000000000000..d15ccbc5dc0a1c --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.gettimeshiftinterval.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [getTimeShiftInterval](./kibana-plugin-plugins-data-public.aggconfigs.gettimeshiftinterval.md) + +## AggConfigs.getTimeShiftInterval() method + +Signature: + +```typescript +getTimeShiftInterval(): moment.Duration | undefined; +``` +Returns: + +`moment.Duration | undefined` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.gettimeshifts.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.gettimeshifts.md new file mode 100644 index 00000000000000..44ab25cf30eb2b --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.gettimeshifts.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [getTimeShifts](./kibana-plugin-plugins-data-public.aggconfigs.gettimeshifts.md) + +## AggConfigs.getTimeShifts() method + +Signature: + +```typescript +getTimeShifts(): Record; +``` +Returns: + +`Record` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.hastimeshifts.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.hastimeshifts.md new file mode 100644 index 00000000000000..db31e549666b45 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.hastimeshifts.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [hasTimeShifts](./kibana-plugin-plugins-data-public.aggconfigs.hastimeshifts.md) + +## AggConfigs.hasTimeShifts() method + +Signature: + +```typescript +hasTimeShifts(): boolean; +``` +Returns: + +`boolean` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md index 02e9a63d95ba37..45333b6767cace 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.md @@ -22,6 +22,7 @@ export declare class AggConfigs | --- | --- | --- | --- | | [aggs](./kibana-plugin-plugins-data-public.aggconfigs.aggs.md) | | IAggConfig[] | | | [createAggConfig](./kibana-plugin-plugins-data-public.aggconfigs.createaggconfig.md) | | <T extends AggConfig = AggConfig>(params: CreateAggConfigParams, { addToAggConfigs }?: {
addToAggConfigs?: boolean | undefined;
}) => T | | +| [forceNow](./kibana-plugin-plugins-data-public.aggconfigs.forcenow.md) | | Date | | | [hierarchical](./kibana-plugin-plugins-data-public.aggconfigs.hierarchical.md) | | boolean | | | [indexPattern](./kibana-plugin-plugins-data-public.aggconfigs.indexpattern.md) | | IndexPattern | | | [timeFields](./kibana-plugin-plugins-data-public.aggconfigs.timefields.md) | | string[] | | @@ -43,8 +44,14 @@ export declare class AggConfigs | [getRequestAggs()](./kibana-plugin-plugins-data-public.aggconfigs.getrequestaggs.md) | | | | [getResponseAggById(id)](./kibana-plugin-plugins-data-public.aggconfigs.getresponseaggbyid.md) | | Find a response agg by it's id. This may be an agg in the aggConfigs, or one created specifically for a response value | | [getResponseAggs()](./kibana-plugin-plugins-data-public.aggconfigs.getresponseaggs.md) | | Gets the AggConfigs (and possibly ResponseAggConfigs) that represent the values that will be produced when all aggs are run.With multi-value metric aggs it is possible for a single agg request to result in multiple agg values, which is why the length of a vis' responseValuesAggs may be different than the vis' aggs {array\[AggConfig\]} | +| [getSearchSourceTimeFilter(forceNow)](./kibana-plugin-plugins-data-public.aggconfigs.getsearchsourcetimefilter.md) | | | +| [getTimeShiftInterval()](./kibana-plugin-plugins-data-public.aggconfigs.gettimeshiftinterval.md) | | | +| [getTimeShifts()](./kibana-plugin-plugins-data-public.aggconfigs.gettimeshifts.md) | | | +| [hasTimeShifts()](./kibana-plugin-plugins-data-public.aggconfigs.hastimeshifts.md) | | | | [jsonDataEquals(aggConfigs)](./kibana-plugin-plugins-data-public.aggconfigs.jsondataequals.md) | | Data-by-data comparison of this Aggregation Ignores the non-array indexes | | [onSearchRequestStart(searchSource, options)](./kibana-plugin-plugins-data-public.aggconfigs.onsearchrequeststart.md) | | | +| [postFlightTransform(response)](./kibana-plugin-plugins-data-public.aggconfigs.postflighttransform.md) | | | +| [setForceNow(now)](./kibana-plugin-plugins-data-public.aggconfigs.setforcenow.md) | | | | [setTimeFields(timeFields)](./kibana-plugin-plugins-data-public.aggconfigs.settimefields.md) | | | | [setTimeRange(timeRange)](./kibana-plugin-plugins-data-public.aggconfigs.settimerange.md) | | | | [toDsl()](./kibana-plugin-plugins-data-public.aggconfigs.todsl.md) | | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.postflighttransform.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.postflighttransform.md new file mode 100644 index 00000000000000..b34fda40a30895 --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.postflighttransform.md @@ -0,0 +1,22 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [postFlightTransform](./kibana-plugin-plugins-data-public.aggconfigs.postflighttransform.md) + +## AggConfigs.postFlightTransform() method + +Signature: + +```typescript +postFlightTransform(response: IEsSearchResponse): IEsSearchResponse; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| response | IEsSearchResponse<any> | | + +Returns: + +`IEsSearchResponse` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.setforcenow.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.setforcenow.md new file mode 100644 index 00000000000000..60a1bfe0872faf --- /dev/null +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.aggconfigs.setforcenow.md @@ -0,0 +1,22 @@ + + +[Home](./index.md) > [kibana-plugin-plugins-data-public](./kibana-plugin-plugins-data-public.md) > [AggConfigs](./kibana-plugin-plugins-data-public.aggconfigs.md) > [setForceNow](./kibana-plugin-plugins-data-public.aggconfigs.setforcenow.md) + +## AggConfigs.setForceNow() method + +Signature: + +```typescript +setForceNow(now: Date | undefined): void; +``` + +## Parameters + +| Parameter | Type | Description | +| --- | --- | --- | +| now | Date | undefined | | + +Returns: + +`void` + diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md index 99d2fc00a6b7b6..812f014b15a6c9 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.addScriptedField() method +> Warning: This API is now obsolete. +> +> + Add scripted field to field list Signature: diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md index 48d94b84497bd9..cc40ab8bb1173f 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md @@ -9,33 +9,9 @@ Returns index pattern as saved object body for saving Signature: ```typescript -getAsSavedObjectBody(): { - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }; +getAsSavedObjectBody(): IndexPatternAttributes; ``` Returns: -`{ - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }` +`IndexPatternAttributes` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md index 77ce6f6f23a671..1792a979bf7490 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getNonScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md index 055f07367c96e0..b6b3dcb19bac17 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md index aaaebdaccca5d7..91f25c09ab197a 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.removeScriptedField() method +> Warning: This API is now obsolete. +> +> + Remove scripted field from field list Signature: diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md index a86fea3106225a..981f28a51ae09d 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.addScriptedField() method +> Warning: This API is now obsolete. +> +> + Add scripted field to field list Signature: diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md index 668d563ff04c0e..f5e87638e2f1ca 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md @@ -9,33 +9,9 @@ Returns index pattern as saved object body for saving Signature: ```typescript -getAsSavedObjectBody(): { - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }; +getAsSavedObjectBody(): IndexPatternAttributes; ``` Returns: -`{ - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }` +`IndexPatternAttributes` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md index 89d79d9b750faf..cff2c5de98de61 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getNonScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md index edfff8ec5efacc..62b8f1b62ac787 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md index 3162a7f42dd121..f6beed7389e438 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.removeScriptedField() method +> Warning: This API is now obsolete. +> +> + Remove scripted field from field list Signature: diff --git a/docs/discover/images/add-field-to-pattern.png b/docs/discover/images/add-field-to-pattern.png index 84dfcb0745c691..04f564ab89857e 100644 Binary files a/docs/discover/images/add-field-to-pattern.png and b/docs/discover/images/add-field-to-pattern.png differ diff --git a/docs/discover/images/customer.png b/docs/discover/images/customer.png new file mode 100644 index 00000000000000..f2042080f0a378 Binary files /dev/null and b/docs/discover/images/customer.png differ diff --git a/docs/discover/images/discover-from-visualize.png b/docs/discover/images/discover-from-visualize.png index cbf64dff18b15e..07d34b8b49018f 100644 Binary files a/docs/discover/images/discover-from-visualize.png and b/docs/discover/images/discover-from-visualize.png differ diff --git a/docs/discover/images/discover-visualize.png b/docs/discover/images/discover-visualize.png index f4bcaf8aca028e..242aa16709a9fd 100644 Binary files a/docs/discover/images/discover-visualize.png and b/docs/discover/images/discover-visualize.png differ diff --git a/docs/discover/images/document-table-expanded.png b/docs/discover/images/document-table-expanded.png index 87c8e2047a0cbc..38ea0987b120af 100644 Binary files a/docs/discover/images/document-table-expanded.png and b/docs/discover/images/document-table-expanded.png differ diff --git a/docs/discover/images/document-table.png b/docs/discover/images/document-table.png index f47ca5353124ed..097feb4dc90341 100644 Binary files a/docs/discover/images/document-table.png and b/docs/discover/images/document-table.png differ diff --git a/docs/discover/images/hello-field.png b/docs/discover/images/hello-field.png index 07d97e054d7ecb..fe4cfbb41fdc2d 100644 Binary files a/docs/discover/images/hello-field.png and b/docs/discover/images/hello-field.png differ diff --git a/docs/index-extra-title-page.html b/docs/index-extra-title-page.html new file mode 100644 index 00000000000000..06d8c0b2e827b9 --- /dev/null +++ b/docs/index-extra-title-page.html @@ -0,0 +1,36 @@ +
+

From creating beautiful visualizations to managing the Elastic Stack, learn how Kibana helps you get the most of your data.

+ + + + + + + + + +

New to Kibana?

Popular topics

+ + +
+

All topics

+
diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc index bac2b0ebdf15fa..ddb906f390a2d9 100644 --- a/docs/setup/settings.asciidoc +++ b/docs/setup/settings.asciidoc @@ -262,11 +262,11 @@ on the {kib} index at startup. {kib} users still need to authenticate with {es} {ref}/indices-create-index.html[index name limitations]. *Default: `".kibana"`* -| `kibana.autocompleteTimeout:` {ess-icon} +| `data.autocomplete.valueSuggestions.timeout:` {ess-icon} | Time in milliseconds to wait for autocomplete suggestions from {es}. This value must be a whole number greater than zero. *Default: `"1000"`* -| `kibana.autocompleteTerminateAfter:` {ess-icon} +| `data.autocomplete.valueSuggestions.terminateAfter:` {ess-icon} | Maximum number of documents loaded by each shard to generate autocomplete suggestions. This value must be a whole number greater than zero. *Default: `"100000"`* @@ -596,6 +596,10 @@ inactive socket. *Default: `"120000"`* | Paths to a PEM-encoded X.509 server certificate and its corresponding private key. These are used by {kib} to establish trust when receiving inbound SSL/TLS connections from users. +|[[server-uuid]] `server.uuid:` + | The unique identifier for this {kib} instance. + + |=== [NOTE] diff --git a/docs/user/dashboard/images/lens_missing_values_strategy.png b/docs/user/dashboard/images/lens_missing_values_strategy.png new file mode 100644 index 00000000000000..d77c230b533f58 Binary files /dev/null and b/docs/user/dashboard/images/lens_missing_values_strategy.png differ diff --git a/docs/user/dashboard/lens.asciidoc b/docs/user/dashboard/lens.asciidoc index 1f3b4c429f7045..c5718b2a089bfc 100644 --- a/docs/user/dashboard/lens.asciidoc +++ b/docs/user/dashboard/lens.asciidoc @@ -190,6 +190,8 @@ A subset of *Lens* visualizations support value labels. [role="screenshot"] image::images/lens_value_labels_xychart_toggle.png[Lens Bar chart value labels menu] +NOTE: In bar charts, you are unable to move the label positions. + * *Pie*, *Donut*, and *Treemap* + [role="screenshot"] @@ -243,6 +245,9 @@ refer to <>. Sorting dimensions in visualizations is unsupported in *Lens*. +You can sort the dimensions for a single column in data tables: click the column header, then select the sorting criteria you want to use. +If you use the dimension as `Columns`, then all the columns that belong to the same dimension are sorted in the table. + [float] [[is-it-possible-to-use-saved-serches-in-lens]] ===== How do I visualize saved searches? @@ -254,3 +259,47 @@ Visualizing saved searches in unsupported in *Lens*. ===== How do I change the number of suggestions? Configuring the *Suggestions* that *Lens* automatically populates is unsupported. + +[float] +[[is-it-possible-to-use-different-indexpatterns-in-lens]] +===== Can I visualize multiple index patterns in a single visualization? + +You can create *Bar*, *Line* and *Area* charts from multiple index patterns. + +Each *Layer* in a visualization is associated with an index pattern and mutiple *Layers* can be combined together within the same visualization. Each *Layer* also has a chart switcher button in order to select the best type of visualization for the specific dataset. +You can also change the index pattern for a single *Layer*. + +[float] +[[why-my-field-x-is-missing-from-the-fields-list]] +===== Why is my field X missing from the fields list? + +*Lens* does not support the visualization of full-text fields, therefore it is not showing them in the data summary. + +[float] +[[how-to-handle-gaps-in-time-series-visualizations]] +===== How do I handle gaps in time series visualizations? + +*Lens* provides a set of features to handle missing values for *Area* and *Line* charts, which is useful for sparse data in time series data. + +To select a different way to represent missing values, open the *Visual options* menu, then select how to handle missing values. The default is to hide the missing values. ++ +[role="screenshot"] +image::images/lens_missing_values_strategy.png[Lens Missing values strategies menu] + +[float] +[[is-it-possible-to-change-the-scale-of-Y-axis]] +===== Is it possible to statically define the scale of the y-axis in a visualization? + +The ability to start the y-axis from another value than 0, or use a logarithmic scale, is unsupported in *Lens*. + +[float] +[[is-it-possible-to-have-pagination-for-datatable]] +===== Is it possible to have pagination in a data table? + +Pagination in a data table is unsupported in *Lens*. However, the <> supports pagination. + +[float] +[[is-it-possible-to-have-more-than-one-Y-axis-scale]] +===== Is it possible to have more than one y-axis scale in visualizations? + +*Lens* lets you pick, for each Y dimension, up to two distinct axis: *left* and *right*. Each axis can have a different scale. diff --git a/docs/user/discover.asciidoc b/docs/user/discover.asciidoc index ea413747a2aadf..82a7dd300f028b 100644 --- a/docs/user/discover.asciidoc +++ b/docs/user/discover.asciidoc @@ -101,22 +101,24 @@ image:images/find-manufacturer-field.png[Fields list that displays the top five . Click image:images/add-icon.png[Add icon] to toggle the field into the document table. -. Find the `day of week` field and add it to your document table. Your table should look like this: +. Find the `customer_first_name` and `customer_last_name` last name fields and add +them to your document table. Your table should look similar to this: + [role="screenshot"] -image:images/document-table.png[Document table with fields for manufacturer, geo.country_iso_code, and day_of_week] - +image:images/document-table.png[Document table with fields for manufacturer, customer_first_name, and customer_last_name] . To rearrange the table columns, hover the mouse over a -column header, and then use the move and sort controls. +column header, and then use the move controls. + +. To view more of the document table, click *Hide chart*. [float] [[add-field-in-discover]] -=== Add a field +=== Add a field to your index pattern What happens if you forgot to define an important value as a separate field? Or, what if you -want to combine two fields and treat them as one? -You can add a field to your index pattern from inside of **Discover**, +want to combine two fields and treat them as one? This is where {ref}/runtime.html[runtime fields] come into play. +You can add a runtime field to your index pattern from inside of **Discover**, and then use that field for analysis and visualizations, the same way you do with other fields. @@ -129,7 +131,7 @@ image:images/add-field-to-pattern.png[Dropdown menu located next to index patter . Turn on *Set value*. -. Use the Painless scripting language to define the field: +. Define the script using the Painless scripting language. Runtime fields require an `emit()`. + ```ts emit("Hello World!"); @@ -137,15 +139,25 @@ emit("Hello World!"); . Click *Save*. -. In the fields list, search for the *hello* field, and then click it. -+ -You'll see the top values for the field. The pop-up also includes actions for filtering, -editing, and deleting the field. +. In the fields list, search for the *hello* field, and then add it to the table to view it's value. + [role="screenshot"] -image:images/hello-field.png[Top values for the hello field, width=50%] +image:images/hello-field.png[hello field in the document table] -For more information on adding fields and Painless scripting language examples, refer to <>. +. Create a second field named `customer` that combines customer last name and first initial. ++ +```ts +String str = doc['customer_first_name.keyword'].value; +char ch1 = str.charAt(0); +emit(doc['customer_last_name.keyword'].value + ", " + ch1); +``` +. Simplify the the document table by removing `customer_first_name` and `customer_last_name` and adding `customer` in their place. ++ +[role="screenshot"] +image:images/customer.png[Customer last name, first initial in the document table] ++ +For more information on adding fields and Painless scripting language examples, +refer to <>. [float] @@ -202,7 +214,7 @@ click the close icon (x) next to its name in the filter bar. Dive into an individual document to view its fields and the documents that occurred before and after it. -. In the document table, expand any document. To view more of the document table, click *Hide chart*. +. In the document table, expand any document. + [role="screenshot"] image:images/document-table-expanded.png[Table view with document expanded] @@ -238,7 +250,7 @@ image:images/discover-save-saved-search.png[Save saved search in Discover, width If a field can be {ref}/search-aggregations.html[aggregated], you can quickly visualize it from **Discover**. -. From the **Selected fields** list, click `day_of_week`, and then click **Visualize**. +. From the **Available fields** list, click `day_of_week`, and then click **Visualize**. + [role="screenshot"] image:images/discover-visualize.png[Discover sidebar field popover with visualize button, width=75%] diff --git a/docs/user/production-considerations/production.asciidoc b/docs/user/production-considerations/production.asciidoc index 726747d5d69d0e..1ffca4b6ae6ab1 100644 --- a/docs/user/production-considerations/production.asciidoc +++ b/docs/user/production-considerations/production.asciidoc @@ -8,7 +8,6 @@ * <> * <> * <> -* <> * <> * <> * <> @@ -22,9 +21,8 @@ Kibana instances that are all connected to the same Elasticsearch instance. While Kibana isn't terribly resource intensive, we still recommend running Kibana separate from your Elasticsearch data or master nodes. To distribute Kibana -traffic across the nodes in your Elasticsearch cluster, you can run Kibana -and an Elasticsearch client node on the same machine. For more information, see -<>. +traffic across the nodes in your Elasticsearch cluster, +you can configure Kibana to use a list of Elasticsearch hosts. [float] [[configuring-kibana-shield]] @@ -69,58 +67,6 @@ csp.strict: true See <>. -[float] -[[load-balancing-es]] -=== Load Balancing across multiple {es} nodes -If you have multiple nodes in your Elasticsearch cluster, the easiest way to distribute Kibana requests -across the nodes is to run an Elasticsearch _Coordinating only_ node on the same machine as Kibana. -Elasticsearch Coordinating only nodes are essentially smart load balancers that are part of the cluster. They -process incoming HTTP requests, redirect operations to the other nodes in the cluster as needed, and -gather and return the results. For more information, see -{ref}/modules-node.html[Node] in the Elasticsearch reference. - -To use a local client node to load balance Kibana requests: - -. Install Elasticsearch on the same machine as Kibana. -. Configure the node as a Coordinating only node. In `elasticsearch.yml`, set `node.data`, `node.master` and `node.ingest` to `false`: -+ -[source,js] --------- -# 3. You want this node to be neither master nor data node nor ingest node, but -# to act as a "search load balancer" (fetching data from nodes, -# aggregating results, etc.) -# -node.master: false -node.data: false -node.ingest: false --------- -. Configure the client node to join your Elasticsearch cluster. In `elasticsearch.yml`, set the `cluster.name` to the -name of your cluster. -+ -[source,js] --------- -cluster.name: "my_cluster" --------- -. Check your transport and HTTP host configs in `elasticsearch.yml` under `network.host` and `transport.host`. The `transport.host` needs to be on the network reachable to the cluster members, the `network.host` is the network for the HTTP connection for Kibana (localhost:9200 by default). -+ -[source,js] --------- -network.host: localhost -http.port: 9200 - -# by default transport.host refers to network.host -transport.host: -transport.tcp.port: 9300 - 9400 --------- -. Make sure Kibana is configured to point to your local client node. In `kibana.yml`, the `elasticsearch.hosts` setting should be set to -`["localhost:9200"]`. -+ -[source,js] --------- -# The Elasticsearch instance to use for all your queries. -elasticsearch.hosts: ["http://localhost:9200"] --------- - [float] [[load-balancing-kibana]] === Load balancing across multiple Kibana instances diff --git a/docs/user/security/api-keys/images/create-api-key.png b/docs/user/security/api-keys/images/create-api-key.png index c763dcbfa53f86..b0041f69b05b69 100644 Binary files a/docs/user/security/api-keys/images/create-api-key.png and b/docs/user/security/api-keys/images/create-api-key.png differ diff --git a/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc b/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc index f5192f4641d4df..63fd2799c90cf8 100644 --- a/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc +++ b/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc @@ -39,7 +39,7 @@ This entails generating a "server certificate" for {es} to use on the HTTP layer . Obtain a client certificate and private key for {kib}. + -- -{kib} must this "client certificate" and corresponding private key when connecting to {es}. +{kib} uses the client certificate and corresponding private key when connecting to {es}. NOTE: This is not the same as the <> that {kib} will present to web browsers. diff --git a/package.json b/package.json index f41c85c4c7b805..e0bebcaacd7eaa 100644 --- a/package.json +++ b/package.json @@ -86,6 +86,7 @@ "**/prismjs": "1.23.0", "**/react-syntax-highlighter": "^15.3.1", "**/react-syntax-highlighter/**/highlight.js": "^10.4.1", + "**/refractor": "^3.3.1", "**/request": "^2.88.2", "**/trim": "1.0.1", "**/typescript": "4.1.3", @@ -102,7 +103,7 @@ "@elastic/datemath": "link:bazel-bin/packages/elastic-datemath/npm_module", "@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.4", "@elastic/ems-client": "7.13.0", - "@elastic/eui": "32.1.0", + "@elastic/eui": "33.0.0", "@elastic/filesaver": "1.1.2", "@elastic/good": "^9.0.1-kibana3", "@elastic/maki": "6.3.0", @@ -130,20 +131,21 @@ "@kbn/config": "link:bazel-bin/packages/kbn-config/npm_module", "@kbn/config-schema": "link:bazel-bin/packages/kbn-config-schema/npm_module", "@kbn/crypto": "link:bazel-bin/packages/kbn-crypto/npm_module", - "@kbn/mapbox-gl": "link:bazel-bin/packages/kbn-mapbox-gl/npm_module", "@kbn/i18n": "link:bazel-bin/packages/kbn-i18n/npm_module", "@kbn/interpreter": "link:packages/kbn-interpreter", "@kbn/io-ts-utils": "link:bazel-bin/packages/kbn-io-ts-utils/npm_module", "@kbn/legacy-logging": "link:bazel-bin/packages/kbn-legacy-logging/npm_module", "@kbn/logging": "link:bazel-bin/packages/kbn-logging/npm_module", + "@kbn/mapbox-gl": "link:bazel-bin/packages/kbn-mapbox-gl/npm_module", "@kbn/monaco": "link:bazel-bin/packages/kbn-monaco/npm_module", - "@kbn/securitysolution-list-constants": "link:bazel-bin/packages/kbn-securitysolution-list-constants/npm_module", + "@kbn/rule-data-utils": "link:packages/kbn-rule-data-utils", "@kbn/securitysolution-es-utils": "link:bazel-bin/packages/kbn-securitysolution-es-utils/npm_module", - "@kbn/securitysolution-io-ts-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-types/npm_module", "@kbn/securitysolution-io-ts-alerting-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-alerting-types/npm_module", "@kbn/securitysolution-io-ts-list-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-list-types/npm_module", + "@kbn/securitysolution-io-ts-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-types/npm_module", "@kbn/securitysolution-io-ts-utils": "link:bazel-bin/packages/kbn-securitysolution-io-ts-utils/npm_module", "@kbn/securitysolution-list-api": "link:bazel-bin/packages/kbn-securitysolution-list-api/npm_module", + "@kbn/securitysolution-list-constants": "link:bazel-bin/packages/kbn-securitysolution-list-constants/npm_module", "@kbn/securitysolution-list-hooks": "link:bazel-bin/packages/kbn-securitysolution-list-hooks/npm_module", "@kbn/securitysolution-list-utils": "link:bazel-bin/packages/kbn-securitysolution-list-utils/npm_module", "@kbn/securitysolution-utils": "link:bazel-bin/packages/kbn-securitysolution-utils/npm_module", @@ -162,7 +164,6 @@ "@mapbox/mapbox-gl-rtl-text": "0.2.3", "@mapbox/vector-tile": "1.3.1", "@reduxjs/toolkit": "^1.5.1", - "@scant/router": "^0.1.1", "@slack/webhook": "^5.0.4", "@turf/along": "6.0.1", "@turf/area": "6.0.1", @@ -229,6 +230,7 @@ "expiry-js": "0.1.7", "extract-zip": "^2.0.1", "fast-deep-equal": "^3.1.1", + "fflate": "^0.6.9", "file-saver": "^1.3.8", "file-type": "^10.9.0", "focus-trap-react": "^3.1.1", @@ -274,7 +276,6 @@ "json-stringify-safe": "5.0.1", "jsonwebtoken": "^8.5.1", "jsts": "^1.6.2", - "@kbn/rule-data-utils": "link:packages/kbn-rule-data-utils", "kea": "^2.4.2", "leaflet": "1.5.1", "leaflet-draw": "0.4.14", diff --git a/packages/kbn-config/src/config_service.test.ts b/packages/kbn-config/src/config_service.test.ts index c2d4f15b6d9153..b1b622381abb1c 100644 --- a/packages/kbn-config/src/config_service.test.ts +++ b/packages/kbn-config/src/config_service.test.ts @@ -418,8 +418,14 @@ test('logs deprecation warning during validation', async () => { const configService = new ConfigService(rawConfig, defaultEnv, logger); mockApplyDeprecations.mockImplementationOnce((config, deprecations, createAddDeprecation) => { const addDeprecation = createAddDeprecation!(''); - addDeprecation({ message: 'some deprecation message' }); - addDeprecation({ message: 'another deprecation message' }); + addDeprecation({ + message: 'some deprecation message', + correctiveActions: { manualSteps: ['do X'] }, + }); + addDeprecation({ + message: 'another deprecation message', + correctiveActions: { manualSteps: ['do Y'] }, + }); return { config, changedPaths: mockedChangedPaths }; }); @@ -444,13 +450,24 @@ test('does not log warnings for silent deprecations during validation', async () mockApplyDeprecations .mockImplementationOnce((config, deprecations, createAddDeprecation) => { const addDeprecation = createAddDeprecation!(''); - addDeprecation({ message: 'some deprecation message', silent: true }); - addDeprecation({ message: 'another deprecation message' }); + addDeprecation({ + message: 'some deprecation message', + correctiveActions: { manualSteps: ['do X'] }, + silent: true, + }); + addDeprecation({ + message: 'another deprecation message', + correctiveActions: { manualSteps: ['do Y'] }, + }); return { config, changedPaths: mockedChangedPaths }; }) .mockImplementationOnce((config, deprecations, createAddDeprecation) => { const addDeprecation = createAddDeprecation!(''); - addDeprecation({ message: 'I am silent', silent: true }); + addDeprecation({ + message: 'I am silent', + silent: true, + correctiveActions: { manualSteps: ['do Z'] }, + }); return { config, changedPaths: mockedChangedPaths }; }); @@ -519,7 +536,11 @@ describe('getHandledDeprecatedConfigs', () => { mockApplyDeprecations.mockImplementationOnce((config, deprecations, createAddDeprecation) => { deprecations.forEach((deprecation) => { const addDeprecation = createAddDeprecation!(deprecation.path); - addDeprecation({ message: `some deprecation message`, documentationUrl: 'some-url' }); + addDeprecation({ + message: `some deprecation message`, + documentationUrl: 'some-url', + correctiveActions: { manualSteps: ['do X'] }, + }); }); return { config, changedPaths: mockedChangedPaths }; }); @@ -532,6 +553,11 @@ describe('getHandledDeprecatedConfigs', () => { "base", Array [ Object { + "correctiveActions": Object { + "manualSteps": Array [ + "do X", + ], + }, "documentationUrl": "some-url", "message": "some deprecation message", }, diff --git a/packages/kbn-config/src/deprecation/types.ts b/packages/kbn-config/src/deprecation/types.ts index 0522365ad76c11..1791dac060e2bf 100644 --- a/packages/kbn-config/src/deprecation/types.ts +++ b/packages/kbn-config/src/deprecation/types.ts @@ -25,8 +25,8 @@ export interface DeprecatedConfigDetails { silent?: boolean; /* (optional) link to the documentation for more details on the deprecation. */ documentationUrl?: string; - /* (optional) corrective action needed to fix this deprecation. */ - correctiveActions?: { + /* corrective action needed to fix this deprecation. */ + correctiveActions: { /** * Specify a list of manual steps our users need to follow * to fix the deprecation before upgrade. diff --git a/packages/kbn-crypto/BUILD.bazel b/packages/kbn-crypto/BUILD.bazel index b1723e4120e797..20793e27de6298 100644 --- a/packages/kbn-crypto/BUILD.bazel +++ b/packages/kbn-crypto/BUILD.bazel @@ -38,6 +38,7 @@ TYPES_DEPS = [ "@npm//@types/node", "@npm//@types/node-forge", "@npm//@types/testing-library__jest-dom", + "@npm//resize-observer-polyfill" ] DEPS = SRC_DEPS + TYPES_DEPS diff --git a/packages/kbn-optimizer/limits.yml b/packages/kbn-optimizer/limits.yml index c28fd835919602..6ccf6269751b1e 100644 --- a/packages/kbn-optimizer/limits.yml +++ b/packages/kbn-optimizer/limits.yml @@ -3,7 +3,7 @@ pageLoadAssetSize: alerting: 106936 apm: 64385 apmOss: 18996 - bfetch: 41874 + bfetch: 51874 canvas: 1066647 charts: 195358 cloud: 21076 diff --git a/packages/kbn-optimizer/src/optimizer/cache_keys.ts b/packages/kbn-optimizer/src/optimizer/cache_keys.ts index 9275cab1fdff0c..e622b3b8f593e3 100644 --- a/packages/kbn-optimizer/src/optimizer/cache_keys.ts +++ b/packages/kbn-optimizer/src/optimizer/cache_keys.ts @@ -121,6 +121,10 @@ export interface OptimizerCacheKey { } async function getLastCommit() { + if (!Fs.existsSync(Path.join(REPO_ROOT, '.git'))) { + return undefined; + } + const { stdout } = await execa( 'git', ['log', '-n', '1', '--pretty=format:%H', '--', RELATIVE_DIR], diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts index 4d4a902bb186a6..d8be1917c101bd 100644 --- a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts +++ b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts @@ -7,6 +7,7 @@ */ jest.mock('execa'); +jest.mock('fs'); import { getChanges } from './get_changes'; @@ -15,6 +16,8 @@ const execa: jest.Mock = jest.requireMock('execa'); it('parses git ls-files output', async () => { expect.assertions(4); + jest.requireMock('fs').existsSync.mockImplementation(() => true); + execa.mockImplementation((cmd, args, options) => { expect(cmd).toBe('git'); expect(args).toEqual(['ls-files', '-dmt', '--', '/foo/bar/x']); diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.ts b/packages/kbn-optimizer/src/optimizer/get_changes.ts index e5574d9b1a2e47..73fa5e9d931e7b 100644 --- a/packages/kbn-optimizer/src/optimizer/get_changes.ts +++ b/packages/kbn-optimizer/src/optimizer/get_changes.ts @@ -9,6 +9,7 @@ import Path from 'path'; import execa from 'execa'; +import fs from 'fs'; export type Changes = Map; @@ -16,11 +17,16 @@ export type Changes = Map; * get the changes in all the context directories (plugin public paths) */ export async function getChanges(dir: string) { + const changes: Changes = new Map(); + + if (!fs.existsSync(Path.join(dir, '.git'))) { + return changes; + } + const { stdout } = await execa('git', ['ls-files', '-dmt', '--', dir], { cwd: dir, }); - const changes: Changes = new Map(); const output = stdout.trim(); if (output) { diff --git a/packages/kbn-telemetry-tools/BUILD.bazel b/packages/kbn-telemetry-tools/BUILD.bazel index 9a6b4a10bd1905..d394b0c93d45fb 100644 --- a/packages/kbn-telemetry-tools/BUILD.bazel +++ b/packages/kbn-telemetry-tools/BUILD.bazel @@ -47,6 +47,7 @@ TYPES_DEPS = [ "@npm//@types/node", "@npm//@types/normalize-path", "@npm//@types/testing-library__jest-dom", + "@npm//resize-observer-polyfill" ] DEPS = SRC_DEPS + TYPES_DEPS diff --git a/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts b/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts index b5d379d3426e72..0a2dba60742adf 100644 --- a/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts +++ b/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts @@ -5,6 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ +import { relative } from 'path'; import { ToolingLog } from '@kbn/dev-utils'; @@ -120,6 +121,9 @@ export class FunctionalTestRunner { throw new Error('No tests defined.'); } + // eslint-disable-next-line + console.log(`--- Running ${relative(process.cwd(), this.configFile)}`); + const dockerServers = new DockerServersService( config.get('dockerServers'), this.log, diff --git a/packages/kbn-test/src/functional_tests/tasks.js b/packages/kbn-test/src/functional_tests/tasks.js index 02c55b6af91dcc..122a5a23842d6f 100644 --- a/packages/kbn-test/src/functional_tests/tasks.js +++ b/packages/kbn-test/src/functional_tests/tasks.js @@ -88,6 +88,8 @@ export async function runTests(options) { continue; } + console.log(`--- Running ${relative(process.cwd(), configPath)}`); + await withProcRunner(log, async (procs) => { const config = await readConfigFile(log, configPath); diff --git a/packages/kbn-test/src/kbn_client/kbn_client.ts b/packages/kbn-test/src/kbn_client/kbn_client.ts index 3fa74412c1a8bf..ac14a399918cb9 100644 --- a/packages/kbn-test/src/kbn_client/kbn_client.ts +++ b/packages/kbn-test/src/kbn_client/kbn_client.ts @@ -8,13 +8,14 @@ import { ToolingLog } from '@kbn/dev-utils'; -import { KbnClientRequester, ReqOptions } from './kbn_client_requester'; -import { KbnClientStatus } from './kbn_client_status'; +import { KbnClientImportExport } from './kbn_client_import_export'; import { KbnClientPlugins } from './kbn_client_plugins'; -import { KbnClientVersion } from './kbn_client_version'; +import { KbnClientRequester, ReqOptions } from './kbn_client_requester'; import { KbnClientSavedObjects } from './kbn_client_saved_objects'; +import { KbnClientSpaces } from './kbn_client_spaces'; +import { KbnClientStatus } from './kbn_client_status'; import { KbnClientUiSettings, UiSettingValues } from './kbn_client_ui_settings'; -import { KbnClientImportExport } from './kbn_client_import_export'; +import { KbnClientVersion } from './kbn_client_version'; export interface KbnClientOptions { url: string; @@ -29,6 +30,7 @@ export class KbnClient { readonly plugins: KbnClientPlugins; readonly version: KbnClientVersion; readonly savedObjects: KbnClientSavedObjects; + readonly spaces: KbnClientSpaces; readonly uiSettings: KbnClientUiSettings; readonly importExport: KbnClientImportExport; @@ -59,6 +61,7 @@ export class KbnClient { this.plugins = new KbnClientPlugins(this.status); this.version = new KbnClientVersion(this.status); this.savedObjects = new KbnClientSavedObjects(this.log, this.requester); + this.spaces = new KbnClientSpaces(this.requester); this.uiSettings = new KbnClientUiSettings(this.log, this.requester, this.uiSettingDefaults); this.importExport = new KbnClientImportExport( this.log, diff --git a/packages/kbn-test/src/kbn_client/kbn_client_requester.ts b/packages/kbn-test/src/kbn_client/kbn_client_requester.ts index af75137d148e97..a194b593b3863a 100644 --- a/packages/kbn-test/src/kbn_client/kbn_client_requester.ts +++ b/packages/kbn-test/src/kbn_client/kbn_client_requester.ts @@ -121,6 +121,8 @@ export class KbnClientRequester { responseType: options.responseType, // work around https://github.com/axios/axios/issues/2791 transformResponse: options.responseType === 'text' ? [(x) => x] : undefined, + maxContentLength: 30000000, + maxBodyLength: 30000000, paramsSerializer: (params) => Qs.stringify(params), }); diff --git a/packages/kbn-test/src/kbn_client/kbn_client_spaces.ts b/packages/kbn-test/src/kbn_client/kbn_client_spaces.ts new file mode 100644 index 00000000000000..e88531606e9178 --- /dev/null +++ b/packages/kbn-test/src/kbn_client/kbn_client_spaces.ts @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { KbnClientRequester, uriencode } from './kbn_client_requester'; + +interface UpdateBody { + name: string; + description?: string; + disabledFeatures?: string | string[]; + initials?: string; + color?: string; + imageUrl?: string; +} + +interface CreateBody extends UpdateBody { + id: string; +} + +export class KbnClientSpaces { + constructor(private readonly requester: KbnClientRequester) {} + + async create(body: CreateBody) { + await this.requester.request({ + method: 'POST', + path: '/api/spaces/space', + body, + }); + } + + async update(id: string, body: UpdateBody) { + await this.requester.request({ + method: 'PUT', + path: uriencode`/api/spaces/space/${id}`, + body, + }); + } + + async get(id: string) { + const { data } = await this.requester.request({ + method: 'GET', + path: uriencode`/api/spaces/space/${id}`, + }); + + return data; + } + + async list() { + const { data } = await this.requester.request({ + method: 'GET', + path: '/api/spaces/space', + }); + + return data; + } + + async delete(id: string) { + await this.requester.request({ + method: 'DELETE', + path: uriencode`/api/spaces/space/${id}`, + }); + } +} diff --git a/packages/kbn-test/src/report_path.ts b/packages/kbn-test/src/report_path.ts index 9f41fdd41c0616..dcda0f5eb2d3cf 100644 --- a/packages/kbn-test/src/report_path.ts +++ b/packages/kbn-test/src/report_path.ts @@ -16,11 +16,17 @@ export function getUniqueJunitReportPath( reportName: string, counter?: number ): string { + const BUILDKITE_ID_SUFFIX = process.env.BUILDKITE_JOB_ID + ? `-${process.env.BUILDKITE_JOB_ID}` + : ''; + const path = Path.resolve( rootDirectory, 'target/junit', process.env.JOB || '.', - `TEST-${CI_PARALLEL_PROCESS_PREFIX}${reportName}${counter ? `-${counter}` : ''}.xml` + `TEST-${CI_PARALLEL_PROCESS_PREFIX}${reportName}${ + counter ? `-${counter}` : '' + }${BUILDKITE_ID_SUFFIX}.xml` ); return Fs.existsSync(path) diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js index 4029ce28faf5bf..d3755ed7c5f293 100644 --- a/packages/kbn-ui-shared-deps/entry.js +++ b/packages/kbn-ui-shared-deps/entry.js @@ -44,6 +44,8 @@ export const Theme = require('./theme.ts'); export const Lodash = require('lodash'); export const LodashFp = require('lodash/fp'); +export const Fflate = require('fflate/esm/browser'); + // runtime deps which don't need to be copied across all bundles export const TsLib = require('tslib'); export const KbnAnalytics = require('@kbn/analytics'); diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js index 62ddb09d25add7..877bf3df6c039d 100644 --- a/packages/kbn-ui-shared-deps/index.js +++ b/packages/kbn-ui-shared-deps/index.js @@ -52,6 +52,7 @@ exports.externals = { '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.Theme.euiDarkVars', lodash: '__kbnSharedDeps__.Lodash', 'lodash/fp': '__kbnSharedDeps__.LodashFp', + fflate: '__kbnSharedDeps__.Fflate', /** * runtime deps which don't need to be copied across all bundles diff --git a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap index 575a247ffeccb5..0f5efe667ec2fc 100644 --- a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap +++ b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap @@ -587,10 +587,12 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` > @@ -1921,6 +1923,7 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` > @@ -1999,6 +2004,7 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` @@ -3084,6 +3094,7 @@ exports[`CollapsibleNav renders the default nav 3`] = ` > @@ -3162,6 +3175,7 @@ exports[`CollapsibleNav renders the default nav 3`] = ` @@ -5469,6 +5473,7 @@ exports[`Header renders 1`] = ` > @@ -5547,6 +5554,7 @@ exports[`Header renders 1`] = ` { path: 'some-path', method: 'POST', }, + manualSteps: ['manual-step'], }, }; @@ -104,7 +105,9 @@ describe('DeprecationsClient', () => { domainId: 'testPluginId-1', message: 'some-message', level: 'warning', - correctiveActions: {}, + correctiveActions: { + manualSteps: ['manual-step'], + }, }; const isResolvable = deprecationsClient.isDeprecationResolvable(mockDeprecationDetails); @@ -120,7 +123,9 @@ describe('DeprecationsClient', () => { domainId: 'testPluginId-1', message: 'some-message', level: 'warning', - correctiveActions: {}, + correctiveActions: { + manualSteps: ['manual-step'], + }, }; const result = await deprecationsClient.resolveDeprecation(mockDeprecationDetails); @@ -144,6 +149,7 @@ describe('DeprecationsClient', () => { extra_param: 123, }, }, + manualSteps: ['manual-step'], }, }; const result = await deprecationsClient.resolveDeprecation(mockDeprecationDetails); @@ -176,6 +182,7 @@ describe('DeprecationsClient', () => { extra_param: 123, }, }, + manualSteps: ['manual-step'], }, }; http.fetch.mockRejectedValue({ body: { message: mockResponse } }); diff --git a/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap b/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap index 0d10ac47d0b75c..f5a1c51ccbe158 100644 --- a/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap +++ b/src/core/public/overlays/flyout/__snapshots__/flyout_service.test.tsx.snap @@ -26,7 +26,7 @@ Array [ ] `; -exports[`FlyoutService openFlyout() renders a flyout to the DOM 2`] = `"
Flyout content
"`; +exports[`FlyoutService openFlyout() renders a flyout to the DOM 2`] = `"
Flyout content
"`; exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 1`] = ` Array [ @@ -59,4 +59,4 @@ Array [ ] `; -exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; +exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; diff --git a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap index 19ebb5a9113c3d..9c39776fcea5c0 100644 --- a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap +++ b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap @@ -29,7 +29,7 @@ Array [ ] `; -exports[`ModalService openConfirm() renders a mountpoint confirm message 2`] = `"
Modal content
"`; +exports[`ModalService openConfirm() renders a mountpoint confirm message 2`] = `"
Modal content
"`; exports[`ModalService openConfirm() renders a string confirm message 1`] = ` Array [ @@ -49,7 +49,7 @@ Array [ ] `; -exports[`ModalService openConfirm() renders a string confirm message 2`] = `"

Some message

"`; +exports[`ModalService openConfirm() renders a string confirm message 2`] = `"

Some message

"`; exports[`ModalService openConfirm() with a currently active confirm replaces the current confirm with the new one 1`] = ` Array [ @@ -131,7 +131,7 @@ Array [ ] `; -exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; +exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; exports[`ModalService openModal() with a currently active confirm replaces the current confirm with the new one 1`] = ` Array [ diff --git a/src/core/server/config/deprecation/core_deprecations.test.ts b/src/core/server/config/deprecation/core_deprecations.test.ts index a8063c317b3c50..06c7116c8bebb0 100644 --- a/src/core/server/config/deprecation/core_deprecations.test.ts +++ b/src/core/server/config/deprecation/core_deprecations.test.ts @@ -24,7 +24,7 @@ describe('core deprecations', () => { const { messages } = applyCoreDeprecations(); expect(messages).toMatchInlineSnapshot(` Array [ - "Environment variable CONFIG_PATH is deprecated. It has been replaced with KBN_PATH_CONF pointing to a config folder", + "Environment variable \\"CONFIG_PATH\\" is deprecated. It has been replaced with \\"KBN_PATH_CONF\\" pointing to a config folder", ] `); }); @@ -405,7 +405,7 @@ describe('core deprecations', () => { }); expect(messages).toMatchInlineSnapshot(` Array [ - "\\"logging.events.log\\" has been deprecated and will be removed in 8.0. Moving forward, log levels can be customized on a per-logger basis using the new logging configuration. ", + "\\"logging.events.log\\" has been deprecated and will be removed in 8.0. Moving forward, log levels can be customized on a per-logger basis using the new logging configuration.", ] `); }); @@ -418,7 +418,7 @@ describe('core deprecations', () => { }); expect(messages).toMatchInlineSnapshot(` Array [ - "\\"logging.events.error\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level: error\\" in your logging configuration. ", + "\\"logging.events.error\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level: error\\" in your logging configuration.", ] `); }); diff --git a/src/core/server/config/deprecation/core_deprecations.ts b/src/core/server/config/deprecation/core_deprecations.ts index 0722bbe0a71adc..222f92321d917c 100644 --- a/src/core/server/config/deprecation/core_deprecations.ts +++ b/src/core/server/config/deprecation/core_deprecations.ts @@ -11,7 +11,10 @@ import { ConfigDeprecationProvider, ConfigDeprecation } from '@kbn/config'; const configPathDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => { if (process.env?.CONFIG_PATH) { addDeprecation({ - message: `Environment variable CONFIG_PATH is deprecated. It has been replaced with KBN_PATH_CONF pointing to a config folder`, + message: `Environment variable "CONFIG_PATH" is deprecated. It has been replaced with "KBN_PATH_CONF" pointing to a config folder`, + correctiveActions: { + manualSteps: ['Use "KBN_PATH_CONF" instead of "CONFIG_PATH" to point to a config folder.'], + }, }); } }; @@ -20,6 +23,11 @@ const dataPathDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecati if (process.env?.DATA_PATH) { addDeprecation({ message: `Environment variable "DATA_PATH" will be removed. It has been replaced with kibana.yml setting "path.data"`, + correctiveActions: { + manualSteps: [ + `Set 'path.data' in the config file or CLI flag with the value of the environment variable "DATA_PATH".`, + ], + }, }); } }; @@ -32,6 +40,12 @@ const rewriteBasePathDeprecation: ConfigDeprecation = (settings, fromPath, addDe 'will expect that all requests start with server.basePath rather than expecting you to rewrite ' + 'the requests in your reverse proxy. Set server.rewriteBasePath to false to preserve the ' + 'current behavior and silence this warning.', + correctiveActions: { + manualSteps: [ + `Set 'server.rewriteBasePath' in the config file, CLI flag, or environment variable (in Docker only).`, + `Set to false to preserve the current behavior and silence this warning.`, + ], + }, }); } }; @@ -41,6 +55,11 @@ const rewriteCorsSettings: ConfigDeprecation = (settings, fromPath, addDeprecati if (typeof corsSettings === 'boolean') { addDeprecation({ message: '"server.cors" is deprecated and has been replaced by "server.cors.enabled"', + correctiveActions: { + manualSteps: [ + `Replace "server.cors: ${corsSettings}" with "server.cors.enabled: ${corsSettings}"`, + ], + }, }); return { @@ -72,6 +91,9 @@ const cspRulesDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecati if (sourceList.find((source) => source.includes(NONCE_STRING))) { addDeprecation({ message: `csp.rules no longer supports the {nonce} syntax. Replacing with 'self' in ${policy}`, + correctiveActions: { + manualSteps: [`Replace {nonce} syntax with 'self' in ${policy}`], + }, }); sourceList = sourceList.filter((source) => !source.includes(NONCE_STRING)); @@ -87,6 +109,9 @@ const cspRulesDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecati ) { addDeprecation({ message: `csp.rules must contain the 'self' source. Automatically adding to ${policy}.`, + correctiveActions: { + manualSteps: [`Add 'self' source to ${policy}.`], + }, }); sourceList.push(SELF_STRING); } @@ -111,6 +136,12 @@ const mapManifestServiceUrlDeprecation: ConfigDeprecation = ( 'of the Elastic Maps Service settings. These settings have moved to the "map.emsTileApiUrl" and ' + '"map.emsFileApiUrl" settings instead. These settings are for development use only and should not be ' + 'modified for use in production environments.', + correctiveActions: { + manualSteps: [ + `Use "map.emsTileApiUrl" and "map.emsFileApiUrl" config instead of "map.manifestServiceUrl".`, + `These settings are for development use only and should not be modified for use in production environments.`, + ], + }, }); } }; @@ -125,12 +156,28 @@ const opsLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, addDe 'in 8.0. To access ops data moving forward, please enable debug logs for the ' + '"metrics.ops" context in your logging configuration. For more details, see ' + 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx', + correctiveActions: { + manualSteps: [ + `Remove "logging.events.ops" from your kibana settings.`, + `Enable debug logs for the "metrics.ops" context in your logging configuration`, + ], + }, }); } }; const requestLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecation) => { if (settings.logging?.events?.request || settings.logging?.events?.response) { + const removeConfigsSteps = []; + + if (settings.logging?.events?.request) { + removeConfigsSteps.push(`Remove "logging.events.request" from your kibana configs.`); + } + + if (settings.logging?.events?.response) { + removeConfigsSteps.push(`Remove "logging.events.response" from your kibana configs.`); + } + addDeprecation({ documentationUrl: 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents', @@ -139,6 +186,12 @@ const requestLoggingEventDeprecation: ConfigDeprecation = (settings, fromPath, a 'in 8.0. To access request and/or response data moving forward, please enable debug logs for the ' + '"http.server.response" context in your logging configuration. For more details, see ' + 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx', + correctiveActions: { + manualSteps: [ + ...removeConfigsSteps, + `enable debug logs for the "http.server.response" context in your logging configuration.`, + ], + }, }); } }; @@ -153,6 +206,12 @@ const timezoneLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDe 'in 8.0. To set the timezone moving forward, please add a timezone date modifier to the log pattern ' + 'in your logging configuration. For more details, see ' + 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx', + correctiveActions: { + manualSteps: [ + `Remove "logging.timezone" from your kibana configs.`, + `To set the timezone add a timezone date modifier to the log pattern in your logging configuration.`, + ], + }, }); } }; @@ -167,6 +226,12 @@ const destLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprec 'in 8.0. To set the destination moving forward, you can use the "console" appender ' + 'in your logging configuration or define a custom one. For more details, see ' + 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx', + correctiveActions: { + manualSteps: [ + `Remove "logging.dest" from your kibana configs.`, + `To set the destination use the "console" appender in your logging configuration or define a custom one.`, + ], + }, }); } }; @@ -179,6 +244,12 @@ const quietLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDepre message: '"logging.quiet" has been deprecated and will be removed ' + 'in 8.0. Moving forward, you can use "logging.root.level:error" in your logging configuration. ', + correctiveActions: { + manualSteps: [ + `Remove "logging.quiet" from your kibana configs.`, + `Use "logging.root.level:error" in your logging configuration.`, + ], + }, }); } }; @@ -191,6 +262,12 @@ const silentLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDepr message: '"logging.silent" has been deprecated and will be removed ' + 'in 8.0. Moving forward, you can use "logging.root.level:off" in your logging configuration. ', + correctiveActions: { + manualSteps: [ + `Remove "logging.silent" from your kibana configs.`, + `Use "logging.root.level:off" in your logging configuration.`, + ], + }, }); } }; @@ -203,6 +280,12 @@ const verboseLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDep message: '"logging.verbose" has been deprecated and will be removed ' + 'in 8.0. Moving forward, you can use "logging.root.level:all" in your logging configuration. ', + correctiveActions: { + manualSteps: [ + `Remove "logging.verbose" from your kibana configs.`, + `Use "logging.root.level:all" in your logging configuration.`, + ], + }, }); } }; @@ -223,6 +306,12 @@ const jsonLoggingDeprecation: ConfigDeprecation = (settings, fromPath, addDeprec 'There is currently no default layout for custom appenders and each one must be declared explicitly. ' + 'For more details, see ' + 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx', + correctiveActions: { + manualSteps: [ + `Remove "logging.json" from your kibana configs.`, + `Configure the "appender.layout" property for every custom appender in your logging configuration.`, + ], + }, }); } }; @@ -237,6 +326,12 @@ const logRotateDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecat 'Moving forward, you can enable log rotation using the "rolling-file" appender for a logger ' + 'in your logging configuration. For more details, see ' + 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#rolling-file-appender', + correctiveActions: { + manualSteps: [ + `Remove "logging.rotate" from your kibana configs.`, + `Enable log rotation using the "rolling-file" appender for a logger in your logging configuration.`, + ], + }, }); } }; @@ -248,7 +343,13 @@ const logEventsLogDeprecation: ConfigDeprecation = (settings, fromPath, addDepre 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents', message: '"logging.events.log" has been deprecated and will be removed ' + - 'in 8.0. Moving forward, log levels can be customized on a per-logger basis using the new logging configuration. ', + 'in 8.0. Moving forward, log levels can be customized on a per-logger basis using the new logging configuration.', + correctiveActions: { + manualSteps: [ + `Remove "logging.events.log" from your kibana configs.`, + `Customize log levels can be per-logger using the new logging configuration.`, + ], + }, }); } }; @@ -260,7 +361,13 @@ const logEventsErrorDeprecation: ConfigDeprecation = (settings, fromPath, addDep 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingevents', message: '"logging.events.error" has been deprecated and will be removed ' + - 'in 8.0. Moving forward, you can use "logging.root.level: error" in your logging configuration. ', + 'in 8.0. Moving forward, you can use "logging.root.level: error" in your logging configuration.', + correctiveActions: { + manualSteps: [ + `Remove "logging.events.error" from your kibana configs.`, + `Use "logging.root.level: error" in your logging configuration.`, + ], + }, }); } }; @@ -271,6 +378,9 @@ const logFilterDeprecation: ConfigDeprecation = (settings, fromPath, addDeprecat documentationUrl: 'https://github.com/elastic/kibana/blob/master/src/core/server/logging/README.mdx#loggingfilter', message: '"logging.filter" has been deprecated and will be removed in 8.0.', + correctiveActions: { + manualSteps: [`Remove "logging.filter" from your kibana configs.`], + }, }); } }; diff --git a/src/core/server/deprecations/deprecations_factory.mock.ts b/src/core/server/deprecations/deprecations_factory.mock.ts new file mode 100644 index 00000000000000..91ae4e6fa9af93 --- /dev/null +++ b/src/core/server/deprecations/deprecations_factory.mock.ts @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import type { PublicMethodsOf } from '@kbn/utility-types'; +import type { DeprecationsFactory } from './deprecations_factory'; +type DeprecationsFactoryContract = PublicMethodsOf; + +const createDeprecationsFactoryMock = () => { + const mocked: jest.Mocked = { + getRegistry: jest.fn(), + getDeprecations: jest.fn(), + getAllDeprecations: jest.fn(), + }; + + mocked.getDeprecations.mockResolvedValue([]); + mocked.getAllDeprecations.mockResolvedValue([]); + return mocked as jest.Mocked; +}; + +export const mockDeprecationsFactory = { + create: createDeprecationsFactoryMock, +}; diff --git a/src/core/server/deprecations/deprecations_factory.test.ts b/src/core/server/deprecations/deprecations_factory.test.ts index 469451b0020c02..187f3880f9998a 100644 --- a/src/core/server/deprecations/deprecations_factory.test.ts +++ b/src/core/server/deprecations/deprecations_factory.test.ts @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -import { GetDeprecationsContext } from './types'; +import type { GetDeprecationsContext } from './types'; import { DeprecationsFactory } from './deprecations_factory'; import { loggerMock } from '../logging/logger.mock'; diff --git a/src/core/server/deprecations/deprecations_registry.mock.ts b/src/core/server/deprecations/deprecations_registry.mock.ts new file mode 100644 index 00000000000000..bb178c3935cdcf --- /dev/null +++ b/src/core/server/deprecations/deprecations_registry.mock.ts @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import type { PublicMethodsOf } from '@kbn/utility-types'; +import type { DeprecationsRegistry } from './deprecations_registry'; +import type { GetDeprecationsContext } from './types'; +import { elasticsearchClientMock } from '../elasticsearch/client/mocks'; +import { savedObjectsClientMock } from '../saved_objects/service/saved_objects_client.mock'; +type DeprecationsRegistryContract = PublicMethodsOf; + +const createDeprecationsRegistryMock = () => { + const mocked: jest.Mocked = { + registerDeprecations: jest.fn(), + getDeprecations: jest.fn(), + }; + + return mocked as jest.Mocked; +}; + +const createGetDeprecationsContextMock = () => { + const mocked: jest.Mocked = { + esClient: elasticsearchClientMock.createScopedClusterClient(), + savedObjectsClient: savedObjectsClientMock.create(), + }; + + return mocked; +}; + +export const mockDeprecationsRegistry = { + create: createDeprecationsRegistryMock, + createGetDeprecationsContext: createGetDeprecationsContextMock, +}; diff --git a/src/core/server/deprecations/deprecations_registry.test.ts b/src/core/server/deprecations/deprecations_registry.test.ts index 507677a5318618..82b09beaa5123e 100644 --- a/src/core/server/deprecations/deprecations_registry.test.ts +++ b/src/core/server/deprecations/deprecations_registry.test.ts @@ -7,7 +7,7 @@ */ /* eslint-disable dot-notation */ -import { RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; +import type { RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; import { DeprecationsRegistry } from './deprecations_registry'; describe('DeprecationsRegistry', () => { diff --git a/src/core/server/deprecations/deprecations_registry.ts b/src/core/server/deprecations/deprecations_registry.ts index f92d807514b821..cc05473923ac88 100644 --- a/src/core/server/deprecations/deprecations_registry.ts +++ b/src/core/server/deprecations/deprecations_registry.ts @@ -6,7 +6,11 @@ * Side Public License, v 1. */ -import { DeprecationsDetails, RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; +import type { + DeprecationsDetails, + RegisterDeprecationsConfig, + GetDeprecationsContext, +} from './types'; export class DeprecationsRegistry { private readonly deprecationContexts: RegisterDeprecationsConfig[] = []; diff --git a/src/core/server/deprecations/deprecations_service.test.ts b/src/core/server/deprecations/deprecations_service.test.ts new file mode 100644 index 00000000000000..d1ed7a83402cb3 --- /dev/null +++ b/src/core/server/deprecations/deprecations_service.test.ts @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +/* eslint-disable dot-notation */ +import { DeprecationsService } from './deprecations_service'; +import { httpServiceMock } from '../http/http_service.mock'; +import { mockRouter } from '../http/router/router.mock'; +import { mockCoreContext } from '../core_context.mock'; +import { mockDeprecationsFactory } from './deprecations_factory.mock'; +import { mockDeprecationsRegistry } from './deprecations_registry.mock'; + +describe('DeprecationsService', () => { + const coreContext = mockCoreContext.create(); + beforeEach(() => jest.clearAllMocks()); + + describe('#setup', () => { + const http = httpServiceMock.createInternalSetupContract(); + const router = mockRouter.create(); + http.createRouter.mockReturnValue(router); + const deprecationsCoreSetupDeps = { http }; + + it('registers routes', () => { + const deprecationsService = new DeprecationsService(coreContext); + deprecationsService.setup(deprecationsCoreSetupDeps); + // Registers correct base api path + expect(http.createRouter).toBeCalledWith('/api/deprecations'); + // registers get route '/' + expect(router.get).toHaveBeenCalledTimes(1); + expect(router.get).toHaveBeenCalledWith({ path: '/', validate: false }, expect.any(Function)); + }); + + it('calls registerConfigDeprecationsInfo', () => { + const deprecationsService = new DeprecationsService(coreContext); + const mockRegisterConfigDeprecationsInfo = jest.fn(); + deprecationsService['registerConfigDeprecationsInfo'] = mockRegisterConfigDeprecationsInfo; + deprecationsService.setup(deprecationsCoreSetupDeps); + expect(mockRegisterConfigDeprecationsInfo).toBeCalledTimes(1); + }); + }); + + describe('#registerConfigDeprecationsInfo', () => { + const deprecationsFactory = mockDeprecationsFactory.create(); + const deprecationsRegistry = mockDeprecationsRegistry.create(); + const getDeprecationsContext = mockDeprecationsRegistry.createGetDeprecationsContext(); + + it('registers config deprecations', () => { + const deprecationsService = new DeprecationsService(coreContext); + coreContext.configService.getHandledDeprecatedConfigs.mockReturnValue([ + [ + 'testDomain', + [ + { + message: 'testMessage', + documentationUrl: 'testDocUrl', + correctiveActions: { + manualSteps: [ + 'Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.', + 'Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role.', + ], + }, + }, + ], + ], + ]); + + deprecationsFactory.getRegistry.mockReturnValue(deprecationsRegistry); + deprecationsService['registerConfigDeprecationsInfo'](deprecationsFactory); + + expect(coreContext.configService.getHandledDeprecatedConfigs).toBeCalledTimes(1); + expect(deprecationsFactory.getRegistry).toBeCalledTimes(1); + expect(deprecationsFactory.getRegistry).toBeCalledWith('testDomain'); + expect(deprecationsRegistry.registerDeprecations).toBeCalledTimes(1); + const configDeprecations = deprecationsRegistry.registerDeprecations.mock.calls[0][0].getDeprecations( + getDeprecationsContext + ); + expect(configDeprecations).toMatchInlineSnapshot(` + Array [ + Object { + "correctiveActions": Object { + "manualSteps": Array [ + "Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.", + "Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role.", + ], + }, + "deprecationType": "config", + "documentationUrl": "testDocUrl", + "level": "critical", + "message": "testMessage", + }, + ] + `); + }); + }); +}); diff --git a/src/core/server/deprecations/deprecations_service.ts b/src/core/server/deprecations/deprecations_service.ts index 8eca1ba5790c55..ede7f859ffd0d4 100644 --- a/src/core/server/deprecations/deprecations_service.ts +++ b/src/core/server/deprecations/deprecations_service.ts @@ -11,8 +11,6 @@ import { RegisterDeprecationsConfig } from './types'; import { registerRoutes } from './routes'; import { CoreContext } from '../core_context'; -import { CoreUsageDataSetup } from '../core_usage_data'; -import { InternalElasticsearchServiceSetup } from '../elasticsearch'; import { CoreService } from '../../types'; import { InternalHttpServiceSetup } from '../http'; import { Logger } from '../logging'; @@ -112,15 +110,13 @@ export interface InternalDeprecationsServiceSetup { /** @internal */ export interface DeprecationsSetupDeps { http: InternalHttpServiceSetup; - elasticsearch: InternalElasticsearchServiceSetup; - coreUsageData: CoreUsageDataSetup; } /** @internal */ export class DeprecationsService implements CoreService { private readonly logger: Logger; - constructor(private readonly coreContext: CoreContext) { + constructor(private readonly coreContext: Pick) { this.logger = coreContext.logger.get('deprecations-service'); } @@ -156,8 +152,9 @@ export class DeprecationsService implements CoreService { return { level: 'critical', + deprecationType: 'config', message, - correctiveActions: correctiveActions ?? {}, + correctiveActions, documentationUrl, }; }); diff --git a/src/core/server/deprecations/types.ts b/src/core/server/deprecations/types.ts index 31734b51b46bd2..4c4f84f313102c 100644 --- a/src/core/server/deprecations/types.ts +++ b/src/core/server/deprecations/types.ts @@ -25,6 +25,16 @@ export interface DeprecationsDetails { * - fetch_error: Deprecations service failed to grab the deprecation details for the domain. */ level: 'warning' | 'critical' | 'fetch_error'; + /** + * (optional) Used to identify between different deprecation types. + * Example use case: in Upgrade Assistant, we may want to allow the user to sort by + * deprecation type or show each type in a separate tab. + * + * Feel free to add new types if necessary. + * Predefined types are necessary to reduce having similar definitions with different keywords + * across kibana deprecations. + */ + deprecationType?: 'config' | 'feature'; /* (optional) link to the documentation for more details on the deprecation. */ documentationUrl?: string; /* corrective action needed to fix this deprecation. */ @@ -45,11 +55,11 @@ export interface DeprecationsDetails { }; }; /** - * (optional) If this deprecation cannot be automtically fixed - * via an API corrective action. Specify a list of manual steps - * users need to follow to fix the deprecation before upgrade. + * Specify a list of manual steps users need to follow to + * fix the deprecation before upgrade. Required even if an API + * corrective action is set in case the API fails. */ - manualSteps?: string[]; + manualSteps: string[]; }; } diff --git a/src/core/server/elasticsearch/elasticsearch_config.ts b/src/core/server/elasticsearch/elasticsearch_config.ts index b5ea0ec8c34563..b2b25cda3ac2a5 100644 --- a/src/core/server/elasticsearch/elasticsearch_config.ts +++ b/src/core/server/elasticsearch/elasticsearch_config.ts @@ -152,23 +152,45 @@ const deprecations: ConfigDeprecationProvider = () => [ if (es.username === 'elastic') { addDeprecation({ message: `Setting [${fromPath}.username] to "elastic" is deprecated. You should use the "kibana_system" user instead.`, + correctiveActions: { + manualSteps: [`Replace [${fromPath}.username] from "elastic" to "kibana_system".`], + }, }); } else if (es.username === 'kibana') { addDeprecation({ message: `Setting [${fromPath}.username] to "kibana" is deprecated. You should use the "kibana_system" user instead.`, + correctiveActions: { + manualSteps: [`Replace [${fromPath}.username] from "kibana" to "kibana_system".`], + }, }); } if (es.ssl?.key !== undefined && es.ssl?.certificate === undefined) { addDeprecation({ message: `Setting [${fromPath}.ssl.key] without [${fromPath}.ssl.certificate] is deprecated. This has no effect, you should use both settings to enable TLS client authentication to Elasticsearch.`, + correctiveActions: { + manualSteps: [ + `Set [${fromPath}.ssl.certificate] in your kibana configs to enable TLS client authentication to Elasticsearch.`, + ], + }, }); } else if (es.ssl?.certificate !== undefined && es.ssl?.key === undefined) { addDeprecation({ message: `Setting [${fromPath}.ssl.certificate] without [${fromPath}.ssl.key] is deprecated. This has no effect, you should use both settings to enable TLS client authentication to Elasticsearch.`, + correctiveActions: { + manualSteps: [ + `Set [${fromPath}.ssl.key] in your kibana configs to enable TLS client authentication to Elasticsearch.`, + ], + }, }); } else if (es.logQueries === true) { addDeprecation({ message: `Setting [${fromPath}.logQueries] is deprecated and no longer used. You should set the log level to "debug" for the "elasticsearch.queries" context in "logging.loggers" or use "logging.verbose: true".`, + correctiveActions: { + manualSteps: [ + `Remove Setting [${fromPath}.logQueries] from your kibana configs`, + `Set the log level to "debug" for the "elasticsearch.queries" context in "logging.loggers".`, + ], + }, }); } return; diff --git a/src/core/server/kibana_config.test.ts b/src/core/server/kibana_config.test.ts index 47bb6cf2a064c8..72ddb3b65081b5 100644 --- a/src/core/server/kibana_config.test.ts +++ b/src/core/server/kibana_config.test.ts @@ -22,8 +22,6 @@ it('set correct defaults ', () => { const configValue = config.schema.validate({}); expect(configValue).toMatchInlineSnapshot(` Object { - "autocompleteTerminateAfter": "PT1M40S", - "autocompleteTimeout": "PT1S", "enabled": true, "index": ".kibana", } diff --git a/src/core/server/kibana_config.ts b/src/core/server/kibana_config.ts index 848c51dcb69f36..77ee3197b988db 100644 --- a/src/core/server/kibana_config.ts +++ b/src/core/server/kibana_config.ts @@ -18,6 +18,12 @@ const deprecations: ConfigDeprecationProvider = () => [ addDeprecation({ message: `"kibana.index" is deprecated. Multitenancy by changing "kibana.index" will not be supported starting in 8.0. See https://ela.st/kbn-remove-legacy-multitenancy for more details`, documentationUrl: 'https://ela.st/kbn-remove-legacy-multitenancy', + correctiveActions: { + manualSteps: [ + `If you rely on this setting to achieve multitenancy you should use Spaces, cross-cluster replication, or cross-cluster search instead.`, + `To migrate to Spaces, we encourage using saved object management to export your saved objects from a tenant into the default tenant in a space.`, + ], + }, }); } return settings; @@ -29,12 +35,6 @@ export const config = { schema: schema.object({ enabled: schema.boolean({ defaultValue: true }), index: schema.string({ defaultValue: '.kibana' }), - autocompleteTerminateAfter: schema.duration({ defaultValue: 100000 }), - autocompleteTimeout: schema.duration({ defaultValue: 1000 }), }), deprecations, - exposeToUsage: { - autocompleteTerminateAfter: true, - autocompleteTimeout: true, - }, }; diff --git a/src/core/server/mocks.ts b/src/core/server/mocks.ts index 6f1b9dc5bf8203..0d52ff64499c1c 100644 --- a/src/core/server/mocks.ts +++ b/src/core/server/mocks.ts @@ -58,8 +58,6 @@ export function pluginInitializerContextConfigMock(config: T) { const globalConfig: SharedGlobalConfig = { kibana: { index: '.kibana-tests', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration('30s'), diff --git a/src/core/server/plugins/legacy_config.test.ts b/src/core/server/plugins/legacy_config.test.ts index 0ea26f2e0333e0..2a980e38a4e191 100644 --- a/src/core/server/plugins/legacy_config.test.ts +++ b/src/core/server/plugins/legacy_config.test.ts @@ -43,8 +43,6 @@ describe('Legacy config', () => { expect(legacyConfig).toStrictEqual({ kibana: { index: '.kibana', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration(30, 's'), @@ -66,8 +64,6 @@ describe('Legacy config', () => { expect(legacyConfig).toStrictEqual({ kibana: { index: '.kibana', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration(30, 's'), diff --git a/src/core/server/plugins/plugin_context.test.ts b/src/core/server/plugins/plugin_context.test.ts index e37d985d423212..4ba34ccb2149f0 100644 --- a/src/core/server/plugins/plugin_context.test.ts +++ b/src/core/server/plugins/plugin_context.test.ts @@ -115,8 +115,6 @@ describe('createPluginInitializerContext', () => { expect(configObject).toStrictEqual({ kibana: { index: '.kibana', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration(30, 's'), diff --git a/src/core/server/plugins/types.ts b/src/core/server/plugins/types.ts index 2b5acd8ed5e940..6b50b5e820665e 100644 --- a/src/core/server/plugins/types.ts +++ b/src/core/server/plugins/types.ts @@ -313,7 +313,7 @@ export interface AsyncPlugin< export const SharedGlobalConfigKeys = { // We can add more if really needed - kibana: ['index', 'autocompleteTerminateAfter', 'autocompleteTimeout'] as const, + kibana: ['index'] as const, elasticsearch: ['shardTimeout', 'requestTimeout', 'pingTimeout'] as const, path: ['data'] as const, savedObjects: ['maxImportPayloadBytes'] as const, diff --git a/src/core/server/saved_objects/migrationsv2/actions/index.test.ts b/src/core/server/saved_objects/migrationsv2/actions/index.test.ts index df74a4e1282e43..05da335d708840 100644 --- a/src/core/server/saved_objects/migrationsv2/actions/index.test.ts +++ b/src/core/server/saved_objects/migrationsv2/actions/index.test.ts @@ -37,7 +37,7 @@ describe('actions', () => { describe('fetchIndices', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.fetchIndices(client, ['my_index']); + const task = Actions.fetchIndices({ client, indices: ['my_index'] }); try { await task(); } catch (e) { @@ -49,7 +49,7 @@ describe('actions', () => { describe('setWriteBlock', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.setWriteBlock(client, 'my_index'); + const task = Actions.setWriteBlock({ client, index: 'my_index' }); try { await task(); } catch (e) { @@ -58,7 +58,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -66,7 +69,11 @@ describe('actions', () => { describe('cloneIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.cloneIndex(client, 'my_source_index', 'my_target_index'); + const task = Actions.cloneIndex({ + client, + source: 'my_source_index', + target: 'my_target_index', + }); try { await task(); } catch (e) { @@ -75,7 +82,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -95,7 +105,7 @@ describe('actions', () => { describe('openPit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.openPit(client, 'my_index'); + const task = Actions.openPit({ client, index: 'my_index' }); try { await task(); } catch (e) { @@ -107,7 +117,12 @@ describe('actions', () => { describe('readWithPit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.readWithPit(client, 'pitId', { match_all: {} }, 10_000); + const task = Actions.readWithPit({ + client, + pitId: 'pitId', + query: { match_all: {} }, + batchSize: 10_000, + }); try { await task(); } catch (e) { @@ -119,7 +134,7 @@ describe('actions', () => { describe('closePit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.closePit(client, 'pitId'); + const task = Actions.closePit({ client, pitId: 'pitId' }); try { await task(); } catch (e) { @@ -131,14 +146,14 @@ describe('actions', () => { describe('reindex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.reindex( + const task = Actions.reindex({ client, - 'my_source_index', - 'my_target_index', - Option.none, - false, - {} - ); + sourceIndex: 'my_source_index', + targetIndex: 'my_target_index', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: {}, + }); try { await task(); } catch (e) { @@ -150,7 +165,7 @@ describe('actions', () => { describe('waitForReindexTask', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.waitForReindexTask(client, 'my task id', '60s'); + const task = Actions.waitForReindexTask({ client, taskId: 'my task id', timeout: '60s' }); try { await task(); } catch (e) { @@ -160,7 +175,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -168,7 +186,11 @@ describe('actions', () => { describe('waitForPickupUpdatedMappingsTask', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.waitForPickupUpdatedMappingsTask(client, 'my task id', '60s'); + const task = Actions.waitForPickupUpdatedMappingsTask({ + client, + taskId: 'my task id', + timeout: '60s', + }); try { await task(); } catch (e) { @@ -178,7 +200,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -186,7 +211,7 @@ describe('actions', () => { describe('updateAliases', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.updateAliases(client, []); + const task = Actions.updateAliases({ client, aliasActions: [] }); try { await task(); } catch (e) { @@ -196,7 +221,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -204,7 +232,11 @@ describe('actions', () => { describe('createIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.createIndex(client, 'new_index', { properties: {} }); + const task = Actions.createIndex({ + client, + indexName: 'new_index', + mappings: { properties: {} }, + }); try { await task(); } catch (e) { @@ -214,7 +246,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -222,7 +257,11 @@ describe('actions', () => { describe('updateAndPickupMappings', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.updateAndPickupMappings(client, 'new_index', { properties: {} }); + const task = Actions.updateAndPickupMappings({ + client, + index: 'new_index', + mappings: { properties: {} }, + }); try { await task(); } catch (e) { @@ -276,7 +315,12 @@ describe('actions', () => { describe('bulkOverwriteTransformedDocuments', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.bulkOverwriteTransformedDocuments(client, 'new_index', [], 'wait_for'); + const task = Actions.bulkOverwriteTransformedDocuments({ + client, + index: 'new_index', + transformedDocs: [], + refresh: 'wait_for', + }); try { await task(); } catch (e) { @@ -289,7 +333,7 @@ describe('actions', () => { describe('refreshIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.refreshIndex(client, 'target_index'); + const task = Actions.refreshIndex({ client, targetIndex: 'target_index' }); try { await task(); } catch (e) { diff --git a/src/core/server/saved_objects/migrationsv2/actions/index.ts b/src/core/server/saved_objects/migrationsv2/actions/index.ts index c2e0476960c3b3..905d64947298ec 100644 --- a/src/core/server/saved_objects/migrationsv2/actions/index.ts +++ b/src/core/server/saved_objects/migrationsv2/actions/index.ts @@ -68,20 +68,26 @@ export type FetchIndexResponse = Record< { aliases: Record; mappings: IndexMapping; settings: unknown } >; +/** @internal */ +export interface FetchIndicesParams { + client: ElasticsearchClient; + indices: string[]; +} + /** * Fetches information about the given indices including aliases, mappings and * settings. */ -export const fetchIndices = ( - client: ElasticsearchClient, - indicesToFetch: string[] -): TaskEither.TaskEither => +export const fetchIndices = ({ + client, + indices, +}: FetchIndicesParams): TaskEither.TaskEither => // @ts-expect-error @elastic/elasticsearch IndexState.alias and IndexState.mappings should be required () => { return client.indices .get( { - index: indicesToFetch, + index: indices, ignore_unavailable: true, // Don't return an error for missing indices. Note this *will* include closed indices, the docs are misleading https://github.com/elastic/elasticsearch/issues/63607 }, { ignore: [404], maxRetries: 0 } @@ -96,6 +102,12 @@ export interface IndexNotFound { type: 'index_not_found_exception'; index: string; } + +/** @internal */ +export interface SetWriteBlockParams { + client: ElasticsearchClient; + index: string; +} /** * Sets a write block in place for the given index. If the response includes * `acknowledged: true` all in-progress writes have drained and no further @@ -105,10 +117,10 @@ export interface IndexNotFound { * include `shards_acknowledged: true` but once the block is in place, * subsequent calls return `shards_acknowledged: false` */ -export const setWriteBlock = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither< +export const setWriteBlock = ({ + client, + index, +}: SetWriteBlockParams): TaskEither.TaskEither< IndexNotFound | RetryableEsClientError, 'set_write_block_succeeded' > => () => { @@ -145,13 +157,21 @@ export const setWriteBlock = ( ); }; +/** @internal */ +export interface RemoveWriteBlockParams { + client: ElasticsearchClient; + index: string; +} /** * Removes a write block from an index */ -export const removeWriteBlock = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither => () => { +export const removeWriteBlock = ({ + client, + index, +}: RemoveWriteBlockParams): TaskEither.TaskEither< + RetryableEsClientError, + 'remove_write_block_succeeded' +> => () => { return client.indices .putSettings<{ acknowledged: boolean; @@ -182,6 +202,12 @@ export const removeWriteBlock = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface WaitForIndexStatusYellowParams { + client: ElasticsearchClient; + index: string; + timeout?: string; +} /** * A yellow index status means the index's primary shard is allocated and the * index is ready for searching/indexing documents, but ES wasn't able to @@ -193,11 +219,11 @@ export const removeWriteBlock = ( * yellow at any point in the future. So ultimately data-redundancy is up to * users to maintain. */ -export const waitForIndexStatusYellow = ( - client: ElasticsearchClient, - index: string, - timeout = DEFAULT_TIMEOUT -): TaskEither.TaskEither => () => { +export const waitForIndexStatusYellow = ({ + client, + index, + timeout = DEFAULT_TIMEOUT, +}: WaitForIndexStatusYellowParams): TaskEither.TaskEither => () => { return client.cluster .health({ index, wait_for_status: 'yellow', timeout }) .then(() => { @@ -208,6 +234,14 @@ export const waitForIndexStatusYellow = ( export type CloneIndexResponse = AcknowledgeResponse; +/** @internal */ +export interface CloneIndexParams { + client: ElasticsearchClient; + source: string; + target: string; + /** only used for testing */ + timeout?: string; +} /** * Makes a clone of the source index into the target. * @@ -218,13 +252,15 @@ export type CloneIndexResponse = AcknowledgeResponse; * - the first call will wait up to 120s for the cluster state and all shards * to be updated. */ -export const cloneIndex = ( - client: ElasticsearchClient, - source: string, - target: string, - /** only used for testing */ - timeout = DEFAULT_TIMEOUT -): TaskEither.TaskEither => { +export const cloneIndex = ({ + client, + source, + target, + timeout = DEFAULT_TIMEOUT, +}: CloneIndexParams): TaskEither.TaskEither< + RetryableEsClientError | IndexNotFound, + CloneIndexResponse +> => { const cloneTask: TaskEither.TaskEither< RetryableEsClientError | IndexNotFound, AcknowledgeResponse @@ -302,7 +338,7 @@ export const cloneIndex = ( } else { // Otherwise, wait until the target index has a 'green' status. return pipe( - waitForIndexStatusYellow(client, target, timeout), + waitForIndexStatusYellow({ client, index: target, timeout }), TaskEither.map((value) => { /** When the index status is 'green' we know that all shards were started */ return { acknowledged: true, shardsAcknowledged: true }; @@ -352,16 +388,22 @@ const catchWaitForTaskCompletionTimeout = ( } }; +/** @internal */ +export interface WaitForTaskParams { + client: ElasticsearchClient; + taskId: string; + timeout: string; +} /** * Blocks for up to 60s or until a task completes. * * TODO: delete completed tasks */ -const waitForTask = ( - client: ElasticsearchClient, - taskId: string, - timeout: string -): TaskEither.TaskEither< +const waitForTask = ({ + client, + taskId, + timeout, +}: WaitForTaskParams): TaskEither.TaskEither< RetryableEsClientError | WaitForTaskCompletionTimeout, WaitForTaskResponse > => () => { @@ -433,16 +475,21 @@ export interface OpenPitResponse { pitId: string; } +/** @internal */ +export interface OpenPitParams { + client: ElasticsearchClient; + index: string; +} // how long ES should keep PIT alive const pitKeepAlive = '10m'; /* * Creates a lightweight view of data when the request has been initiated. * See https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html * */ -export const openPit = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither => () => { +export const openPit = ({ + client, + index, +}: OpenPitParams): TaskEither.TaskEither => () => { return client .openPointInTime({ index, @@ -459,17 +506,28 @@ export interface ReadWithPit { readonly totalHits: number | undefined; } +/** @internal */ + +export interface ReadWithPitParams { + client: ElasticsearchClient; + pitId: string; + query: estypes.QueryContainer; + batchSize: number; + searchAfter?: number[]; + seqNoPrimaryTerm?: boolean; +} + /* * Requests documents from the index using PIT mechanism. * */ -export const readWithPit = ( - client: ElasticsearchClient, - pitId: string, - query: estypes.QueryContainer, - batchSize: number, - searchAfter?: number[], - seqNoPrimaryTerm?: boolean -): TaskEither.TaskEither => () => { +export const readWithPit = ({ + client, + pitId, + query, + batchSize, + searchAfter, + seqNoPrimaryTerm, +}: ReadWithPitParams): TaskEither.TaskEither => () => { return client .search({ seq_no_primary_term: seqNoPrimaryTerm, @@ -516,14 +574,19 @@ export const readWithPit = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface ClosePitParams { + client: ElasticsearchClient; + pitId: string; +} /* * Closes PIT. * See https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html * */ -export const closePit = ( - client: ElasticsearchClient, - pitId: string -): TaskEither.TaskEither => () => { +export const closePit = ({ + client, + pitId, +}: ClosePitParams): TaskEither.TaskEither => () => { return client .closePointInTime({ body: { id: pitId }, @@ -537,27 +600,42 @@ export const closePit = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface TransformDocsParams { + transformRawDocs: TransformRawDocs; + outdatedDocuments: SavedObjectsRawDoc[]; +} /* * Transform outdated docs * */ -export const transformDocs = ( - transformRawDocs: TransformRawDocs, - outdatedDocuments: SavedObjectsRawDoc[] -): TaskEither.TaskEither => - transformRawDocs(outdatedDocuments); +export const transformDocs = ({ + transformRawDocs, + outdatedDocuments, +}: TransformDocsParams): TaskEither.TaskEither< + DocumentsTransformFailed, + DocumentsTransformSuccess +> => transformRawDocs(outdatedDocuments); /** @internal */ export interface ReindexResponse { taskId: string; } +/** @internal */ +export interface RefreshIndexParams { + client: ElasticsearchClient; + targetIndex: string; +} /** * Wait for Elasticsearch to reindex all the changes. */ -export const refreshIndex = ( - client: ElasticsearchClient, - targetIndex: string -): TaskEither.TaskEither => () => { +export const refreshIndex = ({ + client, + targetIndex, +}: RefreshIndexParams): TaskEither.TaskEither< + RetryableEsClientError, + { refreshed: boolean } +> => () => { return client.indices .refresh({ index: targetIndex, @@ -567,6 +645,19 @@ export const refreshIndex = ( }) .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface ReindexParams { + client: ElasticsearchClient; + sourceIndex: string; + targetIndex: string; + reindexScript: Option.Option; + requireAlias: boolean; + /* When reindexing we use a source query to exclude saved objects types which + * are no longer used. These saved objects will still be kept in the outdated + * index for backup purposes, but won't be available in the upgraded index. + */ + unusedTypesQuery: estypes.QueryContainer; +} /** * Reindex documents from the `sourceIndex` into the `targetIndex`. Returns a * task ID which can be tracked for progress. @@ -575,18 +666,14 @@ export const refreshIndex = ( * this in parallel. By using `op_type: 'create', conflicts: 'proceed'` there * will be only one write per reindexed document. */ -export const reindex = ( - client: ElasticsearchClient, - sourceIndex: string, - targetIndex: string, - reindexScript: Option.Option, - requireAlias: boolean, - /* When reindexing we use a source query to exclude saved objects types which - * are no longer used. These saved objects will still be kept in the outdated - * index for backup purposes, but won't be available in the upgraded index. - */ - unusedTypesQuery: estypes.QueryContainer -): TaskEither.TaskEither => () => { +export const reindex = ({ + client, + sourceIndex, + targetIndex, + reindexScript, + requireAlias, + unusedTypesQuery, +}: ReindexParams): TaskEither.TaskEither => () => { return client .reindex({ // Require targetIndex to be an alias. Prevents a new index from being @@ -688,11 +775,18 @@ export const waitForReindexTask = flow( ) ); -export const verifyReindex = ( - client: ElasticsearchClient, - sourceIndex: string, - targetIndex: string -): TaskEither.TaskEither< +/** @internal */ +export interface VerifyReindexParams { + client: ElasticsearchClient; + sourceIndex: string; + targetIndex: string; +} + +export const verifyReindex = ({ + client, + sourceIndex, + targetIndex, +}: VerifyReindexParams): TaskEither.TaskEither< RetryableEsClientError | { type: 'verify_reindex_failed' }, 'verify_reindex_succeeded' > => () => { @@ -762,13 +856,18 @@ export type AliasAction = | { remove: { index: string; alias: string; must_exist: boolean } } | { add: { index: string; alias: string } }; +/** @internal */ +export interface UpdateAliasesParams { + client: ElasticsearchClient; + aliasActions: AliasAction[]; +} /** * Calls the Update index alias API `_alias` with the provided alias actions. */ -export const updateAliases = ( - client: ElasticsearchClient, - aliasActions: AliasAction[] -): TaskEither.TaskEither< +export const updateAliases = ({ + client, + aliasActions, +}: UpdateAliasesParams): TaskEither.TaskEither< IndexNotFound | AliasNotFound | RemoveIndexNotAConcreteIndex | RetryableEsClientError, 'update_aliases_succeeded' > => () => { @@ -836,6 +935,14 @@ function aliasArrayToRecord(aliases: string[]): Record { } return result; } + +/** @internal */ +export interface CreateIndexParams { + client: ElasticsearchClient; + indexName: string; + mappings: IndexMapping; + aliases?: string[]; +} /** * Creates an index with the given mappings * @@ -846,12 +953,12 @@ function aliasArrayToRecord(aliases: string[]): Record { * - the first call will wait up to 120s for the cluster state and all shards * to be updated. */ -export const createIndex = ( - client: ElasticsearchClient, - indexName: string, - mappings: IndexMapping, - aliases: string[] = [] -): TaskEither.TaskEither => { +export const createIndex = ({ + client, + indexName, + mappings, + aliases = [], +}: CreateIndexParams): TaskEither.TaskEither => { const createIndexTask: TaskEither.TaskEither< RetryableEsClientError, AcknowledgeResponse @@ -930,7 +1037,7 @@ export const createIndex = ( } else { // Otherwise, wait until the target index has a 'yellow' status. return pipe( - waitForIndexStatusYellow(client, indexName, DEFAULT_TIMEOUT), + waitForIndexStatusYellow({ client, index: indexName, timeout: DEFAULT_TIMEOUT }), TaskEither.map(() => { /** When the index status is 'yellow' we know that all shards were started */ return 'create_index_succeeded'; @@ -946,15 +1053,24 @@ export interface UpdateAndPickupMappingsResponse { taskId: string; } +/** @internal */ +export interface UpdateAndPickupMappingsParams { + client: ElasticsearchClient; + index: string; + mappings: IndexMapping; +} /** * Updates an index's mappings and runs an pickupUpdatedMappings task so that the mapping * changes are "picked up". Returns a taskId to track progress. */ -export const updateAndPickupMappings = ( - client: ElasticsearchClient, - index: string, - mappings: IndexMapping -): TaskEither.TaskEither => { +export const updateAndPickupMappings = ({ + client, + index, + mappings, +}: UpdateAndPickupMappingsParams): TaskEither.TaskEither< + RetryableEsClientError, + UpdateAndPickupMappingsResponse +> => { const putMappingTask: TaskEither.TaskEither< RetryableEsClientError, 'update_mappings_succeeded' @@ -1053,16 +1169,26 @@ export const searchForOutdatedDocuments = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface BulkOverwriteTransformedDocumentsParams { + client: ElasticsearchClient; + index: string; + transformedDocs: SavedObjectsRawDoc[]; + refresh?: estypes.Refresh; +} /** * Write the up-to-date transformed documents to the index, overwriting any * documents that are still on their outdated version. */ -export const bulkOverwriteTransformedDocuments = ( - client: ElasticsearchClient, - index: string, - transformedDocs: SavedObjectsRawDoc[], - refresh: estypes.Refresh -): TaskEither.TaskEither => () => { +export const bulkOverwriteTransformedDocuments = ({ + client, + index, + transformedDocs, + refresh = false, +}: BulkOverwriteTransformedDocumentsParams): TaskEither.TaskEither< + RetryableEsClientError, + 'bulk_index_succeeded' +> => () => { return client .bulk({ // Because we only add aliases in the MARK_VERSION_INDEX_READY step we diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts index d0158a4c68f246..67a2685caf3d61 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts @@ -67,9 +67,13 @@ describe('migration actions', () => { client = start.elasticsearch.client.asInternalUser; // Create test fixture data: - await createIndex(client, 'existing_index_with_docs', { - dynamic: true, - properties: {}, + await createIndex({ + client, + indexName: 'existing_index_with_docs', + mappings: { + dynamic: true, + properties: {}, + }, })(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -78,25 +82,30 @@ describe('migration actions', () => { { _source: { title: 'saved object 4', type: 'another_unused_type' } }, { _source: { title: 'f-agent-event 5', type: 'f_agent_event' } }, ] as unknown) as SavedObjectsRawDoc[]; - await bulkOverwriteTransformedDocuments( + await bulkOverwriteTransformedDocuments({ + client, + index: 'existing_index_with_docs', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); + + await createIndex({ client, indexName: 'existing_index_2', mappings: { properties: {} } })(); + await createIndex({ client, - 'existing_index_with_docs', - sourceDocs, - 'wait_for' - )(); - - await createIndex(client, 'existing_index_2', { properties: {} })(); - await createIndex(client, 'existing_index_with_write_block', { properties: {} })(); - await bulkOverwriteTransformedDocuments( + indexName: 'existing_index_with_write_block', + mappings: { properties: {} }, + })(); + await bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_write_block', - sourceDocs, - 'wait_for' - )(); - await setWriteBlock(client, 'existing_index_with_write_block')(); - await updateAliases(client, [ - { add: { index: 'existing_index_2', alias: 'existing_index_2_alias' } }, - ])(); + index: 'existing_index_with_write_block', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); + await setWriteBlock({ client, index: 'existing_index_with_write_block' })(); + await updateAliases({ + client, + aliasActions: [{ add: { index: 'existing_index_2', alias: 'existing_index_2_alias' } }], + })(); }); afterAll(async () => { @@ -107,7 +116,7 @@ describe('migration actions', () => { describe('fetchIndices', () => { it('resolves right empty record if no indices were found', async () => { expect.assertions(1); - const task = fetchIndices(client, ['no_such_index']); + const task = fetchIndices({ client, indices: ['no_such_index'] }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -117,10 +126,10 @@ describe('migration actions', () => { }); it('resolves right record with found indices', async () => { expect.assertions(1); - const res = (await fetchIndices(client, [ - 'no_such_index', - 'existing_index_with_docs', - ])()) as Either.Right; + const res = (await fetchIndices({ + client, + indices: ['no_such_index', 'existing_index_with_docs'], + })()) as Either.Right; expect(res.right).toEqual( expect.objectContaining({ @@ -136,11 +145,15 @@ describe('migration actions', () => { describe('setWriteBlock', () => { beforeAll(async () => { - await createIndex(client, 'new_index_without_write_block', { properties: {} })(); + await createIndex({ + client, + indexName: 'new_index_without_write_block', + mappings: { properties: {} }, + })(); }); it('resolves right when setting the write block succeeds', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'new_index_without_write_block'); + const task = setWriteBlock({ client, index: 'new_index_without_write_block' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -150,7 +163,7 @@ describe('migration actions', () => { }); it('resolves right when setting a write block on an index that already has one', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'existing_index_with_write_block'); + const task = setWriteBlock({ client, index: 'existing_index_with_write_block' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -160,7 +173,7 @@ describe('migration actions', () => { }); it('once resolved, prevents further writes to the index', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'new_index_without_write_block'); + const task = setWriteBlock({ client, index: 'new_index_without_write_block' }); await task(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -169,17 +182,17 @@ describe('migration actions', () => { { _source: { title: 'doc 4' } }, ] as unknown) as SavedObjectsRawDoc[]; await expect( - bulkOverwriteTransformedDocuments( + bulkOverwriteTransformedDocuments({ client, - 'new_index_without_write_block', - sourceDocs, - 'wait_for' - )() + index: 'new_index_without_write_block', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })() ).rejects.toMatchObject(expect.anything()); }); it('resolves left index_not_found_exception when the index does not exist', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'no_such_index'); + const task = setWriteBlock({ client, index: 'no_such_index' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -194,13 +207,21 @@ describe('migration actions', () => { describe('removeWriteBlock', () => { beforeAll(async () => { - await createIndex(client, 'existing_index_without_write_block_2', { properties: {} })(); - await createIndex(client, 'existing_index_with_write_block_2', { properties: {} })(); - await setWriteBlock(client, 'existing_index_with_write_block_2')(); + await createIndex({ + client, + indexName: 'existing_index_without_write_block_2', + mappings: { properties: {} }, + })(); + await createIndex({ + client, + indexName: 'existing_index_with_write_block_2', + mappings: { properties: {} }, + })(); + await setWriteBlock({ client, index: 'existing_index_with_write_block_2' })(); }); it('resolves right if successful when an index already has a write block', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'existing_index_with_write_block_2'); + const task = removeWriteBlock({ client, index: 'existing_index_with_write_block_2' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -210,7 +231,7 @@ describe('migration actions', () => { }); it('resolves right if successful when an index does not have a write block', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'existing_index_without_write_block_2'); + const task = removeWriteBlock({ client, index: 'existing_index_without_write_block_2' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -220,7 +241,7 @@ describe('migration actions', () => { }); it('rejects if there is a non-retryable error', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'no_such_index'); + const task = removeWriteBlock({ client, index: 'no_such_index' }); await expect(task()).rejects.toMatchInlineSnapshot( `[ResponseError: index_not_found_exception]` ); @@ -251,7 +272,10 @@ describe('migration actions', () => { ); // Start tracking the index status - const indexStatusPromise = waitForIndexStatusYellow(client, 'red_then_yellow_index')(); + const indexStatusPromise = waitForIndexStatusYellow({ + client, + index: 'red_then_yellow_index', + })(); const redStatusResponse = await client.cluster.health({ index: 'red_then_yellow_index' }); expect(redStatusResponse.body.status).toBe('red'); @@ -281,7 +305,11 @@ describe('migration actions', () => { } }); it('resolves right if cloning into a new target index', async () => { - const task = cloneIndex(client, 'existing_index_with_write_block', 'clone_target_1'); + const task = cloneIndex({ + client, + source: 'existing_index_with_write_block', + target: 'clone_target_1', + }); expect.assertions(1); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -314,11 +342,11 @@ describe('migration actions', () => { .catch((e) => {}); // Call clone even though the index already exists - const cloneIndexPromise = cloneIndex( + const cloneIndexPromise = cloneIndex({ client, - 'existing_index_with_write_block', - 'clone_red_then_yellow_index' - )(); + source: 'existing_index_with_write_block', + target: 'clone_red_then_yellow_index', + })(); let indexYellow = false; setTimeout(() => { @@ -348,7 +376,7 @@ describe('migration actions', () => { }); it('resolves left index_not_found_exception if the source index does not exist', async () => { expect.assertions(1); - const task = cloneIndex(client, 'no_such_index', 'clone_target_3'); + const task = cloneIndex({ client, source: 'no_such_index', target: 'clone_target_3' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -378,12 +406,12 @@ describe('migration actions', () => { .catch((e) => {}); // Call clone even though the index already exists - const cloneIndexPromise = cloneIndex( + const cloneIndexPromise = cloneIndex({ client, - 'existing_index_with_write_block', - 'clone_red_index', - '0s' - )(); + source: 'existing_index_with_write_block', + target: 'clone_red_index', + timeout: '0s', + })(); await cloneIndexPromise.then((res) => { expect(res).toMatchInlineSnapshot(` @@ -404,15 +432,15 @@ describe('migration actions', () => { // together with waitForReindexTask describe('reindex & waitForReindexTask', () => { it('resolves right when reindex succeeds without reindex script', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -436,21 +464,21 @@ describe('migration actions', () => { `); }); it('resolves right and excludes all documents not matching the unusedTypesQuery', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_excluded_docs', - Option.none, - false, - { + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_excluded_docs', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { bool: { must_not: ['f_agent_event', 'another_unused_type'].map((type) => ({ term: { type }, })), }, - } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -473,15 +501,15 @@ describe('migration actions', () => { }); it('resolves right when reindex succeeds with reindex script', async () => { expect.assertions(2); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_2', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_2', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -506,15 +534,15 @@ describe('migration actions', () => { it('resolves right, ignores version conflicts and does not update existing docs when reindex multiple times', async () => { expect.assertions(3); // Reindex with a script - let res = (await reindex( + let res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_3', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - let task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_3', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + let task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -523,15 +551,15 @@ describe('migration actions', () => { `); // reindex without a script - res = (await reindex( + res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_3', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_3', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -559,7 +587,7 @@ describe('migration actions', () => { expect.assertions(2); // Simulate a reindex that only adds some of the documents from the // source index into the target index - await createIndex(client, 'reindex_target_4', { properties: {} })(); + await createIndex({ client, indexName: 'reindex_target_4', mappings: { properties: {} } })(); const sourceDocs = ((await searchForOutdatedDocuments(client, { batchSize: 1000, targetIndex: 'existing_index_with_docs', @@ -570,18 +598,23 @@ describe('migration actions', () => { _id, _source, })); - await bulkOverwriteTransformedDocuments(client, 'reindex_target_4', sourceDocs, 'wait_for')(); + await bulkOverwriteTransformedDocuments({ + client, + index: 'reindex_target_4', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); // Now do a real reindex - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_4', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_4', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -614,24 +647,28 @@ describe('migration actions', () => { // and should ignore this error. // Create an index with incompatible mappings - await createIndex(client, 'reindex_target_5', { - dynamic: 'strict', - properties: { - /** no title field */ + await createIndex({ + client, + indexName: 'reindex_target_5', + mappings: { + dynamic: 'strict', + properties: { + /** no title field */ + }, }, })(); const { right: { taskId: reindexTaskId }, - } = (await reindex( + } = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_5', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, reindexTaskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_5', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: reindexTaskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -651,22 +688,26 @@ describe('migration actions', () => { // and should ignore this error. // Create an index with incompatible mappings - await createIndex(client, 'reindex_target_6', { - dynamic: false, - properties: { title: { type: 'integer' } }, // integer is incompatible with string title + await createIndex({ + client, + indexName: 'reindex_target_6', + mappings: { + dynamic: false, + properties: { title: { type: 'integer' } }, // integer is incompatible with string title + }, })(); const { right: { taskId: reindexTaskId }, - } = (await reindex( + } = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_6', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, reindexTaskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_6', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: reindexTaskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -679,10 +720,17 @@ describe('migration actions', () => { }); it('resolves left index_not_found_exception if source index does not exist', async () => { expect.assertions(1); - const res = (await reindex(client, 'no_such_index', 'reindex_target', Option.none, false, { - match_all: {}, + const res = (await reindex({ + client, + sourceIndex: 'no_such_index', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { + match_all: {}, + }, })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -695,16 +743,16 @@ describe('migration actions', () => { }); it('resolves left target_index_had_write_block if all failures are due to a write block', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'existing_index_with_write_block', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_with_write_block', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -717,16 +765,16 @@ describe('migration actions', () => { }); it('resolves left if requireAlias=true and the target is not an alias', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'existing_index_with_write_block', - Option.none, - true, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_with_write_block', + reindexScript: Option.none, + requireAlias: true, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -739,16 +787,16 @@ describe('migration actions', () => { `); }); it('resolves left wait_for_task_completion_timeout when the task does not finish within the timeout', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '0s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '0s' }); await expect(task()).resolves.toMatchObject({ _tag: 'Left', @@ -766,17 +814,21 @@ describe('migration actions', () => { describe('verifyReindex', () => { it('resolves right if source and target indices have the same amount of documents', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_7', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - await waitForReindexTask(client, res.right.taskId, '10s')(); - - const task = verifyReindex(client, 'existing_index_with_docs', 'reindex_target_7'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_7', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + await waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' })(); + + const task = verifyReindex({ + client, + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_7', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -786,7 +838,11 @@ describe('migration actions', () => { }); it('resolves left if source and target indices have different amount of documents', async () => { expect.assertions(1); - const task = verifyReindex(client, 'existing_index_with_docs', 'existing_index_2'); + const task = verifyReindex({ + client, + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_2', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -798,19 +854,27 @@ describe('migration actions', () => { }); it('rejects if source or target index does not exist', async () => { expect.assertions(2); - let task = verifyReindex(client, 'no_such_index', 'existing_index_2'); + let task = verifyReindex({ + client, + sourceIndex: 'no_such_index', + targetIndex: 'existing_index_2', + }); await expect(task()).rejects.toMatchInlineSnapshot( `[ResponseError: index_not_found_exception]` ); - task = verifyReindex(client, 'existing_index_2', 'no_such_index'); + task = verifyReindex({ + client, + sourceIndex: 'existing_index_2', + targetIndex: 'no_such_index', + }); await expect(task()).rejects.toThrow('index_not_found_exception'); }); }); describe('openPit', () => { it('opens PointInTime for an index', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; expect(pitResponse.right.pitId).toEqual(expect.any(String)); @@ -824,52 +888,52 @@ describe('migration actions', () => { await expect(searchResponse.body.hits.hits.length).toBeGreaterThan(0); }); it('rejects if index does not exist', async () => { - const openPitTask = openPit(client, 'no_such_index'); + const openPitTask = openPit({ client, index: 'no_such_index' }); await expect(openPitTask()).rejects.toThrow('index_not_found_exception'); }); }); describe('readWithPit', () => { it('requests documents from an index using given PIT', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { match_all: {} }, - 1000, - undefined - ); + pitId: pitResponse.right.pitId, + query: { match_all: {} }, + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; await expect(docsResponse.right.outdatedDocuments.length).toBe(5); }); it('requests the batchSize of documents from an index', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { match_all: {} }, - 3, - undefined - ); + pitId: pitResponse.right.pitId, + query: { match_all: {} }, + batchSize: 3, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; await expect(docsResponse.right.outdatedDocuments.length).toBe(3); }); it('it excludes documents not matching the provided "query"', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { bool: { must_not: [ { @@ -885,9 +949,9 @@ describe('migration actions', () => { ], }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -902,18 +966,18 @@ describe('migration actions', () => { }); it('only returns documents that match the provided "query"', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -928,19 +992,19 @@ describe('migration actions', () => { }); it('returns docs with _seq_no and _primary_term when specified', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined, - true - ); + batchSize: 1000, + searchAfter: undefined, + seqNoPrimaryTerm: true, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -955,18 +1019,18 @@ describe('migration actions', () => { }); it('does not return docs with _seq_no and _primary_term if not specified', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -981,24 +1045,24 @@ describe('migration actions', () => { }); it('rejects if PIT does not exist', async () => { - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - 'no_such_pit', - { match_all: {} }, - 1000, - undefined - ); + pitId: 'no_such_pit', + query: { match_all: {} }, + batchSize: 1000, + searchAfter: undefined, + }); await expect(readWithPitTask()).rejects.toThrow('illegal_argument_exception'); }); }); describe('closePit', () => { it('closes PointInTime', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; const pitId = pitResponse.right.pitId; - await closePit(client, pitId)(); + await closePit({ client, pitId })(); const searchTask = client.search({ body: { @@ -1010,7 +1074,7 @@ describe('migration actions', () => { }); it('rejects if PIT does not exist', async () => { - const closePitTask = closePit(client, 'no_such_pit'); + const closePitTask = closePit({ client, pitId: 'no_such_pit' }); await expect(closePitTask()).rejects.toThrow('illegal_argument_exception'); }); }); @@ -1034,7 +1098,10 @@ describe('migration actions', () => { return Either.right({ processedDocs }); }; } - const transformTask = transformDocs(innerTransformRawDocs, originalDocs); + const transformTask = transformDocs({ + transformRawDocs: innerTransformRawDocs, + outdatedDocuments: originalDocs, + }); const resultsWithProcessDocs = ((await transformTask()) as Either.Right) .right.processedDocs; @@ -1051,7 +1118,11 @@ describe('migration actions', () => { 'existing_index_with_write_block' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); // We can't do a snapshot match because the response includes an index // id which ES assigns dynamically @@ -1065,7 +1136,11 @@ describe('migration actions', () => { 'no_such_index' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); await expect(task()).rejects.toMatchInlineSnapshot(` [Error: pickupUpdatedMappings task failed with the following error: @@ -1078,7 +1153,11 @@ describe('migration actions', () => { 'existing_index_with_docs' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '0s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '0s', + }); await expect(task()).resolves.toMatchObject({ _tag: 'Left', @@ -1097,7 +1176,11 @@ describe('migration actions', () => { 'existing_index_with_docs' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -1111,9 +1194,13 @@ describe('migration actions', () => { describe('updateAndPickupMappings', () => { it('resolves right when mappings were updated and picked up', async () => { // Create an index without any mappings and insert documents into it - await createIndex(client, 'existing_index_without_mappings', { - dynamic: false, - properties: {}, + await createIndex({ + client, + indexName: 'existing_index_without_mappings', + mappings: { + dynamic: false, + properties: {}, + }, })(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -1121,12 +1208,12 @@ describe('migration actions', () => { { _source: { title: 'doc 3' } }, { _source: { title: 'doc 4' } }, ] as unknown) as SavedObjectsRawDoc[]; - await bulkOverwriteTransformedDocuments( + await bulkOverwriteTransformedDocuments({ client, - 'existing_index_without_mappings', - sourceDocs, - 'wait_for' - )(); + index: 'existing_index_without_mappings', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); // Assert that we can't search over the unmapped fields of the document const originalSearchResults = ((await searchForOutdatedDocuments(client, { @@ -1139,14 +1226,18 @@ describe('migration actions', () => { expect(originalSearchResults.length).toBe(0); // Update and pickup mappings so that the title field is searchable - const res = await updateAndPickupMappings(client, 'existing_index_without_mappings', { - properties: { - title: { type: 'text' }, + const res = await updateAndPickupMappings({ + client, + index: 'existing_index_without_mappings', + mappings: { + properties: { + title: { type: 'text' }, + }, }, })(); expect(Either.isRight(res)).toBe(true); const taskId = (res as Either.Right).right.taskId; - await waitForPickupUpdatedMappingsTask(client, taskId, '60s')(); + await waitForPickupUpdatedMappingsTask({ client, taskId, timeout: '60s' })(); // Repeat the search expecting to be able to find the existing documents const pickedUpSearchResults = ((await searchForOutdatedDocuments(client, { @@ -1163,15 +1254,18 @@ describe('migration actions', () => { describe('updateAliases', () => { describe('remove', () => { it('resolves left index_not_found_exception when the index does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'no_such_index', - must_exist: false, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'no_such_index', + must_exist: false, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1184,15 +1278,18 @@ describe('migration actions', () => { }); describe('with must_exist=false', () => { it('resolves left alias_not_found_exception when alias does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'existing_index_with_docs', - must_exist: false, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'existing_index_with_docs', + must_exist: false, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1205,15 +1302,18 @@ describe('migration actions', () => { }); describe('with must_exist=true', () => { it('resolves left alias_not_found_exception when alias does not exist on specified index', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'existing_index_2_alias', - index: 'existing_index_with_docs', - must_exist: true, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'existing_index_2_alias', + index: 'existing_index_with_docs', + must_exist: true, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1224,15 +1324,18 @@ describe('migration actions', () => { `); }); it('resolves left alias_not_found_exception when alias does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'existing_index_with_docs', - must_exist: true, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'existing_index_with_docs', + must_exist: true, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1246,13 +1349,16 @@ describe('migration actions', () => { }); describe('remove_index', () => { it('left index_not_found_exception if index does not exist', async () => { - const task = updateAliases(client, [ - { - remove_index: { - index: 'no_such_index', + const task = updateAliases({ + client, + aliasActions: [ + { + remove_index: { + index: 'no_such_index', + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1264,13 +1370,16 @@ describe('migration actions', () => { `); }); it('left remove_index_not_a_concrete_index when remove_index targets an alias', async () => { - const task = updateAliases(client, [ - { - remove_index: { - index: 'existing_index_2_alias', + const task = updateAliases({ + client, + aliasActions: [ + { + remove_index: { + index: 'existing_index_2_alias', + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1312,7 +1421,11 @@ describe('migration actions', () => { }); // Call createIndex even though the index already exists - const createIndexPromise = createIndex(client, 'red_then_yellow_index', undefined as any)(); + const createIndexPromise = createIndex({ + client, + indexName: 'red_then_yellow_index', + mappings: undefined as any, + })(); let indexYellow = false; setTimeout(() => { @@ -1341,7 +1454,7 @@ describe('migration actions', () => { // Creating an index with the same name as an existing alias to induce // failure await expect( - createIndex(client, 'existing_index_2_alias', undefined as any)() + createIndex({ client, indexName: 'existing_index_2_alias', mappings: undefined as any })() ).rejects.toMatchInlineSnapshot(`[ResponseError: invalid_index_name_exception]`); }); }); @@ -1353,12 +1466,12 @@ describe('migration actions', () => { { _source: { title: 'doc 6' } }, { _source: { title: 'doc 7' } }, ] as unknown) as SavedObjectsRawDoc[]; - const task = bulkOverwriteTransformedDocuments( + const task = bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_docs', - newDocs, - 'wait_for' - ); + index: 'existing_index_with_docs', + transformedDocs: newDocs, + refresh: 'wait_for', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -1374,12 +1487,15 @@ describe('migration actions', () => { outdatedDocumentsQuery: undefined, })()) as Either.Right).right.outdatedDocuments; - const task = bulkOverwriteTransformedDocuments( + const task = bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_docs', - [...existingDocs, ({ _source: { title: 'doc 8' } } as unknown) as SavedObjectsRawDoc], - 'wait_for' - ); + index: 'existing_index_with_docs', + transformedDocs: [ + ...existingDocs, + ({ _source: { title: 'doc 8' } } as unknown) as SavedObjectsRawDoc, + ], + refresh: 'wait_for', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -1394,12 +1510,12 @@ describe('migration actions', () => { { _source: { title: 'doc 7' } }, ] as unknown) as SavedObjectsRawDoc[]; await expect( - bulkOverwriteTransformedDocuments( + bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_write_block', - newDocs, - 'wait_for' - )() + index: 'existing_index_with_write_block', + transformedDocs: newDocs, + refresh: 'wait_for', + })() ).rejects.toMatchObject(expect.anything()); }); }); diff --git a/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts b/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts index 1881f9a712c293..e9cb33c0aa54a2 100644 --- a/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts +++ b/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts @@ -19,7 +19,7 @@ export async function cleanup( if (!state) return; if ('sourceIndexPitId' in state) { try { - await Actions.closePit(client, state.sourceIndexPitId)(); + await Actions.closePit({ client, pitId: state.sourceIndexPitId })(); } catch (e) { executionLog.push({ type: 'cleanup', diff --git a/src/core/server/saved_objects/migrationsv2/next.ts b/src/core/server/saved_objects/migrationsv2/next.ts index 07ebf80271d48c..3c3e3c46a8d68f 100644 --- a/src/core/server/saved_objects/migrationsv2/next.ts +++ b/src/core/server/saved_objects/migrationsv2/next.ts @@ -58,38 +58,46 @@ export type ResponseType = UnwrapPromise< export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: TransformRawDocs) => { return { INIT: (state: InitState) => - Actions.fetchIndices(client, [state.currentAlias, state.versionAlias]), + Actions.fetchIndices({ client, indices: [state.currentAlias, state.versionAlias] }), WAIT_FOR_YELLOW_SOURCE: (state: WaitForYellowSourceState) => - Actions.waitForIndexStatusYellow(client, state.sourceIndex.value), + Actions.waitForIndexStatusYellow({ client, index: state.sourceIndex.value }), SET_SOURCE_WRITE_BLOCK: (state: SetSourceWriteBlockState) => - Actions.setWriteBlock(client, state.sourceIndex.value), + Actions.setWriteBlock({ client, index: state.sourceIndex.value }), CREATE_NEW_TARGET: (state: CreateNewTargetState) => - Actions.createIndex(client, state.targetIndex, state.targetIndexMappings), + Actions.createIndex({ + client, + indexName: state.targetIndex, + mappings: state.targetIndexMappings, + }), CREATE_REINDEX_TEMP: (state: CreateReindexTempState) => - Actions.createIndex(client, state.tempIndex, state.tempIndexMappings), + Actions.createIndex({ + client, + indexName: state.tempIndex, + mappings: state.tempIndexMappings, + }), REINDEX_SOURCE_TO_TEMP_OPEN_PIT: (state: ReindexSourceToTempOpenPit) => - Actions.openPit(client, state.sourceIndex.value), + Actions.openPit({ client, index: state.sourceIndex.value }), REINDEX_SOURCE_TO_TEMP_READ: (state: ReindexSourceToTempRead) => - Actions.readWithPit( + Actions.readWithPit({ client, - state.sourceIndexPitId, + pitId: state.sourceIndexPitId, /* When reading we use a source query to exclude saved objects types which * are no longer used. These saved objects will still be kept in the outdated * index for backup purposes, but won't be available in the upgraded index. */ - state.unusedTypesQuery, - state.batchSize, - state.lastHitSortValue - ), + query: state.unusedTypesQuery, + batchSize: state.batchSize, + searchAfter: state.lastHitSortValue, + }), REINDEX_SOURCE_TO_TEMP_CLOSE_PIT: (state: ReindexSourceToTempClosePit) => - Actions.closePit(client, state.sourceIndexPitId), + Actions.closePit({ client, pitId: state.sourceIndexPitId }), REINDEX_SOURCE_TO_TEMP_INDEX: (state: ReindexSourceToTempIndex) => - Actions.transformDocs(transformRawDocs, state.outdatedDocuments), + Actions.transformDocs({ transformRawDocs, outdatedDocuments: state.outdatedDocuments }), REINDEX_SOURCE_TO_TEMP_INDEX_BULK: (state: ReindexSourceToTempIndexBulk) => - Actions.bulkOverwriteTransformedDocuments( + Actions.bulkOverwriteTransformedDocuments({ client, - state.tempIndex, - state.transformedDocs, + index: state.tempIndex, + transformedDocs: state.transformedDocs, /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. @@ -97,39 +105,48 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra * before we reach out to the OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT step. * Right now, it's performed during REFRESH_TARGET step. */ - false - ), + refresh: false, + }), SET_TEMP_WRITE_BLOCK: (state: SetTempWriteBlock) => - Actions.setWriteBlock(client, state.tempIndex), + Actions.setWriteBlock({ client, index: state.tempIndex }), CLONE_TEMP_TO_TARGET: (state: CloneTempToSource) => - Actions.cloneIndex(client, state.tempIndex, state.targetIndex), - REFRESH_TARGET: (state: RefreshTarget) => Actions.refreshIndex(client, state.targetIndex), + Actions.cloneIndex({ client, source: state.tempIndex, target: state.targetIndex }), + REFRESH_TARGET: (state: RefreshTarget) => + Actions.refreshIndex({ client, targetIndex: state.targetIndex }), UPDATE_TARGET_MAPPINGS: (state: UpdateTargetMappingsState) => - Actions.updateAndPickupMappings(client, state.targetIndex, state.targetIndexMappings), + Actions.updateAndPickupMappings({ + client, + index: state.targetIndex, + mappings: state.targetIndexMappings, + }), UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK: (state: UpdateTargetMappingsWaitForTaskState) => - Actions.waitForPickupUpdatedMappingsTask(client, state.updateTargetMappingsTaskId, '60s'), + Actions.waitForPickupUpdatedMappingsTask({ + client, + taskId: state.updateTargetMappingsTaskId, + timeout: '60s', + }), OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT: (state: OutdatedDocumentsSearchOpenPit) => - Actions.openPit(client, state.targetIndex), + Actions.openPit({ client, index: state.targetIndex }), OUTDATED_DOCUMENTS_SEARCH_READ: (state: OutdatedDocumentsSearchRead) => - Actions.readWithPit( + Actions.readWithPit({ client, - state.pitId, + pitId: state.pitId, // search for outdated documents only - state.outdatedDocumentsQuery, - state.batchSize, - state.lastHitSortValue - ), + query: state.outdatedDocumentsQuery, + batchSize: state.batchSize, + searchAfter: state.lastHitSortValue, + }), OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT: (state: OutdatedDocumentsSearchClosePit) => - Actions.closePit(client, state.pitId), + Actions.closePit({ client, pitId: state.pitId }), OUTDATED_DOCUMENTS_REFRESH: (state: OutdatedDocumentsRefresh) => - Actions.refreshIndex(client, state.targetIndex), + Actions.refreshIndex({ client, targetIndex: state.targetIndex }), OUTDATED_DOCUMENTS_TRANSFORM: (state: OutdatedDocumentsTransform) => - Actions.transformDocs(transformRawDocs, state.outdatedDocuments), + Actions.transformDocs({ transformRawDocs, outdatedDocuments: state.outdatedDocuments }), TRANSFORMED_DOCUMENTS_BULK_INDEX: (state: TransformedDocumentsBulkIndex) => - Actions.bulkOverwriteTransformedDocuments( + Actions.bulkOverwriteTransformedDocuments({ client, - state.targetIndex, - state.transformedDocs, + index: state.targetIndex, + transformedDocs: state.transformedDocs, /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. @@ -137,29 +154,32 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra * before we reach out to the MARK_VERSION_INDEX_READY step. * Right now, it's performed during OUTDATED_DOCUMENTS_REFRESH step. */ - false - ), + }), MARK_VERSION_INDEX_READY: (state: MarkVersionIndexReady) => - Actions.updateAliases(client, state.versionIndexReadyActions.value), + Actions.updateAliases({ client, aliasActions: state.versionIndexReadyActions.value }), MARK_VERSION_INDEX_READY_CONFLICT: (state: MarkVersionIndexReadyConflict) => - Actions.fetchIndices(client, [state.currentAlias, state.versionAlias]), + Actions.fetchIndices({ client, indices: [state.currentAlias, state.versionAlias] }), LEGACY_SET_WRITE_BLOCK: (state: LegacySetWriteBlockState) => - Actions.setWriteBlock(client, state.legacyIndex), + Actions.setWriteBlock({ client, index: state.legacyIndex }), LEGACY_CREATE_REINDEX_TARGET: (state: LegacyCreateReindexTargetState) => - Actions.createIndex(client, state.sourceIndex.value, state.legacyReindexTargetMappings), + Actions.createIndex({ + client, + indexName: state.sourceIndex.value, + mappings: state.legacyReindexTargetMappings, + }), LEGACY_REINDEX: (state: LegacyReindexState) => - Actions.reindex( + Actions.reindex({ client, - state.legacyIndex, - state.sourceIndex.value, - state.preMigrationScript, - false, - state.unusedTypesQuery - ), + sourceIndex: state.legacyIndex, + targetIndex: state.sourceIndex.value, + reindexScript: state.preMigrationScript, + requireAlias: false, + unusedTypesQuery: state.unusedTypesQuery, + }), LEGACY_REINDEX_WAIT_FOR_TASK: (state: LegacyReindexWaitForTaskState) => - Actions.waitForReindexTask(client, state.legacyReindexTaskId, '60s'), + Actions.waitForReindexTask({ client, taskId: state.legacyReindexTaskId, timeout: '60s' }), LEGACY_DELETE: (state: LegacyDeleteState) => - Actions.updateAliases(client, state.legacyPreMigrationDoneActions), + Actions.updateAliases({ client, aliasActions: state.legacyPreMigrationDoneActions }), }; }; diff --git a/src/core/server/saved_objects/saved_objects_config.ts b/src/core/server/saved_objects/saved_objects_config.ts index 7182df74c597fa..c62d322f0bf8d9 100644 --- a/src/core/server/saved_objects/saved_objects_config.ts +++ b/src/core/server/saved_objects/saved_objects_config.ts @@ -29,6 +29,9 @@ const migrationDeprecations: ConfigDeprecationProvider = () => [ message: '"migrations.enableV2" is deprecated and will be removed in an upcoming release without any further notice.', documentationUrl: 'https://ela.st/kbn-so-migration-v2', + correctiveActions: { + manualSteps: [`Remove "migrations.enableV2" from your kibana configs.`], + }, }); } return settings; diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md index 7f108dbeb0086f..379e4147ae024c 100644 --- a/src/core/server/server.api.md +++ b/src/core/server/server.api.md @@ -872,8 +872,9 @@ export interface DeprecationsDetails { [key: string]: any; }; }; - manualSteps?: string[]; + manualSteps: string[]; }; + deprecationType?: 'config' | 'feature'; // (undocumented) documentationUrl?: string; level: 'warning' | 'critical' | 'fetch_error'; diff --git a/src/core/server/server.ts b/src/core/server/server.ts index 4d99368f9bf708..a31b9a061ac5d5 100644 --- a/src/core/server/server.ts +++ b/src/core/server/server.ts @@ -193,8 +193,6 @@ export class Server { const deprecationsSetup = this.deprecations.setup({ http: httpSetup, - elasticsearch: elasticsearchServiceSetup, - coreUsageData: coreUsageDataSetup, }); const coreSetup: InternalCoreSetup = { diff --git a/src/dev/build/lib/version_info.ts b/src/dev/build/lib/version_info.ts index 01066cdeffc4cf..6be34a54e24f8c 100644 --- a/src/dev/build/lib/version_info.ts +++ b/src/dev/build/lib/version_info.ts @@ -28,7 +28,10 @@ export async function getVersionInfo({ isRelease, versionQualifier, pkg }: Optio ); return { - buildSha: (await execa('git', ['rev-parse', 'HEAD'])).stdout, + buildSha: + process.env.GIT_COMMIT || + process.env.BUILDKITE_COMMIT || + (await execa('git', ['rev-parse', 'HEAD'])).stdout, buildVersion, buildNumber: await getBuildNumber(), }; diff --git a/src/dev/precommit_hook/casing_check_config.js b/src/dev/precommit_hook/casing_check_config.js index d523f78a9f5896..ba18c085b649d0 100644 --- a/src/dev/precommit_hook/casing_check_config.js +++ b/src/dev/precommit_hook/casing_check_config.js @@ -41,6 +41,7 @@ export const IGNORE_FILE_GLOBS = [ '.ci/pipeline-library/**/*', 'packages/kbn-test/jest-preset.js', 'test/package/Vagrantfile', + '**/test/**/fixtures/**/*', // filename must match language code which requires capital letters '**/translations/*.json', @@ -61,12 +62,13 @@ export const IGNORE_FILE_GLOBS = [ 'x-pack/plugins/apm/e2e/**/*', 'x-pack/plugins/maps/server/fonts/**/*', - // packages for the ingest manager's api integration tests could be valid semver which has dashes - 'x-pack/test/fleet_api_integration/apis/fixtures/test_packages/**/*', // Bazel default files '**/WORKSPACE.bazel', '**/BUILD.bazel', + + // Buildkite + '.buildkite/hooks/*', ]; /** @@ -95,7 +97,6 @@ export const IGNORE_DIRECTORY_GLOBS = [ ...KEBAB_CASE_DIRECTORY_GLOBS, 'src/babel-*', 'packages/*', - 'test/functional/fixtures/es_archiver/visualize_source-filters', 'packages/kbn-pm/src/utils/__fixtures__/*', 'x-pack/dev-tools', 'packages/kbn-optimizer/src/__fixtures__/mock_repo/x-pack', diff --git a/src/plugins/apm_oss/public/index.ts b/src/plugins/apm_oss/public/index.ts index d5fcabbe146a96..fea8ac4a8a1e42 100644 --- a/src/plugins/apm_oss/public/index.ts +++ b/src/plugins/apm_oss/public/index.ts @@ -14,5 +14,3 @@ export function plugin() { return new ApmOssPlugin(); } export { ApmOssPluginSetup, ApmOssPluginStart } from './types'; - -export { APM_STATIC_INDEX_PATTERN_ID } from '../common/index_pattern_constants'; diff --git a/src/plugins/apm_oss/server/index.ts b/src/plugins/apm_oss/server/index.ts index a02e28201a1b90..1424cb1c7126fb 100644 --- a/src/plugins/apm_oss/server/index.ts +++ b/src/plugins/apm_oss/server/index.ts @@ -7,7 +7,6 @@ */ import { schema, TypeOf } from '@kbn/config-schema'; -import apmIndexPattern from './tutorial/index_pattern.json'; import { PluginInitializerContext } from '../../../core/server'; import { APMOSSPlugin } from './plugin'; @@ -32,20 +31,3 @@ export function plugin(initializerContext: PluginInitializerContext) { export type APMOSSConfig = TypeOf; export { APMOSSPluginSetup } from './plugin'; - -export { apmIndexPattern }; - -export { APM_STATIC_INDEX_PATTERN_ID } from '../common/index_pattern_constants'; - -export { - createNodeAgentInstructions, - createDjangoAgentInstructions, - createFlaskAgentInstructions, - createRailsAgentInstructions, - createRackAgentInstructions, - createJsAgentInstructions, - createGoAgentInstructions, - createJavaAgentInstructions, - createDotNetAgentInstructions, - createPhpAgentInstructions, -} from './tutorial/instructions/apm_agent_instructions'; diff --git a/src/plugins/apm_oss/server/plugin.ts b/src/plugins/apm_oss/server/plugin.ts index e504d5f0b9a9fa..02a8ac38be2a39 100644 --- a/src/plugins/apm_oss/server/plugin.ts +++ b/src/plugins/apm_oss/server/plugin.ts @@ -6,38 +6,18 @@ * Side Public License, v 1. */ -import { Plugin, CoreSetup, PluginInitializerContext } from 'src/core/server'; import { Observable } from 'rxjs'; +import { Plugin, PluginInitializerContext } from 'src/core/server'; import { APMOSSConfig } from './'; -import { HomeServerPluginSetup, TutorialProvider } from '../../home/server'; -import { tutorialProvider } from './tutorial'; export class APMOSSPlugin implements Plugin { constructor(private readonly initContext: PluginInitializerContext) { this.initContext = initContext; } - public setup(core: CoreSetup, plugins: { home: HomeServerPluginSetup }) { + public setup() { const config$ = this.initContext.config.create(); - const config = this.initContext.config.get(); - - const apmTutorialProvider = tutorialProvider({ - indexPatternTitle: config.indexPattern, - indices: { - errorIndices: config.errorIndices, - metricsIndices: config.metricsIndices, - onboardingIndices: config.onboardingIndices, - sourcemapIndices: config.sourcemapIndices, - transactionIndices: config.transactionIndices, - }, - }); - plugins.home.tutorials.registerTutorial(apmTutorialProvider); - - return { - config, - config$, - getRegisteredTutorialProvider: () => apmTutorialProvider, - }; + return { config, config$ }; } start() {} @@ -47,5 +27,4 @@ export class APMOSSPlugin implements Plugin { export interface APMOSSPluginSetup { config: APMOSSConfig; config$: Observable; - getRegisteredTutorialProvider(): TutorialProvider; } diff --git a/src/plugins/apm_oss/server/tutorial/index.ts b/src/plugins/apm_oss/server/tutorial/index.ts deleted file mode 100644 index ce7fec406e7ac0..00000000000000 --- a/src/plugins/apm_oss/server/tutorial/index.ts +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -import { i18n } from '@kbn/i18n'; -import { onPremInstructions } from './envs/on_prem'; -import apmIndexPattern from './index_pattern.json'; -import { ArtifactsSchema, TutorialsCategory } from '../../../../../src/plugins/home/server'; -import { APM_STATIC_INDEX_PATTERN_ID } from '../../common/index_pattern_constants'; - -const apmIntro = i18n.translate('apmOss.tutorial.introduction', { - defaultMessage: 'Collect in-depth performance metrics and errors from inside your applications.', -}); -const moduleName = 'apm'; - -export const tutorialProvider = ({ - indexPatternTitle, - indices, -}: { - indexPatternTitle: string; - indices: { - errorIndices: string; - transactionIndices: string; - metricsIndices: string; - sourcemapIndices: string; - onboardingIndices: string; - }; -}) => () => { - const savedObjects = [ - { - ...apmIndexPattern, - id: APM_STATIC_INDEX_PATTERN_ID, - attributes: { - ...apmIndexPattern.attributes, - title: indexPatternTitle, - }, - }, - ]; - - const artifacts: ArtifactsSchema = { - dashboards: [ - { - id: '8d3ed660-7828-11e7-8c47-65b845b5cfb3', - linkLabel: i18n.translate('apmOss.tutorial.specProvider.artifacts.dashboards.linkLabel', { - defaultMessage: 'APM dashboard', - }), - isOverview: true, - }, - ], - }; - - return { - id: 'apm', - name: i18n.translate('apmOss.tutorial.specProvider.name', { - defaultMessage: 'APM', - }), - moduleName, - category: TutorialsCategory.OTHER, - shortDescription: apmIntro, - longDescription: i18n.translate('apmOss.tutorial.specProvider.longDescription', { - defaultMessage: - 'Application Performance Monitoring (APM) collects in-depth \ -performance metrics and errors from inside your application. \ -It allows you to monitor the performance of thousands of applications in real time. \ -[Learn more]({learnMoreLink}).', - values: { - learnMoreLink: - '{config.docs.base_url}guide/en/apm/get-started/{config.docs.version}/index.html', - }, - }), - euiIconType: 'apmApp', - artifacts, - onPrem: onPremInstructions(indices), - previewImagePath: '/plugins/apmOss/assets/apm.png', - savedObjects, - savedObjectsInstallMsg: i18n.translate('apmOss.tutorial.specProvider.savedObjectsInstallMsg', { - defaultMessage: 'An APM index pattern is required for some features in the APM UI.', - }), - }; -}; diff --git a/src/plugins/apm_oss/server/tutorial/instructions/apm_agent_instructions.ts b/src/plugins/apm_oss/server/tutorial/instructions/apm_agent_instructions.ts deleted file mode 100644 index ba2b062870cf68..00000000000000 --- a/src/plugins/apm_oss/server/tutorial/instructions/apm_agent_instructions.ts +++ /dev/null @@ -1,754 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -import { i18n } from '@kbn/i18n'; - -export const createNodeAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.nodeClient.install.title', { - defaultMessage: 'Install the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.nodeClient.install.textPre', { - defaultMessage: 'Install the APM agent for Node.js as a dependency to your application.', - }), - commands: ['npm install elastic-apm-node --save'], - }, - { - title: i18n.translate('apmOss.tutorial.nodeClient.configure.title', { - defaultMessage: 'Configure the agent', - }), - textPre: i18n.translate('apmOss.tutorial.nodeClient.configure.textPre', { - defaultMessage: - 'Agents are libraries that run inside of your application process. \ -APM services are created programmatically based on the `serviceName`. \ -This agent supports a variety of frameworks but can also be used with your custom stack.', - }), - commands: `// ${i18n.translate( - 'apmOss.tutorial.nodeClient.configure.commands.addThisToTheFileTopComment', - { - defaultMessage: 'Add this to the VERY top of the first file loaded in your app', - } - )} -var apm = require('elastic-apm-node').start({curlyOpen} - - // ${i18n.translate( - 'apmOss.tutorial.nodeClient.configure.commands.setRequiredServiceNameComment', - { - defaultMessage: 'Override the service name from package.json', - } - )} - // ${i18n.translate('apmOss.tutorial.nodeClient.configure.commands.allowedCharactersComment', { - defaultMessage: 'Allowed characters: a-z, A-Z, 0-9, -, _, and space', - })} - serviceName: '', - - // ${i18n.translate( - 'apmOss.tutorial.nodeClient.configure.commands.useIfApmRequiresTokenComment', - { - defaultMessage: 'Use if APM Server requires a secret token', - } - )} - secretToken: '${secretToken}', - - // ${i18n.translate( - 'apmOss.tutorial.nodeClient.configure.commands.setCustomApmServerUrlComment', - { - defaultMessage: 'Set the custom APM Server URL (default: {defaultApmServerUrl})', - values: { defaultApmServerUrl: 'http://localhost:8200' }, - } - )} - serverUrl: '${apmServerUrl}', - - // ${i18n.translate( - 'apmOss.tutorial.nodeClient.configure.commands.setCustomServiceEnvironmentComment', - { - defaultMessage: 'Set the service environment', - } - )} - environment: 'production' -{curlyClose})`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.nodeClient.configure.textPost', { - defaultMessage: - 'See [the documentation]({documentationLink}) for advanced usage, including how to use with \ -[Babel/ES Modules]({babelEsModulesLink}).', - values: { - documentationLink: '{config.docs.base_url}guide/en/apm/agent/nodejs/current/index.html', - babelEsModulesLink: - '{config.docs.base_url}guide/en/apm/agent/nodejs/current/advanced-setup.html#es-modules', - }, - }), - }, -]; - -export const createDjangoAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.djangoClient.install.title', { - defaultMessage: 'Install the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.djangoClient.install.textPre', { - defaultMessage: 'Install the APM agent for Python as a dependency.', - }), - commands: ['$ pip install elastic-apm'], - }, - { - title: i18n.translate('apmOss.tutorial.djangoClient.configure.title', { - defaultMessage: 'Configure the agent', - }), - textPre: i18n.translate('apmOss.tutorial.djangoClient.configure.textPre', { - defaultMessage: - 'Agents are libraries that run inside of your application process. \ -APM services are created programmatically based on the `SERVICE_NAME`.', - }), - commands: `# ${i18n.translate( - 'apmOss.tutorial.djangoClient.configure.commands.addAgentComment', - { - defaultMessage: 'Add the agent to the installed apps', - } - )} -INSTALLED_APPS = ( - 'elasticapm.contrib.django', - # ... -) - -ELASTIC_APM = {curlyOpen} - # ${i18n.translate( - 'apmOss.tutorial.djangoClient.configure.commands.setRequiredServiceNameComment', - { - defaultMessage: 'Set the required service name. Allowed characters:', - } - )} - # ${i18n.translate('apmOss.tutorial.djangoClient.configure.commands.allowedCharactersComment', { - defaultMessage: 'a-z, A-Z, 0-9, -, _, and space', - })} - 'SERVICE_NAME': '', - - # ${i18n.translate( - 'apmOss.tutorial.djangoClient.configure.commands.useIfApmServerRequiresTokenComment', - { - defaultMessage: 'Use if APM Server requires a secret token', - } - )} - 'SECRET_TOKEN': '${secretToken}', - - # ${i18n.translate( - 'apmOss.tutorial.djangoClient.configure.commands.setCustomApmServerUrlComment', - { - defaultMessage: 'Set the custom APM Server URL (default: {defaultApmServerUrl})', - values: { defaultApmServerUrl: 'http://localhost:8200' }, - } - )} - 'SERVER_URL': '${apmServerUrl}', - - # ${i18n.translate( - 'apmOss.tutorial.djangoClient.configure.commands.setServiceEnvironmentComment', - { - defaultMessage: 'Set the service environment', - } - )} - 'ENVIRONMENT': 'production', -{curlyClose} - -# ${i18n.translate('apmOss.tutorial.djangoClient.configure.commands.addTracingMiddlewareComment', { - defaultMessage: 'To send performance metrics, add our tracing middleware:', - })} -MIDDLEWARE = ( - 'elasticapm.contrib.django.middleware.TracingMiddleware', - #... -)`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.djangoClient.configure.textPost', { - defaultMessage: 'See the [documentation]({documentationLink}) for advanced usage.', - values: { - documentationLink: - '{config.docs.base_url}guide/en/apm/agent/python/current/django-support.html', - }, - }), - }, -]; - -export const createFlaskAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.flaskClient.install.title', { - defaultMessage: 'Install the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.flaskClient.install.textPre', { - defaultMessage: 'Install the APM agent for Python as a dependency.', - }), - commands: ['$ pip install elastic-apm[flask]'], - }, - { - title: i18n.translate('apmOss.tutorial.flaskClient.configure.title', { - defaultMessage: 'Configure the agent', - }), - textPre: i18n.translate('apmOss.tutorial.flaskClient.configure.textPre', { - defaultMessage: - 'Agents are libraries that run inside of your application process. \ -APM services are created programmatically based on the `SERVICE_NAME`.', - }), - commands: `# ${i18n.translate( - 'apmOss.tutorial.flaskClient.configure.commands.initializeUsingEnvironmentVariablesComment', - { - defaultMessage: 'initialize using environment variables', - } - )} -from elasticapm.contrib.flask import ElasticAPM -app = Flask(__name__) -apm = ElasticAPM(app) - -# ${i18n.translate('apmOss.tutorial.flaskClient.configure.commands.configureElasticApmComment', { - defaultMessage: "or configure to use ELASTIC_APM in your application's settings", - })} -from elasticapm.contrib.flask import ElasticAPM -app.config['ELASTIC_APM'] = {curlyOpen} - # ${i18n.translate( - 'apmOss.tutorial.flaskClient.configure.commands.setRequiredServiceNameComment', - { - defaultMessage: 'Set the required service name. Allowed characters:', - } - )} - # ${i18n.translate('apmOss.tutorial.flaskClient.configure.commands.allowedCharactersComment', { - defaultMessage: 'a-z, A-Z, 0-9, -, _, and space', - })} - 'SERVICE_NAME': '', - - # ${i18n.translate( - 'apmOss.tutorial.flaskClient.configure.commands.useIfApmServerRequiresTokenComment', - { - defaultMessage: 'Use if APM Server requires a secret token', - } - )} - 'SECRET_TOKEN': '${secretToken}', - - # ${i18n.translate( - 'apmOss.tutorial.flaskClient.configure.commands.setCustomApmServerUrlComment', - { - defaultMessage: 'Set the custom APM Server URL (default: {defaultApmServerUrl})', - values: { defaultApmServerUrl: 'http://localhost:8200' }, - } - )} - 'SERVER_URL': '${apmServerUrl}', - - # ${i18n.translate( - 'apmOss.tutorial.flaskClient.configure.commands.setServiceEnvironmentComment', - { - defaultMessage: 'Set the service environment', - } - )} - 'ENVIRONMENT': 'production', -{curlyClose} - -apm = ElasticAPM(app)`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.flaskClient.configure.textPost', { - defaultMessage: 'See the [documentation]({documentationLink}) for advanced usage.', - values: { - documentationLink: - '{config.docs.base_url}guide/en/apm/agent/python/current/flask-support.html', - }, - }), - }, -]; - -export const createRailsAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.railsClient.install.title', { - defaultMessage: 'Install the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.railsClient.install.textPre', { - defaultMessage: 'Add the agent to your Gemfile.', - }), - commands: [`gem 'elastic-apm'`], - }, - { - title: i18n.translate('apmOss.tutorial.railsClient.configure.title', { - defaultMessage: 'Configure the agent', - }), - textPre: i18n.translate('apmOss.tutorial.railsClient.configure.textPre', { - defaultMessage: - 'APM is automatically started when your app boots. Configure the agent, by creating the config file {configFile}', - values: { configFile: '`config/elastic_apm.yml`' }, - }), - commands: `# config/elastic_apm.yml: - -# Set the service name - allowed characters: a-z, A-Z, 0-9, -, _ and space -# Defaults to the name of your Rails app -service_name: 'my-service' - -# Use if APM Server requires a secret token -secret_token: '${secretToken}' - -# Set the custom APM Server URL (default: http://localhost:8200) -server_url: '${apmServerUrl || 'http://localhost:8200'}' - -# Set the service environment -environment: 'production'`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.railsClient.configure.textPost', { - defaultMessage: - 'See the [documentation]({documentationLink}) for configuration options and advanced usage.\n\n', - values: { - documentationLink: '{config.docs.base_url}guide/en/apm/agent/ruby/current/index.html', - }, - }), - }, -]; - -export const createRackAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.rackClient.install.title', { - defaultMessage: 'Install the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.rackClient.install.textPre', { - defaultMessage: 'Add the agent to your Gemfile.', - }), - commands: [`gem 'elastic-apm'`], - }, - { - title: i18n.translate('apmOss.tutorial.rackClient.configure.title', { - defaultMessage: 'Configure the agent', - }), - textPre: i18n.translate('apmOss.tutorial.rackClient.configure.textPre', { - defaultMessage: - 'For Rack or a compatible framework (e.g. Sinatra), include the middleware in your app and start the agent.', - }), - commands: `# config.ru - require 'sinatra/base' - - class MySinatraApp < Sinatra::Base - use ElasticAPM::Middleware - - # ... - end - - ElasticAPM.start( - app: MySinatraApp, # ${i18n.translate( - 'apmOss.tutorial.rackClient.configure.commands.requiredComment', - { - defaultMessage: 'required', - } - )} - config_file: '' # ${i18n.translate( - 'apmOss.tutorial.rackClient.configure.commands.optionalComment', - { - defaultMessage: 'optional, defaults to config/elastic_apm.yml', - } - )} - ) - - run MySinatraApp - - at_exit {curlyOpen} ElasticAPM.stop {curlyClose}`.split('\n'), - }, - { - title: i18n.translate('apmOss.tutorial.rackClient.createConfig.title', { - defaultMessage: 'Create config file', - }), - textPre: i18n.translate('apmOss.tutorial.rackClient.createConfig.textPre', { - defaultMessage: 'Create a config file {configFile}:', - values: { configFile: '`config/elastic_apm.yml`' }, - }), - commands: `# config/elastic_apm.yml: - -# ${i18n.translate('apmOss.tutorial.rackClient.createConfig.commands.setServiceNameComment', { - defaultMessage: 'Set the service name - allowed characters: a-z, A-Z, 0-9, -, _ and space', - })} -# ${i18n.translate( - 'apmOss.tutorial.rackClient.createConfig.commands.defaultsToTheNameOfRackAppClassComment', - { - defaultMessage: "Defaults to the name of your Rack app's class.", - } - )} -service_name: 'my-service' - -# ${i18n.translate( - 'apmOss.tutorial.rackClient.createConfig.commands.useIfApmServerRequiresTokenComment', - { - defaultMessage: 'Use if APM Server requires a token', - } - )} -secret_token: '${secretToken}' - -# ${i18n.translate('apmOss.tutorial.rackClient.createConfig.commands.setCustomApmServerComment', { - defaultMessage: 'Set custom APM Server URL (default: {defaultServerUrl})', - values: { defaultServerUrl: 'http://localhost:8200' }, - })} -server_url: '${apmServerUrl || 'http://localhost:8200'}', - -# ${i18n.translate('apmOss.tutorial.rackClient.createConfig.commands.setServiceEnvironment', { - defaultMessage: 'Set the service environment', - })} -environment: 'production'`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.rackClient.createConfig.textPost', { - defaultMessage: - 'See the [documentation]({documentationLink}) for configuration options and advanced usage.\n\n', - values: { - documentationLink: '{config.docs.base_url}guide/en/apm/agent/ruby/current/index.html', - }, - }), - }, -]; - -export const createJsAgentInstructions = (apmServerUrl = '') => [ - { - title: i18n.translate('apmOss.tutorial.jsClient.enableRealUserMonitoring.title', { - defaultMessage: 'Enable Real User Monitoring support in APM Server', - }), - textPre: i18n.translate('apmOss.tutorial.jsClient.enableRealUserMonitoring.textPre', { - defaultMessage: - 'APM Server disables RUM support by default. See the [documentation]({documentationLink}) \ -for details on how to enable RUM support.', - values: { - documentationLink: - '{config.docs.base_url}guide/en/apm/server/{config.docs.version}/configuration-rum.html', - }, - }), - }, - { - title: i18n.translate('apmOss.tutorial.jsClient.installDependency.title', { - defaultMessage: 'Set up the Agent as a dependency', - }), - textPre: i18n.translate('apmOss.tutorial.jsClient.installDependency.textPre', { - defaultMessage: - 'You can install the Agent as a dependency to your application with \ -`npm install @elastic/apm-rum --save`.\n\n\ -The Agent can then be initialized and configured in your application like this:', - }), - commands: `import {curlyOpen} init as initApm {curlyClose} from '@elastic/apm-rum' -var apm = initApm({curlyOpen} - - // ${i18n.translate( - 'apmOss.tutorial.jsClient.installDependency.commands.setRequiredServiceNameComment', - { - defaultMessage: - 'Set required service name (allowed characters: a-z, A-Z, 0-9, -, _, and space)', - } - )} - serviceName: 'your-app-name', - - // ${i18n.translate( - 'apmOss.tutorial.jsClient.installDependency.commands.setCustomApmServerUrlComment', - { - defaultMessage: 'Set custom APM Server URL (default: {defaultApmServerUrl})', - values: { defaultApmServerUrl: 'http://localhost:8200' }, - } - )} - serverUrl: '${apmServerUrl}', - - // ${i18n.translate( - 'apmOss.tutorial.jsClient.installDependency.commands.setServiceVersionComment', - { - defaultMessage: 'Set the service version (required for source map feature)', - } - )} - serviceVersion: '', - - // ${i18n.translate( - 'apmOss.tutorial.jsClient.installDependency.commands.setServiceEnvironmentComment', - { - defaultMessage: 'Set the service environment', - } - )} - environment: 'production' -{curlyClose})`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.jsClient.installDependency.textPost', { - defaultMessage: - 'Framework integrations, like React or Angular, have custom dependencies. \ -See the [integration documentation]({docLink}) for more information.', - values: { - docLink: - '{config.docs.base_url}guide/en/apm/agent/rum-js/current/framework-integrations.html', - }, - }), - }, - { - title: i18n.translate('apmOss.tutorial.jsClient.scriptTags.title', { - defaultMessage: 'Set up the Agent with Script Tags', - }), - textPre: i18n.translate('apmOss.tutorial.jsClient.scriptTags.textPre', { - defaultMessage: - "Alternatively, you can use Script tags to set up and configure the Agent. \ -Add a ` - -`.split('\n'), - }, -]; - -export const createGoAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.goClient.install.title', { - defaultMessage: 'Install the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.goClient.install.textPre', { - defaultMessage: 'Install the APM agent packages for Go.', - }), - commands: ['go get go.elastic.co/apm'], - }, - { - title: i18n.translate('apmOss.tutorial.goClient.configure.title', { - defaultMessage: 'Configure the agent', - }), - textPre: i18n.translate('apmOss.tutorial.goClient.configure.textPre', { - defaultMessage: - 'Agents are libraries that run inside of your application process. \ -APM services are created programmatically based on the executable \ -file name, or the `ELASTIC_APM_SERVICE_NAME` environment variable.', - }), - commands: `# ${i18n.translate( - 'apmOss.tutorial.goClient.configure.commands.initializeUsingEnvironmentVariablesComment', - { - defaultMessage: 'Initialize using environment variables:', - } - )} - -# ${i18n.translate('apmOss.tutorial.goClient.configure.commands.setServiceNameComment', { - defaultMessage: 'Set the service name. Allowed characters: # a-z, A-Z, 0-9, -, _, and space.', - })} -# ${i18n.translate('apmOss.tutorial.goClient.configure.commands.usedExecutableNameComment', { - defaultMessage: - 'If ELASTIC_APM_SERVICE_NAME is not specified, the executable name will be used.', - })} -export ELASTIC_APM_SERVICE_NAME= - -# ${i18n.translate('apmOss.tutorial.goClient.configure.commands.setCustomApmServerUrlComment', { - defaultMessage: 'Set custom APM Server URL (default: {defaultApmServerUrl})', - values: { defaultApmServerUrl: 'http://localhost:8200' }, - })} -export ELASTIC_APM_SERVER_URL=${apmServerUrl} - -# ${i18n.translate('apmOss.tutorial.goClient.configure.commands.useIfApmRequiresTokenComment', { - defaultMessage: 'Use if APM Server requires a secret token', - })} -export ELASTIC_APM_SECRET_TOKEN=${secretToken} - -# ${i18n.translate('apmOss.tutorial.goClient.configure.commands.setServiceEnvironment', { - defaultMessage: 'Set the service environment', - })} -export ELASTIC_APM_ENVIRONMENT= -`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.goClient.configure.textPost', { - defaultMessage: 'See the [documentation]({documentationLink}) for advanced configuration.', - values: { - documentationLink: '{config.docs.base_url}guide/en/apm/agent/go/current/configuration.html', - }, - }), - }, - { - title: i18n.translate('apmOss.tutorial.goClient.instrument.title', { - defaultMessage: 'Instrument your application', - }), - textPre: i18n.translate('apmOss.tutorial.goClient.instrument.textPre', { - defaultMessage: - 'Instrument your Go application by using one of the provided instrumentation modules or \ -by using the tracer API directly.', - }), - commands: `\ -import ( - "net/http" - - "go.elastic.co/apm/module/apmhttp" -) - -func main() {curlyOpen} - mux := http.NewServeMux() - ... - http.ListenAndServe(":8080", apmhttp.Wrap(mux)) -{curlyClose} -`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.goClient.instrument.textPost', { - defaultMessage: - 'See the [documentation]({documentationLink}) for a detailed \ -guide to instrumenting Go source code.', - values: { - documentationLink: - '{config.docs.base_url}guide/en/apm/agent/go/current/instrumenting-source.html', - }, - }), - }, -]; - -export const createJavaAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.javaClient.download.title', { - defaultMessage: 'Download the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.javaClient.download.textPre', { - defaultMessage: - 'Download the agent jar from [Maven Central]({mavenCentralLink}). \ -Do **not** add the agent as a dependency to your application.', - values: { - mavenCentralLink: 'http://search.maven.org/#search%7Cga%7C1%7Ca%3Aelastic-apm-agent', - }, - }), - }, - { - title: i18n.translate('apmOss.tutorial.javaClient.startApplication.title', { - defaultMessage: 'Start your application with the javaagent flag', - }), - textPre: i18n.translate('apmOss.tutorial.javaClient.startApplication.textPre', { - defaultMessage: - 'Add the `-javaagent` flag and configure the agent with system properties.\n\n \ -* Set the required service name (allowed characters: a-z, A-Z, 0-9, -, _, and space)\n \ -* Set the custom APM Server URL (default: {customApmServerUrl})\n \ -* Set the APM Server secret token\n \ -* Set the service environment\n \ -* Set the base package of your application', - values: { customApmServerUrl: 'http://localhost:8200' }, - }), - commands: `java -javaagent:/path/to/elastic-apm-agent-.jar \\ - -Delastic.apm.service_name=my-application \\ - -Delastic.apm.server_urls=${apmServerUrl || 'http://localhost:8200'} \\ - -Delastic.apm.secret_token=${secretToken} \\ - -Delastic.apm.environment=production \\ - -Delastic.apm.application_packages=org.example \\ - -jar my-application.jar`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.javaClient.startApplication.textPost', { - defaultMessage: - 'See the [documentation]({documentationLink}) for configuration options and advanced \ -usage.', - values: { - documentationLink: '{config.docs.base_url}guide/en/apm/agent/java/current/index.html', - }, - }), - }, -]; - -export const createDotNetAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.dotNetClient.download.title', { - defaultMessage: 'Download the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.dotNetClient.download.textPre', { - defaultMessage: - 'Add the the agent package(s) from [NuGet]({allNuGetPackagesLink}) to your .NET application. There are multiple \ - NuGet packages available for different use cases. \n\nFor an ASP.NET Core application with Entity Framework \ - Core download the [Elastic.Apm.NetCoreAll]({netCoreAllApmPackageLink}) package. This package will automatically add every \ - agent component to your application. \n\n In case you would like to minimize the dependencies, you can use the \ - [Elastic.Apm.AspNetCore]({aspNetCorePackageLink}) package for just \ - ASP.NET Core monitoring or the [Elastic.Apm.EfCore]({efCorePackageLink}) package for just Entity Framework Core monitoring. \n\n \ - In case you only want to use the public Agent API for manual instrumentation use the [Elastic.Apm]({elasticApmPackageLink}) package.', - values: { - allNuGetPackagesLink: 'https://www.nuget.org/packages?q=Elastic.apm', - netCoreAllApmPackageLink: 'https://www.nuget.org/packages/Elastic.Apm.NetCoreAll', - aspNetCorePackageLink: 'https://www.nuget.org/packages/Elastic.Apm.AspNetCore', - efCorePackageLink: 'https://www.nuget.org/packages/Elastic.Apm.EntityFrameworkCore', - elasticApmPackageLink: 'https://www.nuget.org/packages/Elastic.Apm', - }, - }), - }, - { - title: i18n.translate('apmOss.tutorial.dotNetClient.configureApplication.title', { - defaultMessage: 'Add the agent to the application', - }), - textPre: i18n.translate('apmOss.tutorial.dotNetClient.configureApplication.textPre', { - defaultMessage: - 'In case of ASP.NET Core with the `Elastic.Apm.NetCoreAll` package, call the `UseAllElasticApm` \ - method in the `Configure` method within the `Startup.cs` file.', - }), - commands: `public class Startup -{curlyOpen} - public void Configure(IApplicationBuilder app, IHostingEnvironment env) - {curlyOpen} - app.UseAllElasticApm(Configuration); - //…rest of the method - {curlyClose} - //…rest of the class -{curlyClose}`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.dotNetClient.configureApplication.textPost', { - defaultMessage: - 'Passing an `IConfiguration` instance is optional and by doing so, the agent will read config settings through this \ - `IConfiguration` instance (e.g. from the `appsettings.json` file).', - }), - }, - { - title: i18n.translate('apmOss.tutorial.dotNetClient.configureAgent.title', { - defaultMessage: 'Sample appsettings.json file:', - }), - commands: `{curlyOpen} - "ElasticApm": {curlyOpen} - "SecretToken": "${secretToken}", - "ServerUrls": "${ - apmServerUrl || 'http://localhost:8200' - }", //Set custom APM Server URL (default: http://localhost:8200) - "ServiceName": "MyApp", //allowed characters: a-z, A-Z, 0-9, -, _, and space. Default is the entry assembly of the application - "Environment": "production", // Set the service environment - {curlyClose} -{curlyClose}`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.dotNetClient.configureAgent.textPost', { - defaultMessage: - 'In case you don’t pass an `IConfiguration` instance to the agent (e.g. in case of non ASP.NET Core applications) \ - you can also configure the agent through environment variables. \n \ - See [the documentation]({documentationLink}) for advanced usage.', - values: { - documentationLink: - '{config.docs.base_url}guide/en/apm/agent/dotnet/current/configuration.html', - }, - }), - }, -]; - -export const createPhpAgentInstructions = (apmServerUrl = '', secretToken = '') => [ - { - title: i18n.translate('apmOss.tutorial.phpClient.download.title', { - defaultMessage: 'Download the APM agent', - }), - textPre: i18n.translate('apmOss.tutorial.phpClient.download.textPre', { - defaultMessage: - 'Download the package corresponding to your platform from [GitHub releases]({githubReleasesLink}).', - values: { - githubReleasesLink: 'https://github.com/elastic/apm-agent-php/releases', - }, - }), - }, - { - title: i18n.translate('apmOss.tutorial.phpClient.installPackage.title', { - defaultMessage: 'Install the downloaded package', - }), - textPre: i18n.translate('apmOss.tutorial.phpClient.installPackage.textPre', { - defaultMessage: 'For example on Alpine Linux using APK package:', - }), - commands: ['apk add --allow-untrusted .apk'], - textPost: i18n.translate('apmOss.tutorial.phpClient.installPackage.textPost', { - defaultMessage: - 'See the [documentation]({documentationLink}) for installation commands on other supported platforms and advanced installation.', - values: { - documentationLink: '{config.docs.base_url}guide/en/apm/agent/php/current/setup.html', - }, - }), - }, - { - title: i18n.translate('apmOss.tutorial.phpClient.configureAgent.title', { - defaultMessage: 'Configure the agent', - }), - textPre: i18n.translate('apmOss.tutorial.phpClient.configureAgent.textPre', { - defaultMessage: - 'APM is automatically started when your app boots. Configure the agent either via `php.ini` file:', - }), - commands: `elastic_apm.server_url=http://localhost:8200 -elastic_apm.service_name="My service" -`.split('\n'), - textPost: i18n.translate('apmOss.tutorial.phpClient.configure.textPost', { - defaultMessage: - 'See the [documentation]({documentationLink}) for configuration options and advanced usage.\n\n', - values: { - documentationLink: - '{config.docs.base_url}guide/en/apm/agent/php/current/configuration.html', - }, - }), - }, -]; diff --git a/src/plugins/bfetch/common/batch.ts b/src/plugins/bfetch/common/batch.ts index a84d94b541ae52..59b012751c66db 100644 --- a/src/plugins/bfetch/common/batch.ts +++ b/src/plugins/bfetch/common/batch.ts @@ -19,3 +19,8 @@ export interface BatchResponseItem new Promise((resolve) => setImmediate(resolve)); const getPromiseState = (promise: Promise): Promise<'resolved' | 'rejected' | 'pending'> => Promise.race<'resolved' | 'rejected' | 'pending'>([ @@ -46,12 +48,14 @@ const setup = () => { }; }; -describe('createStreamingBatchedFunction()', () => { +// FLAKY: https://github.com/elastic/kibana/issues/101126 +describe.skip('createStreamingBatchedFunction()', () => { test('returns a function', () => { const { fetchStreaming } = setup(); const fn = createStreamingBatchedFunction({ url: '/test', fetchStreaming, + compressionDisabled$: rxof(true), }); expect(typeof fn).toBe('function'); }); @@ -61,6 +65,7 @@ describe('createStreamingBatchedFunction()', () => { const fn = createStreamingBatchedFunction({ url: '/test', fetchStreaming, + compressionDisabled$: rxof(true), }); const res = fn({}); expect(typeof res.then).toBe('function'); @@ -74,6 +79,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); @@ -93,6 +99,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); @@ -107,6 +114,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ foo: 'bar' }); @@ -125,6 +133,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ foo: 'bar' }); @@ -146,14 +155,18 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ foo: 'bar' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ baz: 'quix' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ full: 'yep' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(1); }); @@ -164,6 +177,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -186,11 +200,13 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ a: '1' }); fn({ b: '2' }); fn({ c: '3' }); + await flushPromises(); expect(fetchStreaming.mock.calls[0][0]).toMatchObject({ url: '/test', @@ -209,13 +225,16 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ a: '1' }); fn({ b: '2' }); fn({ c: '3' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(1); fn({ d: '4' }); + await flushPromises(); await new Promise((r) => setTimeout(r, 6)); expect(fetchStreaming).toHaveBeenCalledTimes(2); }); @@ -229,6 +248,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -246,8 +266,11 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); + await flushPromises(); + const promise1 = fn({ a: '1' }); const promise2 = fn({ b: '2' }); const promise3 = fn({ c: '3' }); @@ -287,6 +310,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -314,6 +338,20 @@ describe('createStreamingBatchedFunction()', () => { expect(await promise3).toEqual({ foo: 'bar 2' }); }); + test('compression is false by default', async () => { + const { fetchStreaming } = setup(); + const fn = createStreamingBatchedFunction({ + url: '/test', + flushOnMaxItems: 1, + fetchStreaming, + }); + + fn({ a: '1' }); + + const dontCompress = await fetchStreaming.mock.calls[0][0].compressionDisabled$.toPromise(); + expect(dontCompress).toBe(false); + }); + test('resolves falsy results', async () => { const { fetchStreaming, stream } = setup(); const fn = createStreamingBatchedFunction({ @@ -321,6 +359,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -362,6 +401,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise = fn({ a: '1' }); @@ -390,6 +430,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -442,6 +483,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -471,6 +513,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -509,6 +552,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -539,6 +583,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -576,6 +621,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -608,6 +654,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -644,7 +691,9 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); + await flushPromises(); const promise1 = of(fn({ a: '1' })); const promise2 = of(fn({ a: '2' })); diff --git a/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts b/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts index 2d81331f10a88e..d5f955f517d135 100644 --- a/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts +++ b/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts @@ -6,16 +6,16 @@ * Side Public License, v 1. */ +import { Observable, of } from 'rxjs'; import { AbortError, abortSignalToPromise, defer } from '../../../kibana_utils/public'; import { ItemBufferParams, TimedItemBufferParams, createBatchedFunction, - BatchResponseItem, ErrorLike, + normalizeError, } from '../../common'; -import { fetchStreaming, split } from '../streaming'; -import { normalizeError } from '../../common'; +import { fetchStreaming } from '../streaming'; import { BatchedFunc, BatchItem } from './types'; export interface BatchedFunctionProtocolError extends ErrorLike { @@ -47,6 +47,11 @@ export interface StreamingBatchedFunctionParams { * before sending the batch request. */ maxItemAge?: TimedItemBufferParams['maxItemAge']; + + /** + * Disabled zlib compression of response chunks. + */ + compressionDisabled$?: Observable; } /** @@ -64,6 +69,7 @@ export const createStreamingBatchedFunction = ( fetchStreaming: fetchStreamingInjected = fetchStreaming, flushOnMaxItems = 25, maxItemAge = 10, + compressionDisabled$ = of(false), } = params; const [fn] = createBatchedFunction({ onCall: (payload: Payload, signal?: AbortSignal) => { @@ -119,6 +125,7 @@ export const createStreamingBatchedFunction = ( body: JSON.stringify({ batch }), method: 'POST', signal: abortController.signal, + compressionDisabled$, }); const handleStreamError = (error: any) => { @@ -127,10 +134,10 @@ export const createStreamingBatchedFunction = ( for (const { future } of items) future.reject(normalizedError); }; - stream.pipe(split('\n')).subscribe({ + stream.subscribe({ next: (json: string) => { try { - const response = JSON.parse(json) as BatchResponseItem; + const response = JSON.parse(json); if (response.error) { items[response.id].future.reject(response.error); } else if (response.result !== undefined) { diff --git a/src/plugins/bfetch/public/batching/index.ts b/src/plugins/bfetch/public/batching/index.ts new file mode 100644 index 00000000000000..115fd84cbe979f --- /dev/null +++ b/src/plugins/bfetch/public/batching/index.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +export { + createStreamingBatchedFunction, + StreamingBatchedFunctionParams, +} from './create_streaming_batched_function'; diff --git a/src/plugins/bfetch/public/plugin.ts b/src/plugins/bfetch/public/plugin.ts index ed97d468eec0b1..f97a91a0e70d37 100644 --- a/src/plugins/bfetch/public/plugin.ts +++ b/src/plugins/bfetch/public/plugin.ts @@ -7,12 +7,11 @@ */ import { CoreStart, PluginInitializerContext, CoreSetup, Plugin } from 'src/core/public'; +import { from, Observable, of } from 'rxjs'; +import { switchMap } from 'rxjs/operators'; import { fetchStreaming as fetchStreamingStatic, FetchStreamingParams } from './streaming'; -import { removeLeadingSlash } from '../common'; -import { - createStreamingBatchedFunction, - StreamingBatchedFunctionParams, -} from './batching/create_streaming_batched_function'; +import { DISABLE_BFETCH_COMPRESSION, removeLeadingSlash } from '../common'; +import { createStreamingBatchedFunction, StreamingBatchedFunctionParams } from './batching'; import { BatchedFunc } from './batching/types'; // eslint-disable-next-line @@ -43,12 +42,23 @@ export class BfetchPublicPlugin constructor(private readonly initializerContext: PluginInitializerContext) {} - public setup(core: CoreSetup, plugins: BfetchPublicSetupDependencies): BfetchPublicSetup { + public setup( + core: CoreSetup, + plugins: BfetchPublicSetupDependencies + ): BfetchPublicSetup { const { version } = this.initializerContext.env.packageInfo; const basePath = core.http.basePath.get(); - const fetchStreaming = this.fetchStreaming(version, basePath); - const batchedFunction = this.batchedFunction(fetchStreaming); + const compressionDisabled$ = from(core.getStartServices()).pipe( + switchMap((deps) => { + return of(deps[0]); + }), + switchMap((coreStart) => { + return coreStart.uiSettings.get$(DISABLE_BFETCH_COMPRESSION); + }) + ); + const fetchStreaming = this.fetchStreaming(version, basePath, compressionDisabled$); + const batchedFunction = this.batchedFunction(fetchStreaming, compressionDisabled$); this.contract = { fetchStreaming, @@ -66,7 +76,8 @@ export class BfetchPublicPlugin private fetchStreaming = ( version: string, - basePath: string + basePath: string, + compressionDisabled$: Observable ): BfetchPublicSetup['fetchStreaming'] => (params) => fetchStreamingStatic({ ...params, @@ -76,13 +87,16 @@ export class BfetchPublicPlugin 'kbn-version': version, ...(params.headers || {}), }, + compressionDisabled$, }); private batchedFunction = ( - fetchStreaming: BfetchPublicContract['fetchStreaming'] + fetchStreaming: BfetchPublicContract['fetchStreaming'], + compressionDisabled$: Observable ): BfetchPublicContract['batchedFunction'] => (params) => createStreamingBatchedFunction({ ...params, + compressionDisabled$, fetchStreaming: params.fetchStreaming || fetchStreaming, }); } diff --git a/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts b/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts index e804b3ea942275..a5d066f6d9a24d 100644 --- a/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts +++ b/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts @@ -8,6 +8,15 @@ import { fetchStreaming } from './fetch_streaming'; import { mockXMLHttpRequest } from '../test_helpers/xhr'; +import { of } from 'rxjs'; +import { promisify } from 'util'; +import { deflate } from 'zlib'; +const pDeflate = promisify(deflate); + +const compressResponse = async (resp: any) => { + const gzipped = await pDeflate(JSON.stringify(resp)); + return gzipped.toString('base64'); +}; const tick = () => new Promise((resolve) => setTimeout(resolve, 1)); @@ -21,6 +30,7 @@ test('returns XHR request', () => { setup(); const { xhr } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(typeof xhr.readyState).toBe('number'); }); @@ -29,6 +39,7 @@ test('returns stream', () => { setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(typeof stream.subscribe).toBe('function'); }); @@ -37,6 +48,7 @@ test('promise resolves when request completes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); let resolved = false; @@ -65,10 +77,90 @@ test('promise resolves when request completes', async () => { expect(resolved).toBe(true); }); -test('streams incoming text as it comes through', async () => { +test('promise resolves when compressed request completes', async () => { + const env = setup(); + const { stream } = fetchStreaming({ + url: 'http://example.com', + compressionDisabled$: of(false), + }); + + let resolved = false; + let result; + stream.toPromise().then((r) => { + resolved = true; + result = r; + }); + + await tick(); + expect(resolved).toBe(false); + + const msg = { foo: 'bar' }; + + // Whole message in a response + (env.xhr as any).responseText = `${await compressResponse(msg)}\n`; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + (env.xhr as any).readyState = 4; + (env.xhr as any).status = 200; + env.xhr.onreadystatechange!({} as any); + + await tick(); + expect(resolved).toBe(true); + expect(result).toStrictEqual(JSON.stringify(msg)); +}); + +test('promise resolves when compressed chunked request completes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(false), + }); + + let resolved = false; + let result; + stream.toPromise().then((r) => { + resolved = true; + result = r; + }); + + await tick(); + expect(resolved).toBe(false); + + const msg = { veg: 'tomato' }; + const msgToCut = await compressResponse(msg); + const part1 = msgToCut.substr(0, 3); + + // Message and a half in a response + (env.xhr as any).responseText = part1; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + // Half a message in a response + (env.xhr as any).responseText = `${msgToCut}\n`; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + (env.xhr as any).readyState = 4; + (env.xhr as any).status = 200; + env.xhr.onreadystatechange!({} as any); + + await tick(); + expect(resolved).toBe(true); + expect(result).toStrictEqual(JSON.stringify(msg)); +}); + +test('streams incoming text as it comes through, according to separators', async () => { + const env = setup(); + const { stream } = fetchStreaming({ + url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -80,16 +172,22 @@ test('streams incoming text as it comes through', async () => { (env.xhr as any).responseText = 'foo'; env.xhr.onprogress!({} as any); + await tick(); + expect(spy).toHaveBeenCalledTimes(0); + + (env.xhr as any).responseText = 'foo\nbar'; + env.xhr.onprogress!({} as any); + await tick(); expect(spy).toHaveBeenCalledTimes(1); expect(spy).toHaveBeenCalledWith('foo'); - (env.xhr as any).responseText = 'foo\nbar'; + (env.xhr as any).responseText = 'foo\nbar\n'; env.xhr.onprogress!({} as any); await tick(); expect(spy).toHaveBeenCalledTimes(2); - expect(spy).toHaveBeenCalledWith('\nbar'); + expect(spy).toHaveBeenCalledWith('bar'); (env.xhr as any).readyState = 4; (env.xhr as any).status = 200; @@ -103,6 +201,7 @@ test('completes stream observable when request finishes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -127,6 +226,7 @@ test('completes stream observable when aborted', async () => { const { stream } = fetchStreaming({ url: 'http://example.com', signal: abort.signal, + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -152,6 +252,7 @@ test('promise throws when request errors', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -178,6 +279,7 @@ test('stream observable errors when request errors', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -210,6 +312,7 @@ test('sets custom headers', async () => { 'Content-Type': 'text/plain', Authorization: 'Bearer 123', }, + compressionDisabled$: of(true), }); expect(env.xhr.setRequestHeader).toHaveBeenCalledWith('Content-Type', 'text/plain'); @@ -223,6 +326,7 @@ test('uses credentials', async () => { fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(env.xhr.withCredentials).toBe(true); @@ -238,6 +342,7 @@ test('opens XHR request and sends specified body', async () => { url: 'http://elastic.co', method: 'GET', body: 'foobar', + compressionDisabled$: of(true), }); expect(env.xhr.open).toHaveBeenCalledTimes(1); @@ -250,6 +355,7 @@ test('uses POST request method by default', async () => { const env = setup(); fetchStreaming({ url: 'http://elastic.co', + compressionDisabled$: of(true), }); expect(env.xhr.open).toHaveBeenCalledWith('POST', 'http://elastic.co'); }); diff --git a/src/plugins/bfetch/public/streaming/fetch_streaming.ts b/src/plugins/bfetch/public/streaming/fetch_streaming.ts index d68e4d01b44f5f..1af35ef68fb85c 100644 --- a/src/plugins/bfetch/public/streaming/fetch_streaming.ts +++ b/src/plugins/bfetch/public/streaming/fetch_streaming.ts @@ -6,7 +6,11 @@ * Side Public License, v 1. */ +import { Observable, of } from 'rxjs'; +import { map, share, switchMap } from 'rxjs/operators'; +import { inflateResponse } from '.'; import { fromStreamingXhr } from './from_streaming_xhr'; +import { split } from './split'; export interface FetchStreamingParams { url: string; @@ -14,6 +18,7 @@ export interface FetchStreamingParams { method?: 'GET' | 'POST'; body?: string; signal?: AbortSignal; + compressionDisabled$?: Observable; } /** @@ -26,23 +31,49 @@ export function fetchStreaming({ method = 'POST', body = '', signal, + compressionDisabled$ = of(false), }: FetchStreamingParams) { const xhr = new window.XMLHttpRequest(); - // Begin the request - xhr.open(method, url); - xhr.withCredentials = true; + const msgStream = compressionDisabled$.pipe( + switchMap((compressionDisabled) => { + // Begin the request + xhr.open(method, url); + xhr.withCredentials = true; - // Set the HTTP headers - Object.entries(headers).forEach(([k, v]) => xhr.setRequestHeader(k, v)); + if (!compressionDisabled) { + headers['X-Chunk-Encoding'] = 'deflate'; + } - const stream = fromStreamingXhr(xhr, signal); + // Set the HTTP headers + Object.entries(headers).forEach(([k, v]) => xhr.setRequestHeader(k, v)); - // Send the payload to the server - xhr.send(body); + const stream = fromStreamingXhr(xhr, signal); + + // Send the payload to the server + xhr.send(body); + + // Return a stream of chunked decompressed messages + return stream.pipe( + split('\n'), + map((msg) => { + return compressionDisabled ? msg : inflateResponse(msg); + }) + ); + }), + share() + ); + + // start execution + const msgStreamSub = msgStream.subscribe({ + error: (e) => {}, + complete: () => { + msgStreamSub.unsubscribe(); + }, + }); return { xhr, - stream, + stream: msgStream, }; } diff --git a/src/plugins/bfetch/public/streaming/index.ts b/src/plugins/bfetch/public/streaming/index.ts index afb442feffb294..545cae87aa3d6a 100644 --- a/src/plugins/bfetch/public/streaming/index.ts +++ b/src/plugins/bfetch/public/streaming/index.ts @@ -9,3 +9,4 @@ export * from './split'; export * from './from_streaming_xhr'; export * from './fetch_streaming'; +export { inflateResponse } from './inflate_response'; diff --git a/src/plugins/bfetch/public/streaming/inflate_response.ts b/src/plugins/bfetch/public/streaming/inflate_response.ts new file mode 100644 index 00000000000000..73cb52285987ca --- /dev/null +++ b/src/plugins/bfetch/public/streaming/inflate_response.ts @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { unzlibSync, strFromU8 } from 'fflate'; + +export function inflateResponse(response: string) { + const buff = Buffer.from(response, 'base64'); + const unzip = unzlibSync(buff); + return strFromU8(unzip); +} diff --git a/src/plugins/bfetch/server/plugin.ts b/src/plugins/bfetch/server/plugin.ts index 18f0813260f036..7fd46e2f6cc44e 100644 --- a/src/plugins/bfetch/server/plugin.ts +++ b/src/plugins/bfetch/server/plugin.ts @@ -16,6 +16,7 @@ import type { RouteMethod, RequestHandler, RequestHandlerContext, + StartServicesAccessor, } from 'src/core/server'; import { schema } from '@kbn/config-schema'; import { Subject } from 'rxjs'; @@ -28,7 +29,8 @@ import { normalizeError, } from '../common'; import { StreamingRequestHandler } from './types'; -import { createNDJSONStream } from './streaming'; +import { createStream } from './streaming'; +import { getUiSettings } from './ui_settings'; // eslint-disable-next-line export interface BfetchServerSetupDependencies {} @@ -112,9 +114,19 @@ export class BfetchServerPlugin public setup(core: CoreSetup, plugins: BfetchServerSetupDependencies): BfetchServerSetup { const logger = this.initializerContext.logger.get(); const router = core.http.createRouter(); - const addStreamingResponseRoute = this.addStreamingResponseRoute({ router, logger }); + + core.uiSettings.register(getUiSettings()); + + const addStreamingResponseRoute = this.addStreamingResponseRoute({ + getStartServices: core.getStartServices, + router, + logger, + }); const addBatchProcessingRoute = this.addBatchProcessingRoute(addStreamingResponseRoute); - const createStreamingRequestHandler = this.createStreamingRequestHandler({ logger }); + const createStreamingRequestHandler = this.createStreamingRequestHandler({ + getStartServices: core.getStartServices, + logger, + }); return { addBatchProcessingRoute, @@ -129,10 +141,16 @@ export class BfetchServerPlugin public stop() {} + private getCompressionDisabled(request: KibanaRequest) { + return request.headers['x-chunk-encoding'] !== 'deflate'; + } + private addStreamingResponseRoute = ({ + getStartServices, router, logger, }: { + getStartServices: StartServicesAccessor; router: ReturnType; logger: Logger; }): BfetchServerSetup['addStreamingResponseRoute'] => (path, handler) => { @@ -146,9 +164,10 @@ export class BfetchServerPlugin async (context, request, response) => { const handlerInstance = handler(request); const data = request.body; + const compressionDisabled = this.getCompressionDisabled(request); return response.ok({ headers: streamingHeaders, - body: createNDJSONStream(handlerInstance.getResponseStream(data), logger), + body: createStream(handlerInstance.getResponseStream(data), logger, compressionDisabled), }); } ); @@ -156,17 +175,20 @@ export class BfetchServerPlugin private createStreamingRequestHandler = ({ logger, + getStartServices, }: { logger: Logger; + getStartServices: StartServicesAccessor; }): BfetchServerSetup['createStreamingRequestHandler'] => (streamHandler) => async ( context, request, response ) => { const response$ = await streamHandler(context, request); + const compressionDisabled = this.getCompressionDisabled(request); return response.ok({ headers: streamingHeaders, - body: createNDJSONStream(response$, logger), + body: createStream(response$, logger, compressionDisabled), }); }; diff --git a/src/plugins/bfetch/server/streaming/create_compressed_stream.ts b/src/plugins/bfetch/server/streaming/create_compressed_stream.ts new file mode 100644 index 00000000000000..6814ed1dd79550 --- /dev/null +++ b/src/plugins/bfetch/server/streaming/create_compressed_stream.ts @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { promisify } from 'util'; +import { Observable } from 'rxjs'; +import { catchError, concatMap, finalize } from 'rxjs/operators'; +import { Logger } from 'src/core/server'; +import { Stream, PassThrough } from 'stream'; +import { constants, deflate } from 'zlib'; + +const delimiter = '\n'; +const pDeflate = promisify(deflate); + +async function zipMessageToStream(output: PassThrough, message: string) { + return new Promise(async (resolve, reject) => { + try { + const gzipped = await pDeflate(message, { + flush: constants.Z_SYNC_FLUSH, + }); + output.write(gzipped.toString('base64')); + output.write(delimiter); + resolve(undefined); + } catch (err) { + reject(err); + } + }); +} + +export const createCompressedStream = ( + results: Observable, + logger: Logger +): Stream => { + const output = new PassThrough(); + + const sub = results + .pipe( + concatMap((message: Response) => { + const strMessage = JSON.stringify(message); + return zipMessageToStream(output, strMessage); + }), + catchError((e) => { + logger.error('Could not serialize or stream a message.'); + logger.error(e); + throw e; + }), + finalize(() => { + output.end(); + sub.unsubscribe(); + }) + ) + .subscribe(); + + return output; +}; diff --git a/src/plugins/bfetch/server/streaming/create_stream.ts b/src/plugins/bfetch/server/streaming/create_stream.ts new file mode 100644 index 00000000000000..7d6981294341b3 --- /dev/null +++ b/src/plugins/bfetch/server/streaming/create_stream.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { Logger } from 'kibana/server'; +import { Stream } from 'stream'; +import { Observable } from 'rxjs'; +import { createCompressedStream } from './create_compressed_stream'; +import { createNDJSONStream } from './create_ndjson_stream'; + +export function createStream( + response$: Observable, + logger: Logger, + compressionDisabled: boolean +): Stream { + return compressionDisabled + ? createNDJSONStream(response$, logger) + : createCompressedStream(response$, logger); +} diff --git a/src/plugins/bfetch/server/streaming/index.ts b/src/plugins/bfetch/server/streaming/index.ts index 2c31cc329295d0..dfd472b5034a1b 100644 --- a/src/plugins/bfetch/server/streaming/index.ts +++ b/src/plugins/bfetch/server/streaming/index.ts @@ -7,3 +7,5 @@ */ export * from './create_ndjson_stream'; +export * from './create_compressed_stream'; +export * from './create_stream'; diff --git a/src/plugins/bfetch/server/ui_settings.ts b/src/plugins/bfetch/server/ui_settings.ts new file mode 100644 index 00000000000000..cf7b13a9af1820 --- /dev/null +++ b/src/plugins/bfetch/server/ui_settings.ts @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { i18n } from '@kbn/i18n'; +import { UiSettingsParams } from 'src/core/server'; +import { schema } from '@kbn/config-schema'; +import { DISABLE_BFETCH_COMPRESSION } from '../common'; + +export function getUiSettings(): Record> { + return { + [DISABLE_BFETCH_COMPRESSION]: { + name: i18n.translate('bfetch.disableBfetchCompression', { + defaultMessage: 'Disable Batch Compression', + }), + value: false, + description: i18n.translate('bfetch.disableBfetchCompressionDesc', { + defaultMessage: + 'Disable batch compression. This allows you to debug individual requests, but increases response size.', + }), + schema: schema.boolean(), + category: [], + }, + }; +} diff --git a/src/plugins/dashboard/public/application/dashboard_app.tsx b/src/plugins/dashboard/public/application/dashboard_app.tsx index fa86fb81bd4073..93310bb8213615 100644 --- a/src/plugins/dashboard/public/application/dashboard_app.tsx +++ b/src/plugins/dashboard/public/application/dashboard_app.tsx @@ -295,13 +295,6 @@ export function DashboardApp({ }; }, [dashboardStateManager, dashboardContainer, onAppLeave, embeddable]); - // clear search session when leaving dashboard route - useEffect(() => { - return () => { - data.search.session.clear(); - }; - }, [data.search.session]); - return ( <> {savedDashboard && dashboardStateManager && dashboardContainer && viewMode && ( diff --git a/src/plugins/dashboard/public/application/dashboard_router.tsx b/src/plugins/dashboard/public/application/dashboard_router.tsx index d5eddf6bb48647..be279ed98492e7 100644 --- a/src/plugins/dashboard/public/application/dashboard_router.tsx +++ b/src/plugins/dashboard/public/application/dashboard_router.tsx @@ -198,8 +198,14 @@ export async function mountApp({ return ; }; - // make sure the index pattern list is up to date - await dataStart.indexPatterns.clearCache(); + const hasEmbeddableIncoming = Boolean( + dashboardServices.embeddable + .getStateTransfer() + .getIncomingEmbeddablePackage(DashboardConstants.DASHBOARDS_ID, false) + ); + if (!hasEmbeddableIncoming) { + dataStart.indexPatterns.clearCache(); + } // dispatch synthetic hash change event to update hash history objects // this is necessary because hash updates triggered by using popState won't trigger this event naturally. @@ -242,7 +248,6 @@ export async function mountApp({ } render(app, element); return () => { - dataStart.search.session.clear(); unlistenParentHistory(); unmountComponentAtNode(element); appUnMounted(); diff --git a/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts b/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts index 0be29f67a94922..d715fb70ec91a8 100644 --- a/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts +++ b/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts @@ -85,6 +85,7 @@ export const useDashboardContainer = ({ let canceled = false; let pendingContainer: DashboardContainer | ErrorEmbeddable | null | undefined; (async function createContainer() { + const existingSession = searchSession.getSessionId(); pendingContainer = await dashboardFactory.create( getDashboardContainerInput({ isEmbeddedExternally: Boolean(isEmbeddedExternally), @@ -92,7 +93,9 @@ export const useDashboardContainer = ({ dashboardStateManager, incomingEmbeddable, query, - searchSessionId: searchSessionIdFromURL ?? searchSession.start(), + searchSessionId: + searchSessionIdFromURL ?? + (existingSession && incomingEmbeddable ? existingSession : searchSession.start()), }) ); diff --git a/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx b/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx index db0404595af6c8..e2c11d614d7970 100644 --- a/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx +++ b/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx @@ -83,6 +83,11 @@ export const DashboardListing = ({ }; }, [title, savedObjectsClient, redirectTo, data.query, kbnUrlStateStorage]); + // clear dangling session because they are not required here + useEffect(() => { + data.search.session.clear(); + }, [data.search.session]); + const hideWriteControls = dashboardCapabilities.hideWriteControls; const listingLimit = savedObjects.settings.getListingLimit(); const defaultFilter = title ? `"${title}"` : ''; diff --git a/src/plugins/data/common/es_query/es_query/build_es_query.test.ts b/src/plugins/data/common/es_query/es_query/build_es_query.test.ts index c6d923f4505f01..fa9a2c85aaef54 100644 --- a/src/plugins/data/common/es_query/es_query/build_es_query.test.ts +++ b/src/plugins/data/common/es_query/es_query/build_es_query.test.ts @@ -39,12 +39,16 @@ describe('build query', () => { { query: 'extension:jpg', language: 'kuery' }, { query: 'bar:baz', language: 'lucene' }, ] as Query[]; - const filters = [ - { - match_all: {}, - meta: { type: 'match_all' }, - } as MatchAllFilter, - ]; + const filters = { + match: { + a: 'b', + }, + meta: { + alias: '', + disabled: false, + negate: false, + }, + }; const config = { allowLeadingWildcards: true, queryStringOptions: {}, @@ -56,7 +60,11 @@ describe('build query', () => { must: [decorateQuery(luceneStringToDsl('bar:baz'), config.queryStringOptions)], filter: [ toElasticsearchQuery(fromKueryExpression('extension:jpg'), indexPattern), - { match_all: {} }, + { + match: { + a: 'b', + }, + }, ], should: [], must_not: [], @@ -71,9 +79,15 @@ describe('build query', () => { it('should accept queries and filters as either single objects or arrays', () => { const queries = { query: 'extension:jpg', language: 'lucene' } as Query; const filters = { - match_all: {}, - meta: { type: 'match_all' }, - } as MatchAllFilter; + match: { + a: 'b', + }, + meta: { + alias: '', + disabled: false, + negate: false, + }, + }; const config = { allowLeadingWildcards: true, queryStringOptions: {}, @@ -83,7 +97,13 @@ describe('build query', () => { const expectedResult = { bool: { must: [decorateQuery(luceneStringToDsl('extension:jpg'), config.queryStringOptions)], - filter: [{ match_all: {} }], + filter: [ + { + match: { + a: 'b', + }, + }, + ], should: [], must_not: [], }, @@ -94,6 +114,49 @@ describe('build query', () => { expect(result).toEqual(expectedResult); }); + it('should remove match_all clauses', () => { + const filters = [ + { + match_all: {}, + meta: { type: 'match_all' }, + } as MatchAllFilter, + { + match: { + a: 'b', + }, + meta: { + alias: '', + disabled: false, + negate: false, + }, + }, + ]; + const config = { + allowLeadingWildcards: true, + queryStringOptions: {}, + ignoreFilterIfFieldNotInIndex: false, + }; + + const expectedResult = { + bool: { + must: [], + filter: [ + { + match: { + a: 'b', + }, + }, + ], + should: [], + must_not: [], + }, + }; + + const result = buildEsQuery(indexPattern, [], filters, config); + + expect(result).toEqual(expectedResult); + }); + it('should use the default time zone set in the Advanced Settings in queries and filters', () => { const queries = [ { query: '@timestamp:"2019-03-23T13:18:00"', language: 'kuery' }, @@ -122,7 +185,6 @@ describe('build query', () => { indexPattern, config ), - { match_all: {} }, ], should: [], must_not: [], diff --git a/src/plugins/data/common/es_query/es_query/build_es_query.ts b/src/plugins/data/common/es_query/es_query/build_es_query.ts index 18b360de9aaa64..45724796c3518c 100644 --- a/src/plugins/data/common/es_query/es_query/build_es_query.ts +++ b/src/plugins/data/common/es_query/es_query/build_es_query.ts @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -import { groupBy, has } from 'lodash'; +import { groupBy, has, isEqual } from 'lodash'; import { buildQueryFromKuery } from './from_kuery'; import { buildQueryFromFilters } from './from_filters'; import { buildQueryFromLucene } from './from_lucene'; @@ -21,6 +21,12 @@ export interface EsQueryConfig { dateFormatTZ?: string; } +function removeMatchAll(filters: T[]) { + return filters.filter( + (filter) => !filter || typeof filter !== 'object' || !isEqual(filter, { match_all: {} }) + ); +} + /** * @param indexPattern * @param queries - a query object or array of query objects. Each query has a language property and a query property. @@ -63,9 +69,9 @@ export function buildEsQuery( return { bool: { - must: [...kueryQuery.must, ...luceneQuery.must, ...filterQuery.must], - filter: [...kueryQuery.filter, ...luceneQuery.filter, ...filterQuery.filter], - should: [...kueryQuery.should, ...luceneQuery.should, ...filterQuery.should], + must: removeMatchAll([...kueryQuery.must, ...luceneQuery.must, ...filterQuery.must]), + filter: removeMatchAll([...kueryQuery.filter, ...luceneQuery.filter, ...filterQuery.filter]), + should: removeMatchAll([...kueryQuery.should, ...luceneQuery.should, ...filterQuery.should]), must_not: [...kueryQuery.must_not, ...luceneQuery.must_not, ...filterQuery.must_not], }, }; diff --git a/src/plugins/data/common/field_formats/converters/duration.test.ts b/src/plugins/data/common/field_formats/converters/duration.test.ts index fc019720425dfd..72551f4b7b236f 100644 --- a/src/plugins/data/common/field_formats/converters/duration.test.ts +++ b/src/plugins/data/common/field_formats/converters/duration.test.ts @@ -139,17 +139,182 @@ describe('Duration Format', () => { ], }); + testCase({ + inputFormat: 'nanoseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 2, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '0.00 Milliseconds', + }, + { + input: 658, + output: '0.00 Milliseconds', + }, + { + input: 3857, + output: '0.00 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'microseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 2, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '1.99 Milliseconds', + }, + { + input: 658, + output: '0.66 Milliseconds', + }, + { + input: 3857, + output: '3.86 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'microseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 1, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '2.0 Milliseconds', + }, + { + input: 0, + output: '0.0 Milliseconds', + }, + { + input: 658, + output: '0.7 Milliseconds', + }, + { + input: 3857, + output: '3.9 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'seconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + fixtures: [ + { + input: 600, + output: '10 Minutes', + }, + { + input: 30, + output: '30 Seconds', + }, + { + input: 3000, + output: '50 Minutes', + }, + ], + }); + + testCase({ + inputFormat: 'milliseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + useShortSuffix: true, + fixtures: [ + { + input: -123, + output: '-123 ms', + }, + { + input: 1, + output: '1 ms', + }, + { + input: 600, + output: '600 ms', + }, + { + input: 30, + output: '30 ms', + }, + { + input: 3000, + output: '3 s', + }, + { + input: 300000, + output: '5 min', + }, + { + input: 30000000, + output: '8 h', + }, + { + input: 90000000, + output: '1 d', + }, + { + input: 9000000000, + output: '3 mon', + }, + { + input: 99999999999, + output: '3 y', + }, + ], + }); + + testCase({ + inputFormat: 'milliseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + useShortSuffix: true, + includeSpaceWithSuffix: false, + fixtures: [ + { + input: -123, + output: '-123ms', + }, + { + input: 1, + output: '1ms', + }, + { + input: 600, + output: '600ms', + }, + ], + }); + function testCase({ inputFormat, outputFormat, outputPrecision, showSuffix, + useShortSuffix, + includeSpaceWithSuffix, fixtures, }: { inputFormat: string; outputFormat: string; outputPrecision: number | undefined; showSuffix: boolean | undefined; + useShortSuffix?: boolean; + includeSpaceWithSuffix?: boolean; fixtures: any[]; }) { fixtures.forEach((fixture: Record) => { @@ -160,7 +325,14 @@ describe('Duration Format', () => { outputPrecision ? `, ${outputPrecision} decimals` : '' }`, () => { const duration = new DurationFormat( - { inputFormat, outputFormat, outputPrecision, showSuffix }, + { + inputFormat, + outputFormat, + outputPrecision, + showSuffix, + useShortSuffix, + includeSpaceWithSuffix, + }, jest.fn() ); expect(duration.convert(input)).toBe(output); diff --git a/src/plugins/data/common/field_formats/converters/duration.ts b/src/plugins/data/common/field_formats/converters/duration.ts index ef8c1df3704a83..c9a7091db84716 100644 --- a/src/plugins/data/common/field_formats/converters/duration.ts +++ b/src/plugins/data/common/field_formats/converters/duration.ts @@ -18,6 +18,7 @@ const ratioToSeconds: Record = { microseconds: 0.000001, }; const HUMAN_FRIENDLY = 'humanize'; +const HUMAN_FRIENDLY_PRECISE = 'humanizePrecise'; const DEFAULT_OUTPUT_PRECISION = 2; const DEFAULT_INPUT_FORMAT = { text: i18n.translate('data.fieldFormats.duration.inputFormats.seconds', { @@ -89,59 +90,89 @@ const inputFormats = [ }, ]; const DEFAULT_OUTPUT_FORMAT = { - text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize', { - defaultMessage: 'Human Readable', + text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize.approximate', { + defaultMessage: 'Human-readable (approximate)', }), method: 'humanize', }; const outputFormats = [ { ...DEFAULT_OUTPUT_FORMAT }, + { + text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize.precise', { + defaultMessage: 'Human-readable (precise)', + }), + method: 'humanizePrecise', + }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMilliseconds', { defaultMessage: 'Milliseconds', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMilliseconds.short', { + defaultMessage: 'ms', + }), method: 'asMilliseconds', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asSeconds', { defaultMessage: 'Seconds', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asSeconds.short', { + defaultMessage: 's', + }), method: 'asSeconds', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMinutes', { defaultMessage: 'Minutes', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMinutes.short', { + defaultMessage: 'min', + }), method: 'asMinutes', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asHours', { defaultMessage: 'Hours', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asHours.short', { + defaultMessage: 'h', + }), method: 'asHours', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asDays', { defaultMessage: 'Days', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asDays.short', { + defaultMessage: 'd', + }), method: 'asDays', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asWeeks', { defaultMessage: 'Weeks', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asWeeks.short', { + defaultMessage: 'w', + }), method: 'asWeeks', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMonths', { defaultMessage: 'Months', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMonths.short', { + defaultMessage: 'mon', + }), method: 'asMonths', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asYears', { defaultMessage: 'Years', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asYears.short', { + defaultMessage: 'y', + }), method: 'asYears', }, ]; @@ -154,6 +185,29 @@ function parseInputAsDuration(val: number, inputFormat: string) { return moment.duration(val * ratio, kind); } +function formatInputHumanPrecise( + val: number, + inputFormat: string, + outputPrecision: number, + useShortSuffix: boolean, + includeSpace: string +) { + const ratio = ratioToSeconds[inputFormat] || 1; + const kind = (inputFormat in ratioToSeconds + ? 'seconds' + : inputFormat) as unitOfTime.DurationConstructor; + const valueInDuration = moment.duration(val * ratio, kind); + + return formatDuration( + val, + valueInDuration, + inputFormat, + outputPrecision, + useShortSuffix, + includeSpace + ); +} + export class DurationFormat extends FieldFormat { static id = FIELD_FORMAT_IDS.DURATION; static title = i18n.translate('data.fieldFormats.duration.title', { @@ -167,11 +221,17 @@ export class DurationFormat extends FieldFormat { isHuman() { return this.param('outputFormat') === HUMAN_FRIENDLY; } + + isHumanPrecise() { + return this.param('outputFormat') === HUMAN_FRIENDLY_PRECISE; + } + getParamDefaults() { return { inputFormat: DEFAULT_INPUT_FORMAT.kind, outputFormat: DEFAULT_OUTPUT_FORMAT.method, outputPrecision: DEFAULT_OUTPUT_PRECISION, + includeSpaceWithSuffix: true, }; } @@ -180,19 +240,84 @@ export class DurationFormat extends FieldFormat { const outputFormat = this.param('outputFormat') as keyof Duration; const outputPrecision = this.param('outputPrecision'); const showSuffix = Boolean(this.param('showSuffix')); + const useShortSuffix = Boolean(this.param('useShortSuffix')); + const includeSpaceWithSuffix = this.param('includeSpaceWithSuffix'); + + const includeSpace = includeSpaceWithSuffix ? ' ' : ''; + const human = this.isHuman(); + const humanPrecise = this.isHumanPrecise(); + const prefix = val < 0 && human ? i18n.translate('data.fieldFormats.duration.negativeLabel', { defaultMessage: 'minus', }) + ' ' : ''; + const duration = parseInputAsDuration(val, inputFormat) as Record; - const formatted = duration[outputFormat](); - const precise = human ? formatted : formatted.toFixed(outputPrecision); + const formatted = humanPrecise + ? formatInputHumanPrecise(val, inputFormat, outputPrecision, useShortSuffix, includeSpace) + : duration[outputFormat](); + + const precise = human || humanPrecise ? formatted : formatted.toFixed(outputPrecision); const type = outputFormats.find(({ method }) => method === outputFormat); - const suffix = showSuffix && type ? ` ${type.text}` : ''; - return prefix + precise + suffix; + const unitText = useShortSuffix ? type?.shortText : type?.text; + + const suffix = showSuffix && unitText && !human ? `${includeSpace}${unitText}` : ''; + + return humanPrecise ? precise : prefix + precise + suffix; }; } + +function formatDuration( + val: number, + duration: moment.Duration, + inputFormat: string, + outputPrecision: number, + useShortSuffix: boolean, + includeSpace: string +) { + // return nothing when the duration is falsy or not correctly parsed (P0D) + if (!duration || !duration.isValid()) return; + const units = [ + { unit: duration.years(), nextUnitRate: 12, method: 'asYears' }, + { unit: duration.months(), nextUnitRate: 4, method: 'asMonths' }, + { unit: duration.weeks(), nextUnitRate: 7, method: 'asWeeks' }, + { unit: duration.days(), nextUnitRate: 24, method: 'asDays' }, + { unit: duration.hours(), nextUnitRate: 60, method: 'asHours' }, + { unit: duration.minutes(), nextUnitRate: 60, method: 'asMinutes' }, + { unit: duration.seconds(), nextUnitRate: 1000, method: 'asSeconds' }, + { unit: duration.milliseconds(), nextUnitRate: 1000, method: 'asMilliseconds' }, + ]; + + const getUnitText = (method: string) => { + const type = outputFormats.find(({ method: methodT }) => method === methodT); + return useShortSuffix ? type?.shortText : type?.text; + }; + + for (let i = 0; i < units.length; i++) { + const unitValue = units[i].unit; + if (unitValue >= 1) { + const unitText = getUnitText(units[i].method); + + const value = Math.floor(unitValue); + if (units?.[i + 1]) { + const decimalPointValue = Math.floor(units[i + 1].unit); + return ( + (value + decimalPointValue / units[i].nextUnitRate).toFixed(outputPrecision) + + includeSpace + + unitText + ); + } else { + return unitValue.toFixed(outputPrecision) + includeSpace + unitText; + } + } + } + + const unitValue = units[units.length - 1].unit; + const unitText = getUnitText(units[units.length - 1].method); + + return unitValue.toFixed(outputPrecision) + includeSpace + unitText; +} diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts index 41ce7ba4bab4a1..c897cdbce2309a 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts @@ -7,7 +7,7 @@ */ import _, { each, reject } from 'lodash'; -import { FieldAttrs, FieldAttrSet } from '../..'; +import { FieldAttrs, FieldAttrSet, IndexPatternAttributes } from '../..'; import type { RuntimeField } from '../types'; import { DuplicateField } from '../../../../kibana_utils/common'; @@ -240,6 +240,7 @@ export class IndexPattern implements IIndexPattern { * @param script script code * @param fieldType * @param lang + * @deprecated use runtime field instead */ async addScriptedField(name: string, script: string, fieldType: string = 'string') { const scriptedFields = this.getScriptedFields(); @@ -265,6 +266,7 @@ export class IndexPattern implements IIndexPattern { /** * Remove scripted field from field list * @param fieldName + * @deprecated use runtime field instead */ removeScriptedField(fieldName: string) { @@ -274,10 +276,18 @@ export class IndexPattern implements IIndexPattern { } } + /** + * + * @deprecated use runtime field instead + */ getNonScriptedFields() { return [...this.fields.getAll().filter((field) => !field.scripted)]; } + /** + * + * @deprecated use runtime field instead + */ getScriptedFields() { return [...this.fields.getAll().filter((field) => field.scripted)]; } @@ -308,7 +318,7 @@ export class IndexPattern implements IIndexPattern { /** * Returns index pattern as saved object body for saving */ - getAsSavedObjectBody() { + getAsSavedObjectBody(): IndexPatternAttributes { const fieldFormatMap = _.isEmpty(this.fieldFormatMap) ? undefined : JSON.stringify(this.fieldFormatMap); @@ -321,12 +331,10 @@ export class IndexPattern implements IIndexPattern { timeFieldName: this.timeFieldName, intervalName: this.intervalName, sourceFilters: this.sourceFilters ? JSON.stringify(this.sourceFilters) : undefined, - fields: this.fields - ? JSON.stringify(this.fields.filter((field) => field.scripted)) - : undefined, + fields: JSON.stringify(this.fields?.filter((field) => field.scripted) ?? []), fieldFormatMap, - type: this.type, - typeMeta: this.typeMeta ? JSON.stringify(this.typeMeta) : undefined, + type: this.type!, + typeMeta: JSON.stringify(this.typeMeta ?? {}), allowNoIndex: this.allowNoIndex ? this.allowNoIndex : undefined, runtimeFieldMap: runtimeFieldMap ? JSON.stringify(runtimeFieldMap) : undefined, }; diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts index a4f37334c212ee..8715f8feb067a0 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts @@ -230,7 +230,12 @@ describe('IndexPatterns', () => { test('createAndSave', async () => { const title = 'kibana-*'; - indexPatterns.createSavedObject = jest.fn(); + + indexPatterns.createSavedObject = jest.fn(() => + Promise.resolve(({ + id: 'id', + } as unknown) as IndexPattern) + ); indexPatterns.setDefault = jest.fn(); await indexPatterns.createAndSave({ title }); expect(indexPatterns.createSavedObject).toBeCalled(); diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts index 66e66051a6370e..e67e72f295b8ec 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts @@ -403,6 +403,12 @@ export class IndexPatternsService { throw new SavedObjectNotFound(savedObjectType, id, 'management/kibana/indexPatterns'); } + return this.initFromSavedObject(savedObject); + }; + + private initFromSavedObject = async ( + savedObject: SavedObject + ): Promise => { const spec = this.savedObjectToSpec(savedObject); const { title, type, typeMeta, runtimeFieldMap } = spec; spec.fieldAttrs = savedObject.attributes.fieldAttrs @@ -412,7 +418,7 @@ export class IndexPatternsService { try { spec.fields = await this.refreshFieldSpecMap( spec.fields || {}, - id, + savedObject.id, spec.title as string, { pattern: title as string, @@ -423,6 +429,7 @@ export class IndexPatternsService { }, spec.fieldAttrs ); + // CREATE RUNTIME FIELDS for (const [key, value] of Object.entries(runtimeFieldMap || {})) { // do not create runtime field if mapped field exists @@ -450,7 +457,7 @@ export class IndexPatternsService { this.onError(err, { title: i18n.translate('data.indexPatterns.fetchFieldErrorTitle', { defaultMessage: 'Error fetching fields for index pattern {title} (ID: {id})', - values: { id, title }, + values: { id: savedObject.id, title }, }), }); } @@ -516,9 +523,9 @@ export class IndexPatternsService { async createAndSave(spec: IndexPatternSpec, override = false, skipFetchFields = false) { const indexPattern = await this.create(spec, skipFetchFields); - await this.createSavedObject(indexPattern, override); - await this.setDefault(indexPattern.id!); - return indexPattern; + const createdIndexPattern = await this.createSavedObject(indexPattern, override); + await this.setDefault(createdIndexPattern.id!); + return createdIndexPattern; } /** @@ -538,15 +545,20 @@ export class IndexPatternsService { } const body = indexPattern.getAsSavedObjectBody(); - const response = await this.savedObjectsClient.create(savedObjectType, body, { - id: indexPattern.id, - }); - indexPattern.id = response.id; - this.indexPatternCache.set(indexPattern.id, Promise.resolve(indexPattern)); + const response: SavedObject = (await this.savedObjectsClient.create( + savedObjectType, + body, + { + id: indexPattern.id, + } + )) as SavedObject; + + const createdIndexPattern = await this.initFromSavedObject(response); + this.indexPatternCache.set(createdIndexPattern.id!, Promise.resolve(createdIndexPattern)); if (this.savedObjectsCache) { this.savedObjectsCache.push(response as SavedObject); } - return indexPattern; + return createdIndexPattern; } /** diff --git a/src/plugins/data/common/search/aggs/agg_config.ts b/src/plugins/data/common/search/aggs/agg_config.ts index 283d276a22904b..3c83b5bdf6084b 100644 --- a/src/plugins/data/common/search/aggs/agg_config.ts +++ b/src/plugins/data/common/search/aggs/agg_config.ts @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import moment from 'moment'; import _ from 'lodash'; import { i18n } from '@kbn/i18n'; import { Assign, Ensure } from '@kbn/utility-types'; @@ -20,6 +21,7 @@ import { import { IAggType } from './agg_type'; import { writeParams } from './agg_params'; import { IAggConfigs } from './agg_configs'; +import { parseTimeShift } from './utils'; type State = string | number | boolean | null | undefined | SerializableState; @@ -172,6 +174,31 @@ export class AggConfig { return _.get(this.params, key); } + hasTimeShift(): boolean { + return Boolean(this.getParam('timeShift')); + } + + getTimeShift(): undefined | moment.Duration { + const rawTimeShift = this.getParam('timeShift'); + if (!rawTimeShift) return undefined; + const parsedTimeShift = parseTimeShift(rawTimeShift); + if (parsedTimeShift === 'invalid') { + throw new Error(`could not parse time shift ${rawTimeShift}`); + } + if (parsedTimeShift === 'previous') { + const timeShiftInterval = this.aggConfigs.getTimeShiftInterval(); + if (timeShiftInterval) { + return timeShiftInterval; + } else if (!this.aggConfigs.timeRange) { + return; + } + return moment.duration( + moment(this.aggConfigs.timeRange.to).diff(this.aggConfigs.timeRange.from) + ); + } + return parsedTimeShift; + } + write(aggs?: IAggConfigs) { return writeParams(this.type.params, this, aggs); } diff --git a/src/plugins/data/common/search/aggs/agg_configs.test.ts b/src/plugins/data/common/search/aggs/agg_configs.test.ts index 28102544ae0553..72ea64791fa5b3 100644 --- a/src/plugins/data/common/search/aggs/agg_configs.test.ts +++ b/src/plugins/data/common/search/aggs/agg_configs.test.ts @@ -14,6 +14,7 @@ import { mockAggTypesRegistry } from './test_helpers'; import type { IndexPatternField } from '../../index_patterns'; import { IndexPattern } from '../../index_patterns/index_patterns/index_pattern'; import { stubIndexPattern, stubIndexPatternWithFields } from '../../../common/stubs'; +import { IEsSearchResponse } from '..'; describe('AggConfigs', () => { let indexPattern: IndexPattern; @@ -332,6 +333,109 @@ describe('AggConfigs', () => { }); }); + it('inserts a time split filters agg if there are multiple time shifts', () => { + const configStates = [ + { + enabled: true, + type: 'terms', + schema: 'segment', + params: { field: 'clientip', size: 10 }, + }, + { enabled: true, type: 'avg', schema: 'metric', params: { field: 'bytes' } }, + { + enabled: true, + type: 'sum', + schema: 'metric', + params: { field: 'bytes', timeShift: '1d' }, + }, + ]; + indexPattern.fields.push({ + name: 'timestamp', + type: 'date', + esTypes: ['date'], + aggregatable: true, + filterable: true, + searchable: true, + } as IndexPatternField); + + const ac = new AggConfigs(indexPattern, configStates, { typesRegistry }); + ac.timeFields = ['timestamp']; + ac.timeRange = { + from: '2021-05-05T00:00:00.000Z', + to: '2021-05-10T00:00:00.000Z', + }; + const dsl = ac.toDsl(); + + const terms = ac.byName('terms')[0]; + const avg = ac.byName('avg')[0]; + const sum = ac.byName('sum')[0]; + + expect(dsl[terms.id].aggs.time_offset_split.filters.filters).toMatchInlineSnapshot(` + Object { + "0": Object { + "range": Object { + "timestamp": Object { + "gte": "2021-05-05T00:00:00.000Z", + "lte": "2021-05-10T00:00:00.000Z", + }, + }, + }, + "86400000": Object { + "range": Object { + "timestamp": Object { + "gte": "2021-05-04T00:00:00.000Z", + "lte": "2021-05-09T00:00:00.000Z", + }, + }, + }, + } + `); + expect(dsl[terms.id].aggs.time_offset_split.aggs).toHaveProperty(avg.id); + expect(dsl[terms.id].aggs.time_offset_split.aggs).toHaveProperty(sum.id); + }); + + it('does not insert a time split if there is a single time shift', () => { + const configStates = [ + { + enabled: true, + type: 'terms', + schema: 'segment', + params: { field: 'clientip', size: 10 }, + }, + { + enabled: true, + type: 'avg', + schema: 'metric', + params: { + field: 'bytes', + timeShift: '1d', + }, + }, + { + enabled: true, + type: 'sum', + schema: 'metric', + params: { field: 'bytes', timeShift: '1d' }, + }, + ]; + + const ac = new AggConfigs(indexPattern, configStates, { typesRegistry }); + ac.timeFields = ['timestamp']; + ac.timeRange = { + from: '2021-05-05T00:00:00.000Z', + to: '2021-05-10T00:00:00.000Z', + }; + const dsl = ac.toDsl(); + + const terms = ac.byName('terms')[0]; + const avg = ac.byName('avg')[0]; + const sum = ac.byName('sum')[0]; + + expect(dsl[terms.id].aggs).not.toHaveProperty('time_offset_split'); + expect(dsl[terms.id].aggs).toHaveProperty(avg.id); + expect(dsl[terms.id].aggs).toHaveProperty(sum.id); + }); + it('writes multiple metric aggregations at every level if the vis is hierarchical', () => { const configStates = [ { enabled: true, type: 'terms', schema: 'segment', params: { field: 'bytes', orderBy: 1 } }, @@ -426,4 +530,246 @@ describe('AggConfigs', () => { ); }); }); + + describe('#postFlightTransform', () => { + it('merges together splitted responses for multiple shifts', () => { + indexPattern = stubIndexPattern as IndexPattern; + indexPattern.fields.getByName = (name) => (({ name } as unknown) as IndexPatternField); + const configStates = [ + { + enabled: true, + type: 'terms', + schema: 'segment', + params: { field: 'clientip', size: 10 }, + }, + { + enabled: true, + type: 'date_histogram', + schema: 'segment', + params: { field: '@timestamp', interval: '1d' }, + }, + { + enabled: true, + type: 'avg', + schema: 'metric', + params: { + field: 'bytes', + timeShift: '1d', + }, + }, + { + enabled: true, + type: 'sum', + schema: 'metric', + params: { field: 'bytes' }, + }, + ]; + + const ac = new AggConfigs(indexPattern, configStates, { typesRegistry }); + ac.timeFields = ['@timestamp']; + ac.timeRange = { + from: '2021-05-05T00:00:00.000Z', + to: '2021-05-10T00:00:00.000Z', + }; + // 1 terms bucket (A), with 2 date buckets (7th and 8th of May) + // the bucket keys of the shifted time range will be shifted forward + const response = { + rawResponse: { + aggregations: { + '1': { + buckets: [ + { + key: 'A', + time_offset_split: { + buckets: { + '0': { + 2: { + buckets: [ + { + // 2021-05-07 + key: 1620345600000, + 3: { + value: 1.1, + }, + 4: { + value: 2.2, + }, + }, + { + // 2021-05-08 + key: 1620432000000, + doc_count: 26, + 3: { + value: 3.3, + }, + 4: { + value: 4.4, + }, + }, + ], + }, + }, + '86400000': { + 2: { + buckets: [ + { + // 2021-05-07 + key: 1620345600000, + doc_count: 13, + 3: { + value: 5.5, + }, + 4: { + value: 6.6, + }, + }, + { + // 2021-05-08 + key: 1620432000000, + 3: { + value: 7.7, + }, + 4: { + value: 8.8, + }, + }, + ], + }, + }, + }, + }, + }, + ], + }, + }, + }, + }; + const mergedResponse = ac.postFlightTransform( + (response as unknown) as IEsSearchResponse + ); + expect(mergedResponse.rawResponse).toEqual({ + aggregations: { + '1': { + buckets: [ + { + '2': { + buckets: [ + { + '4': { + value: 2.2, + }, + // 2021-05-07 + key: 1620345600000, + }, + { + '3': { + value: 5.5, + }, + '4': { + value: 4.4, + }, + doc_count: 26, + doc_count_86400000: 13, + // 2021-05-08 + key: 1620432000000, + }, + { + '3': { + value: 7.7, + }, + // 2021-05-09 + key: 1620518400000, + }, + ], + }, + key: 'A', + }, + ], + }, + }, + }); + }); + + it('shifts date histogram keys and renames doc_count properties for single shift', () => { + indexPattern = stubIndexPattern as IndexPattern; + indexPattern.fields.getByName = (name) => (({ name } as unknown) as IndexPatternField); + const configStates = [ + { + enabled: true, + type: 'date_histogram', + schema: 'segment', + params: { field: '@timestamp', interval: '1d' }, + }, + { + enabled: true, + type: 'avg', + schema: 'metric', + params: { + field: 'bytes', + timeShift: '1d', + }, + }, + ]; + + const ac = new AggConfigs(indexPattern, configStates, { typesRegistry }); + ac.timeFields = ['@timestamp']; + ac.timeRange = { + from: '2021-05-05T00:00:00.000Z', + to: '2021-05-10T00:00:00.000Z', + }; + const response = { + rawResponse: { + aggregations: { + '1': { + buckets: [ + { + // 2021-05-07 + key: 1620345600000, + doc_count: 26, + 2: { + value: 1.1, + }, + }, + { + // 2021-05-08 + key: 1620432000000, + doc_count: 27, + 2: { + value: 2.2, + }, + }, + ], + }, + }, + }, + }; + const mergedResponse = ac.postFlightTransform( + (response as unknown) as IEsSearchResponse + ); + expect(mergedResponse.rawResponse).toEqual({ + aggregations: { + '1': { + buckets: [ + { + '2': { + value: 1.1, + }, + doc_count_86400000: 26, + // 2021-05-08 + key: 1620432000000, + }, + { + '2': { + value: 2.2, + }, + doc_count_86400000: 27, + // 2021-05-09 + key: 1620518400000, + }, + ], + }, + }, + }); + }); + }); }); diff --git a/src/plugins/data/common/search/aggs/agg_configs.ts b/src/plugins/data/common/search/aggs/agg_configs.ts index 2932ef7325aed8..6f8a8d38a4a286 100644 --- a/src/plugins/data/common/search/aggs/agg_configs.ts +++ b/src/plugins/data/common/search/aggs/agg_configs.ts @@ -6,17 +6,26 @@ * Side Public License, v 1. */ -import _ from 'lodash'; +import moment from 'moment'; +import _, { cloneDeep } from 'lodash'; import { i18n } from '@kbn/i18n'; import { Assign } from '@kbn/utility-types'; - -import { ISearchOptions, ISearchSource } from 'src/plugins/data/public'; +import { Aggregate, Bucket } from '@elastic/elasticsearch/api/types'; + +import { + IEsSearchResponse, + ISearchOptions, + ISearchSource, + RangeFilter, +} from 'src/plugins/data/public'; import { AggConfig, AggConfigSerialized, IAggConfig } from './agg_config'; import { IAggType } from './agg_type'; import { AggTypesRegistryStart } from './agg_types_registry'; import { AggGroupNames } from './agg_groups'; import { IndexPattern } from '../../index_patterns/index_patterns/index_pattern'; -import { TimeRange } from '../../../common'; +import { TimeRange, getTime, isRangeFilter } from '../../../common'; +import { IBucketAggConfig } from './buckets'; +import { insertTimeShiftSplit, mergeTimeShifts } from './utils/time_splits'; function removeParentAggs(obj: any) { for (const prop in obj) { @@ -48,6 +57,8 @@ export interface AggConfigsOptions { export type CreateAggConfigParams = Assign; +export type GenericBucket = Bucket & { [property: string]: Aggregate }; + /** * @name AggConfigs * @@ -66,6 +77,7 @@ export class AggConfigs { public indexPattern: IndexPattern; public timeRange?: TimeRange; public timeFields?: string[]; + public forceNow?: Date; public hierarchical?: boolean = false; private readonly typesRegistry: AggTypesRegistryStart; @@ -92,6 +104,10 @@ export class AggConfigs { this.timeFields = timeFields; } + setForceNow(now: Date | undefined) { + this.forceNow = now; + } + setTimeRange(timeRange: TimeRange) { this.timeRange = timeRange; @@ -183,7 +199,13 @@ export class AggConfigs { let dslLvlCursor: Record; let nestedMetrics: Array<{ config: AggConfig; dsl: Record }> | []; + const timeShifts = this.getTimeShifts(); + const hasMultipleTimeShifts = Object.keys(timeShifts).length > 1; + if (this.hierarchical) { + if (hasMultipleTimeShifts) { + throw new Error('Multiple time shifts not supported for hierarchical metrics'); + } // collect all metrics, and filter out the ones that we won't be copying nestedMetrics = this.aggs .filter(function (agg) { @@ -196,52 +218,67 @@ export class AggConfigs { }; }); } - this.getRequestAggs() - .filter((config: AggConfig) => !config.type.hasNoDsl) - .forEach((config: AggConfig, i: number, list) => { - if (!dslLvlCursor) { - // start at the top level - dslLvlCursor = dslTopLvl; - } else { - const prevConfig: AggConfig = list[i - 1]; - const prevDsl = dslLvlCursor[prevConfig.id]; + const requestAggs = this.getRequestAggs(); + const aggsWithDsl = requestAggs.filter((agg) => !agg.type.hasNoDsl).length; + const timeSplitIndex = this.getAll().findIndex( + (config) => 'splitForTimeShift' in config.type && config.type.splitForTimeShift(config, this) + ); - // advance the cursor and nest under the previous agg, or - // put it on the same level if the previous agg doesn't accept - // sub aggs - dslLvlCursor = prevDsl?.aggs || dslLvlCursor; - } + requestAggs.forEach((config: AggConfig, i: number, list) => { + if (!dslLvlCursor) { + // start at the top level + dslLvlCursor = dslTopLvl; + } else { + const prevConfig: AggConfig = list[i - 1]; + const prevDsl = dslLvlCursor[prevConfig.id]; + + // advance the cursor and nest under the previous agg, or + // put it on the same level if the previous agg doesn't accept + // sub aggs + dslLvlCursor = prevDsl?.aggs || dslLvlCursor; + } + + if (hasMultipleTimeShifts) { + dslLvlCursor = insertTimeShiftSplit(this, config, timeShifts, dslLvlCursor); + } - const dsl = config.type.hasNoDslParams - ? config.toDsl(this) - : (dslLvlCursor[config.id] = config.toDsl(this)); - let subAggs: any; + if (config.type.hasNoDsl) { + return; + } - parseParentAggs(dslLvlCursor, dsl); + const dsl = config.type.hasNoDslParams + ? config.toDsl(this) + : (dslLvlCursor[config.id] = config.toDsl(this)); + let subAggs: any; - if (config.type.type === AggGroupNames.Buckets && i < list.length - 1) { - // buckets that are not the last item in the list accept sub-aggs - subAggs = dsl.aggs || (dsl.aggs = {}); - } + parseParentAggs(dslLvlCursor, dsl); - if (subAggs) { - _.each(subAggs, (agg) => { - parseParentAggs(subAggs, agg); - }); - } - if (subAggs && nestedMetrics) { - nestedMetrics.forEach((agg: any) => { - subAggs[agg.config.id] = agg.dsl; - // if a nested metric agg has parent aggs, we have to add them to every level of the tree - // to make sure "bucket_path" references in the nested metric agg itself are still working - if (agg.dsl.parentAggs) { - Object.entries(agg.dsl.parentAggs).forEach(([parentAggId, parentAgg]) => { - subAggs[parentAggId] = parentAgg; - }); - } - }); - } - }); + if ( + config.type.type === AggGroupNames.Buckets && + (i < aggsWithDsl - 1 || timeSplitIndex > i) + ) { + // buckets that are not the last item in the list of dsl producing aggs or have a time split coming up accept sub-aggs + subAggs = dsl.aggs || (dsl.aggs = {}); + } + + if (subAggs) { + _.each(subAggs, (agg) => { + parseParentAggs(subAggs, agg); + }); + } + if (subAggs && nestedMetrics) { + nestedMetrics.forEach((agg: any) => { + subAggs[agg.config.id] = agg.dsl; + // if a nested metric agg has parent aggs, we have to add them to every level of the tree + // to make sure "bucket_path" references in the nested metric agg itself are still working + if (agg.dsl.parentAggs) { + Object.entries(agg.dsl.parentAggs).forEach(([parentAggId, parentAgg]) => { + subAggs[parentAggId] = parentAgg; + }); + } + }); + } + }); removeParentAggs(dslTopLvl); return dslTopLvl; @@ -289,6 +326,104 @@ export class AggConfigs { ); } + getTimeShifts(): Record { + const timeShifts: Record = {}; + this.getAll() + .filter((agg) => agg.schema === 'metric') + .map((agg) => agg.getTimeShift()) + .forEach((timeShift) => { + if (timeShift) { + timeShifts[String(timeShift.asMilliseconds())] = timeShift; + } else { + timeShifts[0] = moment.duration(0); + } + }); + return timeShifts; + } + + getTimeShiftInterval(): moment.Duration | undefined { + const splitAgg = (this.getAll().filter( + (agg) => agg.type.type === AggGroupNames.Buckets + ) as IBucketAggConfig[]).find((agg) => agg.type.splitForTimeShift(agg, this)); + return splitAgg?.type.getTimeShiftInterval(splitAgg); + } + + hasTimeShifts(): boolean { + return this.getAll().some((agg) => agg.hasTimeShift()); + } + + getSearchSourceTimeFilter(forceNow?: Date) { + if (!this.timeFields || !this.timeRange) { + return []; + } + const timeRange = this.timeRange; + const timeFields = this.timeFields; + const timeShifts = this.getTimeShifts(); + if (!this.hasTimeShifts()) { + return this.timeFields + .map((fieldName) => getTime(this.indexPattern, timeRange, { fieldName, forceNow })) + .filter(isRangeFilter); + } + return [ + { + meta: { + index: this.indexPattern?.id, + params: {}, + alias: '', + disabled: false, + negate: false, + }, + query: { + bool: { + should: Object.entries(timeShifts).map(([, shift]) => { + return { + bool: { + filter: timeFields + .map( + (fieldName) => + [ + getTime(this.indexPattern, timeRange, { fieldName, forceNow }), + fieldName, + ] as [RangeFilter | undefined, string] + ) + .filter(([filter]) => isRangeFilter(filter)) + .map(([filter, field]) => ({ + range: { + [field]: { + gte: moment(filter?.range[field].gte).subtract(shift).toISOString(), + lte: moment(filter?.range[field].lte).subtract(shift).toISOString(), + }, + }, + })), + }, + }; + }), + minimum_should_match: 1, + }, + }, + }, + ]; + } + + postFlightTransform(response: IEsSearchResponse) { + if (!this.hasTimeShifts()) { + return response; + } + const transformedRawResponse = cloneDeep(response.rawResponse); + if (!transformedRawResponse.aggregations) { + transformedRawResponse.aggregations = { + doc_count: response.rawResponse.hits?.total as Aggregate, + }; + } + const aggCursor = transformedRawResponse.aggregations!; + + mergeTimeShifts(this, aggCursor); + return { + ...response, + rawResponse: transformedRawResponse, + }; + } + getRequestAggById(id: string) { return this.aggs.find((agg: AggConfig) => agg.id === id); } diff --git a/src/plugins/data/common/search/aggs/agg_type.ts b/src/plugins/data/common/search/aggs/agg_type.ts index f0f3912bf64fea..48ce54bbd61bdb 100644 --- a/src/plugins/data/common/search/aggs/agg_type.ts +++ b/src/plugins/data/common/search/aggs/agg_type.ts @@ -215,6 +215,10 @@ export class AggType< return agg.id; }; + splitForTimeShift(agg: TAggConfig, aggs: IAggConfigs) { + return false; + } + /** * Generic AggType Constructor * diff --git a/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts b/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts index 6230ae897b1702..372d487bcf7a39 100644 --- a/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts +++ b/src/plugins/data/common/search/aggs/buckets/_terms_other_bucket_helper.ts @@ -166,7 +166,7 @@ export const buildOtherBucketAgg = ( key: string ) => { // make sure there are actually results for the buckets - if (aggregations[aggId].buckets.length < 1) { + if (aggregations[aggId]?.buckets.length < 1) { noAggBucketResults = true; return; } diff --git a/src/plugins/data/common/search/aggs/buckets/bucket_agg_type.ts b/src/plugins/data/common/search/aggs/buckets/bucket_agg_type.ts index e9ed3799b90cf2..d44e634a00fe6b 100644 --- a/src/plugins/data/common/search/aggs/buckets/bucket_agg_type.ts +++ b/src/plugins/data/common/search/aggs/buckets/bucket_agg_type.ts @@ -6,8 +6,9 @@ * Side Public License, v 1. */ +import moment from 'moment'; import { IAggConfig } from '../agg_config'; -import { KBN_FIELD_TYPES } from '../../../../common'; +import { GenericBucket, IAggConfigs, KBN_FIELD_TYPES } from '../../../../common'; import { AggType, AggTypeConfig } from '../agg_type'; import { AggParamType } from '../param_types/agg'; @@ -26,6 +27,14 @@ const bucketType = 'buckets'; interface BucketAggTypeConfig extends AggTypeConfig> { getKey?: (bucket: any, key: any, agg: IAggConfig) => any; + getShiftedKey?: ( + agg: TBucketAggConfig, + key: string | number, + timeShift: moment.Duration + ) => string | number; + orderBuckets?(agg: TBucketAggConfig, a: GenericBucket, b: GenericBucket): number; + splitForTimeShift?(agg: TBucketAggConfig, aggs: IAggConfigs): boolean; + getTimeShiftInterval?(agg: TBucketAggConfig): undefined | moment.Duration; } export class BucketAggType extends AggType< @@ -35,6 +44,22 @@ export class BucketAggType any; type = bucketType; + getShiftedKey( + agg: TBucketAggConfig, + key: string | number, + timeShift: moment.Duration + ): string | number { + return key; + } + + getTimeShiftInterval(agg: TBucketAggConfig): undefined | moment.Duration { + return undefined; + } + + orderBuckets(agg: TBucketAggConfig, a: GenericBucket, b: GenericBucket): number { + return Number(a.key) - Number(b.key); + } + constructor(config: BucketAggTypeConfig) { super(config); @@ -43,6 +68,22 @@ export class BucketAggType { return key || bucket.key; }); + + if (config.getShiftedKey) { + this.getShiftedKey = config.getShiftedKey; + } + + if (config.orderBuckets) { + this.orderBuckets = config.orderBuckets; + } + + if (config.getTimeShiftInterval) { + this.getTimeShiftInterval = config.getTimeShiftInterval; + } + + if (config.splitForTimeShift) { + this.splitForTimeShift = config.splitForTimeShift; + } } } diff --git a/src/plugins/data/common/search/aggs/buckets/date_histogram.ts b/src/plugins/data/common/search/aggs/buckets/date_histogram.ts index 4a83ae38d34db9..4cbf6562487b2c 100644 --- a/src/plugins/data/common/search/aggs/buckets/date_histogram.ts +++ b/src/plugins/data/common/search/aggs/buckets/date_histogram.ts @@ -135,6 +135,17 @@ export const getDateHistogramBucketAgg = ({ }, }; }, + getShiftedKey(agg, key, timeShift) { + return moment(key).add(timeShift).valueOf(); + }, + splitForTimeShift(agg, aggs) { + return aggs.hasTimeShifts() && Boolean(aggs.timeFields?.includes(agg.fieldName())); + }, + getTimeShiftInterval(agg) { + const { useNormalizedEsInterval } = agg.params; + const interval = agg.buckets.getInterval(useNormalizedEsInterval); + return interval; + }, params: [ { name: 'field', diff --git a/src/plugins/data/common/search/aggs/buckets/terms.ts b/src/plugins/data/common/search/aggs/buckets/terms.ts index 1b876051d009b7..b9329bcb25af37 100644 --- a/src/plugins/data/common/search/aggs/buckets/terms.ts +++ b/src/plugins/data/common/search/aggs/buckets/terms.ts @@ -9,6 +9,7 @@ import { noop } from 'lodash'; import { i18n } from '@kbn/i18n'; +import moment from 'moment'; import { BucketAggType, IBucketAggConfig } from './bucket_agg_type'; import { BUCKET_TYPES } from './bucket_agg_types'; import { createFilterTerms } from './create_filter/terms'; @@ -179,6 +180,54 @@ export const getTermsBucketAgg = () => return; } + if ( + aggs?.hasTimeShifts() && + Object.keys(aggs?.getTimeShifts()).length > 1 && + aggs.timeRange + ) { + const shift = orderAgg.getTimeShift(); + orderAgg = aggs.createAggConfig( + { + type: 'filtered_metric', + id: orderAgg.id, + params: { + customBucket: aggs + .createAggConfig( + { + type: 'filter', + id: 'shift', + params: { + filter: { + language: 'lucene', + query: { + range: { + [aggs.timeFields![0]]: { + gte: moment(aggs.timeRange.from) + .subtract(shift || 0) + .toISOString(), + lte: moment(aggs.timeRange.to) + .subtract(shift || 0) + .toISOString(), + }, + }, + }, + }, + }, + }, + { + addToAggConfigs: false, + } + ) + .serialize(), + customMetric: orderAgg.serialize(), + }, + enabled: false, + }, + { + addToAggConfigs: false, + } + ); + } if (orderAgg.type.name === 'count') { order._count = dir; return; diff --git a/src/plugins/data/common/search/aggs/metrics/avg_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/avg_fn.test.ts index 05a6e9eeff7d74..0b794617fb96ed 100644 --- a/src/plugins/data/common/search/aggs/metrics/avg_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/avg_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "avg", diff --git a/src/plugins/data/common/search/aggs/metrics/avg_fn.ts b/src/plugins/data/common/search/aggs/metrics/avg_fn.ts index 253013238d10e4..e32de6cd0a83f3 100644 --- a/src/plugins/data/common/search/aggs/metrics/avg_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/avg_fn.ts @@ -62,6 +62,13 @@ export const aggAvg = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.test.ts index 33f20b9a40dc26..ac214c1a1591ce 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.test.ts @@ -29,6 +29,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "avg_bucket", @@ -42,11 +43,13 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "avg_bucket", }, "json": undefined, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.ts b/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.ts index b79e57207ebd81..a980f6ac555a24 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_avg_fn.ts @@ -77,6 +77,13 @@ export const aggBucketAvg = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.test.ts index 35b765ec0e075c..e6db7665a68ddc 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.test.ts @@ -29,6 +29,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "max_bucket", @@ -42,11 +43,13 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "max_bucket", }, "json": undefined, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.ts b/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.ts index e12a592448334d..0d3e8a5e7f878e 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_max_fn.ts @@ -77,6 +77,13 @@ export const aggBucketMax = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.test.ts index 49346036ce6492..22ec55506fe901 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.test.ts @@ -29,6 +29,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "min_bucket", @@ -42,11 +43,13 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "min_bucket", }, "json": undefined, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.ts b/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.ts index ece5c07c6e5f86..3b6c32595909a9 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_min_fn.ts @@ -77,6 +77,13 @@ export const aggBucketMin = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.test.ts index 0f5c84a477b06b..0e3370cec14e5d 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.test.ts @@ -29,6 +29,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "sum_bucket", @@ -42,11 +43,13 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "customMetric": undefined, "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "sum_bucket", }, "json": undefined, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.ts b/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.ts index 5fe0ee75bfe38a..ae3502bbc25883 100644 --- a/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/bucket_sum_fn.ts @@ -77,6 +77,13 @@ export const aggBucketSum = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/cardinality_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/cardinality_fn.test.ts index 8b235edacb59a0..08d64e599d8a9e 100644 --- a/src/plugins/data/common/search/aggs/metrics/cardinality_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/cardinality_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "cardinality", diff --git a/src/plugins/data/common/search/aggs/metrics/cardinality_fn.ts b/src/plugins/data/common/search/aggs/metrics/cardinality_fn.ts index ee0f72e01e1def..89006761407f74 100644 --- a/src/plugins/data/common/search/aggs/metrics/cardinality_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/cardinality_fn.ts @@ -67,6 +67,13 @@ export const aggCardinality = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/count.ts b/src/plugins/data/common/search/aggs/metrics/count.ts index 8a10d7edb3f835..fac1751290f70d 100644 --- a/src/plugins/data/common/search/aggs/metrics/count.ts +++ b/src/plugins/data/common/search/aggs/metrics/count.ts @@ -31,7 +31,12 @@ export const getCountMetricAgg = () => }; }, getValue(agg, bucket) { - return bucket.doc_count; + const timeShift = agg.getTimeShift(); + if (!timeShift) { + return bucket.doc_count; + } else { + return bucket[`doc_count_${timeShift.asMilliseconds()}`]; + } }, isScalable() { return true; diff --git a/src/plugins/data/common/search/aggs/metrics/count_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/count_fn.test.ts index 047b9bbd8517f2..c6736c5b69f7d4 100644 --- a/src/plugins/data/common/search/aggs/metrics/count_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/count_fn.test.ts @@ -23,6 +23,7 @@ describe('agg_expression_functions', () => { "id": undefined, "params": Object { "customLabel": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "count", diff --git a/src/plugins/data/common/search/aggs/metrics/count_fn.ts b/src/plugins/data/common/search/aggs/metrics/count_fn.ts index 40c87db57eedca..a3a4bcc16a3913 100644 --- a/src/plugins/data/common/search/aggs/metrics/count_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/count_fn.ts @@ -54,6 +54,13 @@ export const aggCount = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.test.ts index 5eb6d2b7804420..f311ab35a8d0df 100644 --- a/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.test.ts @@ -29,6 +29,7 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "cumulative_sum", @@ -54,6 +55,7 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": "sum", + "timeShift": undefined, }, "schema": undefined, "type": "cumulative_sum", @@ -81,12 +83,14 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "cumulative_sum", }, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.ts b/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.ts index cba4de1ad11aec..5cdbcfe8575853 100644 --- a/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/cumulative_sum_fn.ts @@ -81,6 +81,13 @@ export const aggCumulativeSum = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/derivative_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/derivative_fn.test.ts index 1eaca811a2481f..3e4fc838dd398a 100644 --- a/src/plugins/data/common/search/aggs/metrics/derivative_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/derivative_fn.test.ts @@ -29,6 +29,7 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "derivative", @@ -54,6 +55,7 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": "sum", + "timeShift": undefined, }, "schema": undefined, "type": "derivative", @@ -81,12 +83,14 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "derivative", }, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/derivative_fn.ts b/src/plugins/data/common/search/aggs/metrics/derivative_fn.ts index e27179c7209ade..8bfe808aede8e9 100644 --- a/src/plugins/data/common/search/aggs/metrics/derivative_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/derivative_fn.ts @@ -81,6 +81,13 @@ export const aggDerivative = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/filtered_metric.ts b/src/plugins/data/common/search/aggs/metrics/filtered_metric.ts index aa2417bbf84156..00f47d31b0398d 100644 --- a/src/plugins/data/common/search/aggs/metrics/filtered_metric.ts +++ b/src/plugins/data/common/search/aggs/metrics/filtered_metric.ts @@ -42,7 +42,7 @@ export const getFilteredMetricAgg = () => { getValue(agg, bucket) { const customMetric = agg.getParam('customMetric'); const customBucket = agg.getParam('customBucket'); - return customMetric.getValue(bucket[customBucket.id]); + return bucket && bucket[customBucket.id] && customMetric.getValue(bucket[customBucket.id]); }, getValueBucketPath(agg) { const customBucket = agg.getParam('customBucket'); diff --git a/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.test.ts index 22e97fe18b604c..d1ce6ff4639035 100644 --- a/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.test.ts @@ -28,6 +28,7 @@ describe('agg_expression_functions', () => { "customBucket": undefined, "customLabel": undefined, "customMetric": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "filtered_metric", @@ -40,10 +41,12 @@ describe('agg_expression_functions', () => { "customBucket": undefined, "customLabel": undefined, "customMetric": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "filtered_metric", }, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.ts b/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.ts index 6a7ff5fa5fd40e..0b3d3acd3a603f 100644 --- a/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/filtered_metric_fn.ts @@ -72,6 +72,13 @@ export const aggFilteredMetric = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.test.ts index c48233e84404c6..50b5f5b60376b6 100644 --- a/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "geo_bounds", diff --git a/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.ts b/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.ts index 19d2dabc843dd9..b2cfad1805b9f5 100644 --- a/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/geo_bounds_fn.ts @@ -67,6 +67,13 @@ export const aggGeoBounds = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.test.ts index e984df13527ca9..889ed29c63ee14 100644 --- a/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "geo_centroid", diff --git a/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.ts b/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.ts index 1cc11c345e9ba0..9215f7afb4c6d1 100644 --- a/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/geo_centroid_fn.ts @@ -67,6 +67,13 @@ export const aggGeoCentroid = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/max_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/max_fn.test.ts index d94e01927c851b..021c5aac69e102 100644 --- a/src/plugins/data/common/search/aggs/metrics/max_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/max_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "max", diff --git a/src/plugins/data/common/search/aggs/metrics/max_fn.ts b/src/plugins/data/common/search/aggs/metrics/max_fn.ts index 7eac992680737d..7a1d8ad22fb7ed 100644 --- a/src/plugins/data/common/search/aggs/metrics/max_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/max_fn.ts @@ -62,6 +62,13 @@ export const aggMax = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/median.ts b/src/plugins/data/common/search/aggs/metrics/median.ts index bad4c7baf173f6..4fdb1ce6b7d81c 100644 --- a/src/plugins/data/common/search/aggs/metrics/median.ts +++ b/src/plugins/data/common/search/aggs/metrics/median.ts @@ -46,7 +46,7 @@ export const getMedianMetricAgg = () => { { name: 'percents', default: [50], shouldShow: () => false, serialize: () => undefined }, ], getValue(agg, bucket) { - return bucket[agg.id].values['50.0']; + return bucket[agg.id]?.values['50.0']; }, }); }; diff --git a/src/plugins/data/common/search/aggs/metrics/median_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/median_fn.test.ts index e70520b743e179..7ff7f18cdbc02c 100644 --- a/src/plugins/data/common/search/aggs/metrics/median_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/median_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "median", diff --git a/src/plugins/data/common/search/aggs/metrics/median_fn.ts b/src/plugins/data/common/search/aggs/metrics/median_fn.ts index 1c0afd81a63c4b..a9537e1f99ca41 100644 --- a/src/plugins/data/common/search/aggs/metrics/median_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/median_fn.ts @@ -67,6 +67,13 @@ export const aggMedian = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/metric_agg_type.ts b/src/plugins/data/common/search/aggs/metrics/metric_agg_type.ts index 3ebb771413665d..6ddb0fdd9410d4 100644 --- a/src/plugins/data/common/search/aggs/metrics/metric_agg_type.ts +++ b/src/plugins/data/common/search/aggs/metrics/metric_agg_type.ts @@ -11,7 +11,8 @@ import { AggType, AggTypeConfig } from '../agg_type'; import { AggParamType } from '../param_types/agg'; import { AggConfig } from '../agg_config'; import { METRIC_TYPES } from './metric_agg_types'; -import { FieldTypes } from '../param_types'; +import { BaseParamType, FieldTypes } from '../param_types'; +import { AggGroupNames } from '../agg_groups'; export interface IMetricAggConfig extends AggConfig { type: InstanceType; @@ -47,6 +48,14 @@ export class MetricAggType) { super(config); + this.params.push( + new BaseParamType({ + name: 'timeShift', + type: 'string', + write: () => {}, + }) as MetricAggParam + ); + this.getValue = config.getValue || ((agg, bucket) => { @@ -69,6 +78,14 @@ export class MetricAggType false); + + // split at this point if there are time shifts and this is the first metric + this.splitForTimeShift = (agg, aggs) => + aggs.hasTimeShifts() && + aggs.byType(AggGroupNames.Metrics)[0] === agg && + !aggs + .byType(AggGroupNames.Buckets) + .some((bucketAgg) => bucketAgg.type.splitForTimeShift(bucketAgg, aggs)); } } diff --git a/src/plugins/data/common/search/aggs/metrics/min_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/min_fn.test.ts index ea2d2cd23edaea..fee4b28882408d 100644 --- a/src/plugins/data/common/search/aggs/metrics/min_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/min_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "min", diff --git a/src/plugins/data/common/search/aggs/metrics/min_fn.ts b/src/plugins/data/common/search/aggs/metrics/min_fn.ts index 6dfbac1ecb8b4d..a97834f310a49e 100644 --- a/src/plugins/data/common/search/aggs/metrics/min_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/min_fn.ts @@ -62,6 +62,13 @@ export const aggMin = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.test.ts index bde90c563afc11..645519a6683761 100644 --- a/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.test.ts @@ -30,6 +30,7 @@ describe('agg_expression_functions', () => { "json": undefined, "metricAgg": undefined, "script": undefined, + "timeShift": undefined, "window": undefined, }, "schema": undefined, @@ -59,6 +60,7 @@ describe('agg_expression_functions', () => { "json": undefined, "metricAgg": "sum", "script": "test", + "timeShift": undefined, "window": 10, }, "schema": undefined, @@ -88,6 +90,7 @@ describe('agg_expression_functions', () => { "json": undefined, "metricAgg": undefined, "script": undefined, + "timeShift": undefined, "window": undefined, }, "schema": undefined, @@ -96,6 +99,7 @@ describe('agg_expression_functions', () => { "json": undefined, "metricAgg": undefined, "script": undefined, + "timeShift": undefined, "window": undefined, } `); diff --git a/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.ts b/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.ts index 667c585226a528..1637dad561c375 100644 --- a/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/moving_avg_fn.ts @@ -94,6 +94,13 @@ export const aggMovingAvg = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.test.ts index 9328597b24cfaa..873765374c80a6 100644 --- a/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, "values": undefined, }, "schema": undefined, @@ -51,6 +52,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, "values": Array [ 1, 2, diff --git a/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.ts b/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.ts index 7929a01c0b5893..60a2882fcec581 100644 --- a/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/percentile_ranks_fn.ts @@ -74,6 +74,13 @@ export const aggPercentileRanks = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/percentiles_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/percentiles_fn.test.ts index 0d71df240d1226..468da036cea88f 100644 --- a/src/plugins/data/common/search/aggs/metrics/percentiles_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/percentiles_fn.test.ts @@ -28,6 +28,7 @@ describe('agg_expression_functions', () => { "field": "machine.os.keyword", "json": undefined, "percents": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "percentiles", @@ -56,6 +57,7 @@ describe('agg_expression_functions', () => { 2, 3, ], + "timeShift": undefined, }, "schema": undefined, "type": "percentiles", diff --git a/src/plugins/data/common/search/aggs/metrics/percentiles_fn.ts b/src/plugins/data/common/search/aggs/metrics/percentiles_fn.ts index fa5120dfc3b97f..1a746a86cbcd57 100644 --- a/src/plugins/data/common/search/aggs/metrics/percentiles_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/percentiles_fn.ts @@ -74,6 +74,13 @@ export const aggPercentiles = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.test.ts index 065ef8021cbda2..aa73d5c44dd7fc 100644 --- a/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.test.ts @@ -29,6 +29,7 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "serial_diff", @@ -54,6 +55,7 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": "sum", + "timeShift": undefined, }, "schema": undefined, "type": "serial_diff", @@ -81,12 +83,14 @@ describe('agg_expression_functions', () => { "customMetric": undefined, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "serial_diff", }, "json": undefined, "metricAgg": undefined, + "timeShift": undefined, } `); }); diff --git a/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.ts b/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.ts index 925d85774c7ad6..8460cb891f1e4c 100644 --- a/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/serial_diff_fn.ts @@ -81,6 +81,13 @@ export const aggSerialDiff = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/single_percentile_fn.ts b/src/plugins/data/common/search/aggs/metrics/single_percentile_fn.ts index e7ef22c6faeee6..edf69031c31ace 100644 --- a/src/plugins/data/common/search/aggs/metrics/single_percentile_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/single_percentile_fn.ts @@ -74,6 +74,13 @@ export const aggSinglePercentile = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.test.ts index 9aaf82e65812b8..849987695dc7c4 100644 --- a/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "std_dev", diff --git a/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.ts b/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.ts index 80787a3383c6b5..c181065d2416e7 100644 --- a/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/std_deviation_fn.ts @@ -67,6 +67,13 @@ export const aggStdDeviation = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/sum_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/sum_fn.test.ts index e19fc072e1cd98..f4d4fb5451dcda 100644 --- a/src/plugins/data/common/search/aggs/metrics/sum_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/sum_fn.test.ts @@ -27,6 +27,7 @@ describe('agg_expression_functions', () => { "customLabel": undefined, "field": "machine.os.keyword", "json": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "sum", diff --git a/src/plugins/data/common/search/aggs/metrics/sum_fn.ts b/src/plugins/data/common/search/aggs/metrics/sum_fn.ts index d0175e0c8fafe4..d8e03d28bb12a7 100644 --- a/src/plugins/data/common/search/aggs/metrics/sum_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/sum_fn.ts @@ -62,6 +62,13 @@ export const aggSum = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/metrics/top_hit_fn.test.ts b/src/plugins/data/common/search/aggs/metrics/top_hit_fn.test.ts index e9d6a619a9cd6b..2f8ef74b5c2f0c 100644 --- a/src/plugins/data/common/search/aggs/metrics/top_hit_fn.test.ts +++ b/src/plugins/data/common/search/aggs/metrics/top_hit_fn.test.ts @@ -32,6 +32,7 @@ describe('agg_expression_functions', () => { "size": undefined, "sortField": undefined, "sortOrder": undefined, + "timeShift": undefined, }, "schema": undefined, "type": "top_hits", @@ -64,6 +65,7 @@ describe('agg_expression_functions', () => { "size": 6, "sortField": "_score", "sortOrder": "asc", + "timeShift": undefined, }, "schema": "whatever", "type": "top_hits", diff --git a/src/plugins/data/common/search/aggs/metrics/top_hit_fn.ts b/src/plugins/data/common/search/aggs/metrics/top_hit_fn.ts index 85b54f16954937..bc20f19253eec7 100644 --- a/src/plugins/data/common/search/aggs/metrics/top_hit_fn.ts +++ b/src/plugins/data/common/search/aggs/metrics/top_hit_fn.ts @@ -94,6 +94,13 @@ export const aggTopHit = (): FunctionDefinition => ({ defaultMessage: 'Represents a custom label for this aggregation', }), }, + timeShift: { + types: ['string'], + help: i18n.translate('data.search.aggs.metrics.timeShift.help', { + defaultMessage: + 'Shift the time range for the metric by a set time, for example 1h or 7d. "previous" will use the closest time range from the date histogram or time range filter.', + }), + }, }, fn: (input, args) => { const { id, enabled, schema, ...rest } = args; diff --git a/src/plugins/data/common/search/aggs/types.ts b/src/plugins/data/common/search/aggs/types.ts index 675be2323b93e8..c0eb0c6c241a94 100644 --- a/src/plugins/data/common/search/aggs/types.ts +++ b/src/plugins/data/common/search/aggs/types.ts @@ -132,6 +132,7 @@ export type AggsStart = Assign { + const trimmedVal = val.trim(); + if (trimmedVal === 'previous') { + return 'previous'; + } + const [, amount, unit] = trimmedVal.match(/^(\d+)(\w)$/) || []; + const parsedAmount = Number(amount); + if (Number.isNaN(parsedAmount) || !allowedUnits.includes(unit as AllowedUnit)) { + return 'invalid'; + } + return moment.duration(Number(amount), unit as AllowedUnit); +}; diff --git a/src/plugins/data/common/search/aggs/utils/time_splits.ts b/src/plugins/data/common/search/aggs/utils/time_splits.ts new file mode 100644 index 00000000000000..4ac47efaea3476 --- /dev/null +++ b/src/plugins/data/common/search/aggs/utils/time_splits.ts @@ -0,0 +1,447 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import moment from 'moment'; +import _, { isArray } from 'lodash'; +import { + Aggregate, + FiltersAggregate, + FiltersBucketItem, + MultiBucketAggregate, +} from '@elastic/elasticsearch/api/types'; + +import { AggGroupNames } from '../agg_groups'; +import { GenericBucket, AggConfigs, getTime, AggConfig } from '../../../../common'; +import { IBucketAggConfig } from '../buckets'; + +/** + * This function will transform an ES response containg a time split (using a filters aggregation before the metrics or date histogram aggregation), + * merging together all branches for the different time ranges into a single response structure which can be tabified into a single table. + * + * If there is just a single time shift, there are no separate branches per time range - in this case only the date histogram keys are shifted by the + * configured amount of time. + * + * To do this, the following steps are taken: + * * Traverse the response tree, tracking the current agg config + * * Once the node which would contain the time split object is found, merge all separate time range buckets into a single layer of buckets of the parent agg + * * Recursively repeat this process for all nested sub-buckets + * + * Example input: + * ``` + * "aggregations" : { + "product" : { + "buckets" : [ + { + "key" : "Product A", + "doc_count" : 512, + "first_year" : { + "doc_count" : 418, + "overall_revenue" : { + "value" : 2163634.0 + } + }, + "time_offset_split" : { + "buckets" : { + "-1y" : { + "doc_count" : 420, + "year" : { + "buckets" : [ + { + "key_as_string" : "2018", + "doc_count" : 81, + "revenue" : { + "value" : 505124.0 + } + }, + { + "key_as_string" : "2019", + "doc_count" : 65, + "revenue" : { + "value" : 363058.0 + } + } + ] + } + }, + "regular" : { + "doc_count" : 418, + "year" : { + "buckets" : [ + { + "key_as_string" : "2019", + "doc_count" : 65, + "revenue" : { + "value" : 363058.0 + } + }, + { + "key_as_string" : "2020", + "doc_count" : 84, + "revenue" : { + "value" : 392924.0 + } + } + ] + } + } + } + } + }, + { + "key" : "Product B", + "doc_count" : 248, + "first_year" : { + "doc_count" : 215, + "overall_revenue" : { + "value" : 1315547.0 + } + }, + "time_offset_split" : { + "buckets" : { + "-1y" : { + "doc_count" : 211, + "year" : { + "buckets" : [ + { + "key_as_string" : "2018", + "key" : 1618963200000, + "doc_count" : 28, + "revenue" : { + "value" : 156543.0 + } + }, + // ... + * ``` + * + * Example output: + * ``` + * "aggregations" : { + "product" : { + "buckets" : [ + { + "key" : "Product A", + "doc_count" : 512, + "first_year" : { + "doc_count" : 418, + "overall_revenue" : { + "value" : 2163634.0 + } + }, + "year" : { + "buckets" : [ + { + "key_as_string" : "2019", + "doc_count" : 81, + "revenue_regular" : { + "value" : 505124.0 + }, + "revenue_-1y" : { + "value" : 302736.0 + } + }, + { + "key_as_string" : "2020", + "doc_count" : 78, + "revenue_regular" : { + "value" : 392924.0 + }, + "revenue_-1y" : { + "value" : 363058.0 + }, + } + // ... + * ``` + * + * + * @param aggConfigs The agg configs instance + * @param aggCursor The root aggregations object from the response which will be mutated in place + */ +export function mergeTimeShifts(aggConfigs: AggConfigs, aggCursor: Record) { + const timeShifts = aggConfigs.getTimeShifts(); + const hasMultipleTimeShifts = Object.keys(timeShifts).length > 1; + const requestAggs = aggConfigs.getRequestAggs(); + const bucketAggs = aggConfigs.aggs.filter( + (agg) => agg.type.type === AggGroupNames.Buckets + ) as IBucketAggConfig[]; + const mergeAggLevel = ( + target: GenericBucket, + source: GenericBucket, + shift: moment.Duration, + aggIndex: number + ) => { + Object.entries(source).forEach(([key, val]) => { + // copy over doc count into special key + if (typeof val === 'number' && key === 'doc_count') { + if (shift.asMilliseconds() === 0) { + target.doc_count = val; + } else { + target[`doc_count_${shift.asMilliseconds()}`] = val; + } + } else if (typeof val !== 'object') { + // other meta keys not of interest + return; + } else { + // a sub-agg + const agg = requestAggs.find((requestAgg) => key.indexOf(requestAgg.id) === 0); + if (agg && agg.type.type === AggGroupNames.Metrics) { + const timeShift = agg.getTimeShift(); + if ( + (timeShift && timeShift.asMilliseconds() === shift.asMilliseconds()) || + (shift.asMilliseconds() === 0 && !timeShift) + ) { + // this is a metric from the current time shift, copy it over + target[key] = source[key]; + } + } else if (agg && agg === bucketAggs[aggIndex]) { + const bucketAgg = agg as IBucketAggConfig; + // expected next bucket sub agg + const subAggregate = val as Aggregate; + const buckets = ('buckets' in subAggregate ? subAggregate.buckets : undefined) as + | GenericBucket[] + | Record + | undefined; + if (!target[key]) { + // sub aggregate only exists in shifted branch, not in base branch - create dummy aggregate + // which will be filled with shifted data + target[key] = { + buckets: isArray(buckets) ? [] : {}, + }; + } + const baseSubAggregate = target[key] as Aggregate; + // only supported bucket formats in agg configs are array of buckets and record of buckets for filters + const baseBuckets = ('buckets' in baseSubAggregate + ? baseSubAggregate.buckets + : undefined) as GenericBucket[] | Record | undefined; + // merge + if (isArray(buckets) && isArray(baseBuckets)) { + const baseBucketMap: Record = {}; + baseBuckets.forEach((bucket) => { + baseBucketMap[String(bucket.key)] = bucket; + }); + buckets.forEach((bucket) => { + const bucketKey = bucketAgg.type.getShiftedKey(bucketAgg, bucket.key, shift); + // if a bucket is missing in the map, create an empty one + if (!baseBucketMap[bucketKey]) { + baseBucketMap[String(bucketKey)] = { + key: bucketKey, + } as GenericBucket; + } + mergeAggLevel(baseBucketMap[bucketKey], bucket, shift, aggIndex + 1); + }); + (baseSubAggregate as MultiBucketAggregate).buckets = Object.values( + baseBucketMap + ).sort((a, b) => bucketAgg.type.orderBuckets(bucketAgg, a, b)); + } else if (baseBuckets && buckets && !isArray(baseBuckets)) { + Object.entries(buckets).forEach(([bucketKey, bucket]) => { + // if a bucket is missing in the base response, create an empty one + if (!baseBuckets[bucketKey]) { + baseBuckets[bucketKey] = {} as GenericBucket; + } + mergeAggLevel(baseBuckets[bucketKey], bucket, shift, aggIndex + 1); + }); + } + } + } + }); + }; + const transformTimeShift = (cursor: Record, aggIndex: number): undefined => { + const shouldSplit = aggConfigs.aggs[aggIndex].type.splitForTimeShift( + aggConfigs.aggs[aggIndex], + aggConfigs + ); + if (shouldSplit) { + // multiple time shifts caused a filters agg in the tree we have to merge + if (hasMultipleTimeShifts && cursor.time_offset_split) { + const timeShiftedBuckets = (cursor.time_offset_split as FiltersAggregate).buckets as Record< + string, + FiltersBucketItem + >; + const subTree = {}; + Object.entries(timeShifts).forEach(([key, shift]) => { + mergeAggLevel( + subTree as GenericBucket, + timeShiftedBuckets[key] as GenericBucket, + shift, + aggIndex + ); + }); + + delete cursor.time_offset_split; + Object.assign(cursor, subTree); + } else { + // otherwise we have to "merge" a single level to shift all keys + const [[, shift]] = Object.entries(timeShifts); + const subTree = {}; + mergeAggLevel(subTree, cursor, shift, aggIndex); + Object.assign(cursor, subTree); + } + return; + } + // recurse deeper into the response object + Object.keys(cursor).forEach((subAggId) => { + const subAgg = cursor[subAggId]; + if (typeof subAgg !== 'object' || !('buckets' in subAgg)) { + return; + } + if (isArray(subAgg.buckets)) { + subAgg.buckets.forEach((bucket) => transformTimeShift(bucket, aggIndex + 1)); + } else { + Object.values(subAgg.buckets).forEach((bucket) => transformTimeShift(bucket, aggIndex + 1)); + } + }); + }; + transformTimeShift(aggCursor, 0); +} + +/** + * Inserts a filters aggregation into the aggregation tree which splits buckets to fetch data for all time ranges + * configured in metric aggregations. + * + * The current agg config can implement `splitForTimeShift` to force insertion of the time split filters aggregation + * before the dsl of the agg config (date histogram and metrics aggregations do this) + * + * Example aggregation tree without time split: + * ``` + * "aggs": { + "product": { + "terms": { + "field": "product", + "size": 3, + "order": { "overall_revenue": "desc" } + }, + "aggs": { + "overall_revenue": { + "sum": { + "field": "sales" + } + }, + "year": { + "date_histogram": { + "field": "timestamp", + "interval": "year" + }, + "aggs": { + "revenue": { + "sum": { + "field": "sales" + } + } + } + // ... + * ``` + * + * Same aggregation tree with inserted time split: + * ``` + * "aggs": { + "product": { + "terms": { + "field": "product", + "size": 3, + "order": { "first_year>overall_revenue": "desc" } + }, + "aggs": { + "first_year": { + "filter": { + "range": { + "timestamp": { + "gte": "2019", + "lte": "2020" + } + } + }, + "aggs": { + "overall_revenue": { + "sum": { + "field": "sales" + } + } + } + }, + "time_offset_split": { + "filters": { + "filters": { + "regular": { + "range": { + "timestamp": { + "gte": "2019", + "lte": "2020" + } + } + }, + "-1y": { + "range": { + "timestamp": { + "gte": "2018", + "lte": "2019" + } + } + } + } + }, + "aggs": { + "year": { + "date_histogram": { + "field": "timestamp", + "interval": "year" + }, + "aggs": { + "revenue": { + "sum": { + "field": "sales" + } + } + } + } + } + } + } + * ``` + */ +export function insertTimeShiftSplit( + aggConfigs: AggConfigs, + config: AggConfig, + timeShifts: Record, + dslLvlCursor: Record +) { + if ('splitForTimeShift' in config.type && !config.type.splitForTimeShift(config, aggConfigs)) { + return dslLvlCursor; + } + if (!aggConfigs.timeFields || aggConfigs.timeFields.length < 1) { + throw new Error('Time shift can only be used with configured time field'); + } + if (!aggConfigs.timeRange) { + throw new Error('Time shift can only be used with configured time range'); + } + const timeRange = aggConfigs.timeRange; + const filters: Record = {}; + const timeField = aggConfigs.timeFields[0]; + Object.entries(timeShifts).forEach(([key, shift]) => { + const timeFilter = getTime(aggConfigs.indexPattern, timeRange, { + fieldName: timeField, + forceNow: aggConfigs.forceNow, + }); + if (timeFilter) { + filters[key] = { + range: { + [timeField]: { + gte: moment(timeFilter.range[timeField].gte).subtract(shift).toISOString(), + lte: moment(timeFilter.range[timeField].lte).subtract(shift).toISOString(), + }, + }, + }; + } + }); + dslLvlCursor.time_offset_split = { + filters: { + filters, + }, + aggs: {}, + }; + + return dslLvlCursor.time_offset_split.aggs; +} diff --git a/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts b/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts index 32775464d055f0..4f255cf4c244c6 100644 --- a/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts +++ b/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts @@ -42,6 +42,7 @@ describe('esaggs expression function - public', () => { toDsl: jest.fn().mockReturnValue({ aggs: {} }), onSearchRequestStart: jest.fn(), setTimeFields: jest.fn(), + setForceNow: jest.fn(), } as unknown) as jest.Mocked, filters: undefined, indexPattern: ({ id: 'logstash-*' } as unknown) as jest.Mocked, diff --git a/src/plugins/data/common/search/expressions/esaggs/request_handler.ts b/src/plugins/data/common/search/expressions/esaggs/request_handler.ts index d152ebf159a8ee..61193c52a5e74b 100644 --- a/src/plugins/data/common/search/expressions/esaggs/request_handler.ts +++ b/src/plugins/data/common/search/expressions/esaggs/request_handler.ts @@ -9,15 +9,7 @@ import { i18n } from '@kbn/i18n'; import { Adapters } from 'src/plugins/inspector/common'; -import { - calculateBounds, - Filter, - getTime, - IndexPattern, - isRangeFilter, - Query, - TimeRange, -} from '../../../../common'; +import { calculateBounds, Filter, IndexPattern, Query, TimeRange } from '../../../../common'; import { IAggConfigs } from '../../aggs'; import { ISearchStartSearchSource } from '../../search_source'; @@ -70,8 +62,15 @@ export const handleRequest = async ({ const timeFilterSearchSource = searchSource.createChild({ callParentStartHandlers: true }); const requestSearchSource = timeFilterSearchSource.createChild({ callParentStartHandlers: true }); + // If timeFields have been specified, use the specified ones, otherwise use primary time field of index + // pattern if it's available. + const defaultTimeField = indexPattern?.getTimeField?.(); + const defaultTimeFields = defaultTimeField ? [defaultTimeField.name] : []; + const allTimeFields = timeFields && timeFields.length > 0 ? timeFields : defaultTimeFields; + aggs.setTimeRange(timeRange as TimeRange); - aggs.setTimeFields(timeFields); + aggs.setForceNow(forceNow); + aggs.setTimeFields(allTimeFields); // For now we need to mirror the history of the passed search source, since // the request inspector wouldn't work otherwise. @@ -90,19 +89,11 @@ export const handleRequest = async ({ return aggs.onSearchRequestStart(paramSearchSource, options); }); - // If timeFields have been specified, use the specified ones, otherwise use primary time field of index - // pattern if it's available. - const defaultTimeField = indexPattern?.getTimeField?.(); - const defaultTimeFields = defaultTimeField ? [defaultTimeField.name] : []; - const allTimeFields = timeFields && timeFields.length > 0 ? timeFields : defaultTimeFields; - // If a timeRange has been specified and we had at least one timeField available, create range // filters for that those time fields if (timeRange && allTimeFields.length > 0) { timeFilterSearchSource.setField('filter', () => { - return allTimeFields - .map((fieldName) => getTime(indexPattern, timeRange, { fieldName, forceNow })) - .filter(isRangeFilter); + return aggs.getSearchSourceTimeFilter(forceNow); }); } diff --git a/src/plugins/data/common/search/search_source/search_source.ts b/src/plugins/data/common/search/search_source/search_source.ts index f35d2d47f1bf47..7633382bb87631 100644 --- a/src/plugins/data/common/search/search_source/search_source.ts +++ b/src/plugins/data/common/search/search_source/search_source.ts @@ -75,7 +75,13 @@ import { estypes } from '@elastic/elasticsearch'; import { normalizeSortRequest } from './normalize_sort_request'; import { fieldWildcardFilter } from '../../../../kibana_utils/common'; import { IIndexPattern, IndexPattern, IndexPatternField } from '../../index_patterns'; -import { AggConfigs, ES_SEARCH_STRATEGY, ISearchGeneric, ISearchOptions } from '../..'; +import { + AggConfigs, + ES_SEARCH_STRATEGY, + IEsSearchResponse, + ISearchGeneric, + ISearchOptions, +} from '../..'; import type { ISearchSource, SearchFieldValue, @@ -414,6 +420,15 @@ export class SearchSource { } } + private postFlightTransform(response: IEsSearchResponse) { + const aggs = this.getField('aggs'); + if (aggs instanceof AggConfigs) { + return aggs.postFlightTransform(response); + } else { + return response; + } + } + private async fetchOthers(response: estypes.SearchResponse, options: ISearchOptions) { const aggs = this.getField('aggs'); if (aggs instanceof AggConfigs) { @@ -451,24 +466,26 @@ export class SearchSource { if (isErrorResponse(response)) { obs.error(response); } else if (isPartialResponse(response)) { - obs.next(response); + obs.next(this.postFlightTransform(response)); } else { if (!this.hasPostFlightRequests()) { - obs.next(response); + obs.next(this.postFlightTransform(response)); obs.complete(); } else { // Treat the complete response as partial, then run the postFlightRequests. obs.next({ - ...response, + ...this.postFlightTransform(response), isPartial: true, isRunning: true, }); const sub = from(this.fetchOthers(response.rawResponse, options)).subscribe({ next: (responseWithOther) => { - obs.next({ - ...response, - rawResponse: responseWithOther, - }); + obs.next( + this.postFlightTransform({ + ...response, + rawResponse: responseWithOther!, + }) + ); }, error: (e) => { obs.error(e); diff --git a/src/plugins/data/common/search/tabify/tabify.ts b/src/plugins/data/common/search/tabify/tabify.ts index 4a8972d4384c23..a4d9551da75d50 100644 --- a/src/plugins/data/common/search/tabify/tabify.ts +++ b/src/plugins/data/common/search/tabify/tabify.ts @@ -139,7 +139,7 @@ export function tabifyAggResponse( const write = new TabbedAggResponseWriter(aggConfigs, respOpts || {}); const topLevelBucket: AggResponseBucket = { ...esResponse.aggregations, - doc_count: esResponse.hits?.total, + doc_count: esResponse.aggregations?.doc_count || esResponse.hits?.total, }; collectBucket(aggConfigs, write, topLevelBucket, '', 1); diff --git a/src/plugins/data/config.ts b/src/plugins/data/config.ts index 72fa547f44a77b..9306b64019bbc7 100644 --- a/src/plugins/data/config.ts +++ b/src/plugins/data/config.ts @@ -15,6 +15,8 @@ export const configSchema = schema.object({ }), valueSuggestions: schema.object({ enabled: schema.boolean({ defaultValue: true }), + terminateAfter: schema.duration({ defaultValue: 100000 }), + timeout: schema.duration({ defaultValue: 1000 }), }), }), search: schema.object({ diff --git a/src/plugins/data/public/autocomplete/autocomplete_service.ts b/src/plugins/data/public/autocomplete/autocomplete_service.ts index eb9d859664c4d4..67efbe2af29ce8 100644 --- a/src/plugins/data/public/autocomplete/autocomplete_service.ts +++ b/src/plugins/data/public/autocomplete/autocomplete_service.ts @@ -7,6 +7,7 @@ */ import { CoreSetup, PluginInitializerContext } from 'src/core/public'; +import moment from 'moment'; import { TimefilterSetup } from '../query'; import { QuerySuggestionGetFn } from './providers/query_suggestion_provider'; import { @@ -27,7 +28,7 @@ import { DataPublicPluginStart, DataStartDependencies } from '../types'; export class AutocompleteService { autocompleteConfig: ConfigSchema['autocomplete']; - constructor(initializerContext: PluginInitializerContext) { + constructor(private initializerContext: PluginInitializerContext) { const { autocomplete } = initializerContext.config.get(); this.autocompleteConfig = autocomplete; @@ -55,6 +56,8 @@ export class AutocompleteService { usageCollection, }: { timefilter: TimefilterSetup; usageCollection?: UsageCollectionSetup } ) { + const { autocomplete } = this.initializerContext.config.get(); + const { terminateAfter, timeout } = autocomplete.valueSuggestions; const usageCollector = createUsageCollector(core.getStartServices, usageCollection); this.getValueSuggestions = this.autocompleteConfig.valueSuggestions.enabled @@ -71,6 +74,10 @@ export class AutocompleteService { * please use "getQuerySuggestions" from the start contract */ getQuerySuggestions: this.getQuerySuggestions, + getAutocompleteSettings: () => ({ + terminateAfter: moment.duration(terminateAfter).asMilliseconds(), + timeout: moment.duration(timeout).asMilliseconds(), + }), }; } diff --git a/src/plugins/data/public/mocks.ts b/src/plugins/data/public/mocks.ts index f5def327b5473b..1277e31be2103c 100644 --- a/src/plugins/data/public/mocks.ts +++ b/src/plugins/data/public/mocks.ts @@ -16,8 +16,9 @@ import { createNowProviderMock } from './now_provider/mocks'; export type Setup = jest.Mocked>; export type Start = jest.Mocked>; -const automcompleteSetupMock: jest.Mocked = { +const autocompleteSetupMock: jest.Mocked = { getQuerySuggestions: jest.fn(), + getAutocompleteSettings: jest.fn(), }; const autocompleteStartMock: jest.Mocked = { @@ -29,7 +30,7 @@ const autocompleteStartMock: jest.Mocked = { const createSetupContract = (): Setup => { const querySetupMock = queryServiceMock.createSetupContract(); return { - autocomplete: automcompleteSetupMock, + autocomplete: autocompleteSetupMock, search: searchServiceMock.createSetupContract(), fieldFormats: fieldFormatsServiceMock.createSetupContract(), query: querySetupMock, diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md index fde7075d9e760e..9f5c2ef5fad3df 100644 --- a/src/plugins/data/public/public.api.md +++ b/src/plugins/data/public/public.api.md @@ -7,11 +7,13 @@ import { $Values } from '@kbn/utility-types'; import { Action } from 'history'; import { Adapters as Adapters_2 } from 'src/plugins/inspector/common'; +import { Aggregate } from '@elastic/elasticsearch/api/types'; import { ApiResponse } from '@elastic/elasticsearch/lib/Transport'; import { ApplicationStart } from 'kibana/public'; import { Assign } from '@kbn/utility-types'; import { BfetchPublicSetup } from 'src/plugins/bfetch/public'; import Boom from '@hapi/boom'; +import { Bucket } from '@elastic/elasticsearch/api/types'; import { ConfigDeprecationProvider } from '@kbn/config'; import { CoreSetup } from 'src/core/public'; import { CoreSetup as CoreSetup_2 } from 'kibana/public'; @@ -46,6 +48,7 @@ import { Href } from 'history'; import { HttpSetup } from 'kibana/public'; import { IAggConfigs as IAggConfigs_2 } from 'src/plugins/data/public'; import { IconType } from '@elastic/eui'; +import { IEsSearchResponse as IEsSearchResponse_2 } from 'src/plugins/data/public'; import { IncomingHttpHeaders } from 'http'; import { InjectedIntl } from '@kbn/i18n/react'; import { ISearchOptions as ISearchOptions_2 } from 'src/plugins/data/public'; @@ -74,6 +77,7 @@ import * as PropTypes from 'prop-types'; import { PublicContract } from '@kbn/utility-types'; import { PublicMethodsOf } from '@kbn/utility-types'; import { PublicUiSettingsParams } from 'src/core/server/types'; +import { RangeFilter as RangeFilter_2 } from 'src/plugins/data/public'; import React from 'react'; import * as React_3 from 'react'; import { RecursiveReadonly } from '@kbn/utility-types'; @@ -152,9 +156,13 @@ export class AggConfig { // (undocumented) getTimeRange(): import("../../../public").TimeRange | undefined; // (undocumented) + getTimeShift(): undefined | moment.Duration; + // (undocumented) getValue(bucket: any): any; getValueBucketPath(): string; // (undocumented) + hasTimeShift(): boolean; + // (undocumented) id: string; // (undocumented) isFilterable(): boolean; @@ -245,6 +253,8 @@ export class AggConfigs { addToAggConfigs?: boolean | undefined; }) => T; // (undocumented) + forceNow?: Date; + // (undocumented) getAll(): AggConfig[]; // (undocumented) getRequestAggById(id: string): AggConfig | undefined; @@ -253,6 +263,39 @@ export class AggConfigs { getResponseAggById(id: string): AggConfig | undefined; getResponseAggs(): AggConfig[]; // (undocumented) + getSearchSourceTimeFilter(forceNow?: Date): RangeFilter_2[] | { + meta: { + index: string | undefined; + params: {}; + alias: string; + disabled: boolean; + negate: boolean; + }; + query: { + bool: { + should: { + bool: { + filter: { + range: { + [x: string]: { + gte: string; + lte: string; + }; + }; + }[]; + }; + }[]; + minimum_should_match: number; + }; + }; + }[]; + // (undocumented) + getTimeShiftInterval(): moment.Duration | undefined; + // (undocumented) + getTimeShifts(): Record; + // (undocumented) + hasTimeShifts(): boolean; + // (undocumented) hierarchical?: boolean; // (undocumented) indexPattern: IndexPattern; @@ -260,6 +303,10 @@ export class AggConfigs { // (undocumented) onSearchRequestStart(searchSource: ISearchSource_2, options?: ISearchOptions_2): Promise<[unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown, unknown]>; // (undocumented) + postFlightTransform(response: IEsSearchResponse_2): IEsSearchResponse_2; + // (undocumented) + setForceNow(now: Date | undefined): void; + // (undocumented) setTimeFields(timeFields: string[] | undefined): void; // (undocumented) setTimeRange(timeRange: TimeRange): void; @@ -1307,6 +1354,7 @@ export class IndexPattern implements IIndexPattern { // Warning: (ae-forgotten-export) The symbol "IndexPatternDeps" needs to be exported by the entry point index.d.ts constructor({ spec, fieldFormats, shortDotsEnable, metaFields, }: IndexPatternDeps); addRuntimeField(name: string, runtimeField: RuntimeField): void; + // @deprecated addScriptedField(name: string, script: string, fieldType?: string): Promise; readonly allowNoIndex: boolean; // (undocumented) @@ -1335,19 +1383,7 @@ export class IndexPattern implements IIndexPattern { delay?: string | undefined; time_zone?: string | undefined; }>> | undefined; - getAsSavedObjectBody(): { - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }; + getAsSavedObjectBody(): IndexPatternAttributes; // (undocumented) getComputedFields(): { storedFields: string[]; @@ -1366,7 +1402,7 @@ export class IndexPattern implements IIndexPattern { getFieldByName(name: string): IndexPatternField | undefined; getFormatterForField(field: IndexPatternField | IndexPatternField['spec'] | IFieldType): FieldFormat; getFormatterForFieldNoDefault(fieldname: string): FieldFormat | undefined; - // (undocumented) + // @deprecated (undocumented) getNonScriptedFields(): IndexPatternField[]; getOriginalSavedObjectBody: () => { fieldAttrs?: string | undefined; @@ -1379,7 +1415,7 @@ export class IndexPattern implements IIndexPattern { typeMeta?: string | undefined; type?: string | undefined; }; - // (undocumented) + // @deprecated (undocumented) getScriptedFields(): IndexPatternField[]; getSourceFiltering(): { excludes: any[]; @@ -1397,6 +1433,7 @@ export class IndexPattern implements IIndexPattern { // (undocumented) metaFields: string[]; removeRuntimeField(name: string): void; + // @deprecated removeScriptedField(fieldName: string): void; resetOriginalSavedObjectBody: () => void; // (undocumented) diff --git a/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.test.ts b/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.test.ts index 479cbb140fc709..0ae1513ae5d1b7 100644 --- a/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.test.ts +++ b/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.test.ts @@ -54,6 +54,61 @@ describe('mapSpatialFilter()', () => { expect(result).toHaveProperty('type', FILTERS.SPATIAL_FILTER); }); + test('should return the key for matching multi field filter', async () => { + const filter = { + meta: { + alias: 'my spatial filter', + isMultiIndex: true, + type: FILTERS.SPATIAL_FILTER, + } as FilterMeta, + query: { + bool: { + should: [ + { + bool: { + must: [ + { + exists: { + field: 'geo.coordinates', + }, + }, + { + geo_distance: { + distance: '1000km', + 'geo.coordinates': [120, 30], + }, + }, + ], + }, + }, + { + bool: { + must: [ + { + exists: { + field: 'location', + }, + }, + { + geo_distance: { + distance: '1000km', + location: [120, 30], + }, + }, + ], + }, + }, + ], + }, + }, + } as Filter; + const result = mapSpatialFilter(filter); + + expect(result).toHaveProperty('key', 'query'); + expect(result).toHaveProperty('value', ''); + expect(result).toHaveProperty('type', FILTERS.SPATIAL_FILTER); + }); + test('should return undefined for none matching', async (done) => { const filter = { meta: { diff --git a/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.ts b/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.ts index 0703bc055a39b9..229257c1a7d81f 100644 --- a/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.ts +++ b/src/plugins/data/public/query/filter_manager/lib/mappers/map_spatial_filter.ts @@ -22,5 +22,18 @@ export const mapSpatialFilter = (filter: Filter) => { value: '', }; } + + if ( + filter.meta && + filter.meta.type === FILTERS.SPATIAL_FILTER && + filter.meta.isMultiIndex && + filter.query?.bool?.should + ) { + return { + key: 'query', + type: filter.meta.type, + value: '', + }; + } throw filter; }; diff --git a/src/plugins/data/public/search/session/session_service.test.ts b/src/plugins/data/public/search/session/session_service.test.ts index 13a1a1bd388ba6..39680c49483667 100644 --- a/src/plugins/data/public/search/session/session_service.test.ts +++ b/src/plugins/data/public/search/session/session_service.test.ts @@ -98,14 +98,6 @@ describe('Session service', () => { expect(nowProvider.reset).toHaveBeenCalled(); }); - it("Can't clear other apps' session", async () => { - sessionService.start(); - expect(sessionService.getSessionId()).not.toBeUndefined(); - currentAppId$.next('change'); - sessionService.clear(); - expect(sessionService.getSessionId()).not.toBeUndefined(); - }); - it("Can start a new session in case there is other apps' stale session", async () => { const s1 = sessionService.start(); expect(sessionService.getSessionId()).not.toBeUndefined(); diff --git a/src/plugins/data/public/search/session/session_service.ts b/src/plugins/data/public/search/session/session_service.ts index 71f51b4bc8d83c..629d76b07d7caf 100644 --- a/src/plugins/data/public/search/session/session_service.ts +++ b/src/plugins/data/public/search/session/session_service.ts @@ -128,21 +128,6 @@ export class SessionService { this.subscription.add( coreStart.application.currentAppId$.subscribe((newAppName) => { this.currentApp = newAppName; - if (!this.getSessionId()) return; - - // Apps required to clean up their sessions before unmounting - // Make sure that apps don't leave sessions open by throwing an error in DEV mode - const message = `Application '${ - this.state.get().appName - }' had an open session while navigating`; - if (initializerContext.env.mode.dev) { - coreStart.fatalErrors.add(message); - } else { - // this should never happen in prod because should be caught in dev mode - // in case this happen we don't want to throw fatal error, as most likely possible bugs are not that critical - // eslint-disable-next-line no-console - console.warn(message); - } }) ); }); @@ -230,18 +215,6 @@ export class SessionService { * Cleans up current state */ public clear() { - // make sure apps can't clear other apps' sessions - const currentSessionApp = this.state.get().appName; - if (currentSessionApp && currentSessionApp !== this.currentApp) { - // eslint-disable-next-line no-console - console.warn( - `Skip clearing session "${this.getSessionId()}" because it belongs to a different app. current: "${ - this.currentApp - }", owner: "${currentSessionApp}"` - ); - return; - } - this.state.transitions.clear(); this.searchSessionInfoProvider = undefined; this.searchSessionIndicatorUiConfig = undefined; diff --git a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap index 9896a6dbdc7b7f..a0a7e54d275322 100644 --- a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap +++ b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap @@ -718,11 +718,13 @@ exports[`Inspector Data View component should render single table without select > @@ -996,6 +998,7 @@ exports[`Inspector Data View component should render single table without select - - - - - column1 - - - - - - - Click to sort in ascending order - + + + + column1 + + + - + @@ -1322,6 +1320,7 @@ exports[`Inspector Data View component should render single table without select