diff --git a/.buildkite/README.md b/.buildkite/README.md new file mode 100644 index 0000000000000..6b9be00898d24 --- /dev/null +++ b/.buildkite/README.md @@ -0,0 +1,10 @@ +# Kibana / Buildkite + +## Directory Structure + +- `hooks` - special directory used by Buildkite agents for [hooks](https://buildkite.com/docs/agent/v3/hooks) +- `pipelines` - contains pipeline definitions +- `scripts/common` - scripts that get `source`d by other scripts to set environment variables or import shared functions +- `scripts/lifecycle` - general scripts for tasks that run before or after individual steps or the entire build +- `scripts/steps` - scripts that define something that will run for a step defined in a pipeline +- `scripts/*` - all other scripts are building blocks that make up the tasks in pipelines. They may be run by other scripts, but should not be `source`d diff --git a/.buildkite/agents.json b/.buildkite/agents.json new file mode 100644 index 0000000000000..797b7e71f2be6 --- /dev/null +++ b/.buildkite/agents.json @@ -0,0 +1,79 @@ +{ + "gcp": { + "project": "elastic-kibana-ci", + "zones": ["us-central1-a", "us-central1-b", "us-central1-c", "us-central1-f"], + "serviceAccount": "elastic-buildkite-agent@elastic-kibana-ci.iam.gserviceaccount.com", + "imageFamily": "kb-ubuntu", + "subnetwork": "buildkite", + "disableExternalIp": true, + "diskType": "pd-ssd", + "diskSizeGb": 75, + "overprovision": 0, + "minimumAgents": 0, + "maximumAgents": 50, + "gracefulStopAfterMins": 360, + "hardStopAfterMins": 540, + "idleTimeoutMins": 10, + "exitAfterOneJob": false, + + "agents": [ + { + "queue": "default", + "name": "kb-default", + "minimumAgents": 1, + "maximumAgents": 100, + "idleTimeoutMins": 60, + "machineType": "e2-small" + }, + { + "queue": "c2-8", + "name": "kb-c2-8", + "machineType": "c2-standard-8", + "localSsds": 1 + }, + { + "queue": "c2-4", + "name": "kb-c2-4", + "machineType": "c2-standard-4", + "localSsds": 1 + }, + { + "queue": "jest", + "name": "kb-jest", + "machineType": "n2-standard-2", + "diskSizeGb": 128 + }, + { + "queue": "ci-group", + "name": "kb-cigroup", + "machineType": "n2-standard-8", + "diskSizeGb": 256 + }, + { + "queue": "ci-group-4", + "name": "kb-cigroup-4", + "machineType": "n2-standard-4", + "diskSizeGb": 128 + }, + { + "queue": "ci-group-4d", + "name": "kb-cigroup-4d", + "machineType": "n2d-standard-4", + "diskSizeGb": 128 + }, + { + "queue": "ci-group-6", + "name": "kb-cigroup-6", + "machineType": "n2-custom-6-16384", + "diskSizeGb": 128 + }, + { + "queue": "packer", + "name": "kb-packer", + "serviceAccount": "buildkite-packer-agent@elastic-kibana-ci.iam.gserviceaccount.com", + "maximumAgents": 10, + "machineType": "e2-small" + } + ] + } +} diff --git a/.buildkite/hooks/post-command b/.buildkite/hooks/post-command new file mode 100644 index 0000000000000..21a4326498fc9 --- /dev/null +++ b/.buildkite/hooks/post-command @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/lifecycle/post_command.sh diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command new file mode 100644 index 0000000000000..58a2c5f0b499d --- /dev/null +++ b/.buildkite/hooks/pre-command @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +source .buildkite/scripts/lifecycle/pre_command.sh diff --git a/.buildkite/pipelines/on_merge.yml b/.buildkite/pipelines/on_merge.yml new file mode 100644 index 0000000000000..bceb1796479a2 --- /dev/null +++ b/.buildkite/pipelines/on_merge.yml @@ -0,0 +1,24 @@ +env: + GITHUB_COMMIT_STATUS_ENABLED: 'true' + GITHUB_COMMIT_STATUS_CONTEXT: 'buildkite/on-merge' +steps: + - command: .buildkite/scripts/lifecycle/pre_build.sh + label: Pre-Build + + - wait + + - command: .buildkite/scripts/steps/on_merge_build_and_metrics.sh + label: Default Build and Metrics + agents: + queue: c2-8 + + - command: .buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh + label: Build TS Refs and Check Public API Docs + agents: + queue: c2-4 + + - wait: ~ + continue_on_failure: true + + - command: .buildkite/scripts/lifecycle/post_build.sh + label: Post-Build diff --git a/.buildkite/scripts/bootstrap.sh b/.buildkite/scripts/bootstrap.sh new file mode 100755 index 0000000000000..7ae925b262b9f --- /dev/null +++ b/.buildkite/scripts/bootstrap.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/scripts/common/util.sh + +echo "--- yarn install and bootstrap" +yarn kbn bootstrap --verbose + +### +### upload ts-refs-cache artifacts as quickly as possible so they are available for download +### +if [[ "${BUILD_TS_REFS_CACHE_CAPTURE:-}" == "true" ]]; then + echo "--- Upload ts-refs-cache" + cd "$KIBANA_DIR/target/ts_refs_cache" + gsutil cp "*.zip" 'gs://kibana-ci-ts-refs-cache/' + cd "$KIBANA_DIR" +fi + +if [[ "$DISABLE_BOOTSTRAP_VALIDATION" != "true" ]]; then + verify_no_git_changes 'yarn kbn bootstrap' +fi diff --git a/.buildkite/scripts/build_kibana.sh b/.buildkite/scripts/build_kibana.sh new file mode 100755 index 0000000000000..19dbbcb025fb9 --- /dev/null +++ b/.buildkite/scripts/build_kibana.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -euo pipefail + +export KBN_NP_PLUGINS_BUILT=true + +echo "--- Build Kibana Distribution" +node scripts/build --debug --no-oss + +echo "--- Archive Kibana Distribution" +linuxBuild="$(find "$KIBANA_DIR/target" -name 'kibana-*-linux-x86_64.tar.gz')" +installDir="$KIBANA_DIR/install/kibana" +mkdir -p "$installDir" +tar -xzf "$linuxBuild" -C "$installDir" --strip=1 +mkdir -p "$KIBANA_BUILD_LOCATION" +cp -pR install/kibana/. "$KIBANA_BUILD_LOCATION/" diff --git a/.buildkite/scripts/build_kibana_plugins.sh b/.buildkite/scripts/build_kibana_plugins.sh new file mode 100644 index 0000000000000..f4d82699ef92d --- /dev/null +++ b/.buildkite/scripts/build_kibana_plugins.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +set -euo pipefail + +echo "--- Build Platform Plugins" +node scripts/build_kibana_platform_plugins \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/interpreter_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/common/fixtures/plugins" \ + --scan-dir "$KIBANA_DIR/examples" \ + --scan-dir "$KIBANA_DIR/test/plugin_functional/plugins" \ + --scan-dir "$KIBANA_DIR/test/common/fixtures/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_functional/plugins" \ + --scan-dir "$XPACK_DIR/test/functional_with_es_ssl/fixtures/plugins" \ + --scan-dir "$XPACK_DIR/test/alerting_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_integration/plugins" \ + --scan-dir "$XPACK_DIR/test/plugin_api_perf/plugins" \ + --scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \ + --scan-dir "$XPACK_DIR/test/usage_collection/plugins" \ + --scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \ + --scan-dir "$XPACK_DIR/examples" \ + --verbose + +echo "--- Archive built plugins" +shopt -s globstar +tar -zcf \ + target/kibana-default-plugins.tar.gz \ + x-pack/plugins/**/target/public \ + x-pack/test/**/target/public \ + examples/**/target/public \ + x-pack/examples/**/target/public \ + test/**/target/public diff --git a/.buildkite/scripts/common/env.sh b/.buildkite/scripts/common/env.sh new file mode 100755 index 0000000000000..ff09126f60b08 --- /dev/null +++ b/.buildkite/scripts/common/env.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash + +export CI=true + +KIBANA_DIR=$(pwd) +export KIBANA_DIR +export XPACK_DIR="$KIBANA_DIR/x-pack" + +export CACHE_DIR="$HOME/.kibana" +PARENT_DIR="$(cd "$KIBANA_DIR/.."; pwd)" +export PARENT_DIR +export WORKSPACE="${WORKSPACE:-$PARENT_DIR}" + +KIBANA_PKG_BRANCH="$(jq -r .branch "$KIBANA_DIR/package.json")" +export KIBANA_PKG_BRANCH +export KIBANA_BASE_BRANCH="$KIBANA_PKG_BRANCH" + +export GECKODRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache" +export CHROMEDRIVER_CDNURL="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache" +export RE2_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache" +export CYPRESS_DOWNLOAD_MIRROR="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/cypress" + +export NODE_OPTIONS="--max-old-space-size=4096" + +export FORCE_COLOR=1 +export TEST_BROWSER_HEADLESS=1 + +export ELASTIC_APM_ENVIRONMENT=ci +export ELASTIC_APM_TRANSACTION_SAMPLE_RATE=0.1 + +CI_REPORTING_ENABLED=false # TODO enable when ready, only controls checks reporter and APM + +if is_pr; then + export ELASTIC_APM_ACTIVE=false + export CHECKS_REPORTER_ACTIVE="${CI_REPORTING_ENABLED-}" + + # These can be removed once we're not supporting Jenkins and Buildkite at the same time + # These are primarily used by github checks reporter and can be configured via /github_checks_api.json + export ghprbGhRepository="elastic/kibana" + export ghprbActualCommit="$BUILDKITE_COMMIT" + export BUILD_URL="$BUILDKITE_BUILD_URL" + + # set_git_merge_base # TODO for PRs +else + export ELASTIC_APM_ACTIVE="${CI_REPORTING_ENABLED-}" + export CHECKS_REPORTER_ACTIVE=false +fi + +export FLEET_PACKAGE_REGISTRY_PORT=6104 +export TEST_CORS_SERVER_PORT=6105 + +export DETECT_CHROMEDRIVER_VERSION=true +export CHROMEDRIVER_FORCE_DOWNLOAD=true + +export GCS_UPLOAD_PREFIX=FAKE_UPLOAD_PREFIX # TODO remove the need for this + +export KIBANA_BUILD_LOCATION="$WORKSPACE/kibana-build-xpack" + +if [[ "${BUILD_TS_REFS_CACHE_ENABLE:-}" != "true" ]]; then + export BUILD_TS_REFS_CACHE_ENABLE=false +fi + +export BUILD_TS_REFS_DISABLE=true +export DISABLE_BOOTSTRAP_VALIDATION=true + +export TEST_KIBANA_HOST=localhost +export TEST_KIBANA_PORT=6101 +export TEST_KIBANA_URL="http://elastic:changeme@localhost:6101" +export TEST_ES_URL="http://elastic:changeme@localhost:6102" +export TEST_ES_TRANSPORT_PORT=6103 +export TEST_CORS_SERVER_PORT=6106 +export ALERTING_PROXY_PORT=6105 diff --git a/.buildkite/scripts/common/setup_bazel.sh b/.buildkite/scripts/common/setup_bazel.sh new file mode 100644 index 0000000000000..16ea9a3900353 --- /dev/null +++ b/.buildkite/scripts/common/setup_bazel.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +KIBANA_BUILDBUDDY_CI_API_KEY=$(vault read -field=value secret/kibana-issues/dev/kibana-buildbuddy-ci-api-key) +export KIBANA_BUILDBUDDY_CI_API_KEY + +cp "$KIBANA_DIR/src/dev/ci_setup/.bazelrc-ci" "$HOME/.bazelrc" + +### +### append auth token to buildbuddy into "$HOME/.bazelrc"; +### +echo "# Appended by .buildkite/scripts/setup_bazel.sh" >> "$HOME/.bazelrc" +echo "build --remote_header=x-buildbuddy-api-key=$KIBANA_BUILDBUDDY_CI_API_KEY" >> "$HOME/.bazelrc" + +### +### remove write permissions on buildbuddy remote cache for prs +### +if [[ "${BUILDKITE_PULL_REQUEST:-}" && "$BUILDKITE_PULL_REQUEST" != "false" ]] ; then + { + echo "# Uploads logs & artifacts without writing to cache" + echo "build --noremote_upload_local_results" + } >> "$HOME/.bazelrc" +fi diff --git a/.buildkite/scripts/common/setup_node.sh b/.buildkite/scripts/common/setup_node.sh new file mode 100755 index 0000000000000..6a81862f2b097 --- /dev/null +++ b/.buildkite/scripts/common/setup_node.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash + +echo "--- Setup Node" + +NODE_VERSION="$(cat "$KIBANA_DIR/.node-version")" +export NODE_VERSION +export NODE_DIR="$CACHE_DIR/node/$NODE_VERSION" +export NODE_BIN_DIR="$NODE_DIR/bin" +export YARN_OFFLINE_CACHE="$CACHE_DIR/yarn-offline-cache" + +if [[ ! -d "$NODE_DIR" ]]; then + hostArch="$(command uname -m)" + case "${hostArch}" in + x86_64 | amd64) nodeArch="x64" ;; + aarch64) nodeArch="arm64" ;; + *) nodeArch="${hostArch}" ;; + esac + + nodeUrl="https://us-central1-elastic-kibana-184716.cloudfunctions.net/kibana-ci-proxy-cache/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-$nodeArch.tar.gz" + + echo "node.js v$NODE_VERSION not found at $NODE_DIR, downloading from $nodeUrl" + + mkdir -p "$NODE_DIR" + curl --silent -L "$nodeUrl" | tar -xz -C "$NODE_DIR" --strip-components=1 +else + echo "node.js v$NODE_VERSION already installed to $NODE_DIR, re-using" + ls -alh "$NODE_BIN_DIR" +fi + +export PATH="$NODE_BIN_DIR:$PATH" + + +echo "--- Setup Yarn" + +YARN_VERSION=$(node -e "console.log(String(require('./package.json').engines.yarn || '').replace(/^[^\d]+/,''))") +export YARN_VERSION + +if [[ ! $(which yarn) || $(yarn --version) != "$YARN_VERSION" ]]; then + npm install -g "yarn@^${YARN_VERSION}" +fi + +yarn config set yarn-offline-mirror "$YARN_OFFLINE_CACHE" + +YARN_GLOBAL_BIN=$(yarn global bin) +export YARN_GLOBAL_BIN +export PATH="$PATH:$YARN_GLOBAL_BIN" diff --git a/.buildkite/scripts/common/util.sh b/.buildkite/scripts/common/util.sh new file mode 100755 index 0000000000000..653b0c4e9cdbb --- /dev/null +++ b/.buildkite/scripts/common/util.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash + +checks-reporter-with-killswitch() { + if [ "$CHECKS_REPORTER_ACTIVE" == "true" ] ; then + yarn run github-checks-reporter "$@" + else + arguments=("$@"); + "${arguments[@]:1}"; + fi +} + +is_pr() { + [[ "${GITHUB_PR_NUMBER-}" ]] && return + false +} + +verify_no_git_changes() { + RED='\033[0;31m' + C_RESET='\033[0m' # Reset color + + GIT_CHANGES="$(git ls-files --modified)" + if [ "$GIT_CHANGES" ]; then + echo -e "\n${RED}ERROR: '$1' caused changes to the following files:${C_RESET}\n" + echo -e "$GIT_CHANGES\n" + exit 1 + fi +} + +# docker_run can be used in place of `docker run` +# it automatically passes along all of Buildkite's tracked environment variables, and mounts the buildkite-agent in the running container +docker_run() { + args=() + + if [[ -n "${BUILDKITE_ENV_FILE:-}" ]] ; then + # Read in the env file and convert to --env params for docker + # This is because --env-file doesn't support newlines or quotes per https://docs.docker.com/compose/env-file/#syntax-rules + while read -r var; do + args+=( --env "${var%%=*}" ) + done < "$BUILDKITE_ENV_FILE" + fi + + BUILDKITE_AGENT_BINARY_PATH=$(command -v buildkite-agent) + args+=( + "--env" "BUILDKITE_JOB_ID" + "--env" "BUILDKITE_BUILD_ID" + "--env" "BUILDKITE_AGENT_ACCESS_TOKEN" + "--volume" "$BUILDKITE_AGENT_BINARY_PATH:/usr/bin/buildkite-agent" + ) + + docker run "${args[@]}" "$@" +} diff --git a/.buildkite/scripts/download_build_artifacts.sh b/.buildkite/scripts/download_build_artifacts.sh new file mode 100755 index 0000000000000..6a6b7246753f6 --- /dev/null +++ b/.buildkite/scripts/download_build_artifacts.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ ! -d "$KIBANA_BUILD_LOCATION/bin" ]]; then + echo '--- Downloading Distribution and Plugin artifacts' + + cd "$WORKSPACE" + + buildkite-agent artifact download kibana-default.tar.gz . + buildkite-agent artifact download kibana-default-plugins.tar.gz . + + mkdir -p "$KIBANA_BUILD_LOCATION" + tar -xzf kibana-default.tar.gz -C "$KIBANA_BUILD_LOCATION" --strip=1 + + cd "$KIBANA_DIR" + + tar -xzf ../kibana-default-plugins.tar.gz +fi diff --git a/.buildkite/scripts/lifecycle/ci_stats.js b/.buildkite/scripts/lifecycle/ci_stats.js new file mode 100644 index 0000000000000..1e7aec3079ee3 --- /dev/null +++ b/.buildkite/scripts/lifecycle/ci_stats.js @@ -0,0 +1,59 @@ +const https = require('https'); +const token = process.env.CI_STATS_TOKEN; +const host = process.env.CI_STATS_HOST; + +const request = (url, options, data = null) => { + const httpOptions = { + ...options, + headers: { + ...(options.headers || {}), + Authorization: `token ${token}`, + }, + }; + + return new Promise((resolve, reject) => { + console.log(`Calling https://${host}${url}`); + + const req = https.request(`https://${host}${url}`, httpOptions, (res) => { + if (res.statusCode < 200 || res.statusCode >= 300) { + return reject(new Error(`Status Code: ${res.statusCode}`)); + } + + const data = []; + res.on('data', (d) => { + data.push(d); + }); + + res.on('end', () => { + try { + let resp = Buffer.concat(data).toString(); + + try { + if (resp.trim()) { + resp = JSON.parse(resp); + } + } catch (ex) { + console.error(ex); + } + + resolve(resp); + } catch (ex) { + reject(ex); + } + }); + }); + + req.on('error', reject); + + if (data) { + req.write(JSON.stringify(data)); + } + + req.end(); + }); +}; + +module.exports = { + get: (url) => request(url, { method: 'GET' }), + post: (url, data) => request(url, { method: 'POST' }, data), +}; diff --git a/.buildkite/scripts/lifecycle/ci_stats_complete.js b/.buildkite/scripts/lifecycle/ci_stats_complete.js new file mode 100644 index 0000000000000..46aa72aed2024 --- /dev/null +++ b/.buildkite/scripts/lifecycle/ci_stats_complete.js @@ -0,0 +1,17 @@ +const ciStats = require('./ci_stats'); + +// TODO - this is okay for now but should really be replaced with an API call, especially once retries are enabled +const BUILD_STATUS = process.env.BUILD_FAILED === 'true' ? 'FAILURE' : 'SUCCESS'; + +(async () => { + try { + if (process.env.CI_STATS_BUILD_ID) { + await ciStats.post(`/v1/build/_complete?id=${process.env.CI_STATS_BUILD_ID}`, { + result: BUILD_STATUS, + }); + } + } catch (ex) { + console.error(ex); + process.exit(1); + } +})(); diff --git a/.buildkite/scripts/lifecycle/ci_stats_start.js b/.buildkite/scripts/lifecycle/ci_stats_start.js new file mode 100644 index 0000000000000..35a1e7030af5f --- /dev/null +++ b/.buildkite/scripts/lifecycle/ci_stats_start.js @@ -0,0 +1,30 @@ +const { execSync } = require('child_process'); +const ciStats = require('./ci_stats'); + +(async () => { + try { + const build = await ciStats.post('/v1/build', { + jenkinsJobName: process.env.BUILDKITE_PIPELINE_NAME, + jenkinsJobId: process.env.BUILDKITE_BUILD_ID, + jenkinsUrl: process.env.BUILDKITE_BUILD_URL, + prId: process.env.GITHUB_PR_NUMBER || null, + }); + + execSync(`buildkite-agent meta-data set ci_stats_build_id "${build.id}"`); + + // TODO Will need to set MERGE_BASE for PRs + + await ciStats.post(`/v1/git_info?buildId=${build.id}`, { + branch: process.env.BUILDKITE_BRANCH.replace(/^(refs\/heads\/|origin\/)/, ''), + commit: process.env.BUILDKITE_COMMIT, + targetBranch: + process.env.GITHUB_PR_TARGET_BRANCH || + process.env.BUILDKITE_PULL_REQUEST_BASE_BRANCH || + null, + mergeBase: process.env.GITHUB_PR_MERGE_BASE || null, // TODO confirm GITHUB_PR_MERGE_BASE or switch to final var + }); + } catch (ex) { + console.error(ex); + process.exit(1); + } +})(); diff --git a/.buildkite/scripts/lifecycle/commit_status_complete.sh b/.buildkite/scripts/lifecycle/commit_status_complete.sh new file mode 100755 index 0000000000000..1766404719632 --- /dev/null +++ b/.buildkite/scripts/lifecycle/commit_status_complete.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ "${GITHUB_COMMIT_STATUS_ENABLED:-}" == "true" ]]; then + COMMIT_STATUS=success + if [[ "${BUILD_FAILED:-}" == "true" ]]; then + COMMIT_STATUS=failure + fi + + GITHUB_COMMIT_STATUS_CONTEXT=${GITHUB_COMMIT_STATUS_CONTEXT:-"buildkite/$BUILDKITE_PIPELINE_NAME"} + + gh api "repos/elastic/kibana/statuses/$BUILDKITE_COMMIT" -f state="$COMMIT_STATUS" -f target_url="$BUILDKITE_BUILD_URL" -f context="$GITHUB_COMMIT_STATUS_CONTEXT" --silent +fi diff --git a/.buildkite/scripts/lifecycle/commit_status_start.sh b/.buildkite/scripts/lifecycle/commit_status_start.sh new file mode 100755 index 0000000000000..d7e91dd7e0de5 --- /dev/null +++ b/.buildkite/scripts/lifecycle/commit_status_start.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ "${GITHUB_COMMIT_STATUS_ENABLED:-}" == "true" ]]; then + GITHUB_COMMIT_STATUS_CONTEXT=${GITHUB_COMMIT_STATUS_CONTEXT:-"buildkite/$BUILDKITE_PIPELINE_NAME"} + + gh api "repos/elastic/kibana/statuses/$BUILDKITE_COMMIT" -f state=pending -f target_url="$BUILDKITE_BUILD_URL" -f context="$GITHUB_COMMIT_STATUS_CONTEXT" --silent +fi diff --git a/.buildkite/scripts/lifecycle/post_build.sh b/.buildkite/scripts/lifecycle/post_build.sh new file mode 100755 index 0000000000000..06b51d78a836a --- /dev/null +++ b/.buildkite/scripts/lifecycle/post_build.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -euo pipefail + +BUILD_FAILED=$(buildkite-agent meta-data get build_failed --default "false") +export BUILD_FAILED + +"$(dirname "${0}")/commit_status_complete.sh" + +node "$(dirname "${0}")/ci_stats_complete.js" diff --git a/.buildkite/scripts/lifecycle/post_command.sh b/.buildkite/scripts/lifecycle/post_command.sh new file mode 100755 index 0000000000000..89630a874bbd7 --- /dev/null +++ b/.buildkite/scripts/lifecycle/post_command.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ "$BUILDKITE_COMMAND_EXIT_STATUS" != "0" ]]; then + buildkite-agent meta-data set build_failed true +fi diff --git a/.buildkite/scripts/lifecycle/pre_build.sh b/.buildkite/scripts/lifecycle/pre_build.sh new file mode 100755 index 0000000000000..2ddf9a73d0622 --- /dev/null +++ b/.buildkite/scripts/lifecycle/pre_build.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +set -euo pipefail + +"$(dirname "${0}")/commit_status_start.sh" + +export CI_STATS_TOKEN="$(vault read -field=api_token secret/kibana-issues/dev/kibana_ci_stats)" +export CI_STATS_HOST="$(vault read -field=api_host secret/kibana-issues/dev/kibana_ci_stats)" + +node "$(dirname "${0}")/ci_stats_start.js" diff --git a/.buildkite/scripts/lifecycle/pre_command.sh b/.buildkite/scripts/lifecycle/pre_command.sh new file mode 100755 index 0000000000000..d9e79d2d3252b --- /dev/null +++ b/.buildkite/scripts/lifecycle/pre_command.sh @@ -0,0 +1,67 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# Set up a custom ES Snapshot Manifest if one has been specified for this build +{ + ES_SNAPSHOT_MANIFEST=${ES_SNAPSHOT_MANIFEST:-$(buildkite-agent meta-data get ES_SNAPSHOT_MANIFEST --default '')} + export ES_SNAPSHOT_MANIFEST + + if [[ "${ES_SNAPSHOT_MANIFEST:-}" ]]; then + cat << EOF | buildkite-agent annotate --style "info" --context es-snapshot-manifest + This build is running using a custom Elasticsearch snapshot. + + ES Snapshot Manifest: $ES_SNAPSHOT_MANIFEST + + To use this locally, simply prefix your commands with: + + \`\`\` + ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST" + \`\`\` + + e.g. + + \`\`\` + ES_SNAPSHOT_MANIFEST="$ES_SNAPSHOT_MANIFEST" node scripts/functional_tests_server.js + \`\`\` +EOF + fi +} + +# Setup CI Stats +{ + CI_STATS_BUILD_ID="$(buildkite-agent meta-data get ci_stats_build_id --default '')" + export CI_STATS_BUILD_ID + + if [[ "$CI_STATS_BUILD_ID" ]]; then + echo "CI Stats Build ID: $CI_STATS_BUILD_ID" + + CI_STATS_TOKEN="$(vault read -field=api_token secret/kibana-issues/dev/kibana_ci_stats)" + export CI_STATS_TOKEN + + CI_STATS_HOST="$(vault read -field=api_host secret/kibana-issues/dev/kibana_ci_stats)" + export CI_STATS_HOST + + KIBANA_CI_STATS_CONFIG=$(jq -n \ + --arg buildId "$CI_STATS_BUILD_ID" \ + --arg apiUrl "https://$CI_STATS_HOST" \ + --arg apiToken "$CI_STATS_TOKEN" \ + '{buildId: $buildId, apiUrl: $apiUrl, apiToken: $apiToken}' \ + ) + export KIBANA_CI_STATS_CONFIG + fi +} + +GITHUB_TOKEN=$(vault read -field=github_token secret/kibana-issues/dev/kibanamachine) +export GITHUB_TOKEN + +# By default, all steps should set up these things to get a full environment before running +# It can be skipped for pipeline upload steps though, to make job start time a little faster +if [[ "${SKIP_CI_SETUP:-}" != "true" ]]; then + if [[ -d .buildkite/scripts && "${BUILDKITE_COMMAND:-}" != "buildkite-agent pipeline upload"* ]]; then + source .buildkite/scripts/common/util.sh + source .buildkite/scripts/common/env.sh + source .buildkite/scripts/common/setup_node.sh + source .buildkite/scripts/common/setup_bazel.sh + fi +fi diff --git a/.buildkite/scripts/packer_cache.sh b/.buildkite/scripts/packer_cache.sh new file mode 100755 index 0000000000000..45d3dc439ff4d --- /dev/null +++ b/.buildkite/scripts/packer_cache.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/scripts/common/env.sh +source .buildkite/scripts/common/setup_node.sh + +yarn kbn bootstrap diff --git a/.buildkite/scripts/post_build_kibana.sh b/.buildkite/scripts/post_build_kibana.sh new file mode 100755 index 0000000000000..a4f8d71b77105 --- /dev/null +++ b/.buildkite/scripts/post_build_kibana.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +set -euo pipefail + +if [[ ! "${DISABLE_CI_STATS_SHIPPING:-}" ]]; then + echo "--- Ship Kibana Distribution Metrics to CI Stats" + node scripts/ship_ci_stats \ + --metrics target/optimizer_bundle_metrics.json \ + --metrics packages/kbn-ui-shared-deps/target/metrics.json +fi + +echo "--- Upload Build Artifacts" +# Moving to `target/` first will keep `buildkite-agent` from including directories in the artifact name +cd "$KIBANA_DIR/target" +mv kibana-*-linux-x86_64.tar.gz kibana-default.tar.gz +buildkite-agent artifact upload kibana-default.tar.gz +buildkite-agent artifact upload kibana-default-plugins.tar.gz +cd - diff --git a/.buildkite/scripts/saved_object_field_metrics.sh b/.buildkite/scripts/saved_object_field_metrics.sh new file mode 100755 index 0000000000000..3b6c63eeff3b0 --- /dev/null +++ b/.buildkite/scripts/saved_object_field_metrics.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -euo pipefail + +source .buildkite/scripts/common/util.sh + +echo '--- Default Saved Object Field Metrics' +cd "$XPACK_DIR" +checks-reporter-with-killswitch "Capture Kibana Saved Objects field count metrics" \ + node scripts/functional_tests \ + --debug --bail \ + --kibana-install-dir "$KIBANA_BUILD_LOCATION" \ + --config test/saved_objects_field_count/config.ts diff --git a/.buildkite/scripts/steps/on_merge_build_and_metrics.sh b/.buildkite/scripts/steps/on_merge_build_and_metrics.sh new file mode 100755 index 0000000000000..fdf06981ab568 --- /dev/null +++ b/.buildkite/scripts/steps/on_merge_build_and_metrics.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -euo pipefail + +export DISABLE_BOOTSTRAP_VALIDATION=true +export BUILD_TS_REFS_DISABLE=true + +.buildkite/scripts/bootstrap.sh +.buildkite/scripts/build_kibana.sh +.buildkite/scripts/post_build_kibana.sh +.buildkite/scripts/saved_object_field_metrics.sh diff --git a/.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh b/.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh new file mode 100755 index 0000000000000..35bc137ee4a02 --- /dev/null +++ b/.buildkite/scripts/steps/on_merge_ts_refs_api_docs.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -euo pipefail + +export BUILD_TS_REFS_CACHE_ENABLE=true +export BUILD_TS_REFS_CACHE_CAPTURE=true +export DISABLE_BOOTSTRAP_VALIDATION=true +export BUILD_TS_REFS_DISABLE=false + +.buildkite/scripts/bootstrap.sh + +echo "--- Build API Docs" +node scripts/build_api_docs diff --git a/.ci/Jenkinsfile_baseline_capture b/.ci/Jenkinsfile_baseline_capture index d074da1cb1926..9d662f4b39fb9 100644 --- a/.ci/Jenkinsfile_baseline_capture +++ b/.ci/Jenkinsfile_baseline_capture @@ -37,7 +37,7 @@ kibanaPipeline(timeoutMinutes: 210) { tasks([ kibanaPipeline.functionalTestProcess('oss-baseline', './test/scripts/jenkins_baseline.sh'), kibanaPipeline.functionalTestProcess('xpack-baseline', './test/scripts/jenkins_xpack_baseline.sh'), - kibanaPipeline.scriptTask('Check Public API Docs', 'test/scripts/checks/plugin_public_api_docs.sh'), + kibanaPipeline.scriptTask('Check Public API Docs', 'test/scripts/checks/baseline_plugin_public_api_docs.sh'), ]) } } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9c9200ce33e7a..2c2f6fa11841a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -174,6 +174,7 @@ /.bazelversion @elastic/kibana-operations /WORKSPACE.bazel @elastic/kibana-operations #CC# /packages/kbn-expect/ @elastic/kibana-operations +/.buildkite/ @elastic/kibana-operations # Quality Assurance /src/dev/code_coverage @elastic/kibana-qa diff --git a/.i18nrc.json b/.i18nrc.json index dc01a10b6a686..57dffa4147e52 100644 --- a/.i18nrc.json +++ b/.i18nrc.json @@ -3,6 +3,7 @@ "console": "src/plugins/console", "core": "src/core", "discover": "src/plugins/discover", + "bfetch": "src/plugins/bfetch", "dashboard": "src/plugins/dashboard", "data": "src/plugins/data", "embeddableApi": "src/plugins/embeddable", diff --git a/dev_docs/tutorials/expressions.mdx b/dev_docs/tutorials/expressions.mdx new file mode 100644 index 0000000000000..f0fc1dc595cfa --- /dev/null +++ b/dev_docs/tutorials/expressions.mdx @@ -0,0 +1,129 @@ +--- +id: kibDevTutorialExpressions +slug: /kibana-dev-docs/tutorials/expressions +title: Kibana Expressions Service +summary: Kibana Expressions Service +date: 2021-06-01 +tags: ['kibana', 'onboarding', 'dev', 'architecture'] +--- + +## Expressions service + +Expression service exposes a registry of reusable functions primary used for fetching and transposing data and a registry of renderer functions that can render data into a DOM element. +Adding functions is easy and so is reusing them. An expression is a chain of functions with provided arguments, which given a single input translates to a single output. +Each expression is representable by a human friendly string which a user can type. + +### creating expressions + +Here is a very simple expression string: + + essql 'select column1, column2 from myindex' | mapColumn name=column3 fn='{ column1 + 3 }' | table + + +It consists of 3 functions: + + - essql which runs given sql query against elasticsearch and returns the results + - `mapColumn`, which computes a new column from existing ones; + - `table`, which prepares the data for rendering in a tabular format. + +The same expression could also be constructed in the code: + +```ts +import { buildExpression, buildExpressionFunction } from 'src/plugins/expressions'; + +const expression = buildExpression([ + buildExpressionFunction('essql', [ q: 'select column1, column2 from myindex' ]), + buildExpressionFunction('mapColumn', [ name: 'column3', expression: 'column1 + 3' ]), + buildExpressionFunction('table'), +] +``` + +Note: Consumers need to be aware which plugin registers specific functions with expressions function registry and import correct type definitions from there. + + + The `expressions` service is available on both server and client, with similar APIs. + + +### Running expressions + +Expression service exposes `execute` method which allows you to execute an expression: + +```ts +const executionContract = expressions.execute(expression, input); +const result = await executionContract.getData(); +``` + + + Check the full spec of execute function [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.execution.md) + + +In addition, on the browser side, there are two additional ways to run expressions and render the results. + +#### React expression renderer component + +This is the easiest way to get expressions rendered inside your application. + +```ts + +``` + + + Check the full spec of ReactExpressionRenderer component props [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.reactexpressionrendererprops.md) + + +#### Expression loader + +If you are not using React, you can use the loader expression service provides to achieve the same: + +```ts +const handler = loader(domElement, expression, params); +``` + + + Check the full spec of expression loader params [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md) + + +### Creating new expression functions + +Creating a new expression function is easy, just call `registerFunction` method on expressions service setup contract with your function definition: + +```ts +const functionDefinition = { + name: 'clog', + args: {}, + help: 'Outputs the context to the console', + fn: (input: unknown) => { + // eslint-disable-next-line no-console + console.log(input); + return input; + }, +}; + +expressions.registerFunction(functionDefinition); +``` + + + Check the full interface of ExpressionFuntionDefinition [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionfunctiondefinition.md) + + +### Creating new expression renderers + +Adding new renderers is just as easy as adding functions: + +```ts +const rendererDefinition = { + name: 'debug', + help: 'Outputs the context to the dom element', + render: (domElement, input, handlers) => { + // eslint-disable-next-line no-console + domElement.innerText = JSON.strinfigy(input); + handlers.done(); + }, +}; + +expressions.registerRenderer(rendererDefinition); +``` + + + Check the full interface of ExpressionRendererDefinition [here](https://github.com/elastic/kibana/blob/master/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderdefinition.md) + diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md new file mode 100644 index 0000000000000..3a76bc60ee630 --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.deprecationtype.md @@ -0,0 +1,15 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) + +## DeprecationsDetails.deprecationType property + +(optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab. + +Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. + +Signature: + +```typescript +deprecationType?: 'config' | 'feature'; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md index bb77e4247711f..6e46ce0b8611f 100644 --- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md @@ -15,6 +15,7 @@ export interface DeprecationsDetails | Property | Type | Description | | --- | --- | --- | | [correctiveActions](./kibana-plugin-core-server.deprecationsdetails.correctiveactions.md) | {
api?: {
path: string;
method: 'POST' | 'PUT';
body?: {
[key: string]: any;
};
};
manualSteps?: string[];
} | | +| [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) | 'config' | 'feature' | (optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab.Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. | | [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md) | string | | | [level](./kibana-plugin-core-server.deprecationsdetails.level.md) | 'warning' | 'critical' | 'fetch_error' | levels: - warning: will not break deployment upon upgrade - critical: needs to be addressed before upgrade. - fetch\_error: Deprecations service failed to grab the deprecation details for the domain. | | [message](./kibana-plugin-core-server.deprecationsdetails.message.md) | string | | diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md index 99d2fc00a6b7b..812f014b15a6c 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.addscriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.addScriptedField() method +> Warning: This API is now obsolete. +> +> + Add scripted field to field list Signature: diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md index 48d94b84497bd..cc40ab8bb1173 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getassavedobjectbody.md @@ -9,33 +9,9 @@ Returns index pattern as saved object body for saving Signature: ```typescript -getAsSavedObjectBody(): { - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }; +getAsSavedObjectBody(): IndexPatternAttributes; ``` Returns: -`{ - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }` +`IndexPatternAttributes` diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md index 77ce6f6f23a67..1792a979bf749 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getnonscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getNonScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md index 055f07367c96e..b6b3dcb19bac1 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.getscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md index aaaebdaccca5d..91f25c09ab197 100644 --- a/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md +++ b/docs/development/plugins/data/public/kibana-plugin-plugins-data-public.indexpattern.removescriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.removeScriptedField() method +> Warning: This API is now obsolete. +> +> + Remove scripted field from field list Signature: diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md index a86fea3106225..981f28a51ae09 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.addscriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.addScriptedField() method +> Warning: This API is now obsolete. +> +> + Add scripted field to field list Signature: diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md index 668d563ff04c0..f5e87638e2f1c 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getassavedobjectbody.md @@ -9,33 +9,9 @@ Returns index pattern as saved object body for saving Signature: ```typescript -getAsSavedObjectBody(): { - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }; +getAsSavedObjectBody(): IndexPatternAttributes; ``` Returns: -`{ - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }` +`IndexPatternAttributes` diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md index 89d79d9b750fa..cff2c5de98de6 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getnonscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getNonScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md index edfff8ec5efac..62b8f1b62ac78 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.getscriptedfields.md @@ -4,6 +4,10 @@ ## IndexPattern.getScriptedFields() method +> Warning: This API is now obsolete. +> +> + Signature: ```typescript diff --git a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md index 3162a7f42dd12..f6beed7389e43 100644 --- a/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md +++ b/docs/development/plugins/data/server/kibana-plugin-plugins-data-server.indexpattern.removescriptedfield.md @@ -4,6 +4,10 @@ ## IndexPattern.removeScriptedField() method +> Warning: This API is now obsolete. +> +> + Remove scripted field from field list Signature: diff --git a/docs/discover/images/add-field-to-pattern.png b/docs/discover/images/add-field-to-pattern.png index 84dfcb0745c69..04f564ab89857 100644 Binary files a/docs/discover/images/add-field-to-pattern.png and b/docs/discover/images/add-field-to-pattern.png differ diff --git a/docs/discover/images/customer.png b/docs/discover/images/customer.png new file mode 100644 index 0000000000000..f2042080f0a37 Binary files /dev/null and b/docs/discover/images/customer.png differ diff --git a/docs/discover/images/discover-from-visualize.png b/docs/discover/images/discover-from-visualize.png index cbf64dff18b15..07d34b8b49018 100644 Binary files a/docs/discover/images/discover-from-visualize.png and b/docs/discover/images/discover-from-visualize.png differ diff --git a/docs/discover/images/discover-visualize.png b/docs/discover/images/discover-visualize.png index f4bcaf8aca028..242aa16709a9f 100644 Binary files a/docs/discover/images/discover-visualize.png and b/docs/discover/images/discover-visualize.png differ diff --git a/docs/discover/images/document-table-expanded.png b/docs/discover/images/document-table-expanded.png index 87c8e2047a0cb..38ea0987b120a 100644 Binary files a/docs/discover/images/document-table-expanded.png and b/docs/discover/images/document-table-expanded.png differ diff --git a/docs/discover/images/document-table.png b/docs/discover/images/document-table.png index f47ca5353124e..097feb4dc9034 100644 Binary files a/docs/discover/images/document-table.png and b/docs/discover/images/document-table.png differ diff --git a/docs/discover/images/hello-field.png b/docs/discover/images/hello-field.png index 07d97e054d7ec..fe4cfbb41fdc2 100644 Binary files a/docs/discover/images/hello-field.png and b/docs/discover/images/hello-field.png differ diff --git a/docs/index-extra-title-page.html b/docs/index-extra-title-page.html new file mode 100644 index 0000000000000..06d8c0b2e827b --- /dev/null +++ b/docs/index-extra-title-page.html @@ -0,0 +1,36 @@ +
+

From creating beautiful visualizations to managing the Elastic Stack, learn how Kibana helps you get the most of your data.

+ + + + + + + + + +

New to Kibana?

Popular topics

+ + +
+

All topics

+
diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc index bac2b0ebdf15f..6e8026c4a747d 100644 --- a/docs/setup/settings.asciidoc +++ b/docs/setup/settings.asciidoc @@ -262,11 +262,11 @@ on the {kib} index at startup. {kib} users still need to authenticate with {es} {ref}/indices-create-index.html[index name limitations]. *Default: `".kibana"`* -| `kibana.autocompleteTimeout:` {ess-icon} +| `data.autocomplete.valueSuggestions.timeout:` {ess-icon} | Time in milliseconds to wait for autocomplete suggestions from {es}. This value must be a whole number greater than zero. *Default: `"1000"`* -| `kibana.autocompleteTerminateAfter:` {ess-icon} +| `data.autocomplete.valueSuggestions.terminateAfter:` {ess-icon} | Maximum number of documents loaded by each shard to generate autocomplete suggestions. This value must be a whole number greater than zero. *Default: `"100000"`* diff --git a/docs/user/dashboard/images/lens_missing_values_strategy.png b/docs/user/dashboard/images/lens_missing_values_strategy.png new file mode 100644 index 0000000000000..d77c230b533f5 Binary files /dev/null and b/docs/user/dashboard/images/lens_missing_values_strategy.png differ diff --git a/docs/user/dashboard/lens.asciidoc b/docs/user/dashboard/lens.asciidoc index 1f3b4c429f704..c5718b2a089bf 100644 --- a/docs/user/dashboard/lens.asciidoc +++ b/docs/user/dashboard/lens.asciidoc @@ -190,6 +190,8 @@ A subset of *Lens* visualizations support value labels. [role="screenshot"] image::images/lens_value_labels_xychart_toggle.png[Lens Bar chart value labels menu] +NOTE: In bar charts, you are unable to move the label positions. + * *Pie*, *Donut*, and *Treemap* + [role="screenshot"] @@ -243,6 +245,9 @@ refer to <>. Sorting dimensions in visualizations is unsupported in *Lens*. +You can sort the dimensions for a single column in data tables: click the column header, then select the sorting criteria you want to use. +If you use the dimension as `Columns`, then all the columns that belong to the same dimension are sorted in the table. + [float] [[is-it-possible-to-use-saved-serches-in-lens]] ===== How do I visualize saved searches? @@ -254,3 +259,47 @@ Visualizing saved searches in unsupported in *Lens*. ===== How do I change the number of suggestions? Configuring the *Suggestions* that *Lens* automatically populates is unsupported. + +[float] +[[is-it-possible-to-use-different-indexpatterns-in-lens]] +===== Can I visualize multiple index patterns in a single visualization? + +You can create *Bar*, *Line* and *Area* charts from multiple index patterns. + +Each *Layer* in a visualization is associated with an index pattern and mutiple *Layers* can be combined together within the same visualization. Each *Layer* also has a chart switcher button in order to select the best type of visualization for the specific dataset. +You can also change the index pattern for a single *Layer*. + +[float] +[[why-my-field-x-is-missing-from-the-fields-list]] +===== Why is my field X missing from the fields list? + +*Lens* does not support the visualization of full-text fields, therefore it is not showing them in the data summary. + +[float] +[[how-to-handle-gaps-in-time-series-visualizations]] +===== How do I handle gaps in time series visualizations? + +*Lens* provides a set of features to handle missing values for *Area* and *Line* charts, which is useful for sparse data in time series data. + +To select a different way to represent missing values, open the *Visual options* menu, then select how to handle missing values. The default is to hide the missing values. ++ +[role="screenshot"] +image::images/lens_missing_values_strategy.png[Lens Missing values strategies menu] + +[float] +[[is-it-possible-to-change-the-scale-of-Y-axis]] +===== Is it possible to statically define the scale of the y-axis in a visualization? + +The ability to start the y-axis from another value than 0, or use a logarithmic scale, is unsupported in *Lens*. + +[float] +[[is-it-possible-to-have-pagination-for-datatable]] +===== Is it possible to have pagination in a data table? + +Pagination in a data table is unsupported in *Lens*. However, the <> supports pagination. + +[float] +[[is-it-possible-to-have-more-than-one-Y-axis-scale]] +===== Is it possible to have more than one y-axis scale in visualizations? + +*Lens* lets you pick, for each Y dimension, up to two distinct axis: *left* and *right*. Each axis can have a different scale. diff --git a/docs/user/discover.asciidoc b/docs/user/discover.asciidoc index ea413747a2aad..82a7dd300f028 100644 --- a/docs/user/discover.asciidoc +++ b/docs/user/discover.asciidoc @@ -101,22 +101,24 @@ image:images/find-manufacturer-field.png[Fields list that displays the top five . Click image:images/add-icon.png[Add icon] to toggle the field into the document table. -. Find the `day of week` field and add it to your document table. Your table should look like this: +. Find the `customer_first_name` and `customer_last_name` last name fields and add +them to your document table. Your table should look similar to this: + [role="screenshot"] -image:images/document-table.png[Document table with fields for manufacturer, geo.country_iso_code, and day_of_week] - +image:images/document-table.png[Document table with fields for manufacturer, customer_first_name, and customer_last_name] . To rearrange the table columns, hover the mouse over a -column header, and then use the move and sort controls. +column header, and then use the move controls. + +. To view more of the document table, click *Hide chart*. [float] [[add-field-in-discover]] -=== Add a field +=== Add a field to your index pattern What happens if you forgot to define an important value as a separate field? Or, what if you -want to combine two fields and treat them as one? -You can add a field to your index pattern from inside of **Discover**, +want to combine two fields and treat them as one? This is where {ref}/runtime.html[runtime fields] come into play. +You can add a runtime field to your index pattern from inside of **Discover**, and then use that field for analysis and visualizations, the same way you do with other fields. @@ -129,7 +131,7 @@ image:images/add-field-to-pattern.png[Dropdown menu located next to index patter . Turn on *Set value*. -. Use the Painless scripting language to define the field: +. Define the script using the Painless scripting language. Runtime fields require an `emit()`. + ```ts emit("Hello World!"); @@ -137,15 +139,25 @@ emit("Hello World!"); . Click *Save*. -. In the fields list, search for the *hello* field, and then click it. -+ -You'll see the top values for the field. The pop-up also includes actions for filtering, -editing, and deleting the field. +. In the fields list, search for the *hello* field, and then add it to the table to view it's value. + [role="screenshot"] -image:images/hello-field.png[Top values for the hello field, width=50%] +image:images/hello-field.png[hello field in the document table] -For more information on adding fields and Painless scripting language examples, refer to <>. +. Create a second field named `customer` that combines customer last name and first initial. ++ +```ts +String str = doc['customer_first_name.keyword'].value; +char ch1 = str.charAt(0); +emit(doc['customer_last_name.keyword'].value + ", " + ch1); +``` +. Simplify the the document table by removing `customer_first_name` and `customer_last_name` and adding `customer` in their place. ++ +[role="screenshot"] +image:images/customer.png[Customer last name, first initial in the document table] ++ +For more information on adding fields and Painless scripting language examples, +refer to <>. [float] @@ -202,7 +214,7 @@ click the close icon (x) next to its name in the filter bar. Dive into an individual document to view its fields and the documents that occurred before and after it. -. In the document table, expand any document. To view more of the document table, click *Hide chart*. +. In the document table, expand any document. + [role="screenshot"] image:images/document-table-expanded.png[Table view with document expanded] @@ -238,7 +250,7 @@ image:images/discover-save-saved-search.png[Save saved search in Discover, width If a field can be {ref}/search-aggregations.html[aggregated], you can quickly visualize it from **Discover**. -. From the **Selected fields** list, click `day_of_week`, and then click **Visualize**. +. From the **Available fields** list, click `day_of_week`, and then click **Visualize**. + [role="screenshot"] image:images/discover-visualize.png[Discover sidebar field popover with visualize button, width=75%] diff --git a/docs/user/production-considerations/production.asciidoc b/docs/user/production-considerations/production.asciidoc index 726747d5d69d0..1ffca4b6ae6ab 100644 --- a/docs/user/production-considerations/production.asciidoc +++ b/docs/user/production-considerations/production.asciidoc @@ -8,7 +8,6 @@ * <> * <> * <> -* <> * <> * <> * <> @@ -22,9 +21,8 @@ Kibana instances that are all connected to the same Elasticsearch instance. While Kibana isn't terribly resource intensive, we still recommend running Kibana separate from your Elasticsearch data or master nodes. To distribute Kibana -traffic across the nodes in your Elasticsearch cluster, you can run Kibana -and an Elasticsearch client node on the same machine. For more information, see -<>. +traffic across the nodes in your Elasticsearch cluster, +you can configure Kibana to use a list of Elasticsearch hosts. [float] [[configuring-kibana-shield]] @@ -69,58 +67,6 @@ csp.strict: true See <>. -[float] -[[load-balancing-es]] -=== Load Balancing across multiple {es} nodes -If you have multiple nodes in your Elasticsearch cluster, the easiest way to distribute Kibana requests -across the nodes is to run an Elasticsearch _Coordinating only_ node on the same machine as Kibana. -Elasticsearch Coordinating only nodes are essentially smart load balancers that are part of the cluster. They -process incoming HTTP requests, redirect operations to the other nodes in the cluster as needed, and -gather and return the results. For more information, see -{ref}/modules-node.html[Node] in the Elasticsearch reference. - -To use a local client node to load balance Kibana requests: - -. Install Elasticsearch on the same machine as Kibana. -. Configure the node as a Coordinating only node. In `elasticsearch.yml`, set `node.data`, `node.master` and `node.ingest` to `false`: -+ -[source,js] --------- -# 3. You want this node to be neither master nor data node nor ingest node, but -# to act as a "search load balancer" (fetching data from nodes, -# aggregating results, etc.) -# -node.master: false -node.data: false -node.ingest: false --------- -. Configure the client node to join your Elasticsearch cluster. In `elasticsearch.yml`, set the `cluster.name` to the -name of your cluster. -+ -[source,js] --------- -cluster.name: "my_cluster" --------- -. Check your transport and HTTP host configs in `elasticsearch.yml` under `network.host` and `transport.host`. The `transport.host` needs to be on the network reachable to the cluster members, the `network.host` is the network for the HTTP connection for Kibana (localhost:9200 by default). -+ -[source,js] --------- -network.host: localhost -http.port: 9200 - -# by default transport.host refers to network.host -transport.host: -transport.tcp.port: 9300 - 9400 --------- -. Make sure Kibana is configured to point to your local client node. In `kibana.yml`, the `elasticsearch.hosts` setting should be set to -`["localhost:9200"]`. -+ -[source,js] --------- -# The Elasticsearch instance to use for all your queries. -elasticsearch.hosts: ["http://localhost:9200"] --------- - [float] [[load-balancing-kibana]] === Load balancing across multiple Kibana instances diff --git a/docs/user/security/api-keys/images/create-api-key.png b/docs/user/security/api-keys/images/create-api-key.png index c763dcbfa53f8..b0041f69b05b6 100644 Binary files a/docs/user/security/api-keys/images/create-api-key.png and b/docs/user/security/api-keys/images/create-api-key.png differ diff --git a/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc b/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc index f5192f4641d4d..63fd2799c90cf 100644 --- a/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc +++ b/docs/user/security/securing-communications/elasticsearch-mutual-tls.asciidoc @@ -39,7 +39,7 @@ This entails generating a "server certificate" for {es} to use on the HTTP layer . Obtain a client certificate and private key for {kib}. + -- -{kib} must this "client certificate" and corresponding private key when connecting to {es}. +{kib} uses the client certificate and corresponding private key when connecting to {es}. NOTE: This is not the same as the <> that {kib} will present to web browsers. diff --git a/package.json b/package.json index f41c85c4c7b80..e0bebcaacd7ea 100644 --- a/package.json +++ b/package.json @@ -86,6 +86,7 @@ "**/prismjs": "1.23.0", "**/react-syntax-highlighter": "^15.3.1", "**/react-syntax-highlighter/**/highlight.js": "^10.4.1", + "**/refractor": "^3.3.1", "**/request": "^2.88.2", "**/trim": "1.0.1", "**/typescript": "4.1.3", @@ -102,7 +103,7 @@ "@elastic/datemath": "link:bazel-bin/packages/elastic-datemath/npm_module", "@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.4", "@elastic/ems-client": "7.13.0", - "@elastic/eui": "32.1.0", + "@elastic/eui": "33.0.0", "@elastic/filesaver": "1.1.2", "@elastic/good": "^9.0.1-kibana3", "@elastic/maki": "6.3.0", @@ -130,20 +131,21 @@ "@kbn/config": "link:bazel-bin/packages/kbn-config/npm_module", "@kbn/config-schema": "link:bazel-bin/packages/kbn-config-schema/npm_module", "@kbn/crypto": "link:bazel-bin/packages/kbn-crypto/npm_module", - "@kbn/mapbox-gl": "link:bazel-bin/packages/kbn-mapbox-gl/npm_module", "@kbn/i18n": "link:bazel-bin/packages/kbn-i18n/npm_module", "@kbn/interpreter": "link:packages/kbn-interpreter", "@kbn/io-ts-utils": "link:bazel-bin/packages/kbn-io-ts-utils/npm_module", "@kbn/legacy-logging": "link:bazel-bin/packages/kbn-legacy-logging/npm_module", "@kbn/logging": "link:bazel-bin/packages/kbn-logging/npm_module", + "@kbn/mapbox-gl": "link:bazel-bin/packages/kbn-mapbox-gl/npm_module", "@kbn/monaco": "link:bazel-bin/packages/kbn-monaco/npm_module", - "@kbn/securitysolution-list-constants": "link:bazel-bin/packages/kbn-securitysolution-list-constants/npm_module", + "@kbn/rule-data-utils": "link:packages/kbn-rule-data-utils", "@kbn/securitysolution-es-utils": "link:bazel-bin/packages/kbn-securitysolution-es-utils/npm_module", - "@kbn/securitysolution-io-ts-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-types/npm_module", "@kbn/securitysolution-io-ts-alerting-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-alerting-types/npm_module", "@kbn/securitysolution-io-ts-list-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-list-types/npm_module", + "@kbn/securitysolution-io-ts-types": "link:bazel-bin/packages/kbn-securitysolution-io-ts-types/npm_module", "@kbn/securitysolution-io-ts-utils": "link:bazel-bin/packages/kbn-securitysolution-io-ts-utils/npm_module", "@kbn/securitysolution-list-api": "link:bazel-bin/packages/kbn-securitysolution-list-api/npm_module", + "@kbn/securitysolution-list-constants": "link:bazel-bin/packages/kbn-securitysolution-list-constants/npm_module", "@kbn/securitysolution-list-hooks": "link:bazel-bin/packages/kbn-securitysolution-list-hooks/npm_module", "@kbn/securitysolution-list-utils": "link:bazel-bin/packages/kbn-securitysolution-list-utils/npm_module", "@kbn/securitysolution-utils": "link:bazel-bin/packages/kbn-securitysolution-utils/npm_module", @@ -162,7 +164,6 @@ "@mapbox/mapbox-gl-rtl-text": "0.2.3", "@mapbox/vector-tile": "1.3.1", "@reduxjs/toolkit": "^1.5.1", - "@scant/router": "^0.1.1", "@slack/webhook": "^5.0.4", "@turf/along": "6.0.1", "@turf/area": "6.0.1", @@ -229,6 +230,7 @@ "expiry-js": "0.1.7", "extract-zip": "^2.0.1", "fast-deep-equal": "^3.1.1", + "fflate": "^0.6.9", "file-saver": "^1.3.8", "file-type": "^10.9.0", "focus-trap-react": "^3.1.1", @@ -274,7 +276,6 @@ "json-stringify-safe": "5.0.1", "jsonwebtoken": "^8.5.1", "jsts": "^1.6.2", - "@kbn/rule-data-utils": "link:packages/kbn-rule-data-utils", "kea": "^2.4.2", "leaflet": "1.5.1", "leaflet-draw": "0.4.14", diff --git a/packages/kbn-crypto/BUILD.bazel b/packages/kbn-crypto/BUILD.bazel index b1723e4120e79..20793e27de629 100644 --- a/packages/kbn-crypto/BUILD.bazel +++ b/packages/kbn-crypto/BUILD.bazel @@ -38,6 +38,7 @@ TYPES_DEPS = [ "@npm//@types/node", "@npm//@types/node-forge", "@npm//@types/testing-library__jest-dom", + "@npm//resize-observer-polyfill" ] DEPS = SRC_DEPS + TYPES_DEPS diff --git a/packages/kbn-optimizer/limits.yml b/packages/kbn-optimizer/limits.yml index c28fd83591960..6ccf6269751b1 100644 --- a/packages/kbn-optimizer/limits.yml +++ b/packages/kbn-optimizer/limits.yml @@ -3,7 +3,7 @@ pageLoadAssetSize: alerting: 106936 apm: 64385 apmOss: 18996 - bfetch: 41874 + bfetch: 51874 canvas: 1066647 charts: 195358 cloud: 21076 diff --git a/packages/kbn-optimizer/src/optimizer/cache_keys.ts b/packages/kbn-optimizer/src/optimizer/cache_keys.ts index 9275cab1fdff0..e622b3b8f593e 100644 --- a/packages/kbn-optimizer/src/optimizer/cache_keys.ts +++ b/packages/kbn-optimizer/src/optimizer/cache_keys.ts @@ -121,6 +121,10 @@ export interface OptimizerCacheKey { } async function getLastCommit() { + if (!Fs.existsSync(Path.join(REPO_ROOT, '.git'))) { + return undefined; + } + const { stdout } = await execa( 'git', ['log', '-n', '1', '--pretty=format:%H', '--', RELATIVE_DIR], diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts index 4d4a902bb186a..d8be1917c101b 100644 --- a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts +++ b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts @@ -7,6 +7,7 @@ */ jest.mock('execa'); +jest.mock('fs'); import { getChanges } from './get_changes'; @@ -15,6 +16,8 @@ const execa: jest.Mock = jest.requireMock('execa'); it('parses git ls-files output', async () => { expect.assertions(4); + jest.requireMock('fs').existsSync.mockImplementation(() => true); + execa.mockImplementation((cmd, args, options) => { expect(cmd).toBe('git'); expect(args).toEqual(['ls-files', '-dmt', '--', '/foo/bar/x']); diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.ts b/packages/kbn-optimizer/src/optimizer/get_changes.ts index e5574d9b1a2e4..73fa5e9d931e7 100644 --- a/packages/kbn-optimizer/src/optimizer/get_changes.ts +++ b/packages/kbn-optimizer/src/optimizer/get_changes.ts @@ -9,6 +9,7 @@ import Path from 'path'; import execa from 'execa'; +import fs from 'fs'; export type Changes = Map; @@ -16,11 +17,16 @@ export type Changes = Map; * get the changes in all the context directories (plugin public paths) */ export async function getChanges(dir: string) { + const changes: Changes = new Map(); + + if (!fs.existsSync(Path.join(dir, '.git'))) { + return changes; + } + const { stdout } = await execa('git', ['ls-files', '-dmt', '--', dir], { cwd: dir, }); - const changes: Changes = new Map(); const output = stdout.trim(); if (output) { diff --git a/packages/kbn-telemetry-tools/BUILD.bazel b/packages/kbn-telemetry-tools/BUILD.bazel index 9a6b4a10bd190..d394b0c93d45f 100644 --- a/packages/kbn-telemetry-tools/BUILD.bazel +++ b/packages/kbn-telemetry-tools/BUILD.bazel @@ -47,6 +47,7 @@ TYPES_DEPS = [ "@npm//@types/node", "@npm//@types/normalize-path", "@npm//@types/testing-library__jest-dom", + "@npm//resize-observer-polyfill" ] DEPS = SRC_DEPS + TYPES_DEPS diff --git a/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts b/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts index b5d379d3426e7..0a2dba60742ad 100644 --- a/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts +++ b/packages/kbn-test/src/functional_test_runner/functional_test_runner.ts @@ -5,6 +5,7 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ +import { relative } from 'path'; import { ToolingLog } from '@kbn/dev-utils'; @@ -120,6 +121,9 @@ export class FunctionalTestRunner { throw new Error('No tests defined.'); } + // eslint-disable-next-line + console.log(`--- Running ${relative(process.cwd(), this.configFile)}`); + const dockerServers = new DockerServersService( config.get('dockerServers'), this.log, diff --git a/packages/kbn-test/src/functional_tests/tasks.js b/packages/kbn-test/src/functional_tests/tasks.js index 02c55b6af91dc..122a5a23842d6 100644 --- a/packages/kbn-test/src/functional_tests/tasks.js +++ b/packages/kbn-test/src/functional_tests/tasks.js @@ -88,6 +88,8 @@ export async function runTests(options) { continue; } + console.log(`--- Running ${relative(process.cwd(), configPath)}`); + await withProcRunner(log, async (procs) => { const config = await readConfigFile(log, configPath); diff --git a/packages/kbn-test/src/kbn_client/kbn_client.ts b/packages/kbn-test/src/kbn_client/kbn_client.ts index 3fa74412c1a8b..ac14a399918cb 100644 --- a/packages/kbn-test/src/kbn_client/kbn_client.ts +++ b/packages/kbn-test/src/kbn_client/kbn_client.ts @@ -8,13 +8,14 @@ import { ToolingLog } from '@kbn/dev-utils'; -import { KbnClientRequester, ReqOptions } from './kbn_client_requester'; -import { KbnClientStatus } from './kbn_client_status'; +import { KbnClientImportExport } from './kbn_client_import_export'; import { KbnClientPlugins } from './kbn_client_plugins'; -import { KbnClientVersion } from './kbn_client_version'; +import { KbnClientRequester, ReqOptions } from './kbn_client_requester'; import { KbnClientSavedObjects } from './kbn_client_saved_objects'; +import { KbnClientSpaces } from './kbn_client_spaces'; +import { KbnClientStatus } from './kbn_client_status'; import { KbnClientUiSettings, UiSettingValues } from './kbn_client_ui_settings'; -import { KbnClientImportExport } from './kbn_client_import_export'; +import { KbnClientVersion } from './kbn_client_version'; export interface KbnClientOptions { url: string; @@ -29,6 +30,7 @@ export class KbnClient { readonly plugins: KbnClientPlugins; readonly version: KbnClientVersion; readonly savedObjects: KbnClientSavedObjects; + readonly spaces: KbnClientSpaces; readonly uiSettings: KbnClientUiSettings; readonly importExport: KbnClientImportExport; @@ -59,6 +61,7 @@ export class KbnClient { this.plugins = new KbnClientPlugins(this.status); this.version = new KbnClientVersion(this.status); this.savedObjects = new KbnClientSavedObjects(this.log, this.requester); + this.spaces = new KbnClientSpaces(this.requester); this.uiSettings = new KbnClientUiSettings(this.log, this.requester, this.uiSettingDefaults); this.importExport = new KbnClientImportExport( this.log, diff --git a/packages/kbn-test/src/kbn_client/kbn_client_requester.ts b/packages/kbn-test/src/kbn_client/kbn_client_requester.ts index af75137d148e9..a194b593b3863 100644 --- a/packages/kbn-test/src/kbn_client/kbn_client_requester.ts +++ b/packages/kbn-test/src/kbn_client/kbn_client_requester.ts @@ -121,6 +121,8 @@ export class KbnClientRequester { responseType: options.responseType, // work around https://github.com/axios/axios/issues/2791 transformResponse: options.responseType === 'text' ? [(x) => x] : undefined, + maxContentLength: 30000000, + maxBodyLength: 30000000, paramsSerializer: (params) => Qs.stringify(params), }); diff --git a/packages/kbn-test/src/kbn_client/kbn_client_spaces.ts b/packages/kbn-test/src/kbn_client/kbn_client_spaces.ts new file mode 100644 index 0000000000000..e88531606e917 --- /dev/null +++ b/packages/kbn-test/src/kbn_client/kbn_client_spaces.ts @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { KbnClientRequester, uriencode } from './kbn_client_requester'; + +interface UpdateBody { + name: string; + description?: string; + disabledFeatures?: string | string[]; + initials?: string; + color?: string; + imageUrl?: string; +} + +interface CreateBody extends UpdateBody { + id: string; +} + +export class KbnClientSpaces { + constructor(private readonly requester: KbnClientRequester) {} + + async create(body: CreateBody) { + await this.requester.request({ + method: 'POST', + path: '/api/spaces/space', + body, + }); + } + + async update(id: string, body: UpdateBody) { + await this.requester.request({ + method: 'PUT', + path: uriencode`/api/spaces/space/${id}`, + body, + }); + } + + async get(id: string) { + const { data } = await this.requester.request({ + method: 'GET', + path: uriencode`/api/spaces/space/${id}`, + }); + + return data; + } + + async list() { + const { data } = await this.requester.request({ + method: 'GET', + path: '/api/spaces/space', + }); + + return data; + } + + async delete(id: string) { + await this.requester.request({ + method: 'DELETE', + path: uriencode`/api/spaces/space/${id}`, + }); + } +} diff --git a/packages/kbn-test/src/report_path.ts b/packages/kbn-test/src/report_path.ts index 9f41fdd41c061..dcda0f5eb2d3c 100644 --- a/packages/kbn-test/src/report_path.ts +++ b/packages/kbn-test/src/report_path.ts @@ -16,11 +16,17 @@ export function getUniqueJunitReportPath( reportName: string, counter?: number ): string { + const BUILDKITE_ID_SUFFIX = process.env.BUILDKITE_JOB_ID + ? `-${process.env.BUILDKITE_JOB_ID}` + : ''; + const path = Path.resolve( rootDirectory, 'target/junit', process.env.JOB || '.', - `TEST-${CI_PARALLEL_PROCESS_PREFIX}${reportName}${counter ? `-${counter}` : ''}.xml` + `TEST-${CI_PARALLEL_PROCESS_PREFIX}${reportName}${ + counter ? `-${counter}` : '' + }${BUILDKITE_ID_SUFFIX}.xml` ); return Fs.existsSync(path) diff --git a/packages/kbn-ui-shared-deps/entry.js b/packages/kbn-ui-shared-deps/entry.js index 4029ce28faf5b..d3755ed7c5f29 100644 --- a/packages/kbn-ui-shared-deps/entry.js +++ b/packages/kbn-ui-shared-deps/entry.js @@ -44,6 +44,8 @@ export const Theme = require('./theme.ts'); export const Lodash = require('lodash'); export const LodashFp = require('lodash/fp'); +export const Fflate = require('fflate/esm/browser'); + // runtime deps which don't need to be copied across all bundles export const TsLib = require('tslib'); export const KbnAnalytics = require('@kbn/analytics'); diff --git a/packages/kbn-ui-shared-deps/index.js b/packages/kbn-ui-shared-deps/index.js index 62ddb09d25add..877bf3df6c039 100644 --- a/packages/kbn-ui-shared-deps/index.js +++ b/packages/kbn-ui-shared-deps/index.js @@ -52,6 +52,7 @@ exports.externals = { '@elastic/eui/dist/eui_theme_dark.json': '__kbnSharedDeps__.Theme.euiDarkVars', lodash: '__kbnSharedDeps__.Lodash', 'lodash/fp': '__kbnSharedDeps__.LodashFp', + fflate: '__kbnSharedDeps__.Fflate', /** * runtime deps which don't need to be copied across all bundles diff --git a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap index 575a247ffeccb..0f5efe667ec2f 100644 --- a/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap +++ b/src/core/public/chrome/ui/header/__snapshots__/collapsible_nav.test.tsx.snap @@ -587,10 +587,12 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` > @@ -1921,6 +1923,7 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` > @@ -1999,6 +2004,7 @@ exports[`CollapsibleNav renders links grouped by category 1`] = ` @@ -3084,6 +3094,7 @@ exports[`CollapsibleNav renders the default nav 3`] = ` > @@ -3162,6 +3175,7 @@ exports[`CollapsibleNav renders the default nav 3`] = ` @@ -5469,6 +5473,7 @@ exports[`Header renders 1`] = ` > @@ -5547,6 +5554,7 @@ exports[`Header renders 1`] = `
Flyout content
"`; +exports[`FlyoutService openFlyout() renders a flyout to the DOM 2`] = `"
Flyout content
"`; exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 1`] = ` Array [ @@ -59,4 +59,4 @@ Array [ ] `; -exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; +exports[`FlyoutService openFlyout() with a currently active flyout replaces the current flyout with a new one 2`] = `"
Flyout content 2
"`; diff --git a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap index 19ebb5a9113c3..9c39776fcea5c 100644 --- a/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap +++ b/src/core/public/overlays/modal/__snapshots__/modal_service.test.tsx.snap @@ -29,7 +29,7 @@ Array [ ] `; -exports[`ModalService openConfirm() renders a mountpoint confirm message 2`] = `"
Modal content
"`; +exports[`ModalService openConfirm() renders a mountpoint confirm message 2`] = `"
Modal content
"`; exports[`ModalService openConfirm() renders a string confirm message 1`] = ` Array [ @@ -49,7 +49,7 @@ Array [ ] `; -exports[`ModalService openConfirm() renders a string confirm message 2`] = `"

Some message

"`; +exports[`ModalService openConfirm() renders a string confirm message 2`] = `"

Some message

"`; exports[`ModalService openConfirm() with a currently active confirm replaces the current confirm with the new one 1`] = ` Array [ @@ -131,7 +131,7 @@ Array [ ] `; -exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; +exports[`ModalService openModal() renders a modal to the DOM 2`] = `"
Modal content
"`; exports[`ModalService openModal() with a currently active confirm replaces the current confirm with the new one 1`] = ` Array [ diff --git a/src/core/server/deprecations/deprecations_factory.mock.ts b/src/core/server/deprecations/deprecations_factory.mock.ts new file mode 100644 index 0000000000000..91ae4e6fa9af9 --- /dev/null +++ b/src/core/server/deprecations/deprecations_factory.mock.ts @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import type { PublicMethodsOf } from '@kbn/utility-types'; +import type { DeprecationsFactory } from './deprecations_factory'; +type DeprecationsFactoryContract = PublicMethodsOf; + +const createDeprecationsFactoryMock = () => { + const mocked: jest.Mocked = { + getRegistry: jest.fn(), + getDeprecations: jest.fn(), + getAllDeprecations: jest.fn(), + }; + + mocked.getDeprecations.mockResolvedValue([]); + mocked.getAllDeprecations.mockResolvedValue([]); + return mocked as jest.Mocked; +}; + +export const mockDeprecationsFactory = { + create: createDeprecationsFactoryMock, +}; diff --git a/src/core/server/deprecations/deprecations_factory.test.ts b/src/core/server/deprecations/deprecations_factory.test.ts index 469451b0020c0..187f3880f9998 100644 --- a/src/core/server/deprecations/deprecations_factory.test.ts +++ b/src/core/server/deprecations/deprecations_factory.test.ts @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -import { GetDeprecationsContext } from './types'; +import type { GetDeprecationsContext } from './types'; import { DeprecationsFactory } from './deprecations_factory'; import { loggerMock } from '../logging/logger.mock'; diff --git a/src/core/server/deprecations/deprecations_registry.mock.ts b/src/core/server/deprecations/deprecations_registry.mock.ts new file mode 100644 index 0000000000000..bb178c3935cdc --- /dev/null +++ b/src/core/server/deprecations/deprecations_registry.mock.ts @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import type { PublicMethodsOf } from '@kbn/utility-types'; +import type { DeprecationsRegistry } from './deprecations_registry'; +import type { GetDeprecationsContext } from './types'; +import { elasticsearchClientMock } from '../elasticsearch/client/mocks'; +import { savedObjectsClientMock } from '../saved_objects/service/saved_objects_client.mock'; +type DeprecationsRegistryContract = PublicMethodsOf; + +const createDeprecationsRegistryMock = () => { + const mocked: jest.Mocked = { + registerDeprecations: jest.fn(), + getDeprecations: jest.fn(), + }; + + return mocked as jest.Mocked; +}; + +const createGetDeprecationsContextMock = () => { + const mocked: jest.Mocked = { + esClient: elasticsearchClientMock.createScopedClusterClient(), + savedObjectsClient: savedObjectsClientMock.create(), + }; + + return mocked; +}; + +export const mockDeprecationsRegistry = { + create: createDeprecationsRegistryMock, + createGetDeprecationsContext: createGetDeprecationsContextMock, +}; diff --git a/src/core/server/deprecations/deprecations_registry.test.ts b/src/core/server/deprecations/deprecations_registry.test.ts index 507677a531861..82b09beaa5123 100644 --- a/src/core/server/deprecations/deprecations_registry.test.ts +++ b/src/core/server/deprecations/deprecations_registry.test.ts @@ -7,7 +7,7 @@ */ /* eslint-disable dot-notation */ -import { RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; +import type { RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; import { DeprecationsRegistry } from './deprecations_registry'; describe('DeprecationsRegistry', () => { diff --git a/src/core/server/deprecations/deprecations_registry.ts b/src/core/server/deprecations/deprecations_registry.ts index f92d807514b82..cc05473923ac8 100644 --- a/src/core/server/deprecations/deprecations_registry.ts +++ b/src/core/server/deprecations/deprecations_registry.ts @@ -6,7 +6,11 @@ * Side Public License, v 1. */ -import { DeprecationsDetails, RegisterDeprecationsConfig, GetDeprecationsContext } from './types'; +import type { + DeprecationsDetails, + RegisterDeprecationsConfig, + GetDeprecationsContext, +} from './types'; export class DeprecationsRegistry { private readonly deprecationContexts: RegisterDeprecationsConfig[] = []; diff --git a/src/core/server/deprecations/deprecations_service.test.ts b/src/core/server/deprecations/deprecations_service.test.ts new file mode 100644 index 0000000000000..d1ed7a83402cb --- /dev/null +++ b/src/core/server/deprecations/deprecations_service.test.ts @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +/* eslint-disable dot-notation */ +import { DeprecationsService } from './deprecations_service'; +import { httpServiceMock } from '../http/http_service.mock'; +import { mockRouter } from '../http/router/router.mock'; +import { mockCoreContext } from '../core_context.mock'; +import { mockDeprecationsFactory } from './deprecations_factory.mock'; +import { mockDeprecationsRegistry } from './deprecations_registry.mock'; + +describe('DeprecationsService', () => { + const coreContext = mockCoreContext.create(); + beforeEach(() => jest.clearAllMocks()); + + describe('#setup', () => { + const http = httpServiceMock.createInternalSetupContract(); + const router = mockRouter.create(); + http.createRouter.mockReturnValue(router); + const deprecationsCoreSetupDeps = { http }; + + it('registers routes', () => { + const deprecationsService = new DeprecationsService(coreContext); + deprecationsService.setup(deprecationsCoreSetupDeps); + // Registers correct base api path + expect(http.createRouter).toBeCalledWith('/api/deprecations'); + // registers get route '/' + expect(router.get).toHaveBeenCalledTimes(1); + expect(router.get).toHaveBeenCalledWith({ path: '/', validate: false }, expect.any(Function)); + }); + + it('calls registerConfigDeprecationsInfo', () => { + const deprecationsService = new DeprecationsService(coreContext); + const mockRegisterConfigDeprecationsInfo = jest.fn(); + deprecationsService['registerConfigDeprecationsInfo'] = mockRegisterConfigDeprecationsInfo; + deprecationsService.setup(deprecationsCoreSetupDeps); + expect(mockRegisterConfigDeprecationsInfo).toBeCalledTimes(1); + }); + }); + + describe('#registerConfigDeprecationsInfo', () => { + const deprecationsFactory = mockDeprecationsFactory.create(); + const deprecationsRegistry = mockDeprecationsRegistry.create(); + const getDeprecationsContext = mockDeprecationsRegistry.createGetDeprecationsContext(); + + it('registers config deprecations', () => { + const deprecationsService = new DeprecationsService(coreContext); + coreContext.configService.getHandledDeprecatedConfigs.mockReturnValue([ + [ + 'testDomain', + [ + { + message: 'testMessage', + documentationUrl: 'testDocUrl', + correctiveActions: { + manualSteps: [ + 'Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.', + 'Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role.', + ], + }, + }, + ], + ], + ]); + + deprecationsFactory.getRegistry.mockReturnValue(deprecationsRegistry); + deprecationsService['registerConfigDeprecationsInfo'](deprecationsFactory); + + expect(coreContext.configService.getHandledDeprecatedConfigs).toBeCalledTimes(1); + expect(deprecationsFactory.getRegistry).toBeCalledTimes(1); + expect(deprecationsFactory.getRegistry).toBeCalledWith('testDomain'); + expect(deprecationsRegistry.registerDeprecations).toBeCalledTimes(1); + const configDeprecations = deprecationsRegistry.registerDeprecations.mock.calls[0][0].getDeprecations( + getDeprecationsContext + ); + expect(configDeprecations).toMatchInlineSnapshot(` + Array [ + Object { + "correctiveActions": Object { + "manualSteps": Array [ + "Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.", + "Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role.", + ], + }, + "deprecationType": "config", + "documentationUrl": "testDocUrl", + "level": "critical", + "message": "testMessage", + }, + ] + `); + }); + }); +}); diff --git a/src/core/server/deprecations/deprecations_service.ts b/src/core/server/deprecations/deprecations_service.ts index 8eca1ba5790c5..205dd964468c1 100644 --- a/src/core/server/deprecations/deprecations_service.ts +++ b/src/core/server/deprecations/deprecations_service.ts @@ -11,8 +11,6 @@ import { RegisterDeprecationsConfig } from './types'; import { registerRoutes } from './routes'; import { CoreContext } from '../core_context'; -import { CoreUsageDataSetup } from '../core_usage_data'; -import { InternalElasticsearchServiceSetup } from '../elasticsearch'; import { CoreService } from '../../types'; import { InternalHttpServiceSetup } from '../http'; import { Logger } from '../logging'; @@ -112,8 +110,6 @@ export interface InternalDeprecationsServiceSetup { /** @internal */ export interface DeprecationsSetupDeps { http: InternalHttpServiceSetup; - elasticsearch: InternalElasticsearchServiceSetup; - coreUsageData: CoreUsageDataSetup; } /** @internal */ @@ -156,6 +152,7 @@ export class DeprecationsService implements CoreService { return { level: 'critical', + deprecationType: 'config', message, correctiveActions: correctiveActions ?? {}, documentationUrl, diff --git a/src/core/server/deprecations/types.ts b/src/core/server/deprecations/types.ts index 31734b51b46bd..50c947591fdf4 100644 --- a/src/core/server/deprecations/types.ts +++ b/src/core/server/deprecations/types.ts @@ -25,6 +25,16 @@ export interface DeprecationsDetails { * - fetch_error: Deprecations service failed to grab the deprecation details for the domain. */ level: 'warning' | 'critical' | 'fetch_error'; + /** + * (optional) Used to identify between different deprecation types. + * Example use case: in Upgrade Assistant, we may want to allow the user to sort by + * deprecation type or show each type in a separate tab. + * + * Feel free to add new types if necessary. + * Predefined types are necessary to reduce having similar definitions with different keywords + * across kibana deprecations. + */ + deprecationType?: 'config' | 'feature'; /* (optional) link to the documentation for more details on the deprecation. */ documentationUrl?: string; /* corrective action needed to fix this deprecation. */ diff --git a/src/core/server/kibana_config.test.ts b/src/core/server/kibana_config.test.ts index 47bb6cf2a064c..72ddb3b65081b 100644 --- a/src/core/server/kibana_config.test.ts +++ b/src/core/server/kibana_config.test.ts @@ -22,8 +22,6 @@ it('set correct defaults ', () => { const configValue = config.schema.validate({}); expect(configValue).toMatchInlineSnapshot(` Object { - "autocompleteTerminateAfter": "PT1M40S", - "autocompleteTimeout": "PT1S", "enabled": true, "index": ".kibana", } diff --git a/src/core/server/kibana_config.ts b/src/core/server/kibana_config.ts index 848c51dcb69f3..623ffa86c0e8d 100644 --- a/src/core/server/kibana_config.ts +++ b/src/core/server/kibana_config.ts @@ -29,12 +29,6 @@ export const config = { schema: schema.object({ enabled: schema.boolean({ defaultValue: true }), index: schema.string({ defaultValue: '.kibana' }), - autocompleteTerminateAfter: schema.duration({ defaultValue: 100000 }), - autocompleteTimeout: schema.duration({ defaultValue: 1000 }), }), deprecations, - exposeToUsage: { - autocompleteTerminateAfter: true, - autocompleteTimeout: true, - }, }; diff --git a/src/core/server/mocks.ts b/src/core/server/mocks.ts index 6f1b9dc5bf820..0d52ff64499c1 100644 --- a/src/core/server/mocks.ts +++ b/src/core/server/mocks.ts @@ -58,8 +58,6 @@ export function pluginInitializerContextConfigMock(config: T) { const globalConfig: SharedGlobalConfig = { kibana: { index: '.kibana-tests', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration('30s'), diff --git a/src/core/server/plugins/legacy_config.test.ts b/src/core/server/plugins/legacy_config.test.ts index 0ea26f2e0333e..2a980e38a4e19 100644 --- a/src/core/server/plugins/legacy_config.test.ts +++ b/src/core/server/plugins/legacy_config.test.ts @@ -43,8 +43,6 @@ describe('Legacy config', () => { expect(legacyConfig).toStrictEqual({ kibana: { index: '.kibana', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration(30, 's'), @@ -66,8 +64,6 @@ describe('Legacy config', () => { expect(legacyConfig).toStrictEqual({ kibana: { index: '.kibana', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration(30, 's'), diff --git a/src/core/server/plugins/plugin_context.test.ts b/src/core/server/plugins/plugin_context.test.ts index e37d985d42321..4ba34ccb2149f 100644 --- a/src/core/server/plugins/plugin_context.test.ts +++ b/src/core/server/plugins/plugin_context.test.ts @@ -115,8 +115,6 @@ describe('createPluginInitializerContext', () => { expect(configObject).toStrictEqual({ kibana: { index: '.kibana', - autocompleteTerminateAfter: duration(100000), - autocompleteTimeout: duration(1000), }, elasticsearch: { shardTimeout: duration(30, 's'), diff --git a/src/core/server/plugins/types.ts b/src/core/server/plugins/types.ts index 2b5acd8ed5e94..6b50b5e820665 100644 --- a/src/core/server/plugins/types.ts +++ b/src/core/server/plugins/types.ts @@ -313,7 +313,7 @@ export interface AsyncPlugin< export const SharedGlobalConfigKeys = { // We can add more if really needed - kibana: ['index', 'autocompleteTerminateAfter', 'autocompleteTimeout'] as const, + kibana: ['index'] as const, elasticsearch: ['shardTimeout', 'requestTimeout', 'pingTimeout'] as const, path: ['data'] as const, savedObjects: ['maxImportPayloadBytes'] as const, diff --git a/src/core/server/saved_objects/migrationsv2/actions/index.test.ts b/src/core/server/saved_objects/migrationsv2/actions/index.test.ts index df74a4e1282e4..05da335d70884 100644 --- a/src/core/server/saved_objects/migrationsv2/actions/index.test.ts +++ b/src/core/server/saved_objects/migrationsv2/actions/index.test.ts @@ -37,7 +37,7 @@ describe('actions', () => { describe('fetchIndices', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.fetchIndices(client, ['my_index']); + const task = Actions.fetchIndices({ client, indices: ['my_index'] }); try { await task(); } catch (e) { @@ -49,7 +49,7 @@ describe('actions', () => { describe('setWriteBlock', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.setWriteBlock(client, 'my_index'); + const task = Actions.setWriteBlock({ client, index: 'my_index' }); try { await task(); } catch (e) { @@ -58,7 +58,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -66,7 +69,11 @@ describe('actions', () => { describe('cloneIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.cloneIndex(client, 'my_source_index', 'my_target_index'); + const task = Actions.cloneIndex({ + client, + source: 'my_source_index', + target: 'my_target_index', + }); try { await task(); } catch (e) { @@ -75,7 +82,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -95,7 +105,7 @@ describe('actions', () => { describe('openPit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.openPit(client, 'my_index'); + const task = Actions.openPit({ client, index: 'my_index' }); try { await task(); } catch (e) { @@ -107,7 +117,12 @@ describe('actions', () => { describe('readWithPit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.readWithPit(client, 'pitId', { match_all: {} }, 10_000); + const task = Actions.readWithPit({ + client, + pitId: 'pitId', + query: { match_all: {} }, + batchSize: 10_000, + }); try { await task(); } catch (e) { @@ -119,7 +134,7 @@ describe('actions', () => { describe('closePit', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.closePit(client, 'pitId'); + const task = Actions.closePit({ client, pitId: 'pitId' }); try { await task(); } catch (e) { @@ -131,14 +146,14 @@ describe('actions', () => { describe('reindex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.reindex( + const task = Actions.reindex({ client, - 'my_source_index', - 'my_target_index', - Option.none, - false, - {} - ); + sourceIndex: 'my_source_index', + targetIndex: 'my_target_index', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: {}, + }); try { await task(); } catch (e) { @@ -150,7 +165,7 @@ describe('actions', () => { describe('waitForReindexTask', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.waitForReindexTask(client, 'my task id', '60s'); + const task = Actions.waitForReindexTask({ client, taskId: 'my task id', timeout: '60s' }); try { await task(); } catch (e) { @@ -160,7 +175,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -168,7 +186,11 @@ describe('actions', () => { describe('waitForPickupUpdatedMappingsTask', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.waitForPickupUpdatedMappingsTask(client, 'my task id', '60s'); + const task = Actions.waitForPickupUpdatedMappingsTask({ + client, + taskId: 'my task id', + timeout: '60s', + }); try { await task(); } catch (e) { @@ -178,7 +200,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -186,7 +211,7 @@ describe('actions', () => { describe('updateAliases', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.updateAliases(client, []); + const task = Actions.updateAliases({ client, aliasActions: [] }); try { await task(); } catch (e) { @@ -196,7 +221,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -204,7 +232,11 @@ describe('actions', () => { describe('createIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.createIndex(client, 'new_index', { properties: {} }); + const task = Actions.createIndex({ + client, + indexName: 'new_index', + mappings: { properties: {} }, + }); try { await task(); } catch (e) { @@ -214,7 +246,10 @@ describe('actions', () => { expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(retryableError); }); it('re-throws non retry-able errors', async () => { - const task = Actions.setWriteBlock(clientWithNonRetryableError, 'my_index'); + const task = Actions.setWriteBlock({ + client: clientWithNonRetryableError, + index: 'my_index', + }); await task(); expect(catchRetryableEsClientErrors).toHaveBeenCalledWith(nonRetryableError); }); @@ -222,7 +257,11 @@ describe('actions', () => { describe('updateAndPickupMappings', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.updateAndPickupMappings(client, 'new_index', { properties: {} }); + const task = Actions.updateAndPickupMappings({ + client, + index: 'new_index', + mappings: { properties: {} }, + }); try { await task(); } catch (e) { @@ -276,7 +315,12 @@ describe('actions', () => { describe('bulkOverwriteTransformedDocuments', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.bulkOverwriteTransformedDocuments(client, 'new_index', [], 'wait_for'); + const task = Actions.bulkOverwriteTransformedDocuments({ + client, + index: 'new_index', + transformedDocs: [], + refresh: 'wait_for', + }); try { await task(); } catch (e) { @@ -289,7 +333,7 @@ describe('actions', () => { describe('refreshIndex', () => { it('calls catchRetryableEsClientErrors when the promise rejects', async () => { - const task = Actions.refreshIndex(client, 'target_index'); + const task = Actions.refreshIndex({ client, targetIndex: 'target_index' }); try { await task(); } catch (e) { diff --git a/src/core/server/saved_objects/migrationsv2/actions/index.ts b/src/core/server/saved_objects/migrationsv2/actions/index.ts index c2e0476960c3b..905d64947298e 100644 --- a/src/core/server/saved_objects/migrationsv2/actions/index.ts +++ b/src/core/server/saved_objects/migrationsv2/actions/index.ts @@ -68,20 +68,26 @@ export type FetchIndexResponse = Record< { aliases: Record; mappings: IndexMapping; settings: unknown } >; +/** @internal */ +export interface FetchIndicesParams { + client: ElasticsearchClient; + indices: string[]; +} + /** * Fetches information about the given indices including aliases, mappings and * settings. */ -export const fetchIndices = ( - client: ElasticsearchClient, - indicesToFetch: string[] -): TaskEither.TaskEither => +export const fetchIndices = ({ + client, + indices, +}: FetchIndicesParams): TaskEither.TaskEither => // @ts-expect-error @elastic/elasticsearch IndexState.alias and IndexState.mappings should be required () => { return client.indices .get( { - index: indicesToFetch, + index: indices, ignore_unavailable: true, // Don't return an error for missing indices. Note this *will* include closed indices, the docs are misleading https://github.com/elastic/elasticsearch/issues/63607 }, { ignore: [404], maxRetries: 0 } @@ -96,6 +102,12 @@ export interface IndexNotFound { type: 'index_not_found_exception'; index: string; } + +/** @internal */ +export interface SetWriteBlockParams { + client: ElasticsearchClient; + index: string; +} /** * Sets a write block in place for the given index. If the response includes * `acknowledged: true` all in-progress writes have drained and no further @@ -105,10 +117,10 @@ export interface IndexNotFound { * include `shards_acknowledged: true` but once the block is in place, * subsequent calls return `shards_acknowledged: false` */ -export const setWriteBlock = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither< +export const setWriteBlock = ({ + client, + index, +}: SetWriteBlockParams): TaskEither.TaskEither< IndexNotFound | RetryableEsClientError, 'set_write_block_succeeded' > => () => { @@ -145,13 +157,21 @@ export const setWriteBlock = ( ); }; +/** @internal */ +export interface RemoveWriteBlockParams { + client: ElasticsearchClient; + index: string; +} /** * Removes a write block from an index */ -export const removeWriteBlock = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither => () => { +export const removeWriteBlock = ({ + client, + index, +}: RemoveWriteBlockParams): TaskEither.TaskEither< + RetryableEsClientError, + 'remove_write_block_succeeded' +> => () => { return client.indices .putSettings<{ acknowledged: boolean; @@ -182,6 +202,12 @@ export const removeWriteBlock = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface WaitForIndexStatusYellowParams { + client: ElasticsearchClient; + index: string; + timeout?: string; +} /** * A yellow index status means the index's primary shard is allocated and the * index is ready for searching/indexing documents, but ES wasn't able to @@ -193,11 +219,11 @@ export const removeWriteBlock = ( * yellow at any point in the future. So ultimately data-redundancy is up to * users to maintain. */ -export const waitForIndexStatusYellow = ( - client: ElasticsearchClient, - index: string, - timeout = DEFAULT_TIMEOUT -): TaskEither.TaskEither => () => { +export const waitForIndexStatusYellow = ({ + client, + index, + timeout = DEFAULT_TIMEOUT, +}: WaitForIndexStatusYellowParams): TaskEither.TaskEither => () => { return client.cluster .health({ index, wait_for_status: 'yellow', timeout }) .then(() => { @@ -208,6 +234,14 @@ export const waitForIndexStatusYellow = ( export type CloneIndexResponse = AcknowledgeResponse; +/** @internal */ +export interface CloneIndexParams { + client: ElasticsearchClient; + source: string; + target: string; + /** only used for testing */ + timeout?: string; +} /** * Makes a clone of the source index into the target. * @@ -218,13 +252,15 @@ export type CloneIndexResponse = AcknowledgeResponse; * - the first call will wait up to 120s for the cluster state and all shards * to be updated. */ -export const cloneIndex = ( - client: ElasticsearchClient, - source: string, - target: string, - /** only used for testing */ - timeout = DEFAULT_TIMEOUT -): TaskEither.TaskEither => { +export const cloneIndex = ({ + client, + source, + target, + timeout = DEFAULT_TIMEOUT, +}: CloneIndexParams): TaskEither.TaskEither< + RetryableEsClientError | IndexNotFound, + CloneIndexResponse +> => { const cloneTask: TaskEither.TaskEither< RetryableEsClientError | IndexNotFound, AcknowledgeResponse @@ -302,7 +338,7 @@ export const cloneIndex = ( } else { // Otherwise, wait until the target index has a 'green' status. return pipe( - waitForIndexStatusYellow(client, target, timeout), + waitForIndexStatusYellow({ client, index: target, timeout }), TaskEither.map((value) => { /** When the index status is 'green' we know that all shards were started */ return { acknowledged: true, shardsAcknowledged: true }; @@ -352,16 +388,22 @@ const catchWaitForTaskCompletionTimeout = ( } }; +/** @internal */ +export interface WaitForTaskParams { + client: ElasticsearchClient; + taskId: string; + timeout: string; +} /** * Blocks for up to 60s or until a task completes. * * TODO: delete completed tasks */ -const waitForTask = ( - client: ElasticsearchClient, - taskId: string, - timeout: string -): TaskEither.TaskEither< +const waitForTask = ({ + client, + taskId, + timeout, +}: WaitForTaskParams): TaskEither.TaskEither< RetryableEsClientError | WaitForTaskCompletionTimeout, WaitForTaskResponse > => () => { @@ -433,16 +475,21 @@ export interface OpenPitResponse { pitId: string; } +/** @internal */ +export interface OpenPitParams { + client: ElasticsearchClient; + index: string; +} // how long ES should keep PIT alive const pitKeepAlive = '10m'; /* * Creates a lightweight view of data when the request has been initiated. * See https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html * */ -export const openPit = ( - client: ElasticsearchClient, - index: string -): TaskEither.TaskEither => () => { +export const openPit = ({ + client, + index, +}: OpenPitParams): TaskEither.TaskEither => () => { return client .openPointInTime({ index, @@ -459,17 +506,28 @@ export interface ReadWithPit { readonly totalHits: number | undefined; } +/** @internal */ + +export interface ReadWithPitParams { + client: ElasticsearchClient; + pitId: string; + query: estypes.QueryContainer; + batchSize: number; + searchAfter?: number[]; + seqNoPrimaryTerm?: boolean; +} + /* * Requests documents from the index using PIT mechanism. * */ -export const readWithPit = ( - client: ElasticsearchClient, - pitId: string, - query: estypes.QueryContainer, - batchSize: number, - searchAfter?: number[], - seqNoPrimaryTerm?: boolean -): TaskEither.TaskEither => () => { +export const readWithPit = ({ + client, + pitId, + query, + batchSize, + searchAfter, + seqNoPrimaryTerm, +}: ReadWithPitParams): TaskEither.TaskEither => () => { return client .search({ seq_no_primary_term: seqNoPrimaryTerm, @@ -516,14 +574,19 @@ export const readWithPit = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface ClosePitParams { + client: ElasticsearchClient; + pitId: string; +} /* * Closes PIT. * See https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html * */ -export const closePit = ( - client: ElasticsearchClient, - pitId: string -): TaskEither.TaskEither => () => { +export const closePit = ({ + client, + pitId, +}: ClosePitParams): TaskEither.TaskEither => () => { return client .closePointInTime({ body: { id: pitId }, @@ -537,27 +600,42 @@ export const closePit = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface TransformDocsParams { + transformRawDocs: TransformRawDocs; + outdatedDocuments: SavedObjectsRawDoc[]; +} /* * Transform outdated docs * */ -export const transformDocs = ( - transformRawDocs: TransformRawDocs, - outdatedDocuments: SavedObjectsRawDoc[] -): TaskEither.TaskEither => - transformRawDocs(outdatedDocuments); +export const transformDocs = ({ + transformRawDocs, + outdatedDocuments, +}: TransformDocsParams): TaskEither.TaskEither< + DocumentsTransformFailed, + DocumentsTransformSuccess +> => transformRawDocs(outdatedDocuments); /** @internal */ export interface ReindexResponse { taskId: string; } +/** @internal */ +export interface RefreshIndexParams { + client: ElasticsearchClient; + targetIndex: string; +} /** * Wait for Elasticsearch to reindex all the changes. */ -export const refreshIndex = ( - client: ElasticsearchClient, - targetIndex: string -): TaskEither.TaskEither => () => { +export const refreshIndex = ({ + client, + targetIndex, +}: RefreshIndexParams): TaskEither.TaskEither< + RetryableEsClientError, + { refreshed: boolean } +> => () => { return client.indices .refresh({ index: targetIndex, @@ -567,6 +645,19 @@ export const refreshIndex = ( }) .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface ReindexParams { + client: ElasticsearchClient; + sourceIndex: string; + targetIndex: string; + reindexScript: Option.Option; + requireAlias: boolean; + /* When reindexing we use a source query to exclude saved objects types which + * are no longer used. These saved objects will still be kept in the outdated + * index for backup purposes, but won't be available in the upgraded index. + */ + unusedTypesQuery: estypes.QueryContainer; +} /** * Reindex documents from the `sourceIndex` into the `targetIndex`. Returns a * task ID which can be tracked for progress. @@ -575,18 +666,14 @@ export const refreshIndex = ( * this in parallel. By using `op_type: 'create', conflicts: 'proceed'` there * will be only one write per reindexed document. */ -export const reindex = ( - client: ElasticsearchClient, - sourceIndex: string, - targetIndex: string, - reindexScript: Option.Option, - requireAlias: boolean, - /* When reindexing we use a source query to exclude saved objects types which - * are no longer used. These saved objects will still be kept in the outdated - * index for backup purposes, but won't be available in the upgraded index. - */ - unusedTypesQuery: estypes.QueryContainer -): TaskEither.TaskEither => () => { +export const reindex = ({ + client, + sourceIndex, + targetIndex, + reindexScript, + requireAlias, + unusedTypesQuery, +}: ReindexParams): TaskEither.TaskEither => () => { return client .reindex({ // Require targetIndex to be an alias. Prevents a new index from being @@ -688,11 +775,18 @@ export const waitForReindexTask = flow( ) ); -export const verifyReindex = ( - client: ElasticsearchClient, - sourceIndex: string, - targetIndex: string -): TaskEither.TaskEither< +/** @internal */ +export interface VerifyReindexParams { + client: ElasticsearchClient; + sourceIndex: string; + targetIndex: string; +} + +export const verifyReindex = ({ + client, + sourceIndex, + targetIndex, +}: VerifyReindexParams): TaskEither.TaskEither< RetryableEsClientError | { type: 'verify_reindex_failed' }, 'verify_reindex_succeeded' > => () => { @@ -762,13 +856,18 @@ export type AliasAction = | { remove: { index: string; alias: string; must_exist: boolean } } | { add: { index: string; alias: string } }; +/** @internal */ +export interface UpdateAliasesParams { + client: ElasticsearchClient; + aliasActions: AliasAction[]; +} /** * Calls the Update index alias API `_alias` with the provided alias actions. */ -export const updateAliases = ( - client: ElasticsearchClient, - aliasActions: AliasAction[] -): TaskEither.TaskEither< +export const updateAliases = ({ + client, + aliasActions, +}: UpdateAliasesParams): TaskEither.TaskEither< IndexNotFound | AliasNotFound | RemoveIndexNotAConcreteIndex | RetryableEsClientError, 'update_aliases_succeeded' > => () => { @@ -836,6 +935,14 @@ function aliasArrayToRecord(aliases: string[]): Record { } return result; } + +/** @internal */ +export interface CreateIndexParams { + client: ElasticsearchClient; + indexName: string; + mappings: IndexMapping; + aliases?: string[]; +} /** * Creates an index with the given mappings * @@ -846,12 +953,12 @@ function aliasArrayToRecord(aliases: string[]): Record { * - the first call will wait up to 120s for the cluster state and all shards * to be updated. */ -export const createIndex = ( - client: ElasticsearchClient, - indexName: string, - mappings: IndexMapping, - aliases: string[] = [] -): TaskEither.TaskEither => { +export const createIndex = ({ + client, + indexName, + mappings, + aliases = [], +}: CreateIndexParams): TaskEither.TaskEither => { const createIndexTask: TaskEither.TaskEither< RetryableEsClientError, AcknowledgeResponse @@ -930,7 +1037,7 @@ export const createIndex = ( } else { // Otherwise, wait until the target index has a 'yellow' status. return pipe( - waitForIndexStatusYellow(client, indexName, DEFAULT_TIMEOUT), + waitForIndexStatusYellow({ client, index: indexName, timeout: DEFAULT_TIMEOUT }), TaskEither.map(() => { /** When the index status is 'yellow' we know that all shards were started */ return 'create_index_succeeded'; @@ -946,15 +1053,24 @@ export interface UpdateAndPickupMappingsResponse { taskId: string; } +/** @internal */ +export interface UpdateAndPickupMappingsParams { + client: ElasticsearchClient; + index: string; + mappings: IndexMapping; +} /** * Updates an index's mappings and runs an pickupUpdatedMappings task so that the mapping * changes are "picked up". Returns a taskId to track progress. */ -export const updateAndPickupMappings = ( - client: ElasticsearchClient, - index: string, - mappings: IndexMapping -): TaskEither.TaskEither => { +export const updateAndPickupMappings = ({ + client, + index, + mappings, +}: UpdateAndPickupMappingsParams): TaskEither.TaskEither< + RetryableEsClientError, + UpdateAndPickupMappingsResponse +> => { const putMappingTask: TaskEither.TaskEither< RetryableEsClientError, 'update_mappings_succeeded' @@ -1053,16 +1169,26 @@ export const searchForOutdatedDocuments = ( .catch(catchRetryableEsClientErrors); }; +/** @internal */ +export interface BulkOverwriteTransformedDocumentsParams { + client: ElasticsearchClient; + index: string; + transformedDocs: SavedObjectsRawDoc[]; + refresh?: estypes.Refresh; +} /** * Write the up-to-date transformed documents to the index, overwriting any * documents that are still on their outdated version. */ -export const bulkOverwriteTransformedDocuments = ( - client: ElasticsearchClient, - index: string, - transformedDocs: SavedObjectsRawDoc[], - refresh: estypes.Refresh -): TaskEither.TaskEither => () => { +export const bulkOverwriteTransformedDocuments = ({ + client, + index, + transformedDocs, + refresh = false, +}: BulkOverwriteTransformedDocumentsParams): TaskEither.TaskEither< + RetryableEsClientError, + 'bulk_index_succeeded' +> => () => { return client .bulk({ // Because we only add aliases in the MARK_VERSION_INDEX_READY step we diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts index d0158a4c68f24..67a2685caf3d6 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/actions.test.ts @@ -67,9 +67,13 @@ describe('migration actions', () => { client = start.elasticsearch.client.asInternalUser; // Create test fixture data: - await createIndex(client, 'existing_index_with_docs', { - dynamic: true, - properties: {}, + await createIndex({ + client, + indexName: 'existing_index_with_docs', + mappings: { + dynamic: true, + properties: {}, + }, })(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -78,25 +82,30 @@ describe('migration actions', () => { { _source: { title: 'saved object 4', type: 'another_unused_type' } }, { _source: { title: 'f-agent-event 5', type: 'f_agent_event' } }, ] as unknown) as SavedObjectsRawDoc[]; - await bulkOverwriteTransformedDocuments( + await bulkOverwriteTransformedDocuments({ + client, + index: 'existing_index_with_docs', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); + + await createIndex({ client, indexName: 'existing_index_2', mappings: { properties: {} } })(); + await createIndex({ client, - 'existing_index_with_docs', - sourceDocs, - 'wait_for' - )(); - - await createIndex(client, 'existing_index_2', { properties: {} })(); - await createIndex(client, 'existing_index_with_write_block', { properties: {} })(); - await bulkOverwriteTransformedDocuments( + indexName: 'existing_index_with_write_block', + mappings: { properties: {} }, + })(); + await bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_write_block', - sourceDocs, - 'wait_for' - )(); - await setWriteBlock(client, 'existing_index_with_write_block')(); - await updateAliases(client, [ - { add: { index: 'existing_index_2', alias: 'existing_index_2_alias' } }, - ])(); + index: 'existing_index_with_write_block', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); + await setWriteBlock({ client, index: 'existing_index_with_write_block' })(); + await updateAliases({ + client, + aliasActions: [{ add: { index: 'existing_index_2', alias: 'existing_index_2_alias' } }], + })(); }); afterAll(async () => { @@ -107,7 +116,7 @@ describe('migration actions', () => { describe('fetchIndices', () => { it('resolves right empty record if no indices were found', async () => { expect.assertions(1); - const task = fetchIndices(client, ['no_such_index']); + const task = fetchIndices({ client, indices: ['no_such_index'] }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -117,10 +126,10 @@ describe('migration actions', () => { }); it('resolves right record with found indices', async () => { expect.assertions(1); - const res = (await fetchIndices(client, [ - 'no_such_index', - 'existing_index_with_docs', - ])()) as Either.Right; + const res = (await fetchIndices({ + client, + indices: ['no_such_index', 'existing_index_with_docs'], + })()) as Either.Right; expect(res.right).toEqual( expect.objectContaining({ @@ -136,11 +145,15 @@ describe('migration actions', () => { describe('setWriteBlock', () => { beforeAll(async () => { - await createIndex(client, 'new_index_without_write_block', { properties: {} })(); + await createIndex({ + client, + indexName: 'new_index_without_write_block', + mappings: { properties: {} }, + })(); }); it('resolves right when setting the write block succeeds', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'new_index_without_write_block'); + const task = setWriteBlock({ client, index: 'new_index_without_write_block' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -150,7 +163,7 @@ describe('migration actions', () => { }); it('resolves right when setting a write block on an index that already has one', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'existing_index_with_write_block'); + const task = setWriteBlock({ client, index: 'existing_index_with_write_block' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -160,7 +173,7 @@ describe('migration actions', () => { }); it('once resolved, prevents further writes to the index', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'new_index_without_write_block'); + const task = setWriteBlock({ client, index: 'new_index_without_write_block' }); await task(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -169,17 +182,17 @@ describe('migration actions', () => { { _source: { title: 'doc 4' } }, ] as unknown) as SavedObjectsRawDoc[]; await expect( - bulkOverwriteTransformedDocuments( + bulkOverwriteTransformedDocuments({ client, - 'new_index_without_write_block', - sourceDocs, - 'wait_for' - )() + index: 'new_index_without_write_block', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })() ).rejects.toMatchObject(expect.anything()); }); it('resolves left index_not_found_exception when the index does not exist', async () => { expect.assertions(1); - const task = setWriteBlock(client, 'no_such_index'); + const task = setWriteBlock({ client, index: 'no_such_index' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -194,13 +207,21 @@ describe('migration actions', () => { describe('removeWriteBlock', () => { beforeAll(async () => { - await createIndex(client, 'existing_index_without_write_block_2', { properties: {} })(); - await createIndex(client, 'existing_index_with_write_block_2', { properties: {} })(); - await setWriteBlock(client, 'existing_index_with_write_block_2')(); + await createIndex({ + client, + indexName: 'existing_index_without_write_block_2', + mappings: { properties: {} }, + })(); + await createIndex({ + client, + indexName: 'existing_index_with_write_block_2', + mappings: { properties: {} }, + })(); + await setWriteBlock({ client, index: 'existing_index_with_write_block_2' })(); }); it('resolves right if successful when an index already has a write block', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'existing_index_with_write_block_2'); + const task = removeWriteBlock({ client, index: 'existing_index_with_write_block_2' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -210,7 +231,7 @@ describe('migration actions', () => { }); it('resolves right if successful when an index does not have a write block', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'existing_index_without_write_block_2'); + const task = removeWriteBlock({ client, index: 'existing_index_without_write_block_2' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -220,7 +241,7 @@ describe('migration actions', () => { }); it('rejects if there is a non-retryable error', async () => { expect.assertions(1); - const task = removeWriteBlock(client, 'no_such_index'); + const task = removeWriteBlock({ client, index: 'no_such_index' }); await expect(task()).rejects.toMatchInlineSnapshot( `[ResponseError: index_not_found_exception]` ); @@ -251,7 +272,10 @@ describe('migration actions', () => { ); // Start tracking the index status - const indexStatusPromise = waitForIndexStatusYellow(client, 'red_then_yellow_index')(); + const indexStatusPromise = waitForIndexStatusYellow({ + client, + index: 'red_then_yellow_index', + })(); const redStatusResponse = await client.cluster.health({ index: 'red_then_yellow_index' }); expect(redStatusResponse.body.status).toBe('red'); @@ -281,7 +305,11 @@ describe('migration actions', () => { } }); it('resolves right if cloning into a new target index', async () => { - const task = cloneIndex(client, 'existing_index_with_write_block', 'clone_target_1'); + const task = cloneIndex({ + client, + source: 'existing_index_with_write_block', + target: 'clone_target_1', + }); expect.assertions(1); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -314,11 +342,11 @@ describe('migration actions', () => { .catch((e) => {}); // Call clone even though the index already exists - const cloneIndexPromise = cloneIndex( + const cloneIndexPromise = cloneIndex({ client, - 'existing_index_with_write_block', - 'clone_red_then_yellow_index' - )(); + source: 'existing_index_with_write_block', + target: 'clone_red_then_yellow_index', + })(); let indexYellow = false; setTimeout(() => { @@ -348,7 +376,7 @@ describe('migration actions', () => { }); it('resolves left index_not_found_exception if the source index does not exist', async () => { expect.assertions(1); - const task = cloneIndex(client, 'no_such_index', 'clone_target_3'); + const task = cloneIndex({ client, source: 'no_such_index', target: 'clone_target_3' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -378,12 +406,12 @@ describe('migration actions', () => { .catch((e) => {}); // Call clone even though the index already exists - const cloneIndexPromise = cloneIndex( + const cloneIndexPromise = cloneIndex({ client, - 'existing_index_with_write_block', - 'clone_red_index', - '0s' - )(); + source: 'existing_index_with_write_block', + target: 'clone_red_index', + timeout: '0s', + })(); await cloneIndexPromise.then((res) => { expect(res).toMatchInlineSnapshot(` @@ -404,15 +432,15 @@ describe('migration actions', () => { // together with waitForReindexTask describe('reindex & waitForReindexTask', () => { it('resolves right when reindex succeeds without reindex script', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -436,21 +464,21 @@ describe('migration actions', () => { `); }); it('resolves right and excludes all documents not matching the unusedTypesQuery', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_excluded_docs', - Option.none, - false, - { + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_excluded_docs', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { bool: { must_not: ['f_agent_event', 'another_unused_type'].map((type) => ({ term: { type }, })), }, - } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -473,15 +501,15 @@ describe('migration actions', () => { }); it('resolves right when reindex succeeds with reindex script', async () => { expect.assertions(2); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_2', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_2', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -506,15 +534,15 @@ describe('migration actions', () => { it('resolves right, ignores version conflicts and does not update existing docs when reindex multiple times', async () => { expect.assertions(3); // Reindex with a script - let res = (await reindex( + let res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_3', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - let task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_3', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + let task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -523,15 +551,15 @@ describe('migration actions', () => { `); // reindex without a script - res = (await reindex( + res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_3', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_3', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -559,7 +587,7 @@ describe('migration actions', () => { expect.assertions(2); // Simulate a reindex that only adds some of the documents from the // source index into the target index - await createIndex(client, 'reindex_target_4', { properties: {} })(); + await createIndex({ client, indexName: 'reindex_target_4', mappings: { properties: {} } })(); const sourceDocs = ((await searchForOutdatedDocuments(client, { batchSize: 1000, targetIndex: 'existing_index_with_docs', @@ -570,18 +598,23 @@ describe('migration actions', () => { _id, _source, })); - await bulkOverwriteTransformedDocuments(client, 'reindex_target_4', sourceDocs, 'wait_for')(); + await bulkOverwriteTransformedDocuments({ + client, + index: 'reindex_target_4', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); // Now do a real reindex - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_4', - Option.some(`ctx._source.title = ctx._source.title + '_updated'`), - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_4', + reindexScript: Option.some(`ctx._source.title = ctx._source.title + '_updated'`), + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -614,24 +647,28 @@ describe('migration actions', () => { // and should ignore this error. // Create an index with incompatible mappings - await createIndex(client, 'reindex_target_5', { - dynamic: 'strict', - properties: { - /** no title field */ + await createIndex({ + client, + indexName: 'reindex_target_5', + mappings: { + dynamic: 'strict', + properties: { + /** no title field */ + }, }, })(); const { right: { taskId: reindexTaskId }, - } = (await reindex( + } = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_5', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, reindexTaskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_5', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: reindexTaskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -651,22 +688,26 @@ describe('migration actions', () => { // and should ignore this error. // Create an index with incompatible mappings - await createIndex(client, 'reindex_target_6', { - dynamic: false, - properties: { title: { type: 'integer' } }, // integer is incompatible with string title + await createIndex({ + client, + indexName: 'reindex_target_6', + mappings: { + dynamic: false, + properties: { title: { type: 'integer' } }, // integer is incompatible with string title + }, })(); const { right: { taskId: reindexTaskId }, - } = (await reindex( + } = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_6', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - const task = waitForReindexTask(client, reindexTaskId, '10s'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_6', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + const task = waitForReindexTask({ client, taskId: reindexTaskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -679,10 +720,17 @@ describe('migration actions', () => { }); it('resolves left index_not_found_exception if source index does not exist', async () => { expect.assertions(1); - const res = (await reindex(client, 'no_such_index', 'reindex_target', Option.none, false, { - match_all: {}, + const res = (await reindex({ + client, + sourceIndex: 'no_such_index', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { + match_all: {}, + }, })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -695,16 +743,16 @@ describe('migration actions', () => { }); it('resolves left target_index_had_write_block if all failures are due to a write block', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'existing_index_with_write_block', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_with_write_block', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -717,16 +765,16 @@ describe('migration actions', () => { }); it('resolves left if requireAlias=true and the target is not an alias', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'existing_index_with_write_block', - Option.none, - true, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_with_write_block', + reindexScript: Option.none, + requireAlias: true, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '10s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -739,16 +787,16 @@ describe('migration actions', () => { `); }); it('resolves left wait_for_task_completion_timeout when the task does not finish within the timeout', async () => { - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; - const task = waitForReindexTask(client, res.right.taskId, '0s'); + const task = waitForReindexTask({ client, taskId: res.right.taskId, timeout: '0s' }); await expect(task()).resolves.toMatchObject({ _tag: 'Left', @@ -766,17 +814,21 @@ describe('migration actions', () => { describe('verifyReindex', () => { it('resolves right if source and target indices have the same amount of documents', async () => { expect.assertions(1); - const res = (await reindex( + const res = (await reindex({ client, - 'existing_index_with_docs', - 'reindex_target_7', - Option.none, - false, - { match_all: {} } - )()) as Either.Right; - await waitForReindexTask(client, res.right.taskId, '10s')(); - - const task = verifyReindex(client, 'existing_index_with_docs', 'reindex_target_7'); + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_7', + reindexScript: Option.none, + requireAlias: false, + unusedTypesQuery: { match_all: {} }, + })()) as Either.Right; + await waitForReindexTask({ client, taskId: res.right.taskId, timeout: '10s' })(); + + const task = verifyReindex({ + client, + sourceIndex: 'existing_index_with_docs', + targetIndex: 'reindex_target_7', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -786,7 +838,11 @@ describe('migration actions', () => { }); it('resolves left if source and target indices have different amount of documents', async () => { expect.assertions(1); - const task = verifyReindex(client, 'existing_index_with_docs', 'existing_index_2'); + const task = verifyReindex({ + client, + sourceIndex: 'existing_index_with_docs', + targetIndex: 'existing_index_2', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -798,19 +854,27 @@ describe('migration actions', () => { }); it('rejects if source or target index does not exist', async () => { expect.assertions(2); - let task = verifyReindex(client, 'no_such_index', 'existing_index_2'); + let task = verifyReindex({ + client, + sourceIndex: 'no_such_index', + targetIndex: 'existing_index_2', + }); await expect(task()).rejects.toMatchInlineSnapshot( `[ResponseError: index_not_found_exception]` ); - task = verifyReindex(client, 'existing_index_2', 'no_such_index'); + task = verifyReindex({ + client, + sourceIndex: 'existing_index_2', + targetIndex: 'no_such_index', + }); await expect(task()).rejects.toThrow('index_not_found_exception'); }); }); describe('openPit', () => { it('opens PointInTime for an index', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; expect(pitResponse.right.pitId).toEqual(expect.any(String)); @@ -824,52 +888,52 @@ describe('migration actions', () => { await expect(searchResponse.body.hits.hits.length).toBeGreaterThan(0); }); it('rejects if index does not exist', async () => { - const openPitTask = openPit(client, 'no_such_index'); + const openPitTask = openPit({ client, index: 'no_such_index' }); await expect(openPitTask()).rejects.toThrow('index_not_found_exception'); }); }); describe('readWithPit', () => { it('requests documents from an index using given PIT', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { match_all: {} }, - 1000, - undefined - ); + pitId: pitResponse.right.pitId, + query: { match_all: {} }, + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; await expect(docsResponse.right.outdatedDocuments.length).toBe(5); }); it('requests the batchSize of documents from an index', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { match_all: {} }, - 3, - undefined - ); + pitId: pitResponse.right.pitId, + query: { match_all: {} }, + batchSize: 3, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; await expect(docsResponse.right.outdatedDocuments.length).toBe(3); }); it('it excludes documents not matching the provided "query"', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { bool: { must_not: [ { @@ -885,9 +949,9 @@ describe('migration actions', () => { ], }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -902,18 +966,18 @@ describe('migration actions', () => { }); it('only returns documents that match the provided "query"', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -928,19 +992,19 @@ describe('migration actions', () => { }); it('returns docs with _seq_no and _primary_term when specified', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined, - true - ); + batchSize: 1000, + searchAfter: undefined, + seqNoPrimaryTerm: true, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -955,18 +1019,18 @@ describe('migration actions', () => { }); it('does not return docs with _seq_no and _primary_term if not specified', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - pitResponse.right.pitId, - { + pitId: pitResponse.right.pitId, + query: { match: { title: { query: 'doc' } }, }, - 1000, - undefined - ); + batchSize: 1000, + searchAfter: undefined, + }); const docsResponse = (await readWithPitTask()) as Either.Right; @@ -981,24 +1045,24 @@ describe('migration actions', () => { }); it('rejects if PIT does not exist', async () => { - const readWithPitTask = readWithPit( + const readWithPitTask = readWithPit({ client, - 'no_such_pit', - { match_all: {} }, - 1000, - undefined - ); + pitId: 'no_such_pit', + query: { match_all: {} }, + batchSize: 1000, + searchAfter: undefined, + }); await expect(readWithPitTask()).rejects.toThrow('illegal_argument_exception'); }); }); describe('closePit', () => { it('closes PointInTime', async () => { - const openPitTask = openPit(client, 'existing_index_with_docs'); + const openPitTask = openPit({ client, index: 'existing_index_with_docs' }); const pitResponse = (await openPitTask()) as Either.Right; const pitId = pitResponse.right.pitId; - await closePit(client, pitId)(); + await closePit({ client, pitId })(); const searchTask = client.search({ body: { @@ -1010,7 +1074,7 @@ describe('migration actions', () => { }); it('rejects if PIT does not exist', async () => { - const closePitTask = closePit(client, 'no_such_pit'); + const closePitTask = closePit({ client, pitId: 'no_such_pit' }); await expect(closePitTask()).rejects.toThrow('illegal_argument_exception'); }); }); @@ -1034,7 +1098,10 @@ describe('migration actions', () => { return Either.right({ processedDocs }); }; } - const transformTask = transformDocs(innerTransformRawDocs, originalDocs); + const transformTask = transformDocs({ + transformRawDocs: innerTransformRawDocs, + outdatedDocuments: originalDocs, + }); const resultsWithProcessDocs = ((await transformTask()) as Either.Right) .right.processedDocs; @@ -1051,7 +1118,11 @@ describe('migration actions', () => { 'existing_index_with_write_block' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); // We can't do a snapshot match because the response includes an index // id which ES assigns dynamically @@ -1065,7 +1136,11 @@ describe('migration actions', () => { 'no_such_index' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); await expect(task()).rejects.toMatchInlineSnapshot(` [Error: pickupUpdatedMappings task failed with the following error: @@ -1078,7 +1153,11 @@ describe('migration actions', () => { 'existing_index_with_docs' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '0s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '0s', + }); await expect(task()).resolves.toMatchObject({ _tag: 'Left', @@ -1097,7 +1176,11 @@ describe('migration actions', () => { 'existing_index_with_docs' )()) as Either.Right; - const task = waitForPickupUpdatedMappingsTask(client, res.right.taskId, '10s'); + const task = waitForPickupUpdatedMappingsTask({ + client, + taskId: res.right.taskId, + timeout: '10s', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -1111,9 +1194,13 @@ describe('migration actions', () => { describe('updateAndPickupMappings', () => { it('resolves right when mappings were updated and picked up', async () => { // Create an index without any mappings and insert documents into it - await createIndex(client, 'existing_index_without_mappings', { - dynamic: false, - properties: {}, + await createIndex({ + client, + indexName: 'existing_index_without_mappings', + mappings: { + dynamic: false, + properties: {}, + }, })(); const sourceDocs = ([ { _source: { title: 'doc 1' } }, @@ -1121,12 +1208,12 @@ describe('migration actions', () => { { _source: { title: 'doc 3' } }, { _source: { title: 'doc 4' } }, ] as unknown) as SavedObjectsRawDoc[]; - await bulkOverwriteTransformedDocuments( + await bulkOverwriteTransformedDocuments({ client, - 'existing_index_without_mappings', - sourceDocs, - 'wait_for' - )(); + index: 'existing_index_without_mappings', + transformedDocs: sourceDocs, + refresh: 'wait_for', + })(); // Assert that we can't search over the unmapped fields of the document const originalSearchResults = ((await searchForOutdatedDocuments(client, { @@ -1139,14 +1226,18 @@ describe('migration actions', () => { expect(originalSearchResults.length).toBe(0); // Update and pickup mappings so that the title field is searchable - const res = await updateAndPickupMappings(client, 'existing_index_without_mappings', { - properties: { - title: { type: 'text' }, + const res = await updateAndPickupMappings({ + client, + index: 'existing_index_without_mappings', + mappings: { + properties: { + title: { type: 'text' }, + }, }, })(); expect(Either.isRight(res)).toBe(true); const taskId = (res as Either.Right).right.taskId; - await waitForPickupUpdatedMappingsTask(client, taskId, '60s')(); + await waitForPickupUpdatedMappingsTask({ client, taskId, timeout: '60s' })(); // Repeat the search expecting to be able to find the existing documents const pickedUpSearchResults = ((await searchForOutdatedDocuments(client, { @@ -1163,15 +1254,18 @@ describe('migration actions', () => { describe('updateAliases', () => { describe('remove', () => { it('resolves left index_not_found_exception when the index does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'no_such_index', - must_exist: false, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'no_such_index', + must_exist: false, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1184,15 +1278,18 @@ describe('migration actions', () => { }); describe('with must_exist=false', () => { it('resolves left alias_not_found_exception when alias does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'existing_index_with_docs', - must_exist: false, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'existing_index_with_docs', + must_exist: false, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1205,15 +1302,18 @@ describe('migration actions', () => { }); describe('with must_exist=true', () => { it('resolves left alias_not_found_exception when alias does not exist on specified index', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'existing_index_2_alias', - index: 'existing_index_with_docs', - must_exist: true, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'existing_index_2_alias', + index: 'existing_index_with_docs', + must_exist: true, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1224,15 +1324,18 @@ describe('migration actions', () => { `); }); it('resolves left alias_not_found_exception when alias does not exist', async () => { - const task = updateAliases(client, [ - { - remove: { - alias: 'no_such_alias', - index: 'existing_index_with_docs', - must_exist: true, + const task = updateAliases({ + client, + aliasActions: [ + { + remove: { + alias: 'no_such_alias', + index: 'existing_index_with_docs', + must_exist: true, + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1246,13 +1349,16 @@ describe('migration actions', () => { }); describe('remove_index', () => { it('left index_not_found_exception if index does not exist', async () => { - const task = updateAliases(client, [ - { - remove_index: { - index: 'no_such_index', + const task = updateAliases({ + client, + aliasActions: [ + { + remove_index: { + index: 'no_such_index', + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1264,13 +1370,16 @@ describe('migration actions', () => { `); }); it('left remove_index_not_a_concrete_index when remove_index targets an alias', async () => { - const task = updateAliases(client, [ - { - remove_index: { - index: 'existing_index_2_alias', + const task = updateAliases({ + client, + aliasActions: [ + { + remove_index: { + index: 'existing_index_2_alias', + }, }, - }, - ]); + ], + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Left", @@ -1312,7 +1421,11 @@ describe('migration actions', () => { }); // Call createIndex even though the index already exists - const createIndexPromise = createIndex(client, 'red_then_yellow_index', undefined as any)(); + const createIndexPromise = createIndex({ + client, + indexName: 'red_then_yellow_index', + mappings: undefined as any, + })(); let indexYellow = false; setTimeout(() => { @@ -1341,7 +1454,7 @@ describe('migration actions', () => { // Creating an index with the same name as an existing alias to induce // failure await expect( - createIndex(client, 'existing_index_2_alias', undefined as any)() + createIndex({ client, indexName: 'existing_index_2_alias', mappings: undefined as any })() ).rejects.toMatchInlineSnapshot(`[ResponseError: invalid_index_name_exception]`); }); }); @@ -1353,12 +1466,12 @@ describe('migration actions', () => { { _source: { title: 'doc 6' } }, { _source: { title: 'doc 7' } }, ] as unknown) as SavedObjectsRawDoc[]; - const task = bulkOverwriteTransformedDocuments( + const task = bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_docs', - newDocs, - 'wait_for' - ); + index: 'existing_index_with_docs', + transformedDocs: newDocs, + refresh: 'wait_for', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { @@ -1374,12 +1487,15 @@ describe('migration actions', () => { outdatedDocumentsQuery: undefined, })()) as Either.Right).right.outdatedDocuments; - const task = bulkOverwriteTransformedDocuments( + const task = bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_docs', - [...existingDocs, ({ _source: { title: 'doc 8' } } as unknown) as SavedObjectsRawDoc], - 'wait_for' - ); + index: 'existing_index_with_docs', + transformedDocs: [ + ...existingDocs, + ({ _source: { title: 'doc 8' } } as unknown) as SavedObjectsRawDoc, + ], + refresh: 'wait_for', + }); await expect(task()).resolves.toMatchInlineSnapshot(` Object { "_tag": "Right", @@ -1394,12 +1510,12 @@ describe('migration actions', () => { { _source: { title: 'doc 7' } }, ] as unknown) as SavedObjectsRawDoc[]; await expect( - bulkOverwriteTransformedDocuments( + bulkOverwriteTransformedDocuments({ client, - 'existing_index_with_write_block', - newDocs, - 'wait_for' - )() + index: 'existing_index_with_write_block', + transformedDocs: newDocs, + refresh: 'wait_for', + })() ).rejects.toMatchObject(expect.anything()); }); }); diff --git a/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts b/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts index 1881f9a712c29..e9cb33c0aa54a 100644 --- a/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts +++ b/src/core/server/saved_objects/migrationsv2/migrations_state_machine_cleanup.ts @@ -19,7 +19,7 @@ export async function cleanup( if (!state) return; if ('sourceIndexPitId' in state) { try { - await Actions.closePit(client, state.sourceIndexPitId)(); + await Actions.closePit({ client, pitId: state.sourceIndexPitId })(); } catch (e) { executionLog.push({ type: 'cleanup', diff --git a/src/core/server/saved_objects/migrationsv2/next.ts b/src/core/server/saved_objects/migrationsv2/next.ts index 07ebf80271d48..3c3e3c46a8d68 100644 --- a/src/core/server/saved_objects/migrationsv2/next.ts +++ b/src/core/server/saved_objects/migrationsv2/next.ts @@ -58,38 +58,46 @@ export type ResponseType = UnwrapPromise< export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: TransformRawDocs) => { return { INIT: (state: InitState) => - Actions.fetchIndices(client, [state.currentAlias, state.versionAlias]), + Actions.fetchIndices({ client, indices: [state.currentAlias, state.versionAlias] }), WAIT_FOR_YELLOW_SOURCE: (state: WaitForYellowSourceState) => - Actions.waitForIndexStatusYellow(client, state.sourceIndex.value), + Actions.waitForIndexStatusYellow({ client, index: state.sourceIndex.value }), SET_SOURCE_WRITE_BLOCK: (state: SetSourceWriteBlockState) => - Actions.setWriteBlock(client, state.sourceIndex.value), + Actions.setWriteBlock({ client, index: state.sourceIndex.value }), CREATE_NEW_TARGET: (state: CreateNewTargetState) => - Actions.createIndex(client, state.targetIndex, state.targetIndexMappings), + Actions.createIndex({ + client, + indexName: state.targetIndex, + mappings: state.targetIndexMappings, + }), CREATE_REINDEX_TEMP: (state: CreateReindexTempState) => - Actions.createIndex(client, state.tempIndex, state.tempIndexMappings), + Actions.createIndex({ + client, + indexName: state.tempIndex, + mappings: state.tempIndexMappings, + }), REINDEX_SOURCE_TO_TEMP_OPEN_PIT: (state: ReindexSourceToTempOpenPit) => - Actions.openPit(client, state.sourceIndex.value), + Actions.openPit({ client, index: state.sourceIndex.value }), REINDEX_SOURCE_TO_TEMP_READ: (state: ReindexSourceToTempRead) => - Actions.readWithPit( + Actions.readWithPit({ client, - state.sourceIndexPitId, + pitId: state.sourceIndexPitId, /* When reading we use a source query to exclude saved objects types which * are no longer used. These saved objects will still be kept in the outdated * index for backup purposes, but won't be available in the upgraded index. */ - state.unusedTypesQuery, - state.batchSize, - state.lastHitSortValue - ), + query: state.unusedTypesQuery, + batchSize: state.batchSize, + searchAfter: state.lastHitSortValue, + }), REINDEX_SOURCE_TO_TEMP_CLOSE_PIT: (state: ReindexSourceToTempClosePit) => - Actions.closePit(client, state.sourceIndexPitId), + Actions.closePit({ client, pitId: state.sourceIndexPitId }), REINDEX_SOURCE_TO_TEMP_INDEX: (state: ReindexSourceToTempIndex) => - Actions.transformDocs(transformRawDocs, state.outdatedDocuments), + Actions.transformDocs({ transformRawDocs, outdatedDocuments: state.outdatedDocuments }), REINDEX_SOURCE_TO_TEMP_INDEX_BULK: (state: ReindexSourceToTempIndexBulk) => - Actions.bulkOverwriteTransformedDocuments( + Actions.bulkOverwriteTransformedDocuments({ client, - state.tempIndex, - state.transformedDocs, + index: state.tempIndex, + transformedDocs: state.transformedDocs, /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. @@ -97,39 +105,48 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra * before we reach out to the OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT step. * Right now, it's performed during REFRESH_TARGET step. */ - false - ), + refresh: false, + }), SET_TEMP_WRITE_BLOCK: (state: SetTempWriteBlock) => - Actions.setWriteBlock(client, state.tempIndex), + Actions.setWriteBlock({ client, index: state.tempIndex }), CLONE_TEMP_TO_TARGET: (state: CloneTempToSource) => - Actions.cloneIndex(client, state.tempIndex, state.targetIndex), - REFRESH_TARGET: (state: RefreshTarget) => Actions.refreshIndex(client, state.targetIndex), + Actions.cloneIndex({ client, source: state.tempIndex, target: state.targetIndex }), + REFRESH_TARGET: (state: RefreshTarget) => + Actions.refreshIndex({ client, targetIndex: state.targetIndex }), UPDATE_TARGET_MAPPINGS: (state: UpdateTargetMappingsState) => - Actions.updateAndPickupMappings(client, state.targetIndex, state.targetIndexMappings), + Actions.updateAndPickupMappings({ + client, + index: state.targetIndex, + mappings: state.targetIndexMappings, + }), UPDATE_TARGET_MAPPINGS_WAIT_FOR_TASK: (state: UpdateTargetMappingsWaitForTaskState) => - Actions.waitForPickupUpdatedMappingsTask(client, state.updateTargetMappingsTaskId, '60s'), + Actions.waitForPickupUpdatedMappingsTask({ + client, + taskId: state.updateTargetMappingsTaskId, + timeout: '60s', + }), OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT: (state: OutdatedDocumentsSearchOpenPit) => - Actions.openPit(client, state.targetIndex), + Actions.openPit({ client, index: state.targetIndex }), OUTDATED_DOCUMENTS_SEARCH_READ: (state: OutdatedDocumentsSearchRead) => - Actions.readWithPit( + Actions.readWithPit({ client, - state.pitId, + pitId: state.pitId, // search for outdated documents only - state.outdatedDocumentsQuery, - state.batchSize, - state.lastHitSortValue - ), + query: state.outdatedDocumentsQuery, + batchSize: state.batchSize, + searchAfter: state.lastHitSortValue, + }), OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT: (state: OutdatedDocumentsSearchClosePit) => - Actions.closePit(client, state.pitId), + Actions.closePit({ client, pitId: state.pitId }), OUTDATED_DOCUMENTS_REFRESH: (state: OutdatedDocumentsRefresh) => - Actions.refreshIndex(client, state.targetIndex), + Actions.refreshIndex({ client, targetIndex: state.targetIndex }), OUTDATED_DOCUMENTS_TRANSFORM: (state: OutdatedDocumentsTransform) => - Actions.transformDocs(transformRawDocs, state.outdatedDocuments), + Actions.transformDocs({ transformRawDocs, outdatedDocuments: state.outdatedDocuments }), TRANSFORMED_DOCUMENTS_BULK_INDEX: (state: TransformedDocumentsBulkIndex) => - Actions.bulkOverwriteTransformedDocuments( + Actions.bulkOverwriteTransformedDocuments({ client, - state.targetIndex, - state.transformedDocs, + index: state.targetIndex, + transformedDocs: state.transformedDocs, /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. @@ -137,29 +154,32 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra * before we reach out to the MARK_VERSION_INDEX_READY step. * Right now, it's performed during OUTDATED_DOCUMENTS_REFRESH step. */ - false - ), + }), MARK_VERSION_INDEX_READY: (state: MarkVersionIndexReady) => - Actions.updateAliases(client, state.versionIndexReadyActions.value), + Actions.updateAliases({ client, aliasActions: state.versionIndexReadyActions.value }), MARK_VERSION_INDEX_READY_CONFLICT: (state: MarkVersionIndexReadyConflict) => - Actions.fetchIndices(client, [state.currentAlias, state.versionAlias]), + Actions.fetchIndices({ client, indices: [state.currentAlias, state.versionAlias] }), LEGACY_SET_WRITE_BLOCK: (state: LegacySetWriteBlockState) => - Actions.setWriteBlock(client, state.legacyIndex), + Actions.setWriteBlock({ client, index: state.legacyIndex }), LEGACY_CREATE_REINDEX_TARGET: (state: LegacyCreateReindexTargetState) => - Actions.createIndex(client, state.sourceIndex.value, state.legacyReindexTargetMappings), + Actions.createIndex({ + client, + indexName: state.sourceIndex.value, + mappings: state.legacyReindexTargetMappings, + }), LEGACY_REINDEX: (state: LegacyReindexState) => - Actions.reindex( + Actions.reindex({ client, - state.legacyIndex, - state.sourceIndex.value, - state.preMigrationScript, - false, - state.unusedTypesQuery - ), + sourceIndex: state.legacyIndex, + targetIndex: state.sourceIndex.value, + reindexScript: state.preMigrationScript, + requireAlias: false, + unusedTypesQuery: state.unusedTypesQuery, + }), LEGACY_REINDEX_WAIT_FOR_TASK: (state: LegacyReindexWaitForTaskState) => - Actions.waitForReindexTask(client, state.legacyReindexTaskId, '60s'), + Actions.waitForReindexTask({ client, taskId: state.legacyReindexTaskId, timeout: '60s' }), LEGACY_DELETE: (state: LegacyDeleteState) => - Actions.updateAliases(client, state.legacyPreMigrationDoneActions), + Actions.updateAliases({ client, aliasActions: state.legacyPreMigrationDoneActions }), }; }; diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md index 7f108dbeb0086..0c35177f51f99 100644 --- a/src/core/server/server.api.md +++ b/src/core/server/server.api.md @@ -874,6 +874,7 @@ export interface DeprecationsDetails { }; manualSteps?: string[]; }; + deprecationType?: 'config' | 'feature'; // (undocumented) documentationUrl?: string; level: 'warning' | 'critical' | 'fetch_error'; diff --git a/src/core/server/server.ts b/src/core/server/server.ts index 4d99368f9bf70..a31b9a061ac5d 100644 --- a/src/core/server/server.ts +++ b/src/core/server/server.ts @@ -193,8 +193,6 @@ export class Server { const deprecationsSetup = this.deprecations.setup({ http: httpSetup, - elasticsearch: elasticsearchServiceSetup, - coreUsageData: coreUsageDataSetup, }); const coreSetup: InternalCoreSetup = { diff --git a/src/dev/build/lib/version_info.ts b/src/dev/build/lib/version_info.ts index 01066cdeffc4c..6be34a54e24f8 100644 --- a/src/dev/build/lib/version_info.ts +++ b/src/dev/build/lib/version_info.ts @@ -28,7 +28,10 @@ export async function getVersionInfo({ isRelease, versionQualifier, pkg }: Optio ); return { - buildSha: (await execa('git', ['rev-parse', 'HEAD'])).stdout, + buildSha: + process.env.GIT_COMMIT || + process.env.BUILDKITE_COMMIT || + (await execa('git', ['rev-parse', 'HEAD'])).stdout, buildVersion, buildNumber: await getBuildNumber(), }; diff --git a/src/dev/precommit_hook/casing_check_config.js b/src/dev/precommit_hook/casing_check_config.js index d523f78a9f589..ba18c085b649d 100644 --- a/src/dev/precommit_hook/casing_check_config.js +++ b/src/dev/precommit_hook/casing_check_config.js @@ -41,6 +41,7 @@ export const IGNORE_FILE_GLOBS = [ '.ci/pipeline-library/**/*', 'packages/kbn-test/jest-preset.js', 'test/package/Vagrantfile', + '**/test/**/fixtures/**/*', // filename must match language code which requires capital letters '**/translations/*.json', @@ -61,12 +62,13 @@ export const IGNORE_FILE_GLOBS = [ 'x-pack/plugins/apm/e2e/**/*', 'x-pack/plugins/maps/server/fonts/**/*', - // packages for the ingest manager's api integration tests could be valid semver which has dashes - 'x-pack/test/fleet_api_integration/apis/fixtures/test_packages/**/*', // Bazel default files '**/WORKSPACE.bazel', '**/BUILD.bazel', + + // Buildkite + '.buildkite/hooks/*', ]; /** @@ -95,7 +97,6 @@ export const IGNORE_DIRECTORY_GLOBS = [ ...KEBAB_CASE_DIRECTORY_GLOBS, 'src/babel-*', 'packages/*', - 'test/functional/fixtures/es_archiver/visualize_source-filters', 'packages/kbn-pm/src/utils/__fixtures__/*', 'x-pack/dev-tools', 'packages/kbn-optimizer/src/__fixtures__/mock_repo/x-pack', diff --git a/src/plugins/bfetch/common/batch.ts b/src/plugins/bfetch/common/batch.ts index a84d94b541ae5..59b012751c66d 100644 --- a/src/plugins/bfetch/common/batch.ts +++ b/src/plugins/bfetch/common/batch.ts @@ -19,3 +19,8 @@ export interface BatchResponseItem new Promise((resolve) => setImmediate(resolve)); const getPromiseState = (promise: Promise): Promise<'resolved' | 'rejected' | 'pending'> => Promise.race<'resolved' | 'rejected' | 'pending'>([ @@ -52,6 +54,7 @@ describe('createStreamingBatchedFunction()', () => { const fn = createStreamingBatchedFunction({ url: '/test', fetchStreaming, + compressionDisabled$: rxof(true), }); expect(typeof fn).toBe('function'); }); @@ -61,6 +64,7 @@ describe('createStreamingBatchedFunction()', () => { const fn = createStreamingBatchedFunction({ url: '/test', fetchStreaming, + compressionDisabled$: rxof(true), }); const res = fn({}); expect(typeof res.then).toBe('function'); @@ -74,6 +78,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); @@ -93,6 +98,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); @@ -107,6 +113,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ foo: 'bar' }); @@ -125,6 +132,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ foo: 'bar' }); @@ -146,14 +154,18 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ foo: 'bar' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ baz: 'quix' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(0); fn({ full: 'yep' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(1); }); @@ -164,6 +176,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -186,11 +199,13 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ a: '1' }); fn({ b: '2' }); fn({ c: '3' }); + await flushPromises(); expect(fetchStreaming.mock.calls[0][0]).toMatchObject({ url: '/test', @@ -209,13 +224,16 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); fn({ a: '1' }); fn({ b: '2' }); fn({ c: '3' }); + await flushPromises(); expect(fetchStreaming).toHaveBeenCalledTimes(1); fn({ d: '4' }); + await flushPromises(); await new Promise((r) => setTimeout(r, 6)); expect(fetchStreaming).toHaveBeenCalledTimes(2); }); @@ -229,6 +247,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -246,8 +265,11 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); + await flushPromises(); + const promise1 = fn({ a: '1' }); const promise2 = fn({ b: '2' }); const promise3 = fn({ c: '3' }); @@ -287,6 +309,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -314,6 +337,20 @@ describe('createStreamingBatchedFunction()', () => { expect(await promise3).toEqual({ foo: 'bar 2' }); }); + test('compression is false by default', async () => { + const { fetchStreaming } = setup(); + const fn = createStreamingBatchedFunction({ + url: '/test', + flushOnMaxItems: 1, + fetchStreaming, + }); + + fn({ a: '1' }); + + const dontCompress = await fetchStreaming.mock.calls[0][0].compressionDisabled$.toPromise(); + expect(dontCompress).toBe(false); + }); + test('resolves falsy results', async () => { const { fetchStreaming, stream } = setup(); const fn = createStreamingBatchedFunction({ @@ -321,6 +358,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = fn({ a: '1' }); @@ -362,6 +400,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise = fn({ a: '1' }); @@ -390,6 +429,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -442,6 +482,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -471,6 +512,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const abortController = new AbortController(); @@ -509,6 +551,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -539,6 +582,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -576,6 +620,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -608,6 +653,7 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); const promise1 = of(fn({ a: '1' })); @@ -644,7 +690,9 @@ describe('createStreamingBatchedFunction()', () => { fetchStreaming, maxItemAge: 5, flushOnMaxItems: 3, + compressionDisabled$: rxof(true), }); + await flushPromises(); const promise1 = of(fn({ a: '1' })); const promise2 = of(fn({ a: '2' })); diff --git a/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts b/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts index 2d81331f10a88..d5f955f517d13 100644 --- a/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts +++ b/src/plugins/bfetch/public/batching/create_streaming_batched_function.ts @@ -6,16 +6,16 @@ * Side Public License, v 1. */ +import { Observable, of } from 'rxjs'; import { AbortError, abortSignalToPromise, defer } from '../../../kibana_utils/public'; import { ItemBufferParams, TimedItemBufferParams, createBatchedFunction, - BatchResponseItem, ErrorLike, + normalizeError, } from '../../common'; -import { fetchStreaming, split } from '../streaming'; -import { normalizeError } from '../../common'; +import { fetchStreaming } from '../streaming'; import { BatchedFunc, BatchItem } from './types'; export interface BatchedFunctionProtocolError extends ErrorLike { @@ -47,6 +47,11 @@ export interface StreamingBatchedFunctionParams { * before sending the batch request. */ maxItemAge?: TimedItemBufferParams['maxItemAge']; + + /** + * Disabled zlib compression of response chunks. + */ + compressionDisabled$?: Observable; } /** @@ -64,6 +69,7 @@ export const createStreamingBatchedFunction = ( fetchStreaming: fetchStreamingInjected = fetchStreaming, flushOnMaxItems = 25, maxItemAge = 10, + compressionDisabled$ = of(false), } = params; const [fn] = createBatchedFunction({ onCall: (payload: Payload, signal?: AbortSignal) => { @@ -119,6 +125,7 @@ export const createStreamingBatchedFunction = ( body: JSON.stringify({ batch }), method: 'POST', signal: abortController.signal, + compressionDisabled$, }); const handleStreamError = (error: any) => { @@ -127,10 +134,10 @@ export const createStreamingBatchedFunction = ( for (const { future } of items) future.reject(normalizedError); }; - stream.pipe(split('\n')).subscribe({ + stream.subscribe({ next: (json: string) => { try { - const response = JSON.parse(json) as BatchResponseItem; + const response = JSON.parse(json); if (response.error) { items[response.id].future.reject(response.error); } else if (response.result !== undefined) { diff --git a/src/plugins/bfetch/public/batching/index.ts b/src/plugins/bfetch/public/batching/index.ts new file mode 100644 index 0000000000000..115fd84cbe979 --- /dev/null +++ b/src/plugins/bfetch/public/batching/index.ts @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +export { + createStreamingBatchedFunction, + StreamingBatchedFunctionParams, +} from './create_streaming_batched_function'; diff --git a/src/plugins/bfetch/public/plugin.ts b/src/plugins/bfetch/public/plugin.ts index ed97d468eec0b..f97a91a0e70d3 100644 --- a/src/plugins/bfetch/public/plugin.ts +++ b/src/plugins/bfetch/public/plugin.ts @@ -7,12 +7,11 @@ */ import { CoreStart, PluginInitializerContext, CoreSetup, Plugin } from 'src/core/public'; +import { from, Observable, of } from 'rxjs'; +import { switchMap } from 'rxjs/operators'; import { fetchStreaming as fetchStreamingStatic, FetchStreamingParams } from './streaming'; -import { removeLeadingSlash } from '../common'; -import { - createStreamingBatchedFunction, - StreamingBatchedFunctionParams, -} from './batching/create_streaming_batched_function'; +import { DISABLE_BFETCH_COMPRESSION, removeLeadingSlash } from '../common'; +import { createStreamingBatchedFunction, StreamingBatchedFunctionParams } from './batching'; import { BatchedFunc } from './batching/types'; // eslint-disable-next-line @@ -43,12 +42,23 @@ export class BfetchPublicPlugin constructor(private readonly initializerContext: PluginInitializerContext) {} - public setup(core: CoreSetup, plugins: BfetchPublicSetupDependencies): BfetchPublicSetup { + public setup( + core: CoreSetup, + plugins: BfetchPublicSetupDependencies + ): BfetchPublicSetup { const { version } = this.initializerContext.env.packageInfo; const basePath = core.http.basePath.get(); - const fetchStreaming = this.fetchStreaming(version, basePath); - const batchedFunction = this.batchedFunction(fetchStreaming); + const compressionDisabled$ = from(core.getStartServices()).pipe( + switchMap((deps) => { + return of(deps[0]); + }), + switchMap((coreStart) => { + return coreStart.uiSettings.get$(DISABLE_BFETCH_COMPRESSION); + }) + ); + const fetchStreaming = this.fetchStreaming(version, basePath, compressionDisabled$); + const batchedFunction = this.batchedFunction(fetchStreaming, compressionDisabled$); this.contract = { fetchStreaming, @@ -66,7 +76,8 @@ export class BfetchPublicPlugin private fetchStreaming = ( version: string, - basePath: string + basePath: string, + compressionDisabled$: Observable ): BfetchPublicSetup['fetchStreaming'] => (params) => fetchStreamingStatic({ ...params, @@ -76,13 +87,16 @@ export class BfetchPublicPlugin 'kbn-version': version, ...(params.headers || {}), }, + compressionDisabled$, }); private batchedFunction = ( - fetchStreaming: BfetchPublicContract['fetchStreaming'] + fetchStreaming: BfetchPublicContract['fetchStreaming'], + compressionDisabled$: Observable ): BfetchPublicContract['batchedFunction'] => (params) => createStreamingBatchedFunction({ ...params, + compressionDisabled$, fetchStreaming: params.fetchStreaming || fetchStreaming, }); } diff --git a/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts b/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts index e804b3ea94227..a5d066f6d9a24 100644 --- a/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts +++ b/src/plugins/bfetch/public/streaming/fetch_streaming.test.ts @@ -8,6 +8,15 @@ import { fetchStreaming } from './fetch_streaming'; import { mockXMLHttpRequest } from '../test_helpers/xhr'; +import { of } from 'rxjs'; +import { promisify } from 'util'; +import { deflate } from 'zlib'; +const pDeflate = promisify(deflate); + +const compressResponse = async (resp: any) => { + const gzipped = await pDeflate(JSON.stringify(resp)); + return gzipped.toString('base64'); +}; const tick = () => new Promise((resolve) => setTimeout(resolve, 1)); @@ -21,6 +30,7 @@ test('returns XHR request', () => { setup(); const { xhr } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(typeof xhr.readyState).toBe('number'); }); @@ -29,6 +39,7 @@ test('returns stream', () => { setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(typeof stream.subscribe).toBe('function'); }); @@ -37,6 +48,7 @@ test('promise resolves when request completes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); let resolved = false; @@ -65,10 +77,90 @@ test('promise resolves when request completes', async () => { expect(resolved).toBe(true); }); -test('streams incoming text as it comes through', async () => { +test('promise resolves when compressed request completes', async () => { + const env = setup(); + const { stream } = fetchStreaming({ + url: 'http://example.com', + compressionDisabled$: of(false), + }); + + let resolved = false; + let result; + stream.toPromise().then((r) => { + resolved = true; + result = r; + }); + + await tick(); + expect(resolved).toBe(false); + + const msg = { foo: 'bar' }; + + // Whole message in a response + (env.xhr as any).responseText = `${await compressResponse(msg)}\n`; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + (env.xhr as any).readyState = 4; + (env.xhr as any).status = 200; + env.xhr.onreadystatechange!({} as any); + + await tick(); + expect(resolved).toBe(true); + expect(result).toStrictEqual(JSON.stringify(msg)); +}); + +test('promise resolves when compressed chunked request completes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(false), + }); + + let resolved = false; + let result; + stream.toPromise().then((r) => { + resolved = true; + result = r; + }); + + await tick(); + expect(resolved).toBe(false); + + const msg = { veg: 'tomato' }; + const msgToCut = await compressResponse(msg); + const part1 = msgToCut.substr(0, 3); + + // Message and a half in a response + (env.xhr as any).responseText = part1; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + // Half a message in a response + (env.xhr as any).responseText = `${msgToCut}\n`; + env.xhr.onprogress!({} as any); + + await tick(); + expect(resolved).toBe(false); + + (env.xhr as any).readyState = 4; + (env.xhr as any).status = 200; + env.xhr.onreadystatechange!({} as any); + + await tick(); + expect(resolved).toBe(true); + expect(result).toStrictEqual(JSON.stringify(msg)); +}); + +test('streams incoming text as it comes through, according to separators', async () => { + const env = setup(); + const { stream } = fetchStreaming({ + url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -80,16 +172,22 @@ test('streams incoming text as it comes through', async () => { (env.xhr as any).responseText = 'foo'; env.xhr.onprogress!({} as any); + await tick(); + expect(spy).toHaveBeenCalledTimes(0); + + (env.xhr as any).responseText = 'foo\nbar'; + env.xhr.onprogress!({} as any); + await tick(); expect(spy).toHaveBeenCalledTimes(1); expect(spy).toHaveBeenCalledWith('foo'); - (env.xhr as any).responseText = 'foo\nbar'; + (env.xhr as any).responseText = 'foo\nbar\n'; env.xhr.onprogress!({} as any); await tick(); expect(spy).toHaveBeenCalledTimes(2); - expect(spy).toHaveBeenCalledWith('\nbar'); + expect(spy).toHaveBeenCalledWith('bar'); (env.xhr as any).readyState = 4; (env.xhr as any).status = 200; @@ -103,6 +201,7 @@ test('completes stream observable when request finishes', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -127,6 +226,7 @@ test('completes stream observable when aborted', async () => { const { stream } = fetchStreaming({ url: 'http://example.com', signal: abort.signal, + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -152,6 +252,7 @@ test('promise throws when request errors', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -178,6 +279,7 @@ test('stream observable errors when request errors', async () => { const env = setup(); const { stream } = fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); const spy = jest.fn(); @@ -210,6 +312,7 @@ test('sets custom headers', async () => { 'Content-Type': 'text/plain', Authorization: 'Bearer 123', }, + compressionDisabled$: of(true), }); expect(env.xhr.setRequestHeader).toHaveBeenCalledWith('Content-Type', 'text/plain'); @@ -223,6 +326,7 @@ test('uses credentials', async () => { fetchStreaming({ url: 'http://example.com', + compressionDisabled$: of(true), }); expect(env.xhr.withCredentials).toBe(true); @@ -238,6 +342,7 @@ test('opens XHR request and sends specified body', async () => { url: 'http://elastic.co', method: 'GET', body: 'foobar', + compressionDisabled$: of(true), }); expect(env.xhr.open).toHaveBeenCalledTimes(1); @@ -250,6 +355,7 @@ test('uses POST request method by default', async () => { const env = setup(); fetchStreaming({ url: 'http://elastic.co', + compressionDisabled$: of(true), }); expect(env.xhr.open).toHaveBeenCalledWith('POST', 'http://elastic.co'); }); diff --git a/src/plugins/bfetch/public/streaming/fetch_streaming.ts b/src/plugins/bfetch/public/streaming/fetch_streaming.ts index d68e4d01b44f5..1af35ef68fb85 100644 --- a/src/plugins/bfetch/public/streaming/fetch_streaming.ts +++ b/src/plugins/bfetch/public/streaming/fetch_streaming.ts @@ -6,7 +6,11 @@ * Side Public License, v 1. */ +import { Observable, of } from 'rxjs'; +import { map, share, switchMap } from 'rxjs/operators'; +import { inflateResponse } from '.'; import { fromStreamingXhr } from './from_streaming_xhr'; +import { split } from './split'; export interface FetchStreamingParams { url: string; @@ -14,6 +18,7 @@ export interface FetchStreamingParams { method?: 'GET' | 'POST'; body?: string; signal?: AbortSignal; + compressionDisabled$?: Observable; } /** @@ -26,23 +31,49 @@ export function fetchStreaming({ method = 'POST', body = '', signal, + compressionDisabled$ = of(false), }: FetchStreamingParams) { const xhr = new window.XMLHttpRequest(); - // Begin the request - xhr.open(method, url); - xhr.withCredentials = true; + const msgStream = compressionDisabled$.pipe( + switchMap((compressionDisabled) => { + // Begin the request + xhr.open(method, url); + xhr.withCredentials = true; - // Set the HTTP headers - Object.entries(headers).forEach(([k, v]) => xhr.setRequestHeader(k, v)); + if (!compressionDisabled) { + headers['X-Chunk-Encoding'] = 'deflate'; + } - const stream = fromStreamingXhr(xhr, signal); + // Set the HTTP headers + Object.entries(headers).forEach(([k, v]) => xhr.setRequestHeader(k, v)); - // Send the payload to the server - xhr.send(body); + const stream = fromStreamingXhr(xhr, signal); + + // Send the payload to the server + xhr.send(body); + + // Return a stream of chunked decompressed messages + return stream.pipe( + split('\n'), + map((msg) => { + return compressionDisabled ? msg : inflateResponse(msg); + }) + ); + }), + share() + ); + + // start execution + const msgStreamSub = msgStream.subscribe({ + error: (e) => {}, + complete: () => { + msgStreamSub.unsubscribe(); + }, + }); return { xhr, - stream, + stream: msgStream, }; } diff --git a/src/plugins/bfetch/public/streaming/index.ts b/src/plugins/bfetch/public/streaming/index.ts index afb442feffb29..545cae87aa3d6 100644 --- a/src/plugins/bfetch/public/streaming/index.ts +++ b/src/plugins/bfetch/public/streaming/index.ts @@ -9,3 +9,4 @@ export * from './split'; export * from './from_streaming_xhr'; export * from './fetch_streaming'; +export { inflateResponse } from './inflate_response'; diff --git a/src/plugins/bfetch/public/streaming/inflate_response.ts b/src/plugins/bfetch/public/streaming/inflate_response.ts new file mode 100644 index 0000000000000..73cb52285987c --- /dev/null +++ b/src/plugins/bfetch/public/streaming/inflate_response.ts @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { unzlibSync, strFromU8 } from 'fflate'; + +export function inflateResponse(response: string) { + const buff = Buffer.from(response, 'base64'); + const unzip = unzlibSync(buff); + return strFromU8(unzip); +} diff --git a/src/plugins/bfetch/server/plugin.ts b/src/plugins/bfetch/server/plugin.ts index 18f0813260f03..7fd46e2f6cc44 100644 --- a/src/plugins/bfetch/server/plugin.ts +++ b/src/plugins/bfetch/server/plugin.ts @@ -16,6 +16,7 @@ import type { RouteMethod, RequestHandler, RequestHandlerContext, + StartServicesAccessor, } from 'src/core/server'; import { schema } from '@kbn/config-schema'; import { Subject } from 'rxjs'; @@ -28,7 +29,8 @@ import { normalizeError, } from '../common'; import { StreamingRequestHandler } from './types'; -import { createNDJSONStream } from './streaming'; +import { createStream } from './streaming'; +import { getUiSettings } from './ui_settings'; // eslint-disable-next-line export interface BfetchServerSetupDependencies {} @@ -112,9 +114,19 @@ export class BfetchServerPlugin public setup(core: CoreSetup, plugins: BfetchServerSetupDependencies): BfetchServerSetup { const logger = this.initializerContext.logger.get(); const router = core.http.createRouter(); - const addStreamingResponseRoute = this.addStreamingResponseRoute({ router, logger }); + + core.uiSettings.register(getUiSettings()); + + const addStreamingResponseRoute = this.addStreamingResponseRoute({ + getStartServices: core.getStartServices, + router, + logger, + }); const addBatchProcessingRoute = this.addBatchProcessingRoute(addStreamingResponseRoute); - const createStreamingRequestHandler = this.createStreamingRequestHandler({ logger }); + const createStreamingRequestHandler = this.createStreamingRequestHandler({ + getStartServices: core.getStartServices, + logger, + }); return { addBatchProcessingRoute, @@ -129,10 +141,16 @@ export class BfetchServerPlugin public stop() {} + private getCompressionDisabled(request: KibanaRequest) { + return request.headers['x-chunk-encoding'] !== 'deflate'; + } + private addStreamingResponseRoute = ({ + getStartServices, router, logger, }: { + getStartServices: StartServicesAccessor; router: ReturnType; logger: Logger; }): BfetchServerSetup['addStreamingResponseRoute'] => (path, handler) => { @@ -146,9 +164,10 @@ export class BfetchServerPlugin async (context, request, response) => { const handlerInstance = handler(request); const data = request.body; + const compressionDisabled = this.getCompressionDisabled(request); return response.ok({ headers: streamingHeaders, - body: createNDJSONStream(handlerInstance.getResponseStream(data), logger), + body: createStream(handlerInstance.getResponseStream(data), logger, compressionDisabled), }); } ); @@ -156,17 +175,20 @@ export class BfetchServerPlugin private createStreamingRequestHandler = ({ logger, + getStartServices, }: { logger: Logger; + getStartServices: StartServicesAccessor; }): BfetchServerSetup['createStreamingRequestHandler'] => (streamHandler) => async ( context, request, response ) => { const response$ = await streamHandler(context, request); + const compressionDisabled = this.getCompressionDisabled(request); return response.ok({ headers: streamingHeaders, - body: createNDJSONStream(response$, logger), + body: createStream(response$, logger, compressionDisabled), }); }; diff --git a/src/plugins/bfetch/server/streaming/create_compressed_stream.ts b/src/plugins/bfetch/server/streaming/create_compressed_stream.ts new file mode 100644 index 0000000000000..6814ed1dd7955 --- /dev/null +++ b/src/plugins/bfetch/server/streaming/create_compressed_stream.ts @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { promisify } from 'util'; +import { Observable } from 'rxjs'; +import { catchError, concatMap, finalize } from 'rxjs/operators'; +import { Logger } from 'src/core/server'; +import { Stream, PassThrough } from 'stream'; +import { constants, deflate } from 'zlib'; + +const delimiter = '\n'; +const pDeflate = promisify(deflate); + +async function zipMessageToStream(output: PassThrough, message: string) { + return new Promise(async (resolve, reject) => { + try { + const gzipped = await pDeflate(message, { + flush: constants.Z_SYNC_FLUSH, + }); + output.write(gzipped.toString('base64')); + output.write(delimiter); + resolve(undefined); + } catch (err) { + reject(err); + } + }); +} + +export const createCompressedStream = ( + results: Observable, + logger: Logger +): Stream => { + const output = new PassThrough(); + + const sub = results + .pipe( + concatMap((message: Response) => { + const strMessage = JSON.stringify(message); + return zipMessageToStream(output, strMessage); + }), + catchError((e) => { + logger.error('Could not serialize or stream a message.'); + logger.error(e); + throw e; + }), + finalize(() => { + output.end(); + sub.unsubscribe(); + }) + ) + .subscribe(); + + return output; +}; diff --git a/src/plugins/bfetch/server/streaming/create_stream.ts b/src/plugins/bfetch/server/streaming/create_stream.ts new file mode 100644 index 0000000000000..7d6981294341b --- /dev/null +++ b/src/plugins/bfetch/server/streaming/create_stream.ts @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { Logger } from 'kibana/server'; +import { Stream } from 'stream'; +import { Observable } from 'rxjs'; +import { createCompressedStream } from './create_compressed_stream'; +import { createNDJSONStream } from './create_ndjson_stream'; + +export function createStream( + response$: Observable, + logger: Logger, + compressionDisabled: boolean +): Stream { + return compressionDisabled + ? createNDJSONStream(response$, logger) + : createCompressedStream(response$, logger); +} diff --git a/src/plugins/bfetch/server/streaming/index.ts b/src/plugins/bfetch/server/streaming/index.ts index 2c31cc329295d..dfd472b5034a1 100644 --- a/src/plugins/bfetch/server/streaming/index.ts +++ b/src/plugins/bfetch/server/streaming/index.ts @@ -7,3 +7,5 @@ */ export * from './create_ndjson_stream'; +export * from './create_compressed_stream'; +export * from './create_stream'; diff --git a/src/plugins/bfetch/server/ui_settings.ts b/src/plugins/bfetch/server/ui_settings.ts new file mode 100644 index 0000000000000..cf7b13a9af182 --- /dev/null +++ b/src/plugins/bfetch/server/ui_settings.ts @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { i18n } from '@kbn/i18n'; +import { UiSettingsParams } from 'src/core/server'; +import { schema } from '@kbn/config-schema'; +import { DISABLE_BFETCH_COMPRESSION } from '../common'; + +export function getUiSettings(): Record> { + return { + [DISABLE_BFETCH_COMPRESSION]: { + name: i18n.translate('bfetch.disableBfetchCompression', { + defaultMessage: 'Disable Batch Compression', + }), + value: false, + description: i18n.translate('bfetch.disableBfetchCompressionDesc', { + defaultMessage: + 'Disable batch compression. This allows you to debug individual requests, but increases response size.', + }), + schema: schema.boolean(), + category: [], + }, + }; +} diff --git a/src/plugins/dashboard/public/application/dashboard_app.tsx b/src/plugins/dashboard/public/application/dashboard_app.tsx index fa86fb81bd407..93310bb821361 100644 --- a/src/plugins/dashboard/public/application/dashboard_app.tsx +++ b/src/plugins/dashboard/public/application/dashboard_app.tsx @@ -295,13 +295,6 @@ export function DashboardApp({ }; }, [dashboardStateManager, dashboardContainer, onAppLeave, embeddable]); - // clear search session when leaving dashboard route - useEffect(() => { - return () => { - data.search.session.clear(); - }; - }, [data.search.session]); - return ( <> {savedDashboard && dashboardStateManager && dashboardContainer && viewMode && ( diff --git a/src/plugins/dashboard/public/application/dashboard_router.tsx b/src/plugins/dashboard/public/application/dashboard_router.tsx index d5eddf6bb4864..be279ed98492e 100644 --- a/src/plugins/dashboard/public/application/dashboard_router.tsx +++ b/src/plugins/dashboard/public/application/dashboard_router.tsx @@ -198,8 +198,14 @@ export async function mountApp({ return ; }; - // make sure the index pattern list is up to date - await dataStart.indexPatterns.clearCache(); + const hasEmbeddableIncoming = Boolean( + dashboardServices.embeddable + .getStateTransfer() + .getIncomingEmbeddablePackage(DashboardConstants.DASHBOARDS_ID, false) + ); + if (!hasEmbeddableIncoming) { + dataStart.indexPatterns.clearCache(); + } // dispatch synthetic hash change event to update hash history objects // this is necessary because hash updates triggered by using popState won't trigger this event naturally. @@ -242,7 +248,6 @@ export async function mountApp({ } render(app, element); return () => { - dataStart.search.session.clear(); unlistenParentHistory(); unmountComponentAtNode(element); appUnMounted(); diff --git a/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts b/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts index 0be29f67a9492..d715fb70ec91a 100644 --- a/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts +++ b/src/plugins/dashboard/public/application/hooks/use_dashboard_container.ts @@ -85,6 +85,7 @@ export const useDashboardContainer = ({ let canceled = false; let pendingContainer: DashboardContainer | ErrorEmbeddable | null | undefined; (async function createContainer() { + const existingSession = searchSession.getSessionId(); pendingContainer = await dashboardFactory.create( getDashboardContainerInput({ isEmbeddedExternally: Boolean(isEmbeddedExternally), @@ -92,7 +93,9 @@ export const useDashboardContainer = ({ dashboardStateManager, incomingEmbeddable, query, - searchSessionId: searchSessionIdFromURL ?? searchSession.start(), + searchSessionId: + searchSessionIdFromURL ?? + (existingSession && incomingEmbeddable ? existingSession : searchSession.start()), }) ); diff --git a/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx b/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx index db0404595af6c..e2c11d614d797 100644 --- a/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx +++ b/src/plugins/dashboard/public/application/listing/dashboard_listing.tsx @@ -83,6 +83,11 @@ export const DashboardListing = ({ }; }, [title, savedObjectsClient, redirectTo, data.query, kbnUrlStateStorage]); + // clear dangling session because they are not required here + useEffect(() => { + data.search.session.clear(); + }, [data.search.session]); + const hideWriteControls = dashboardCapabilities.hideWriteControls; const listingLimit = savedObjects.settings.getListingLimit(); const defaultFilter = title ? `"${title}"` : ''; diff --git a/src/plugins/data/common/es_query/es_query/build_es_query.test.ts b/src/plugins/data/common/es_query/es_query/build_es_query.test.ts index c6d923f4505f0..fa9a2c85aaef5 100644 --- a/src/plugins/data/common/es_query/es_query/build_es_query.test.ts +++ b/src/plugins/data/common/es_query/es_query/build_es_query.test.ts @@ -39,12 +39,16 @@ describe('build query', () => { { query: 'extension:jpg', language: 'kuery' }, { query: 'bar:baz', language: 'lucene' }, ] as Query[]; - const filters = [ - { - match_all: {}, - meta: { type: 'match_all' }, - } as MatchAllFilter, - ]; + const filters = { + match: { + a: 'b', + }, + meta: { + alias: '', + disabled: false, + negate: false, + }, + }; const config = { allowLeadingWildcards: true, queryStringOptions: {}, @@ -56,7 +60,11 @@ describe('build query', () => { must: [decorateQuery(luceneStringToDsl('bar:baz'), config.queryStringOptions)], filter: [ toElasticsearchQuery(fromKueryExpression('extension:jpg'), indexPattern), - { match_all: {} }, + { + match: { + a: 'b', + }, + }, ], should: [], must_not: [], @@ -71,9 +79,15 @@ describe('build query', () => { it('should accept queries and filters as either single objects or arrays', () => { const queries = { query: 'extension:jpg', language: 'lucene' } as Query; const filters = { - match_all: {}, - meta: { type: 'match_all' }, - } as MatchAllFilter; + match: { + a: 'b', + }, + meta: { + alias: '', + disabled: false, + negate: false, + }, + }; const config = { allowLeadingWildcards: true, queryStringOptions: {}, @@ -83,7 +97,13 @@ describe('build query', () => { const expectedResult = { bool: { must: [decorateQuery(luceneStringToDsl('extension:jpg'), config.queryStringOptions)], - filter: [{ match_all: {} }], + filter: [ + { + match: { + a: 'b', + }, + }, + ], should: [], must_not: [], }, @@ -94,6 +114,49 @@ describe('build query', () => { expect(result).toEqual(expectedResult); }); + it('should remove match_all clauses', () => { + const filters = [ + { + match_all: {}, + meta: { type: 'match_all' }, + } as MatchAllFilter, + { + match: { + a: 'b', + }, + meta: { + alias: '', + disabled: false, + negate: false, + }, + }, + ]; + const config = { + allowLeadingWildcards: true, + queryStringOptions: {}, + ignoreFilterIfFieldNotInIndex: false, + }; + + const expectedResult = { + bool: { + must: [], + filter: [ + { + match: { + a: 'b', + }, + }, + ], + should: [], + must_not: [], + }, + }; + + const result = buildEsQuery(indexPattern, [], filters, config); + + expect(result).toEqual(expectedResult); + }); + it('should use the default time zone set in the Advanced Settings in queries and filters', () => { const queries = [ { query: '@timestamp:"2019-03-23T13:18:00"', language: 'kuery' }, @@ -122,7 +185,6 @@ describe('build query', () => { indexPattern, config ), - { match_all: {} }, ], should: [], must_not: [], diff --git a/src/plugins/data/common/es_query/es_query/build_es_query.ts b/src/plugins/data/common/es_query/es_query/build_es_query.ts index 18b360de9aaa6..45724796c3518 100644 --- a/src/plugins/data/common/es_query/es_query/build_es_query.ts +++ b/src/plugins/data/common/es_query/es_query/build_es_query.ts @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -import { groupBy, has } from 'lodash'; +import { groupBy, has, isEqual } from 'lodash'; import { buildQueryFromKuery } from './from_kuery'; import { buildQueryFromFilters } from './from_filters'; import { buildQueryFromLucene } from './from_lucene'; @@ -21,6 +21,12 @@ export interface EsQueryConfig { dateFormatTZ?: string; } +function removeMatchAll(filters: T[]) { + return filters.filter( + (filter) => !filter || typeof filter !== 'object' || !isEqual(filter, { match_all: {} }) + ); +} + /** * @param indexPattern * @param queries - a query object or array of query objects. Each query has a language property and a query property. @@ -63,9 +69,9 @@ export function buildEsQuery( return { bool: { - must: [...kueryQuery.must, ...luceneQuery.must, ...filterQuery.must], - filter: [...kueryQuery.filter, ...luceneQuery.filter, ...filterQuery.filter], - should: [...kueryQuery.should, ...luceneQuery.should, ...filterQuery.should], + must: removeMatchAll([...kueryQuery.must, ...luceneQuery.must, ...filterQuery.must]), + filter: removeMatchAll([...kueryQuery.filter, ...luceneQuery.filter, ...filterQuery.filter]), + should: removeMatchAll([...kueryQuery.should, ...luceneQuery.should, ...filterQuery.should]), must_not: [...kueryQuery.must_not, ...luceneQuery.must_not, ...filterQuery.must_not], }, }; diff --git a/src/plugins/data/common/field_formats/converters/duration.test.ts b/src/plugins/data/common/field_formats/converters/duration.test.ts index fc019720425df..72551f4b7b236 100644 --- a/src/plugins/data/common/field_formats/converters/duration.test.ts +++ b/src/plugins/data/common/field_formats/converters/duration.test.ts @@ -139,17 +139,182 @@ describe('Duration Format', () => { ], }); + testCase({ + inputFormat: 'nanoseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 2, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '0.00 Milliseconds', + }, + { + input: 658, + output: '0.00 Milliseconds', + }, + { + input: 3857, + output: '0.00 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'microseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 2, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '1.99 Milliseconds', + }, + { + input: 658, + output: '0.66 Milliseconds', + }, + { + input: 3857, + output: '3.86 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'microseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 1, + showSuffix: true, + fixtures: [ + { + input: 1988, + output: '2.0 Milliseconds', + }, + { + input: 0, + output: '0.0 Milliseconds', + }, + { + input: 658, + output: '0.7 Milliseconds', + }, + { + input: 3857, + output: '3.9 Milliseconds', + }, + ], + }); + + testCase({ + inputFormat: 'seconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + fixtures: [ + { + input: 600, + output: '10 Minutes', + }, + { + input: 30, + output: '30 Seconds', + }, + { + input: 3000, + output: '50 Minutes', + }, + ], + }); + + testCase({ + inputFormat: 'milliseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + useShortSuffix: true, + fixtures: [ + { + input: -123, + output: '-123 ms', + }, + { + input: 1, + output: '1 ms', + }, + { + input: 600, + output: '600 ms', + }, + { + input: 30, + output: '30 ms', + }, + { + input: 3000, + output: '3 s', + }, + { + input: 300000, + output: '5 min', + }, + { + input: 30000000, + output: '8 h', + }, + { + input: 90000000, + output: '1 d', + }, + { + input: 9000000000, + output: '3 mon', + }, + { + input: 99999999999, + output: '3 y', + }, + ], + }); + + testCase({ + inputFormat: 'milliseconds', + outputFormat: 'humanizePrecise', + outputPrecision: 0, + showSuffix: true, + useShortSuffix: true, + includeSpaceWithSuffix: false, + fixtures: [ + { + input: -123, + output: '-123ms', + }, + { + input: 1, + output: '1ms', + }, + { + input: 600, + output: '600ms', + }, + ], + }); + function testCase({ inputFormat, outputFormat, outputPrecision, showSuffix, + useShortSuffix, + includeSpaceWithSuffix, fixtures, }: { inputFormat: string; outputFormat: string; outputPrecision: number | undefined; showSuffix: boolean | undefined; + useShortSuffix?: boolean; + includeSpaceWithSuffix?: boolean; fixtures: any[]; }) { fixtures.forEach((fixture: Record) => { @@ -160,7 +325,14 @@ describe('Duration Format', () => { outputPrecision ? `, ${outputPrecision} decimals` : '' }`, () => { const duration = new DurationFormat( - { inputFormat, outputFormat, outputPrecision, showSuffix }, + { + inputFormat, + outputFormat, + outputPrecision, + showSuffix, + useShortSuffix, + includeSpaceWithSuffix, + }, jest.fn() ); expect(duration.convert(input)).toBe(output); diff --git a/src/plugins/data/common/field_formats/converters/duration.ts b/src/plugins/data/common/field_formats/converters/duration.ts index ef8c1df3704a8..c9a7091db8471 100644 --- a/src/plugins/data/common/field_formats/converters/duration.ts +++ b/src/plugins/data/common/field_formats/converters/duration.ts @@ -18,6 +18,7 @@ const ratioToSeconds: Record = { microseconds: 0.000001, }; const HUMAN_FRIENDLY = 'humanize'; +const HUMAN_FRIENDLY_PRECISE = 'humanizePrecise'; const DEFAULT_OUTPUT_PRECISION = 2; const DEFAULT_INPUT_FORMAT = { text: i18n.translate('data.fieldFormats.duration.inputFormats.seconds', { @@ -89,59 +90,89 @@ const inputFormats = [ }, ]; const DEFAULT_OUTPUT_FORMAT = { - text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize', { - defaultMessage: 'Human Readable', + text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize.approximate', { + defaultMessage: 'Human-readable (approximate)', }), method: 'humanize', }; const outputFormats = [ { ...DEFAULT_OUTPUT_FORMAT }, + { + text: i18n.translate('data.fieldFormats.duration.outputFormats.humanize.precise', { + defaultMessage: 'Human-readable (precise)', + }), + method: 'humanizePrecise', + }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMilliseconds', { defaultMessage: 'Milliseconds', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMilliseconds.short', { + defaultMessage: 'ms', + }), method: 'asMilliseconds', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asSeconds', { defaultMessage: 'Seconds', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asSeconds.short', { + defaultMessage: 's', + }), method: 'asSeconds', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMinutes', { defaultMessage: 'Minutes', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMinutes.short', { + defaultMessage: 'min', + }), method: 'asMinutes', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asHours', { defaultMessage: 'Hours', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asHours.short', { + defaultMessage: 'h', + }), method: 'asHours', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asDays', { defaultMessage: 'Days', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asDays.short', { + defaultMessage: 'd', + }), method: 'asDays', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asWeeks', { defaultMessage: 'Weeks', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asWeeks.short', { + defaultMessage: 'w', + }), method: 'asWeeks', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asMonths', { defaultMessage: 'Months', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asMonths.short', { + defaultMessage: 'mon', + }), method: 'asMonths', }, { text: i18n.translate('data.fieldFormats.duration.outputFormats.asYears', { defaultMessage: 'Years', }), + shortText: i18n.translate('data.fieldFormats.duration.outputFormats.asYears.short', { + defaultMessage: 'y', + }), method: 'asYears', }, ]; @@ -154,6 +185,29 @@ function parseInputAsDuration(val: number, inputFormat: string) { return moment.duration(val * ratio, kind); } +function formatInputHumanPrecise( + val: number, + inputFormat: string, + outputPrecision: number, + useShortSuffix: boolean, + includeSpace: string +) { + const ratio = ratioToSeconds[inputFormat] || 1; + const kind = (inputFormat in ratioToSeconds + ? 'seconds' + : inputFormat) as unitOfTime.DurationConstructor; + const valueInDuration = moment.duration(val * ratio, kind); + + return formatDuration( + val, + valueInDuration, + inputFormat, + outputPrecision, + useShortSuffix, + includeSpace + ); +} + export class DurationFormat extends FieldFormat { static id = FIELD_FORMAT_IDS.DURATION; static title = i18n.translate('data.fieldFormats.duration.title', { @@ -167,11 +221,17 @@ export class DurationFormat extends FieldFormat { isHuman() { return this.param('outputFormat') === HUMAN_FRIENDLY; } + + isHumanPrecise() { + return this.param('outputFormat') === HUMAN_FRIENDLY_PRECISE; + } + getParamDefaults() { return { inputFormat: DEFAULT_INPUT_FORMAT.kind, outputFormat: DEFAULT_OUTPUT_FORMAT.method, outputPrecision: DEFAULT_OUTPUT_PRECISION, + includeSpaceWithSuffix: true, }; } @@ -180,19 +240,84 @@ export class DurationFormat extends FieldFormat { const outputFormat = this.param('outputFormat') as keyof Duration; const outputPrecision = this.param('outputPrecision'); const showSuffix = Boolean(this.param('showSuffix')); + const useShortSuffix = Boolean(this.param('useShortSuffix')); + const includeSpaceWithSuffix = this.param('includeSpaceWithSuffix'); + + const includeSpace = includeSpaceWithSuffix ? ' ' : ''; + const human = this.isHuman(); + const humanPrecise = this.isHumanPrecise(); + const prefix = val < 0 && human ? i18n.translate('data.fieldFormats.duration.negativeLabel', { defaultMessage: 'minus', }) + ' ' : ''; + const duration = parseInputAsDuration(val, inputFormat) as Record; - const formatted = duration[outputFormat](); - const precise = human ? formatted : formatted.toFixed(outputPrecision); + const formatted = humanPrecise + ? formatInputHumanPrecise(val, inputFormat, outputPrecision, useShortSuffix, includeSpace) + : duration[outputFormat](); + + const precise = human || humanPrecise ? formatted : formatted.toFixed(outputPrecision); const type = outputFormats.find(({ method }) => method === outputFormat); - const suffix = showSuffix && type ? ` ${type.text}` : ''; - return prefix + precise + suffix; + const unitText = useShortSuffix ? type?.shortText : type?.text; + + const suffix = showSuffix && unitText && !human ? `${includeSpace}${unitText}` : ''; + + return humanPrecise ? precise : prefix + precise + suffix; }; } + +function formatDuration( + val: number, + duration: moment.Duration, + inputFormat: string, + outputPrecision: number, + useShortSuffix: boolean, + includeSpace: string +) { + // return nothing when the duration is falsy or not correctly parsed (P0D) + if (!duration || !duration.isValid()) return; + const units = [ + { unit: duration.years(), nextUnitRate: 12, method: 'asYears' }, + { unit: duration.months(), nextUnitRate: 4, method: 'asMonths' }, + { unit: duration.weeks(), nextUnitRate: 7, method: 'asWeeks' }, + { unit: duration.days(), nextUnitRate: 24, method: 'asDays' }, + { unit: duration.hours(), nextUnitRate: 60, method: 'asHours' }, + { unit: duration.minutes(), nextUnitRate: 60, method: 'asMinutes' }, + { unit: duration.seconds(), nextUnitRate: 1000, method: 'asSeconds' }, + { unit: duration.milliseconds(), nextUnitRate: 1000, method: 'asMilliseconds' }, + ]; + + const getUnitText = (method: string) => { + const type = outputFormats.find(({ method: methodT }) => method === methodT); + return useShortSuffix ? type?.shortText : type?.text; + }; + + for (let i = 0; i < units.length; i++) { + const unitValue = units[i].unit; + if (unitValue >= 1) { + const unitText = getUnitText(units[i].method); + + const value = Math.floor(unitValue); + if (units?.[i + 1]) { + const decimalPointValue = Math.floor(units[i + 1].unit); + return ( + (value + decimalPointValue / units[i].nextUnitRate).toFixed(outputPrecision) + + includeSpace + + unitText + ); + } else { + return unitValue.toFixed(outputPrecision) + includeSpace + unitText; + } + } + } + + const unitValue = units[units.length - 1].unit; + const unitText = getUnitText(units[units.length - 1].method); + + return unitValue.toFixed(outputPrecision) + includeSpace + unitText; +} diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts index 41ce7ba4bab4a..c897cdbce2309 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_pattern.ts @@ -7,7 +7,7 @@ */ import _, { each, reject } from 'lodash'; -import { FieldAttrs, FieldAttrSet } from '../..'; +import { FieldAttrs, FieldAttrSet, IndexPatternAttributes } from '../..'; import type { RuntimeField } from '../types'; import { DuplicateField } from '../../../../kibana_utils/common'; @@ -240,6 +240,7 @@ export class IndexPattern implements IIndexPattern { * @param script script code * @param fieldType * @param lang + * @deprecated use runtime field instead */ async addScriptedField(name: string, script: string, fieldType: string = 'string') { const scriptedFields = this.getScriptedFields(); @@ -265,6 +266,7 @@ export class IndexPattern implements IIndexPattern { /** * Remove scripted field from field list * @param fieldName + * @deprecated use runtime field instead */ removeScriptedField(fieldName: string) { @@ -274,10 +276,18 @@ export class IndexPattern implements IIndexPattern { } } + /** + * + * @deprecated use runtime field instead + */ getNonScriptedFields() { return [...this.fields.getAll().filter((field) => !field.scripted)]; } + /** + * + * @deprecated use runtime field instead + */ getScriptedFields() { return [...this.fields.getAll().filter((field) => field.scripted)]; } @@ -308,7 +318,7 @@ export class IndexPattern implements IIndexPattern { /** * Returns index pattern as saved object body for saving */ - getAsSavedObjectBody() { + getAsSavedObjectBody(): IndexPatternAttributes { const fieldFormatMap = _.isEmpty(this.fieldFormatMap) ? undefined : JSON.stringify(this.fieldFormatMap); @@ -321,12 +331,10 @@ export class IndexPattern implements IIndexPattern { timeFieldName: this.timeFieldName, intervalName: this.intervalName, sourceFilters: this.sourceFilters ? JSON.stringify(this.sourceFilters) : undefined, - fields: this.fields - ? JSON.stringify(this.fields.filter((field) => field.scripted)) - : undefined, + fields: JSON.stringify(this.fields?.filter((field) => field.scripted) ?? []), fieldFormatMap, - type: this.type, - typeMeta: this.typeMeta ? JSON.stringify(this.typeMeta) : undefined, + type: this.type!, + typeMeta: JSON.stringify(this.typeMeta ?? {}), allowNoIndex: this.allowNoIndex ? this.allowNoIndex : undefined, runtimeFieldMap: runtimeFieldMap ? JSON.stringify(runtimeFieldMap) : undefined, }; diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts index a4f37334c212e..8715f8feb067a 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.test.ts @@ -230,7 +230,12 @@ describe('IndexPatterns', () => { test('createAndSave', async () => { const title = 'kibana-*'; - indexPatterns.createSavedObject = jest.fn(); + + indexPatterns.createSavedObject = jest.fn(() => + Promise.resolve(({ + id: 'id', + } as unknown) as IndexPattern) + ); indexPatterns.setDefault = jest.fn(); await indexPatterns.createAndSave({ title }); expect(indexPatterns.createSavedObject).toBeCalled(); diff --git a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts index 66e66051a6370..e67e72f295b8e 100644 --- a/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts +++ b/src/plugins/data/common/index_patterns/index_patterns/index_patterns.ts @@ -403,6 +403,12 @@ export class IndexPatternsService { throw new SavedObjectNotFound(savedObjectType, id, 'management/kibana/indexPatterns'); } + return this.initFromSavedObject(savedObject); + }; + + private initFromSavedObject = async ( + savedObject: SavedObject + ): Promise => { const spec = this.savedObjectToSpec(savedObject); const { title, type, typeMeta, runtimeFieldMap } = spec; spec.fieldAttrs = savedObject.attributes.fieldAttrs @@ -412,7 +418,7 @@ export class IndexPatternsService { try { spec.fields = await this.refreshFieldSpecMap( spec.fields || {}, - id, + savedObject.id, spec.title as string, { pattern: title as string, @@ -423,6 +429,7 @@ export class IndexPatternsService { }, spec.fieldAttrs ); + // CREATE RUNTIME FIELDS for (const [key, value] of Object.entries(runtimeFieldMap || {})) { // do not create runtime field if mapped field exists @@ -450,7 +457,7 @@ export class IndexPatternsService { this.onError(err, { title: i18n.translate('data.indexPatterns.fetchFieldErrorTitle', { defaultMessage: 'Error fetching fields for index pattern {title} (ID: {id})', - values: { id, title }, + values: { id: savedObject.id, title }, }), }); } @@ -516,9 +523,9 @@ export class IndexPatternsService { async createAndSave(spec: IndexPatternSpec, override = false, skipFetchFields = false) { const indexPattern = await this.create(spec, skipFetchFields); - await this.createSavedObject(indexPattern, override); - await this.setDefault(indexPattern.id!); - return indexPattern; + const createdIndexPattern = await this.createSavedObject(indexPattern, override); + await this.setDefault(createdIndexPattern.id!); + return createdIndexPattern; } /** @@ -538,15 +545,20 @@ export class IndexPatternsService { } const body = indexPattern.getAsSavedObjectBody(); - const response = await this.savedObjectsClient.create(savedObjectType, body, { - id: indexPattern.id, - }); - indexPattern.id = response.id; - this.indexPatternCache.set(indexPattern.id, Promise.resolve(indexPattern)); + const response: SavedObject = (await this.savedObjectsClient.create( + savedObjectType, + body, + { + id: indexPattern.id, + } + )) as SavedObject; + + const createdIndexPattern = await this.initFromSavedObject(response); + this.indexPatternCache.set(createdIndexPattern.id!, Promise.resolve(createdIndexPattern)); if (this.savedObjectsCache) { this.savedObjectsCache.push(response as SavedObject); } - return indexPattern; + return createdIndexPattern; } /** diff --git a/src/plugins/data/config.ts b/src/plugins/data/config.ts index 72fa547f44a77..9306b64019bbc 100644 --- a/src/plugins/data/config.ts +++ b/src/plugins/data/config.ts @@ -15,6 +15,8 @@ export const configSchema = schema.object({ }), valueSuggestions: schema.object({ enabled: schema.boolean({ defaultValue: true }), + terminateAfter: schema.duration({ defaultValue: 100000 }), + timeout: schema.duration({ defaultValue: 1000 }), }), }), search: schema.object({ diff --git a/src/plugins/data/public/autocomplete/autocomplete_service.ts b/src/plugins/data/public/autocomplete/autocomplete_service.ts index eb9d859664c4d..67efbe2af29ce 100644 --- a/src/plugins/data/public/autocomplete/autocomplete_service.ts +++ b/src/plugins/data/public/autocomplete/autocomplete_service.ts @@ -7,6 +7,7 @@ */ import { CoreSetup, PluginInitializerContext } from 'src/core/public'; +import moment from 'moment'; import { TimefilterSetup } from '../query'; import { QuerySuggestionGetFn } from './providers/query_suggestion_provider'; import { @@ -27,7 +28,7 @@ import { DataPublicPluginStart, DataStartDependencies } from '../types'; export class AutocompleteService { autocompleteConfig: ConfigSchema['autocomplete']; - constructor(initializerContext: PluginInitializerContext) { + constructor(private initializerContext: PluginInitializerContext) { const { autocomplete } = initializerContext.config.get(); this.autocompleteConfig = autocomplete; @@ -55,6 +56,8 @@ export class AutocompleteService { usageCollection, }: { timefilter: TimefilterSetup; usageCollection?: UsageCollectionSetup } ) { + const { autocomplete } = this.initializerContext.config.get(); + const { terminateAfter, timeout } = autocomplete.valueSuggestions; const usageCollector = createUsageCollector(core.getStartServices, usageCollection); this.getValueSuggestions = this.autocompleteConfig.valueSuggestions.enabled @@ -71,6 +74,10 @@ export class AutocompleteService { * please use "getQuerySuggestions" from the start contract */ getQuerySuggestions: this.getQuerySuggestions, + getAutocompleteSettings: () => ({ + terminateAfter: moment.duration(terminateAfter).asMilliseconds(), + timeout: moment.duration(timeout).asMilliseconds(), + }), }; } diff --git a/src/plugins/data/public/mocks.ts b/src/plugins/data/public/mocks.ts index f5def327b5473..1277e31be2103 100644 --- a/src/plugins/data/public/mocks.ts +++ b/src/plugins/data/public/mocks.ts @@ -16,8 +16,9 @@ import { createNowProviderMock } from './now_provider/mocks'; export type Setup = jest.Mocked>; export type Start = jest.Mocked>; -const automcompleteSetupMock: jest.Mocked = { +const autocompleteSetupMock: jest.Mocked = { getQuerySuggestions: jest.fn(), + getAutocompleteSettings: jest.fn(), }; const autocompleteStartMock: jest.Mocked = { @@ -29,7 +30,7 @@ const autocompleteStartMock: jest.Mocked = { const createSetupContract = (): Setup => { const querySetupMock = queryServiceMock.createSetupContract(); return { - autocomplete: automcompleteSetupMock, + autocomplete: autocompleteSetupMock, search: searchServiceMock.createSetupContract(), fieldFormats: fieldFormatsServiceMock.createSetupContract(), query: querySetupMock, diff --git a/src/plugins/data/public/public.api.md b/src/plugins/data/public/public.api.md index fde7075d9e760..be6e489b17290 100644 --- a/src/plugins/data/public/public.api.md +++ b/src/plugins/data/public/public.api.md @@ -1307,6 +1307,7 @@ export class IndexPattern implements IIndexPattern { // Warning: (ae-forgotten-export) The symbol "IndexPatternDeps" needs to be exported by the entry point index.d.ts constructor({ spec, fieldFormats, shortDotsEnable, metaFields, }: IndexPatternDeps); addRuntimeField(name: string, runtimeField: RuntimeField): void; + // @deprecated addScriptedField(name: string, script: string, fieldType?: string): Promise; readonly allowNoIndex: boolean; // (undocumented) @@ -1335,19 +1336,7 @@ export class IndexPattern implements IIndexPattern { delay?: string | undefined; time_zone?: string | undefined; }>> | undefined; - getAsSavedObjectBody(): { - fieldAttrs: string | undefined; - title: string; - timeFieldName: string | undefined; - intervalName: string | undefined; - sourceFilters: string | undefined; - fields: string | undefined; - fieldFormatMap: string | undefined; - type: string | undefined; - typeMeta: string | undefined; - allowNoIndex: true | undefined; - runtimeFieldMap: string | undefined; - }; + getAsSavedObjectBody(): IndexPatternAttributes; // (undocumented) getComputedFields(): { storedFields: string[]; @@ -1366,7 +1355,7 @@ export class IndexPattern implements IIndexPattern { getFieldByName(name: string): IndexPatternField | undefined; getFormatterForField(field: IndexPatternField | IndexPatternField['spec'] | IFieldType): FieldFormat; getFormatterForFieldNoDefault(fieldname: string): FieldFormat | undefined; - // (undocumented) + // @deprecated (undocumented) getNonScriptedFields(): IndexPatternField[]; getOriginalSavedObjectBody: () => { fieldAttrs?: string | undefined; @@ -1379,7 +1368,7 @@ export class IndexPattern implements IIndexPattern { typeMeta?: string | undefined; type?: string | undefined; }; - // (undocumented) + // @deprecated (undocumented) getScriptedFields(): IndexPatternField[]; getSourceFiltering(): { excludes: any[]; @@ -1397,6 +1386,7 @@ export class IndexPattern implements IIndexPattern { // (undocumented) metaFields: string[]; removeRuntimeField(name: string): void; + // @deprecated removeScriptedField(fieldName: string): void; resetOriginalSavedObjectBody: () => void; // (undocumented) diff --git a/src/plugins/data/public/search/session/session_service.test.ts b/src/plugins/data/public/search/session/session_service.test.ts index 13a1a1bd388ba..39680c4948366 100644 --- a/src/plugins/data/public/search/session/session_service.test.ts +++ b/src/plugins/data/public/search/session/session_service.test.ts @@ -98,14 +98,6 @@ describe('Session service', () => { expect(nowProvider.reset).toHaveBeenCalled(); }); - it("Can't clear other apps' session", async () => { - sessionService.start(); - expect(sessionService.getSessionId()).not.toBeUndefined(); - currentAppId$.next('change'); - sessionService.clear(); - expect(sessionService.getSessionId()).not.toBeUndefined(); - }); - it("Can start a new session in case there is other apps' stale session", async () => { const s1 = sessionService.start(); expect(sessionService.getSessionId()).not.toBeUndefined(); diff --git a/src/plugins/data/public/search/session/session_service.ts b/src/plugins/data/public/search/session/session_service.ts index 71f51b4bc8d83..629d76b07d7ca 100644 --- a/src/plugins/data/public/search/session/session_service.ts +++ b/src/plugins/data/public/search/session/session_service.ts @@ -128,21 +128,6 @@ export class SessionService { this.subscription.add( coreStart.application.currentAppId$.subscribe((newAppName) => { this.currentApp = newAppName; - if (!this.getSessionId()) return; - - // Apps required to clean up their sessions before unmounting - // Make sure that apps don't leave sessions open by throwing an error in DEV mode - const message = `Application '${ - this.state.get().appName - }' had an open session while navigating`; - if (initializerContext.env.mode.dev) { - coreStart.fatalErrors.add(message); - } else { - // this should never happen in prod because should be caught in dev mode - // in case this happen we don't want to throw fatal error, as most likely possible bugs are not that critical - // eslint-disable-next-line no-console - console.warn(message); - } }) ); }); @@ -230,18 +215,6 @@ export class SessionService { * Cleans up current state */ public clear() { - // make sure apps can't clear other apps' sessions - const currentSessionApp = this.state.get().appName; - if (currentSessionApp && currentSessionApp !== this.currentApp) { - // eslint-disable-next-line no-console - console.warn( - `Skip clearing session "${this.getSessionId()}" because it belongs to a different app. current: "${ - this.currentApp - }", owner: "${currentSessionApp}"` - ); - return; - } - this.state.transitions.clear(); this.searchSessionInfoProvider = undefined; this.searchSessionIndicatorUiConfig = undefined; diff --git a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap index 9896a6dbdc7b7..a0a7e54d27532 100644 --- a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap +++ b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap @@ -718,11 +718,13 @@ exports[`Inspector Data View component should render single table without select > @@ -996,6 +998,7 @@ exports[`Inspector Data View component should render single table without select - - - - - column1 - - - - - - - Click to sort in ascending order - + + + + column1 + + + - + @@ -1322,6 +1320,7 @@ exports[`Inspector Data View component should render single table without select