diff --git a/.buildkite/scripts/bootstrap.sh b/.buildkite/scripts/bootstrap.sh index 7ae925b262b9f..05a250f8e9e8c 100755 --- a/.buildkite/scripts/bootstrap.sh +++ b/.buildkite/scripts/bootstrap.sh @@ -5,7 +5,7 @@ set -euo pipefail source .buildkite/scripts/common/util.sh echo "--- yarn install and bootstrap" -yarn kbn bootstrap --verbose +yarn kbn bootstrap ### ### upload ts-refs-cache artifacts as quickly as possible so they are available for download diff --git a/.buildkite/scripts/build_kibana_plugins.sh b/.buildkite/scripts/build_kibana_plugins.sh index f4d82699ef92d..14ea71a75bae6 100644 --- a/.buildkite/scripts/build_kibana_plugins.sh +++ b/.buildkite/scripts/build_kibana_plugins.sh @@ -18,8 +18,7 @@ node scripts/build_kibana_platform_plugins \ --scan-dir "$XPACK_DIR/test/licensing_plugin/plugins" \ --scan-dir "$XPACK_DIR/test/usage_collection/plugins" \ --scan-dir "$XPACK_DIR/test/security_functional/fixtures/common" \ - --scan-dir "$XPACK_DIR/examples" \ - --verbose + --scan-dir "$XPACK_DIR/examples" echo "--- Archive built plugins" shopt -s globstar diff --git a/.buildkite/scripts/lifecycle/build_status.js b/.buildkite/scripts/lifecycle/build_status.js index 2c1d51ecac0a7..f2a5024c96013 100644 --- a/.buildkite/scripts/lifecycle/build_status.js +++ b/.buildkite/scripts/lifecycle/build_status.js @@ -7,11 +7,11 @@ const { BuildkiteClient } = require('kibana-buildkite-library'); console.log(status.success ? 'true' : 'false'); process.exit(0); } catch (ex) { + console.error('Buildkite API Error', ex.message); if (ex.response) { - console.error('HTTP Error Response Body', ex.response.data); console.error('HTTP Error Response Status', ex.response.status); + console.error('HTTP Error Response Body', ex.response.data); } - console.error(ex); process.exit(1); } })(); diff --git a/.buildkite/scripts/lifecycle/ci_stats_complete.js b/.buildkite/scripts/lifecycle/ci_stats_complete.js index d86e2ec7efcae..d9411178799ab 100644 --- a/.buildkite/scripts/lifecycle/ci_stats_complete.js +++ b/.buildkite/scripts/lifecycle/ci_stats_complete.js @@ -4,7 +4,11 @@ const { CiStats } = require('kibana-buildkite-library'); try { await CiStats.onComplete(); } catch (ex) { - console.error(ex); + console.error('CI Stats Error', ex.message); + if (ex.response) { + console.error('HTTP Error Response Status', ex.response.status); + console.error('HTTP Error Response Body', ex.response.data); + } process.exit(1); } })(); diff --git a/.buildkite/scripts/lifecycle/ci_stats_start.js b/.buildkite/scripts/lifecycle/ci_stats_start.js index 115aa9bd23954..ec0e4c713499e 100644 --- a/.buildkite/scripts/lifecycle/ci_stats_start.js +++ b/.buildkite/scripts/lifecycle/ci_stats_start.js @@ -4,7 +4,11 @@ const { CiStats } = require('kibana-buildkite-library'); try { await CiStats.onStart(); } catch (ex) { - console.error(ex); + console.error('CI Stats Error', ex.message); + if (ex.response) { + console.error('HTTP Error Response Status', ex.response.status); + console.error('HTTP Error Response Body', ex.response.data); + } process.exit(1); } })(); diff --git a/.ci/Dockerfile b/.ci/Dockerfile index 947242ecc0ece..d3ea74ca38969 100644 --- a/.ci/Dockerfile +++ b/.ci/Dockerfile @@ -1,7 +1,7 @@ # NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable. # If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts -ARG NODE_VERSION=14.17.5 +ARG NODE_VERSION=14.17.6 FROM node:${NODE_VERSION} AS base diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3829121aa5fe9..381fad404ca73 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -439,6 +439,9 @@ /x-pack/test/reporting_api_integration/ @elastic/kibana-reporting-services @elastic/kibana-app-services /x-pack/test/reporting_functional/ @elastic/kibana-reporting-services @elastic/kibana-app-services /x-pack/test/stack_functional_integration/apps/reporting/ @elastic/kibana-reporting-services @elastic/kibana-app-services +/docs/user/reporting @elastic/kibana-reporting-services @elastic/kibana-app-services +/docs/settings/reporting-settings.asciidoc @elastic/kibana-reporting-services @elastic/kibana-app-services +/docs/setup/configuring-reporting.asciidoc @elastic/kibana-reporting-services @elastic/kibana-app-services #CC# /x-pack/plugins/reporting/ @elastic/kibana-reporting-services diff --git a/.github/workflows/sync-main-branch.yml b/.github/workflows/sync-main-branch.yml new file mode 100644 index 0000000000000..63465602e8436 --- /dev/null +++ b/.github/workflows/sync-main-branch.yml @@ -0,0 +1,26 @@ +# Synchronize all pushes to 'master' branch with 'main' branch to facilitate migration +name: "Sync main branch" +on: + push: + branches: + - master + +jobs: + sync_latest_from_upstream: + runs-on: ubuntu-latest + name: Sync latest commits from master branch + + steps: + - name: Checkout target repo + uses: actions/checkout@v2 + with: + ref: main + + - name: Sync upstream changes + id: sync + uses: aormsby/Fork-Sync-With-Upstream-action@v3.0 + with: + target_sync_branch: main + target_repo_token: ${{ secrets.KIBANAMACHINE_TOKEN }} + upstream_sync_branch: master + upstream_sync_repo: elastic/kibana diff --git a/.i18nrc.json b/.i18nrc.json index 3301cd04ad06c..f38d6b8faae7e 100644 --- a/.i18nrc.json +++ b/.i18nrc.json @@ -2,6 +2,7 @@ "paths": { "alerts": "packages/kbn-alerts/src", "autocomplete": "packages/kbn-securitysolution-autocomplete/src", + "kbnConfig": "packages/kbn-config/src", "console": "src/plugins/console", "core": "src/core", "discover": "src/plugins/discover", diff --git a/.node-version b/.node-version index 18711d290eac4..5595ae1aa9e4c 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -14.17.5 +14.17.6 diff --git a/.nvmrc b/.nvmrc index 18711d290eac4..5595ae1aa9e4c 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -14.17.5 +14.17.6 diff --git a/WORKSPACE.bazel b/WORKSPACE.bazel index 384277822709c..3ae3f202a3bfd 100644 --- a/WORKSPACE.bazel +++ b/WORKSPACE.bazel @@ -27,13 +27,13 @@ check_rules_nodejs_version(minimum_version_string = "3.8.0") # we can update that rule. node_repositories( node_repositories = { - "14.17.5-darwin_amd64": ("node-v14.17.5-darwin-x64.tar.gz", "node-v14.17.5-darwin-x64", "2e40ab625b45b9bdfcb963ddd4d65d87ddf1dd37a86b6f8b075cf3d77fe9dc09"), - "14.17.5-linux_arm64": ("node-v14.17.5-linux-arm64.tar.xz", "node-v14.17.5-linux-arm64", "3a2e674b6db50dfde767c427e8f077235bbf6f9236e1b12a4cc3496b12f94bae"), - "14.17.5-linux_s390x": ("node-v14.17.5-linux-s390x.tar.xz", "node-v14.17.5-linux-s390x", "7d40eee3d54241403db12fb3bc420cd776e2b02e89100c45cf5e74a73942e7f6"), - "14.17.5-linux_amd64": ("node-v14.17.5-linux-x64.tar.xz", "node-v14.17.5-linux-x64", "2d759de07a50cd7f75bd73d67e97b0d0e095ee3c413efac7d1b3d1e84ed76fff"), - "14.17.5-windows_amd64": ("node-v14.17.5-win-x64.zip", "node-v14.17.5-win-x64", "a99b7ee08e846e5d1f4e70c4396265542819d79ed9cebcc27760b89571f03cbf"), + "14.17.6-darwin_amd64": ("node-v14.17.6-darwin-x64.tar.gz", "node-v14.17.6-darwin-x64", "e3e4c02240d74fb1dc8a514daa62e5de04f7eaee0bcbca06a366ece73a52ad88"), + "14.17.6-linux_arm64": ("node-v14.17.6-linux-arm64.tar.xz", "node-v14.17.6-linux-arm64", "9c4f3a651e03cd9b5bddd33a80e8be6a6eb15e518513e410bb0852a658699156"), + "14.17.6-linux_s390x": ("node-v14.17.6-linux-s390x.tar.xz", "node-v14.17.6-linux-s390x", "3677f35b97608056013b5368f86eecdb044bdccc1b3976c1d4448736c37b6a0c"), + "14.17.6-linux_amd64": ("node-v14.17.6-linux-x64.tar.xz", "node-v14.17.6-linux-x64", "3bbe4faf356738d88b45be222bf5e858330541ff16bd0d4cfad36540c331461b"), + "14.17.6-windows_amd64": ("node-v14.17.6-win-x64.zip", "node-v14.17.6-win-x64", "b83e9ce542fda7fc519cec6eb24a2575a84862ea4227dedc171a8e0b5b614ac0"), }, - node_version = "14.17.5", + node_version = "14.17.6", node_urls = [ "https://nodejs.org/dist/v{version}/{filename}", ], diff --git a/docs/apm/correlations.asciidoc b/docs/apm/correlations.asciidoc index 45781228cd200..c0c18433c9021 100644 --- a/docs/apm/correlations.asciidoc +++ b/docs/apm/correlations.asciidoc @@ -12,7 +12,7 @@ piece of hardware, like a host or pod. Or, perhaps a set of users, based on IP address or region, is facing increased latency due to local data center issues. To find correlations, select a service on the *Services* page in the {apm-app} -and click **View correlations**. +then select a transaction group from the *Transactions* tab. NOTE: Queries within the {apm-app} are also applied to the correlations. @@ -20,26 +20,25 @@ NOTE: Queries within the {apm-app} are also applied to the correlations. [[correlations-latency]] ==== Find high transaction latency correlations -The correlations on the *Latency* tab help you discover which attributes are -contributing to increased transaction latency. +The correlations on the *Latency correlations* tab help you discover which +attributes are contributing to increased transaction latency. [role="screenshot"] image::apm/images/correlations-hover.png[Latency correlations] The progress bar indicates the status of the asynchronous analysis, which performs statistical searches across a large number of attributes. For large -time ranges and services with high transaction throughput this might take some -time. To improve performance, reduce the time range on the service overview -page. +time ranges and services with high transaction throughput, this might take some +time. To improve performance, reduce the time range. The latency distribution chart visualizes the overall latency of the -transactions in the service. If there are attributes that have a statistically -significant correlation with slow response times, they are listed in a table -below the chart. The table is sorted by correlation coefficients that range from -0 to 1. Attributes with higher correlation values are more likely to contribute -to high latency transactions. By default, the attribute with the highest -correlation value is added to the chart. To see the latency distribution for -other attributes, hover over their row in the table. +transactions in the transaction group. If there are attributes that have a +statistically significant correlation with slow response times, they are listed +in a table below the chart. The table is sorted by correlation coefficients that +range from 0 to 1. Attributes with higher correlation values are more likely to +contribute to high latency transactions. By default, the attribute with the +highest correlation value is added to the chart. To see the latency distribution +for other attributes, hover over their row in the table. If a correlated attribute seems noteworthy, use the **Filter** quick links: diff --git a/docs/apm/images/correlations-hover.png b/docs/apm/images/correlations-hover.png index c8d5622156b4c..80c1fa41adbdf 100644 Binary files a/docs/apm/images/correlations-hover.png and b/docs/apm/images/correlations-hover.png differ diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md index cadb34ae63b86..bc6075176cd22 100644 --- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md +++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md @@ -9,6 +9,10 @@ ```typescript readonly links: { readonly settings: string; + readonly apm: { + readonly kibanaSettings: string; + readonly supportedServiceMaps: string; + }; readonly canvas: { readonly guide: string; }; @@ -128,6 +132,7 @@ readonly links: { readonly rollupJobs: string; readonly elasticsearch: Record; readonly siem: { + readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md index aded69733b58b..aa3f958018041 100644 --- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md +++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md @@ -17,5 +17,5 @@ export interface DocLinksStart | --- | --- | --- | | [DOC\_LINK\_VERSION](./kibana-plugin-core-public.doclinksstart.doc_link_version.md) | string | | | [ELASTIC\_WEBSITE\_URL](./kibana-plugin-core-public.doclinksstart.elastic_website_url.md) | string | | -| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly settings: string;
readonly canvas: {
readonly guide: string;
};
readonly dashboard: {
readonly guide: string;
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly discover: Record<string, string>;
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly elasticsearchModule: string;
readonly startup: string;
readonly exportedFields: string;
readonly suricataModule: string;
readonly zeekModule: string;
};
readonly auditbeat: {
readonly base: string;
readonly auditdModule: string;
readonly systemModule: string;
};
readonly metricbeat: {
readonly base: string;
readonly configure: string;
readonly httpEndpoint: string;
readonly install: string;
readonly start: string;
};
readonly enterpriseSearch: {
readonly base: string;
readonly appSearchBase: string;
readonly workplaceSearchBase: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly libbeat: {
readonly getStarted: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly composite: string;
readonly composite_missing_bucket: string;
readonly date_histogram: string;
readonly date_range: string;
readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly runtimeFields: {
readonly overview: string;
readonly mapping: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessLangSpec: string;
readonly painlessSyntax: string;
readonly painlessWalkthrough: string;
readonly luceneExpressions: string;
};
readonly search: {
readonly sessions: string;
readonly sessionLimits: string;
};
readonly indexPatterns: {
readonly introduction: string;
readonly fieldFormattersNumber: string;
readonly fieldFormattersString: string;
readonly runtimeFields: string;
};
readonly addData: string;
readonly kibana: string;
readonly upgradeAssistant: string;
readonly rollupJobs: string;
readonly elasticsearch: Record<string, string>;
readonly siem: {
readonly guide: string;
readonly gettingStarted: string;
readonly ml: string;
readonly ruleChangeLog: string;
readonly detectionsReq: string;
readonly networkMap: string;
};
readonly query: {
readonly eql: string;
readonly kueryQuerySyntax: string;
readonly luceneQuerySyntax: string;
readonly percolate: string;
readonly queryDsl: string;
readonly autocompleteChanges: string;
};
readonly date: {
readonly dateMath: string;
readonly dateMathIndexNames: string;
};
readonly management: Record<string, string>;
readonly ml: Record<string, string>;
readonly transforms: Record<string, string>;
readonly visualize: Record<string, string>;
readonly apis: Readonly<{
bulkIndexAlias: string;
byteSizeUnits: string;
createAutoFollowPattern: string;
createFollower: string;
createIndex: string;
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createRollupJobsRequest: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;
cronExpressions: string;
executeWatchActionModes: string;
indexExists: string;
openIndex: string;
putComponentTemplate: string;
painlessExecute: string;
painlessExecuteAPIContexts: string;
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putIndexTemplateV1: string;
putWatch: string;
simulatePipeline: string;
timeUnits: string;
updateTransform: string;
}>;
readonly observability: Record<string, string>;
readonly alerting: Record<string, string>;
readonly maps: Record<string, string>;
readonly monitoring: Record<string, string>;
readonly security: Readonly<{
apiKeyServiceSettings: string;
clusterPrivileges: string;
elasticsearchSettings: string;
elasticsearchEnableSecurity: string;
indicesPrivileges: string;
kibanaTLS: string;
kibanaPrivileges: string;
mappingRoles: string;
mappingRolesFieldRules: string;
runAsPrivilege: string;
}>;
readonly watcher: Record<string, string>;
readonly ccs: Record<string, string>;
readonly plugins: Record<string, string>;
readonly snapshotRestore: Record<string, string>;
readonly ingest: Record<string, string>;
readonly fleet: Readonly<{
guide: string;
fleetServer: string;
fleetServerAddFleetServer: string;
settings: string;
settingsFleetServerHostSettings: string;
troubleshooting: string;
elasticAgent: string;
datastreams: string;
datastreamsNamingScheme: string;
upgradeElasticAgent: string;
upgradeElasticAgent712lower: string;
}>;
readonly ecs: {
readonly guide: string;
};
} | | +| [links](./kibana-plugin-core-public.doclinksstart.links.md) | {
readonly settings: string;
readonly canvas: {
readonly guide: string;
};
readonly dashboard: {
readonly guide: string;
readonly drilldowns: string;
readonly drilldownsTriggerPicker: string;
readonly urlDrilldownTemplateSyntax: string;
readonly urlDrilldownVariables: string;
};
readonly discover: Record<string, string>;
readonly filebeat: {
readonly base: string;
readonly installation: string;
readonly configuration: string;
readonly elasticsearchOutput: string;
readonly elasticsearchModule: string;
readonly startup: string;
readonly exportedFields: string;
readonly suricataModule: string;
readonly zeekModule: string;
};
readonly auditbeat: {
readonly base: string;
readonly auditdModule: string;
readonly systemModule: string;
};
readonly metricbeat: {
readonly base: string;
readonly configure: string;
readonly httpEndpoint: string;
readonly install: string;
readonly start: string;
};
readonly enterpriseSearch: {
readonly base: string;
readonly appSearchBase: string;
readonly workplaceSearchBase: string;
};
readonly heartbeat: {
readonly base: string;
};
readonly libbeat: {
readonly getStarted: string;
};
readonly logstash: {
readonly base: string;
};
readonly functionbeat: {
readonly base: string;
};
readonly winlogbeat: {
readonly base: string;
};
readonly aggs: {
readonly composite: string;
readonly composite_missing_bucket: string;
readonly date_histogram: string;
readonly date_range: string;
readonly date_format_pattern: string;
readonly filter: string;
readonly filters: string;
readonly geohash_grid: string;
readonly histogram: string;
readonly ip_range: string;
readonly range: string;
readonly significant_terms: string;
readonly terms: string;
readonly avg: string;
readonly avg_bucket: string;
readonly max_bucket: string;
readonly min_bucket: string;
readonly sum_bucket: string;
readonly cardinality: string;
readonly count: string;
readonly cumulative_sum: string;
readonly derivative: string;
readonly geo_bounds: string;
readonly geo_centroid: string;
readonly max: string;
readonly median: string;
readonly min: string;
readonly moving_avg: string;
readonly percentile_ranks: string;
readonly serial_diff: string;
readonly std_dev: string;
readonly sum: string;
readonly top_hits: string;
};
readonly runtimeFields: {
readonly overview: string;
readonly mapping: string;
};
readonly scriptedFields: {
readonly scriptFields: string;
readonly scriptAggs: string;
readonly painless: string;
readonly painlessApi: string;
readonly painlessLangSpec: string;
readonly painlessSyntax: string;
readonly painlessWalkthrough: string;
readonly luceneExpressions: string;
};
readonly search: {
readonly sessions: string;
readonly sessionLimits: string;
};
readonly indexPatterns: {
readonly introduction: string;
readonly fieldFormattersNumber: string;
readonly fieldFormattersString: string;
readonly runtimeFields: string;
};
readonly addData: string;
readonly kibana: string;
readonly upgradeAssistant: string;
readonly rollupJobs: string;
readonly elasticsearch: Record<string, string>;
readonly siem: {
readonly privileges: string;
readonly guide: string;
readonly gettingStarted: string;
readonly ml: string;
readonly ruleChangeLog: string;
readonly detectionsReq: string;
readonly networkMap: string;
};
readonly query: {
readonly eql: string;
readonly kueryQuerySyntax: string;
readonly luceneQuerySyntax: string;
readonly percolate: string;
readonly queryDsl: string;
readonly autocompleteChanges: string;
};
readonly date: {
readonly dateMath: string;
readonly dateMathIndexNames: string;
};
readonly management: Record<string, string>;
readonly ml: Record<string, string>;
readonly transforms: Record<string, string>;
readonly visualize: Record<string, string>;
readonly apis: Readonly<{
bulkIndexAlias: string;
byteSizeUnits: string;
createAutoFollowPattern: string;
createFollower: string;
createIndex: string;
createSnapshotLifecyclePolicy: string;
createRoleMapping: string;
createRoleMappingTemplates: string;
createRollupJobsRequest: string;
createApiKey: string;
createPipeline: string;
createTransformRequest: string;
cronExpressions: string;
executeWatchActionModes: string;
indexExists: string;
openIndex: string;
putComponentTemplate: string;
painlessExecute: string;
painlessExecuteAPIContexts: string;
putComponentTemplateMetadata: string;
putSnapshotLifecyclePolicy: string;
putIndexTemplateV1: string;
putWatch: string;
simulatePipeline: string;
timeUnits: string;
updateTransform: string;
}>;
readonly observability: Record<string, string>;
readonly alerting: Record<string, string>;
readonly maps: Record<string, string>;
readonly monitoring: Record<string, string>;
readonly security: Readonly<{
apiKeyServiceSettings: string;
clusterPrivileges: string;
elasticsearchSettings: string;
elasticsearchEnableSecurity: string;
indicesPrivileges: string;
kibanaTLS: string;
kibanaPrivileges: string;
mappingRoles: string;
mappingRolesFieldRules: string;
runAsPrivilege: string;
}>;
readonly watcher: Record<string, string>;
readonly ccs: Record<string, string>;
readonly plugins: Record<string, string>;
readonly snapshotRestore: Record<string, string>;
readonly ingest: Record<string, string>;
readonly fleet: Readonly<{
guide: string;
fleetServer: string;
fleetServerAddFleetServer: string;
settings: string;
settingsFleetServerHostSettings: string;
troubleshooting: string;
elasticAgent: string;
datastreams: string;
datastreamsNamingScheme: string;
upgradeElasticAgent: string;
upgradeElasticAgent712lower: string;
}>;
readonly ecs: {
readonly guide: string;
};
} | | diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md index e5f08213da510..bd0fc1e5b3713 100644 --- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.md @@ -18,6 +18,7 @@ export interface DeprecationsDetails | [deprecationType](./kibana-plugin-core-server.deprecationsdetails.deprecationtype.md) | 'config' | 'feature' | (optional) Used to identify between different deprecation types. Example use case: in Upgrade Assistant, we may want to allow the user to sort by deprecation type or show each type in a separate tab.Feel free to add new types if necessary. Predefined types are necessary to reduce having similar definitions with different keywords across kibana deprecations. | | [documentationUrl](./kibana-plugin-core-server.deprecationsdetails.documentationurl.md) | string | | | [level](./kibana-plugin-core-server.deprecationsdetails.level.md) | 'warning' | 'critical' | 'fetch_error' | levels: - warning: will not break deployment upon upgrade - critical: needs to be addressed before upgrade. - fetch\_error: Deprecations service failed to grab the deprecation details for the domain. | -| [message](./kibana-plugin-core-server.deprecationsdetails.message.md) | string | | +| [message](./kibana-plugin-core-server.deprecationsdetails.message.md) | string | The description message to be displayed for the deprecation. Check the README for writing deprecations in src/core/server/deprecations/README.mdx | | [requireRestart](./kibana-plugin-core-server.deprecationsdetails.requirerestart.md) | boolean | | +| [title](./kibana-plugin-core-server.deprecationsdetails.title.md) | string | The title of the deprecation. Check the README for writing deprecations in src/core/server/deprecations/README.mdx | diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.message.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.message.md index d79a4c9bd7995..906ce8118f95b 100644 --- a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.message.md +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.message.md @@ -4,6 +4,8 @@ ## DeprecationsDetails.message property +The description message to be displayed for the deprecation. Check the README for writing deprecations in `src/core/server/deprecations/README.mdx` + Signature: ```typescript diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.title.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.title.md new file mode 100644 index 0000000000000..e8907688f6e5e --- /dev/null +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsdetails.title.md @@ -0,0 +1,13 @@ + + +[Home](./index.md) > [kibana-plugin-core-server](./kibana-plugin-core-server.md) > [DeprecationsDetails](./kibana-plugin-core-server.deprecationsdetails.md) > [title](./kibana-plugin-core-server.deprecationsdetails.title.md) + +## DeprecationsDetails.title property + +The title of the deprecation. Check the README for writing deprecations in `src/core/server/deprecations/README.mdx` + +Signature: + +```typescript +title: string; +``` diff --git a/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md index 7d9d3dcdda4da..75732f59f1b3f 100644 --- a/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md +++ b/docs/development/core/server/kibana-plugin-core-server.deprecationsservicesetup.md @@ -21,6 +21,7 @@ export interface DeprecationsServiceSetup ```ts import { DeprecationsDetails, GetDeprecationsContext, CoreSetup } from 'src/core/server'; +import { i18n } from '@kbn/i18n'; async function getDeprecations({ esClient, savedObjectsClient }: GetDeprecationsContext): Promise { const deprecations: DeprecationsDetails[] = []; @@ -29,52 +30,44 @@ async function getDeprecations({ esClient, savedObjectsClient }: GetDeprecations if (count > 0) { // Example of a manual correctiveAction deprecations.push({ - message: `You have ${count} Timelion worksheets. The Timelion app will be removed in 8.0. To continue using your Timelion worksheets, migrate them to a dashboard.`, + title: i18n.translate('xpack.timelion.deprecations.worksheetsTitle', { + defaultMessage: 'Found Timelion worksheets.' + }), + message: i18n.translate('xpack.timelion.deprecations.worksheetsMessage', { + defaultMessage: 'You have {count} Timelion worksheets. The Timelion app will be removed in 8.0. To continue using your Timelion worksheets, migrate them to a dashboard.', + values: { count }, + }), documentationUrl: 'https://www.elastic.co/guide/en/kibana/current/create-panels-with-timelion.html', level: 'warning', correctiveActions: { manualSteps: [ - 'Navigate to the Kibana Dashboard and click "Create dashboard".', - 'Select Timelion from the "New Visualization" window.', - 'Open a new tab, open the Timelion app, select the chart you want to copy, then copy the chart expression.', - 'Go to Timelion, paste the chart expression in the Timelion expression field, then click Update.', - 'In the toolbar, click Save.', - 'On the Save visualization window, enter the visualization Title, then click Save and return.', + i18n.translate('xpack.timelion.deprecations.worksheets.manualStepOneMessage', { + defaultMessage: 'Navigate to the Kibana Dashboard and click "Create dashboard".', + }), + i18n.translate('xpack.timelion.deprecations.worksheets.manualStepTwoMessage', { + defaultMessage: 'Select Timelion from the "New Visualization" window.', + }), ], + api: { + path: '/internal/security/users/test_dashboard_user', + method: 'POST', + body: { + username: 'test_dashboard_user', + roles: [ + "machine_learning_user", + "enrich_user", + "kibana_admin" + ], + full_name: "Alison Goryachev", + email: "alisongoryachev@gmail.com", + metadata: {}, + enabled: true + } + }, }, }); } - - // Example of an api correctiveAction - deprecations.push({ - "message": "User 'test_dashboard_user' is using a deprecated role: 'kibana_user'", - "documentationUrl": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html", - "level": "critical", - "correctiveActions": { - "api": { - "path": "/internal/security/users/test_dashboard_user", - "method": "POST", - "body": { - "username": "test_dashboard_user", - "roles": [ - "machine_learning_user", - "enrich_user", - "kibana_admin" - ], - "full_name": "Alison Goryachev", - "email": "alisongoryachev@gmail.com", - "metadata": {}, - "enabled": true - } - }, - "manualSteps": [ - "Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.", - "Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role." - ] - }, - }); - return deprecations; } diff --git a/docs/management/advanced-options.asciidoc b/docs/management/advanced-options.asciidoc index 49adc72bbe346..a4863bd60089b 100644 --- a/docs/management/advanced-options.asciidoc +++ b/docs/management/advanced-options.asciidoc @@ -186,7 +186,7 @@ Set to `true` to enable a dark mode for the {kib} UI. You must refresh the page to apply the setting. [[theme-version]]`theme:version`:: -Specifies the {kib} theme. If you change the setting, refresh the page to apply the setting. +Specifies the {kib} theme. If you change the setting, refresh the page to apply the setting. [[timepicker-quickranges]]`timepicker:quickRanges`:: The list of ranges to show in the Quick section of the time filter. This should @@ -214,7 +214,7 @@ truncation. When enabled, provides access to the experimental *Labs* features for *Canvas*. [[labs-dashboard-defer-below-fold]]`labs:dashboard:deferBelowFold`:: -When enabled, the panels that appear below the fold are loaded when they become visible on the dashboard. +When enabled, the panels that appear below the fold are loaded when they become visible on the dashboard. _Below the fold_ refers to panels that are not immediately visible when you open a dashboard, but become visible as you scroll. For additional information, refer to <>. [[labs-dashboard-enable-ui]]`labs:dashboard:enable_ui`:: @@ -240,7 +240,7 @@ Banners are a https://www.elastic.co/subscriptions[subscription feature]. [horizontal] [[banners-placement]]`banners:placement`:: -Set to `Top` to display a banner above the Elastic header for this space. Defaults to the value of +Set to `Top` to display a banner above the Elastic header for this space. Defaults to the value of the `xpack.banners.placement` configuration property. [[banners-textcontent]]`banners:textContent`:: @@ -443,6 +443,9 @@ The threshold above which {ml} job anomalies are displayed in the {security-app} A comma-delimited list of {es} indices from which the {security-app} collects events. +[[securitysolution-threatindices]]`securitySolution:defaultThreatIndex`:: +A comma-delimited list of Threat Intelligence indices from which the {security-app} collects indicators. + [[securitysolution-enablenewsfeed]]`securitySolution:enableNewsFeed`:: Enables the security news feed on the Security *Overview* page. @@ -544,4 +547,4 @@ only production-ready visualizations are available to users. [horizontal] [[telemetry-enabled-advanced-setting]]`telemetry:enabled`:: When enabled, helps improve the Elastic Stack by providing usage statistics for -basic features. This data will not be shared outside of Elastic. \ No newline at end of file +basic features. This data will not be shared outside of Elastic. diff --git a/docs/settings/reporting-settings.asciidoc b/docs/settings/reporting-settings.asciidoc index b339daf3d36f7..694f8c53f6745 100644 --- a/docs/settings/reporting-settings.asciidoc +++ b/docs/settings/reporting-settings.asciidoc @@ -281,16 +281,15 @@ NOTE: This setting exists for backwards compatibility, but is unused and hardcod [[reporting-advanced-settings]] ==== Security settings -[[xpack-reporting-roles-enabled]] `xpack.reporting.roles.enabled`:: -deprecated:[7.14.0,This setting must be set to `false` in 8.0.] When `true`, grants users access to the {report-features} by assigning reporting roles, specified by `xpack.reporting.roles.allow`. Granting access to users this way is deprecated. Set to `false` and use {kibana-ref}/kibana-privileges.html[{kib} privileges] instead. Defaults to `true`. +With Security enabled, Reporting has two forms of access control: each user can only access their own reports, and custom roles determine who has privilege to generate reports. When Reporting is configured with <>, you can control the spaces and applications where users are allowed to generate reports. [NOTE] ============================================================================ -In 7.x, the default value of `xpack.reporting.roles.enabled` is `true`. To migrate users to the -new method of securing access to *Reporting*, you must set `xpack.reporting.roles.enabled: false`. In the next major version of {kib}, `false` will be the only valid configuration. +The `xpack.reporting.roles` settings are for a deprecated system of access control in Reporting. It does not allow API Keys to generate reports, and it doesn't allow {kib} application privileges. We recommend you explicitly turn off reporting's deprecated access control feature by adding `xpack.reporting.roles.enabled: false` in kibana.yml. This will enable you to create custom roles that provide application privileges for reporting, as described in <>. ============================================================================ -`xpack.reporting.roles.allow`:: -deprecated:[7.14.0,This setting will be removed in 8.0.] Specifies the roles, in addition to superusers, that can generate reports, using the {ref}/security-api.html#security-role-apis[{es} role management APIs]. Requires `xpack.reporting.roles.enabled` to be `true`. Granting access to users this way is deprecated. Use {kibana-ref}/kibana-privileges.html[{kib} privileges] instead. Defaults to `[ "reporting_user" ]`. +[[xpack-reporting-roles-enabled]] `xpack.reporting.roles.enabled`:: +deprecated:[7.14.0,The default for this setting will be `false` in an upcoming version of {kib}.] Sets access control to a set of assigned reporting roles, specified by `xpack.reporting.roles.allow`. Defaults to `true`. -NOTE: Each user has access to only their own reports. +`xpack.reporting.roles.allow`:: +deprecated:[7.14.0] In addition to superusers, specifies the roles that can generate reports using the {ref}/security-api.html#security-role-apis[{es} role management APIs]. Requires `xpack.reporting.roles.enabled` to be `true`. Defaults to `[ "reporting_user" ]`. diff --git a/docs/setup/configuring-reporting.asciidoc b/docs/setup/configuring-reporting.asciidoc index 0dba7befa2931..6d209092d3338 100644 --- a/docs/setup/configuring-reporting.asciidoc +++ b/docs/setup/configuring-reporting.asciidoc @@ -41,11 +41,16 @@ To troubleshoot the problem, start the {kib} server with environment variables t [float] [[grant-user-access]] === Grant users access to reporting +When security is enabled, you grant users access to generate reports with <>, which allow you to create custom roles that control the spaces and applications where users generate reports. -When security is enabled, access to the {report-features} is controlled by roles and <>. With privileges, you can define custom roles that grant *Reporting* privileges as sub-features of {kib} applications. To grant users permission to generate reports and view their reports in *Reporting*, create and assign the reporting role. - -[[reporting-app-users]] -NOTE: In 7.12.0 and earlier, you grant access to the {report-features} by assigning users the `reporting_user` role in {es}. +. Enable application privileges in Reporting. To enable, turn off the default user access control features in `kibana.yml`: ++ +[source,yaml] +------------------------------------ +xpack.reporting.roles.enabled: false +------------------------------------ ++ +NOTE: If you use the default settings, you can still create a custom role that grants reporting privileges. The default role is `reporting_user`. This behavior is being deprecated and does not allow application-level access controls for {report-features}, and does not allow API keys or authentication tokens to authorize report generation. Refer to <> for information and caveats about the deprecated access control features. . Create the reporting role. @@ -90,10 +95,12 @@ If the *Reporting* option is unavailable, contact your administrator, or < Reporting*. Users can only access their own reports. + [float] [[reporting-roles-user-api]] ==== Grant access with the role API -You can also use the {ref}/security-api-put-role.html[role API] to grant access to the reporting features. Grant the reporting role to users in combination with other roles that grant read access to the data in {es}, and at least read access in the applications where users can generate reports. +With <> enabled in Reporting, you can also use the {ref}/security-api-put-role.html[role API] to grant access to the {report-features}. Grant custom reporting roles to users in combination with other roles that grant read access to the data in {es}, and at least read access in the applications where users can generate reports. [source, sh] --------------------------------------------------------------- diff --git a/docs/setup/settings.asciidoc b/docs/setup/settings.asciidoc index ac50062470d78..203339be638ab 100644 --- a/docs/setup/settings.asciidoc +++ b/docs/setup/settings.asciidoc @@ -406,7 +406,10 @@ override this parameter to use their own Tile Map Service. For example: `"https://tiles.elastic.co/v2/default/{z}/{x}/{y}.png?elastic_tile_service_tos=agree&my_app_name=kibana"` | `migrations.batchSize:` - | Defines the number of documents migrated at a time. The higher the value, the faster the Saved Objects migration process performs at the cost of higher memory consumption. If the migration fails due to a `circuit_breaking_exception`, set a smaller `batchSize` value. *Default: `1000`* + | Defines the number of documents migrated at a time. The higher the value, the faster the Saved Objects migration process performs at the cost of higher memory consumption. If upgrade migrations results in {kib} crashing with an out of memory exception or fails due to an Elasticsearch `circuit_breaking_exception`, use a smaller `batchSize` value to reduce the memory pressure. *Default: `1000`* + + | `migrations.maxBatchSizeBytes:` + | Defines the maximum payload size for indexing batches of upgraded saved objects to avoid migrations failing due to a 413 Request Entity Too Large response from Elasticsearch. This value should be lower than or equal to your Elasticsearch cluster's `http.max_content_length` configuration option. *Default: `100mb`* | `migrations.enableV2:` | experimental[]. Enables the new Saved Objects migration algorithm. For information about the migration algorithm, refer to <>. When `migrations v2` is stable, the setting will be removed in an upcoming release without any further notice. Setting the value to `false` causes {kib} to use the legacy migration algorithm, which shipped in 7.11 and earlier versions. *Default: `true`* diff --git a/package.json b/package.json index 836e5336b7b50..e603190c72698 100644 --- a/package.json +++ b/package.json @@ -89,7 +89,7 @@ "**/underscore": "^1.13.1" }, "engines": { - "node": "14.17.5", + "node": "14.17.6", "yarn": "^1.21.1" }, "dependencies": { @@ -100,7 +100,7 @@ "@elastic/datemath": "link:bazel-bin/packages/elastic-datemath", "@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.19", "@elastic/ems-client": "7.15.0", - "@elastic/eui": "37.3.0", + "@elastic/eui": "37.3.1", "@elastic/filesaver": "1.1.2", "@elastic/good": "^9.0.1-kibana3", "@elastic/maki": "6.3.0", @@ -655,6 +655,7 @@ "@types/yauzl": "^2.9.1", "@types/zen-observable": "^0.8.0", "@typescript-eslint/eslint-plugin": "^4.14.1", + "@typescript-eslint/typescript-estree": "^4.14.1", "@typescript-eslint/parser": "^4.14.1", "@yarnpkg/lockfile": "^1.1.0", "abab": "^2.0.4", @@ -725,6 +726,7 @@ "eslint-plugin-react": "^7.20.3", "eslint-plugin-react-hooks": "^4.2.0", "eslint-plugin-react-perf": "^3.2.3", + "eslint-traverse": "^1.0.0", "expose-loader": "^0.7.5", "faker": "^5.1.0", "fancy-log": "^1.3.2", diff --git a/packages/elastic-eslint-config-kibana/.eslintrc.js b/packages/elastic-eslint-config-kibana/.eslintrc.js index 1b3e852e5a502..38c0c43132564 100644 --- a/packages/elastic-eslint-config-kibana/.eslintrc.js +++ b/packages/elastic-eslint-config-kibana/.eslintrc.js @@ -90,5 +90,7 @@ module.exports = { }, ], ], + + '@kbn/eslint/no_async_promise_body': 'error', }, }; diff --git a/packages/kbn-config/BUILD.bazel b/packages/kbn-config/BUILD.bazel index 75e4428ed2d70..e0cf4d2205d65 100644 --- a/packages/kbn-config/BUILD.bazel +++ b/packages/kbn-config/BUILD.bazel @@ -35,6 +35,7 @@ RUNTIME_DEPS = [ "//packages/kbn-logging", "//packages/kbn-std", "//packages/kbn-utility-types", + "//packages/kbn-i18n", "@npm//js-yaml", "@npm//load-json-file", "@npm//lodash", @@ -48,6 +49,7 @@ TYPES_DEPS = [ "//packages/kbn-logging", "//packages/kbn-std", "//packages/kbn-utility-types", + "//packages/kbn-i18n", "@npm//load-json-file", "@npm//rxjs", "@npm//@types/jest", diff --git a/packages/kbn-config/src/deprecation/deprecation_factory.test.ts b/packages/kbn-config/src/deprecation/deprecation_factory.test.ts index 563d4017f5ed9..0a605cbc1c532 100644 --- a/packages/kbn-config/src/deprecation/deprecation_factory.test.ts +++ b/packages/kbn-config/src/deprecation/deprecation_factory.test.ts @@ -48,7 +48,8 @@ describe('DeprecationFactory', () => { "Replace \\"myplugin.deprecated\\" with \\"myplugin.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).", ], }, - "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\"", + "message": "Setting \\"myplugin.deprecated\\" has been replaced by \\"myplugin.renamed\\"", + "title": "Setting \\"myplugin.deprecated\\" is deprecated", }, ], ] @@ -103,7 +104,8 @@ describe('DeprecationFactory', () => { "Replace \\"myplugin.oldsection.deprecated\\" with \\"myplugin.newsection.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).", ], }, - "message": "\\"myplugin.oldsection.deprecated\\" is deprecated and has been replaced by \\"myplugin.newsection.renamed\\"", + "message": "Setting \\"myplugin.oldsection.deprecated\\" has been replaced by \\"myplugin.newsection.renamed\\"", + "title": "Setting \\"myplugin.oldsection.deprecated\\" is deprecated", }, ], ] @@ -130,7 +132,8 @@ describe('DeprecationFactory', () => { "Remove \\"myplugin.deprecated\\" from the config.", ], }, - "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\". However both key are present, ignoring \\"myplugin.deprecated\\"", + "message": "Setting \\"$myplugin.deprecated\\" has been replaced by \\"$myplugin.renamed\\". However, both keys are present. Ignoring \\"$myplugin.deprecated\\"", + "title": "Setting \\"myplugin.deprecated\\" is deprecated", }, ], ] @@ -172,7 +175,8 @@ describe('DeprecationFactory', () => { "Replace \\"myplugin.deprecated\\" with \\"myplugin.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).", ], }, - "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\"", + "message": "Setting \\"myplugin.deprecated\\" has been replaced by \\"myplugin.renamed\\"", + "title": "Setting \\"myplugin.deprecated\\" is deprecated", }, ], ] @@ -212,7 +216,8 @@ describe('DeprecationFactory', () => { "Replace \\"oldplugin.deprecated\\" with \\"newplugin.renamed\\" in the Kibana config file, CLI flag, or environment variable (in Docker only).", ], }, - "message": "\\"oldplugin.deprecated\\" is deprecated and has been replaced by \\"newplugin.renamed\\"", + "message": "Setting \\"oldplugin.deprecated\\" has been replaced by \\"newplugin.renamed\\"", + "title": "Setting \\"oldplugin.deprecated\\" is deprecated", }, ], ] @@ -264,7 +269,8 @@ describe('DeprecationFactory', () => { "Remove \\"myplugin.deprecated\\" from the config.", ], }, - "message": "\\"myplugin.deprecated\\" is deprecated and has been replaced by \\"myplugin.renamed\\". However both key are present, ignoring \\"myplugin.deprecated\\"", + "message": "Setting \\"$myplugin.deprecated\\" has been replaced by \\"$myplugin.renamed\\". However, both keys are present. Ignoring \\"$myplugin.deprecated\\"", + "title": "Setting \\"myplugin.deprecated\\" is deprecated", }, ], ] @@ -293,10 +299,11 @@ describe('DeprecationFactory', () => { Object { "correctiveActions": Object { "manualSteps": Array [ - "Remove \\"myplugin.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only)", + "Remove \\"myplugin.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only).", ], }, - "message": "myplugin.deprecated is deprecated and is no longer used", + "message": "You no longer need to configure \\"myplugin.deprecated\\".", + "title": "Setting \\"myplugin.deprecated\\" is deprecated", }, ], ] @@ -325,10 +332,11 @@ describe('DeprecationFactory', () => { Object { "correctiveActions": Object { "manualSteps": Array [ - "Remove \\"myplugin.section.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only)", + "Remove \\"myplugin.section.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only).", ], }, - "message": "myplugin.section.deprecated is deprecated and is no longer used", + "message": "You no longer need to configure \\"myplugin.section.deprecated\\".", + "title": "Setting \\"myplugin.section.deprecated\\" is deprecated", }, ], ] @@ -375,10 +383,11 @@ describe('DeprecationFactory', () => { Object { "correctiveActions": Object { "manualSteps": Array [ - "Remove \\"myplugin.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only)", + "Remove \\"myplugin.deprecated\\" from the Kibana config file, CLI flag, or environment variable (in Docker only).", ], }, - "message": "myplugin.deprecated is deprecated and is no longer used", + "message": "You no longer need to configure \\"myplugin.deprecated\\".", + "title": "Setting \\"myplugin.deprecated\\" is deprecated", }, ], ] diff --git a/packages/kbn-config/src/deprecation/deprecation_factory.ts b/packages/kbn-config/src/deprecation/deprecation_factory.ts index 76bcc1958d0de..6d7669cef04f2 100644 --- a/packages/kbn-config/src/deprecation/deprecation_factory.ts +++ b/packages/kbn-config/src/deprecation/deprecation_factory.ts @@ -7,6 +7,8 @@ */ import { get } from 'lodash'; +import { i18n } from '@kbn/i18n'; + import { ConfigDeprecation, AddConfigDeprecation, @@ -15,6 +17,13 @@ import { ConfigDeprecationCommand, } from './types'; +const getDeprecationTitle = (deprecationPath: string) => { + return i18n.translate('kbnConfig.deprecations.deprecatedSettingTitle', { + defaultMessage: 'Setting "{deprecationPath}" is deprecated', + values: { deprecationPath }, + }); +}; + const _rename = ( config: Record, rootPath: string, @@ -33,10 +42,18 @@ const _rename = ( const newValue = get(config, fullNewPath); if (newValue === undefined) { addDeprecation({ - message: `"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}"`, + title: getDeprecationTitle(fullOldPath), + message: i18n.translate('kbnConfig.deprecations.replacedSettingMessage', { + defaultMessage: `Setting "{fullOldPath}" has been replaced by "{fullNewPath}"`, + values: { fullOldPath, fullNewPath }, + }), correctiveActions: { manualSteps: [ - `Replace "${fullOldPath}" with "${fullNewPath}" in the Kibana config file, CLI flag, or environment variable (in Docker only).`, + i18n.translate('kbnConfig.deprecations.replacedSetting.manualStepOneMessage', { + defaultMessage: + 'Replace "{fullOldPath}" with "{fullNewPath}" in the Kibana config file, CLI flag, or environment variable (in Docker only).', + values: { fullOldPath, fullNewPath }, + }), ], }, ...details, @@ -47,11 +64,23 @@ const _rename = ( }; } else { addDeprecation({ - message: `"${fullOldPath}" is deprecated and has been replaced by "${fullNewPath}". However both key are present, ignoring "${fullOldPath}"`, + title: getDeprecationTitle(fullOldPath), + message: i18n.translate('kbnConfig.deprecations.conflictSettingMessage', { + defaultMessage: + 'Setting "${fullOldPath}" has been replaced by "${fullNewPath}". However, both keys are present. Ignoring "${fullOldPath}"', + values: { fullOldPath, fullNewPath }, + }), correctiveActions: { manualSteps: [ - `Make sure "${fullNewPath}" contains the correct value in the config file, CLI flag, or environment variable (in Docker only).`, - `Remove "${fullOldPath}" from the config.`, + i18n.translate('kbnConfig.deprecations.conflictSetting.manualStepOneMessage', { + defaultMessage: + 'Make sure "{fullNewPath}" contains the correct value in the config file, CLI flag, or environment variable (in Docker only).', + values: { fullNewPath }, + }), + i18n.translate('kbnConfig.deprecations.conflictSetting.manualStepTwoMessage', { + defaultMessage: 'Remove "{fullOldPath}" from the config.', + values: { fullOldPath }, + }), ], }, ...details, @@ -75,10 +104,18 @@ const _unused = ( return; } addDeprecation({ - message: `${fullPath} is deprecated and is no longer used`, + title: getDeprecationTitle(fullPath), + message: i18n.translate('kbnConfig.deprecations.unusedSettingMessage', { + defaultMessage: 'You no longer need to configure "{fullPath}".', + values: { fullPath }, + }), correctiveActions: { manualSteps: [ - `Remove "${fullPath}" from the Kibana config file, CLI flag, or environment variable (in Docker only)`, + i18n.translate('kbnConfig.deprecations.unusedSetting.manualStepOneMessage', { + defaultMessage: + 'Remove "{fullPath}" from the Kibana config file, CLI flag, or environment variable (in Docker only).', + values: { fullPath }, + }), ], }, ...details, diff --git a/packages/kbn-config/src/deprecation/types.ts b/packages/kbn-config/src/deprecation/types.ts index 1791dac060e2b..007c3ec54113b 100644 --- a/packages/kbn-config/src/deprecation/types.ts +++ b/packages/kbn-config/src/deprecation/types.ts @@ -19,6 +19,8 @@ export type AddConfigDeprecation = (details: DeprecatedConfigDetails) => void; * @public */ export interface DeprecatedConfigDetails { + /* The title to be displayed for the deprecation. */ + title?: string; /* The message to be displayed for the deprecation. */ message: string; /* (optional) set false to prevent the config service from logging the deprecation message. */ diff --git a/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts b/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts index 7847cad0fd5e7..0584ee27aa5f6 100644 --- a/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts +++ b/packages/kbn-dev-utils/src/ci_stats_reporter/ci_stats_reporter.ts @@ -221,11 +221,12 @@ export class CiStatsReporter { ? `${error.response.status} response` : 'no response'; + const seconds = attempt * 10; this.log.warning( - `failed to reach ci-stats service [reason=${reason}], retrying in ${attempt} seconds` + `failed to reach ci-stats service, retrying in ${seconds} seconds, [reason=${reason}], [error=${error.message}]` ); - await new Promise((resolve) => setTimeout(resolve, attempt * 1000)); + await new Promise((resolve) => setTimeout(resolve, seconds * 1000)); } } } diff --git a/packages/kbn-eslint-plugin-eslint/index.js b/packages/kbn-eslint-plugin-eslint/index.js index e5a38e5f09529..a7a9c6b5bebdf 100644 --- a/packages/kbn-eslint-plugin-eslint/index.js +++ b/packages/kbn-eslint-plugin-eslint/index.js @@ -12,5 +12,6 @@ module.exports = { 'disallow-license-headers': require('./rules/disallow_license_headers'), 'no-restricted-paths': require('./rules/no_restricted_paths'), module_migration: require('./rules/module_migration'), + no_async_promise_body: require('./rules/no_async_promise_body'), }, }; diff --git a/packages/kbn-eslint-plugin-eslint/rules/no_async_promise_body.js b/packages/kbn-eslint-plugin-eslint/rules/no_async_promise_body.js new file mode 100644 index 0000000000000..317758fd3629a --- /dev/null +++ b/packages/kbn-eslint-plugin-eslint/rules/no_async_promise_body.js @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +const { parseExpression } = require('@babel/parser'); +const { default: generate } = require('@babel/generator'); +const tsEstree = require('@typescript-eslint/typescript-estree'); +const traverse = require('eslint-traverse'); +const esTypes = tsEstree.AST_NODE_TYPES; +const babelTypes = require('@babel/types'); + +/** @typedef {import("eslint").Rule.RuleModule} Rule */ +/** @typedef {import("@typescript-eslint/parser").ParserServices} ParserServices */ +/** @typedef {import("@typescript-eslint/typescript-estree").TSESTree.Expression} Expression */ +/** @typedef {import("@typescript-eslint/typescript-estree").TSESTree.ArrowFunctionExpression} ArrowFunctionExpression */ +/** @typedef {import("@typescript-eslint/typescript-estree").TSESTree.FunctionExpression} FunctionExpression */ +/** @typedef {import("@typescript-eslint/typescript-estree").TSESTree.TryStatement} TryStatement */ +/** @typedef {import("@typescript-eslint/typescript-estree").TSESTree.NewExpression} NewExpression */ +/** @typedef {import("typescript").ExportDeclaration} ExportDeclaration */ +/** @typedef {import("eslint").Rule.RuleFixer} Fixer */ + +const ERROR_MSG = + 'Passing an async function to the Promise constructor leads to a hidden promise being created and prevents handling rejections'; + +/** + * @param {Expression} node + */ +const isPromise = (node) => node.type === esTypes.Identifier && node.name === 'Promise'; + +/** + * @param {Expression} node + * @returns {node is ArrowFunctionExpression | FunctionExpression} + */ +const isFunc = (node) => + node.type === esTypes.ArrowFunctionExpression || node.type === esTypes.FunctionExpression; + +/** + * @param {any} context + * @param {ArrowFunctionExpression | FunctionExpression} node + */ +const isFuncBodySafe = (context, node) => { + // if the body isn't wrapped in a blockStatement it can't have a try/catch at the root + if (node.body.type !== esTypes.BlockStatement) { + return false; + } + + // when the entire body is wrapped in a try/catch it is the only node + if (node.body.body.length !== 1) { + return false; + } + + const tryNode = node.body.body[0]; + // ensure we have a try node with a handler + if (tryNode.type !== esTypes.TryStatement || !tryNode.handler) { + return false; + } + + // ensure the handler doesn't throw + let hasThrow = false; + traverse(context, tryNode.handler, (path) => { + if (path.node.type === esTypes.ThrowStatement) { + hasThrow = true; + return traverse.STOP; + } + }); + return !hasThrow; +}; + +/** + * @param {string} code + */ +const wrapFunctionInTryCatch = (code) => { + // parse the code with babel so we can mutate the AST + const ast = parseExpression(code, { + plugins: ['typescript', 'jsx'], + }); + + // validate that the code reperesents an arrow or function expression + if (!babelTypes.isArrowFunctionExpression(ast) && !babelTypes.isFunctionExpression(ast)) { + throw new Error('expected function to be an arrow or function expression'); + } + + // ensure that the function receives the second argument, and capture its name if already defined + let rejectName = 'reject'; + if (ast.params.length === 0) { + ast.params.push(babelTypes.identifier('resolve'), babelTypes.identifier(rejectName)); + } else if (ast.params.length === 1) { + ast.params.push(babelTypes.identifier(rejectName)); + } else if (ast.params.length === 2) { + if (babelTypes.isIdentifier(ast.params[1])) { + rejectName = ast.params[1].name; + } else { + throw new Error('expected second param of promise definition function to be an identifier'); + } + } + + // ensure that the body of the function is a blockStatement + let block = ast.body; + if (!babelTypes.isBlockStatement(block)) { + block = babelTypes.blockStatement([babelTypes.returnStatement(block)]); + } + + // redefine the body of the function as a new blockStatement containing a tryStatement + // which catches errors and forwards them to reject() when caught + ast.body = babelTypes.blockStatement([ + // try { + babelTypes.tryStatement( + block, + // catch (error) { + babelTypes.catchClause( + babelTypes.identifier('error'), + babelTypes.blockStatement([ + // reject(error) + babelTypes.expressionStatement( + babelTypes.callExpression(babelTypes.identifier(rejectName), [ + babelTypes.identifier('error'), + ]) + ), + ]) + ) + ), + ]); + + return generate(ast).code; +}; + +/** @type {Rule} */ +module.exports = { + meta: { + fixable: 'code', + schema: [], + }, + create: (context) => ({ + NewExpression(_) { + const node = /** @type {NewExpression} */ (_); + + // ensure we are newing up a promise with a single argument + if (!isPromise(node.callee) || node.arguments.length !== 1) { + return; + } + + const func = node.arguments[0]; + // ensure the argument is an arrow or function expression and is async + if (!isFunc(func) || !func.async) { + return; + } + + // body must be a blockStatement, try/catch can't exist outside of a block + if (!isFuncBodySafe(context, func)) { + context.report({ + message: ERROR_MSG, + loc: func.loc, + fix(fixer) { + const source = context.getSourceCode(); + return fixer.replaceText(func, wrapFunctionInTryCatch(source.getText(func))); + }, + }); + } + }, + }), +}; diff --git a/packages/kbn-eslint-plugin-eslint/rules/no_async_promise_body.test.js b/packages/kbn-eslint-plugin-eslint/rules/no_async_promise_body.test.js new file mode 100644 index 0000000000000..f5929b1b3966f --- /dev/null +++ b/packages/kbn-eslint-plugin-eslint/rules/no_async_promise_body.test.js @@ -0,0 +1,254 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +const { RuleTester } = require('eslint'); +const rule = require('./no_async_promise_body'); +const dedent = require('dedent'); + +const ruleTester = new RuleTester({ + parser: require.resolve('@typescript-eslint/parser'), + parserOptions: { + sourceType: 'module', + ecmaVersion: 2018, + ecmaFeatures: { + jsx: true, + }, + }, +}); + +ruleTester.run('@kbn/eslint/no_async_promise_body', rule, { + valid: [ + // caught but no resolve + { + code: dedent` + new Promise(async function (resolve) { + try { + await asyncOperation(); + } catch (error) { + // noop + } + }) + `, + }, + // arrow caught but no resolve + { + code: dedent` + new Promise(async (resolve) => { + try { + await asyncOperation(); + } catch (error) { + // noop + } + }) + `, + }, + // caught with reject + { + code: dedent` + new Promise(async function (resolve, reject) { + try { + await asyncOperation(); + } catch (error) { + reject(error) + } + }) + `, + }, + // arrow caught with reject + { + code: dedent` + new Promise(async (resolve, reject) => { + try { + await asyncOperation(); + } catch (error) { + reject(error) + } + }) + `, + }, + // non async + { + code: dedent` + new Promise(function (resolve) { + setTimeout(resolve, 10); + }) + `, + }, + // arrow non async + { + code: dedent` + new Promise((resolve) => setTimeout(resolve, 10)) + `, + }, + ], + + invalid: [ + // no catch + { + code: dedent` + new Promise(async function (resolve) { + const result = await asyncOperation(); + resolve(result); + }) + `, + errors: [ + { + line: 1, + message: + 'Passing an async function to the Promise constructor leads to a hidden promise being created and prevents handling rejections', + }, + ], + output: dedent` + new Promise(async function (resolve, reject) { + try { + const result = await asyncOperation(); + resolve(result); + } catch (error) { + reject(error); + } + }) + `, + }, + // arrow no catch + { + code: dedent` + new Promise(async (resolve) => { + const result = await asyncOperation(); + resolve(result); + }) + `, + errors: [ + { + line: 1, + message: + 'Passing an async function to the Promise constructor leads to a hidden promise being created and prevents handling rejections', + }, + ], + output: dedent` + new Promise(async (resolve, reject) => { + try { + const result = await asyncOperation(); + resolve(result); + } catch (error) { + reject(error); + } + }) + `, + }, + // catch, but it throws + { + code: dedent` + new Promise(async function (resolve) { + try { + const result = await asyncOperation(); + resolve(result); + } catch (error) { + if (error.code === 'foo') { + throw error; + } + } + }) + `, + errors: [ + { + line: 1, + message: + 'Passing an async function to the Promise constructor leads to a hidden promise being created and prevents handling rejections', + }, + ], + output: dedent` + new Promise(async function (resolve, reject) { + try { + try { + const result = await asyncOperation(); + resolve(result); + } catch (error) { + if (error.code === 'foo') { + throw error; + } + } + } catch (error) { + reject(error); + } + }) + `, + }, + // no catch without block + { + code: dedent` + new Promise(async (resolve) => resolve(await asyncOperation())); + `, + errors: [ + { + line: 1, + message: + 'Passing an async function to the Promise constructor leads to a hidden promise being created and prevents handling rejections', + }, + ], + output: dedent` + new Promise(async (resolve, reject) => { + try { + return resolve(await asyncOperation()); + } catch (error) { + reject(error); + } + }); + `, + }, + // no catch with named reject + { + code: dedent` + new Promise(async (resolve, rej) => { + const result = await asyncOperation(); + result ? resolve(true) : rej() + }); + `, + errors: [ + { + line: 1, + message: + 'Passing an async function to the Promise constructor leads to a hidden promise being created and prevents handling rejections', + }, + ], + output: dedent` + new Promise(async (resolve, rej) => { + try { + const result = await asyncOperation(); + result ? resolve(true) : rej(); + } catch (error) { + rej(error); + } + }); + `, + }, + // no catch with no args + { + code: dedent` + new Promise(async () => { + await asyncOperation(); + }); + `, + errors: [ + { + line: 1, + message: + 'Passing an async function to the Promise constructor leads to a hidden promise being created and prevents handling rejections', + }, + ], + output: dedent` + new Promise(async (resolve, reject) => { + try { + await asyncOperation(); + } catch (error) { + reject(error); + } + }); + `, + }, + ], +}); diff --git a/packages/kbn-pm/dist/index.js b/packages/kbn-pm/dist/index.js index f0a95a612f02c..c7f4bbe253777 100644 --- a/packages/kbn-pm/dist/index.js +++ b/packages/kbn-pm/dist/index.js @@ -59656,8 +59656,9 @@ class CiStatsReporter { const reason = error !== null && error !== void 0 && (_error$response = error.response) !== null && _error$response !== void 0 && _error$response.status ? `${error.response.status} response` : 'no response'; - this.log.warning(`failed to reach ci-stats service [reason=${reason}], retrying in ${attempt} seconds`); - await new Promise(resolve => setTimeout(resolve, attempt * 1000)); + const seconds = attempt * 10; + this.log.warning(`failed to reach ci-stats service, retrying in ${seconds} seconds, [reason=${reason}], [error=${error.message}]`); + await new Promise(resolve => setTimeout(resolve, seconds * 1000)); } } } diff --git a/packages/kbn-rule-data-utils/src/technical_field_names.ts b/packages/kbn-rule-data-utils/src/technical_field_names.ts index fa3d61d00529c..86a036bbb9fe2 100644 --- a/packages/kbn-rule-data-utils/src/technical_field_names.ts +++ b/packages/kbn-rule-data-utils/src/technical_field_names.ts @@ -28,7 +28,7 @@ const ALERT_DURATION = `${ALERT_NAMESPACE}.duration.us` as const; const ALERT_END = `${ALERT_NAMESPACE}.end` as const; const ALERT_EVALUATION_THRESHOLD = `${ALERT_NAMESPACE}.evaluation.threshold` as const; const ALERT_EVALUATION_VALUE = `${ALERT_NAMESPACE}.evaluation.value` as const; -const ALERT_ID = `${ALERT_NAMESPACE}.id` as const; +const ALERT_INSTANCE_ID = `${ALERT_NAMESPACE}.instance.id` as const; const ALERT_REASON = `${ALERT_NAMESPACE}.reason` as const; const ALERT_RISK_SCORE = `${ALERT_NAMESPACE}.risk_score` as const; const ALERT_SEVERITY = `${ALERT_NAMESPACE}.severity` as const; @@ -94,7 +94,7 @@ const fields = { ALERT_END, ALERT_EVALUATION_THRESHOLD, ALERT_EVALUATION_VALUE, - ALERT_ID, + ALERT_INSTANCE_ID, ALERT_RULE_CONSUMER, ALERT_RULE_PRODUCER, ALERT_REASON, @@ -143,7 +143,7 @@ export { ALERT_END, ALERT_EVALUATION_THRESHOLD, ALERT_EVALUATION_VALUE, - ALERT_ID, + ALERT_INSTANCE_ID, ALERT_NAMESPACE, ALERT_RULE_NAMESPACE, ALERT_RULE_CONSUMER, diff --git a/renovate.json5 b/renovate.json5 index 5ea38e589da4d..b1464ad5040f0 100644 --- a/renovate.json5 +++ b/renovate.json5 @@ -1,6 +1,7 @@ { extends: [ 'config:base', + ':disableDependencyDashboard', ], ignorePaths: [ '**/__fixtures__/**', @@ -12,12 +13,11 @@ baseBranches: [ 'master', '7.x', - '7.13', + '7.15', ], prConcurrentLimit: 0, prHourlyLimit: 0, separateMajorMinor: false, - masterIssue: true, rangeStrategy: 'bump', semanticCommits: false, vulnerabilityAlerts: { @@ -39,7 +39,7 @@ packageNames: ['@elastic/charts'], reviewers: ['markov00', 'nickofthyme'], matchBaseBranches: ['master'], - labels: ['release_note:skip', 'v8.0.0', 'v7.14.0', 'auto-backport'], + labels: ['release_note:skip', 'v8.0.0', 'v7.16.0', 'auto-backport'], enabled: true, }, { diff --git a/src/core/public/application/integration_tests/utils.tsx b/src/core/public/application/integration_tests/utils.tsx index dcf071719c11a..455d19956f7e8 100644 --- a/src/core/public/application/integration_tests/utils.tsx +++ b/src/core/public/application/integration_tests/utils.tsx @@ -21,13 +21,18 @@ export const createRenderer = (element: ReactElement | null): Renderer => { const dom: Dom = element && mount({element}); return () => - new Promise(async (resolve) => { - if (dom) { - await act(async () => { - dom.update(); - }); + new Promise(async (resolve, reject) => { + try { + if (dom) { + await act(async () => { + dom.update(); + }); + } + + setImmediate(() => resolve(dom)); // flushes any pending promises + } catch (error) { + reject(error); } - setImmediate(() => resolve(dom)); // flushes any pending promises }); }; diff --git a/src/core/public/application/ui/app_container.test.tsx b/src/core/public/application/ui/app_container.test.tsx index 86cb9198e0699..4c056e748f06e 100644 --- a/src/core/public/application/ui/app_container.test.tsx +++ b/src/core/public/application/ui/app_container.test.tsx @@ -27,8 +27,12 @@ describe('AppContainer', () => { }); const flushPromises = async () => { - await new Promise(async (resolve) => { - setImmediate(() => resolve()); + await new Promise(async (resolve, reject) => { + try { + setImmediate(() => resolve()); + } catch (error) { + reject(error); + } }); }; diff --git a/src/core/public/chrome/ui/header/header_action_menu.test.tsx b/src/core/public/chrome/ui/header/header_action_menu.test.tsx index 386e48e745e80..201be8848bac8 100644 --- a/src/core/public/chrome/ui/header/header_action_menu.test.tsx +++ b/src/core/public/chrome/ui/header/header_action_menu.test.tsx @@ -26,13 +26,18 @@ describe('HeaderActionMenu', () => { }); const refresh = () => { - new Promise(async (resolve) => { - if (component) { - act(() => { - component.update(); - }); + new Promise(async (resolve, reject) => { + try { + if (component) { + act(() => { + component.update(); + }); + } + + setImmediate(() => resolve(component)); // flushes any pending promises + } catch (error) { + reject(error); } - setImmediate(() => resolve(component)); // flushes any pending promises }); }; diff --git a/src/core/public/deprecations/deprecations_client.test.ts b/src/core/public/deprecations/deprecations_client.test.ts index a998a03772cca..cca81f4687a97 100644 --- a/src/core/public/deprecations/deprecations_client.test.ts +++ b/src/core/public/deprecations/deprecations_client.test.ts @@ -82,6 +82,7 @@ describe('DeprecationsClient', () => { it('returns true if deprecation has correctiveActions.api', async () => { const deprecationsClient = new DeprecationsClient({ http }); const mockDeprecationDetails: DomainDeprecationDetails = { + title: 'some-title', domainId: 'testPluginId-1', message: 'some-message', level: 'warning', @@ -102,6 +103,7 @@ describe('DeprecationsClient', () => { it('returns false if deprecation is missing correctiveActions.api', async () => { const deprecationsClient = new DeprecationsClient({ http }); const mockDeprecationDetails: DomainDeprecationDetails = { + title: 'some-title', domainId: 'testPluginId-1', message: 'some-message', level: 'warning', @@ -120,6 +122,7 @@ describe('DeprecationsClient', () => { it('fails if deprecation is not resolvable', async () => { const deprecationsClient = new DeprecationsClient({ http }); const mockDeprecationDetails: DomainDeprecationDetails = { + title: 'some-title', domainId: 'testPluginId-1', message: 'some-message', level: 'warning', @@ -129,15 +132,18 @@ describe('DeprecationsClient', () => { }; const result = await deprecationsClient.resolveDeprecation(mockDeprecationDetails); - expect(result).toEqual({ - status: 'fail', - reason: 'deprecation has no correctiveAction via api.', - }); + expect(result).toMatchInlineSnapshot(` + Object { + "reason": "This deprecation cannot be resolved automatically.", + "status": "fail", + } + `); }); it('fetches the deprecation api', async () => { const deprecationsClient = new DeprecationsClient({ http }); const mockDeprecationDetails: DomainDeprecationDetails = { + title: 'some-title', domainId: 'testPluginId-1', message: 'some-message', level: 'warning', @@ -171,6 +177,7 @@ describe('DeprecationsClient', () => { const deprecationsClient = new DeprecationsClient({ http }); const mockResponse = 'Failed to fetch'; const mockDeprecationDetails: DomainDeprecationDetails = { + title: 'some-title', domainId: 'testPluginId-1', message: 'some-message', level: 'warning', diff --git a/src/core/public/deprecations/deprecations_client.ts b/src/core/public/deprecations/deprecations_client.ts index e510ab1e79d17..4b9cfca1986ba 100644 --- a/src/core/public/deprecations/deprecations_client.ts +++ b/src/core/public/deprecations/deprecations_client.ts @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import { i18n } from '@kbn/i18n'; import type { HttpStart } from '../http'; import type { DomainDeprecationDetails, DeprecationsGetResponse } from '../../server/types'; @@ -52,7 +53,9 @@ export class DeprecationsClient { if (typeof correctiveActions.api !== 'object') { return { status: 'fail', - reason: 'deprecation has no correctiveAction via api.', + reason: i18n.translate('core.deprecations.noCorrectiveAction', { + defaultMessage: 'This deprecation cannot be resolved automatically.', + }), }; } diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts index 4b1aaf9eb19c1..f3ef7c550e57d 100644 --- a/src/core/public/doc_links/doc_links_service.ts +++ b/src/core/public/doc_links/doc_links_service.ts @@ -30,6 +30,10 @@ export class DocLinksService { ELASTIC_WEBSITE_URL, links: { settings: `${ELASTIC_WEBSITE_URL}guide/en/kibana/${DOC_LINK_VERSION}/settings.html`, + apm: { + kibanaSettings: `${KIBANA_DOCS}apm-settings-in-kibana.html`, + supportedServiceMaps: `${KIBANA_DOCS}service-maps.html#service-maps-supported`, + }, canvas: { guide: `${KIBANA_DOCS}canvas.html`, }, @@ -204,6 +208,7 @@ export class DocLinksService { siem: { guide: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/index.html`, gettingStarted: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/index.html`, + privileges: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/sec-requirements.html`, ml: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/machine-learning.html`, ruleChangeLog: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/prebuilt-rules-changelog.html`, detectionsReq: `${ELASTIC_WEBSITE_URL}guide/en/security/${DOC_LINK_VERSION}/detections-permissions-section.html`, @@ -450,6 +455,10 @@ export interface DocLinksStart { readonly ELASTIC_WEBSITE_URL: string; readonly links: { readonly settings: string; + readonly apm: { + readonly kibanaSettings: string; + readonly supportedServiceMaps: string; + }; readonly canvas: { readonly guide: string; }; @@ -569,6 +578,7 @@ export interface DocLinksStart { readonly rollupJobs: string; readonly elasticsearch: Record; readonly siem: { + readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; diff --git a/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap b/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap index 4ef5eb8f56d2f..54e223cdc5d41 100644 --- a/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap +++ b/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap @@ -57,7 +57,7 @@ exports[`#start() returns \`Context\` component 1`] = ` "euiColumnSelector.searchcolumns": "Search columns", "euiColumnSelector.selectAll": "Show all", "euiColumnSorting.button": "Sort fields", - "euiColumnSorting.buttonActive": "fields sorted", + "euiColumnSorting.buttonActive": [Function], "euiColumnSorting.clearAll": "Clear sorting", "euiColumnSorting.emptySorting": "Currently no fields are sorted", "euiColumnSorting.pickFields": "Pick fields to sort by", @@ -104,9 +104,11 @@ exports[`#start() returns \`Context\` component 1`] = ` "euiFieldPassword.maskPassword": "Mask password", "euiFieldPassword.showPassword": "Show password as plain text. Note: this will visually expose your password on the screen.", "euiFilePicker.clearSelectedFiles": "Clear selected files", - "euiFilePicker.filesSelected": "files selected", + "euiFilePicker.filesSelected": [Function], + "euiFilePicker.promptText": "Select or drag and drop a file", "euiFilePicker.removeSelected": "Remove", - "euiFilterButton.filterBadge": [Function], + "euiFilterButton.filterBadgeActiveAriaLabel": [Function], + "euiFilterButton.filterBadgeAvailableAriaLabel": [Function], "euiFlyout.closeAriaLabel": "Close this dialog", "euiForm.addressFormErrors": "Please address the highlighted errors.", "euiFormControlLayoutClearButton.label": "Clear input", diff --git a/src/core/public/i18n/i18n_eui_mapping.test.ts b/src/core/public/i18n/i18n_eui_mapping.test.ts index 1b80257266d4c..d8d48a8e5f1d5 100644 --- a/src/core/public/i18n/i18n_eui_mapping.test.ts +++ b/src/core/public/i18n/i18n_eui_mapping.test.ts @@ -74,6 +74,11 @@ describe('@elastic/eui i18n tokens', () => { }); test('defaultMessage is in sync with defString', () => { + // Certain complex tokens (e.g. ones that have a function as a defaultMessage) + // need custom i18n handling, and can't be checked for basic defString equality + const tokensToSkip = ['euiColumnSorting.buttonActive']; + if (tokensToSkip.includes(token)) return; + // Clean up typical errors from the `@elastic/eui` extraction token tool const normalizedDefString = defString // Quoted words should use double-quotes diff --git a/src/core/public/i18n/i18n_eui_mapping.tsx b/src/core/public/i18n/i18n_eui_mapping.tsx index 133a2155f7430..4175dac712e82 100644 --- a/src/core/public/i18n/i18n_eui_mapping.tsx +++ b/src/core/public/i18n/i18n_eui_mapping.tsx @@ -272,9 +272,11 @@ export const getEuiContextMapping = (): EuiTokensObject => { 'euiColumnSorting.button': i18n.translate('core.euiColumnSorting.button', { defaultMessage: 'Sort fields', }), - 'euiColumnSorting.buttonActive': i18n.translate('core.euiColumnSorting.buttonActive', { - defaultMessage: 'fields sorted', - }), + 'euiColumnSorting.buttonActive': ({ numberOfSortedFields }: EuiValues) => + i18n.translate('core.euiColumnSorting.buttonActive', { + defaultMessage: '{numberOfSortedFields, plural, one {# field} other {# fields}} sorted', + values: { numberOfSortedFields }, + }), 'euiColumnSortingDraggable.activeSortLabel': ({ display }: EuiValues) => i18n.translate('core.euiColumnSortingDraggable.activeSortLabel', { defaultMessage: '{display} is sorting this data grid', @@ -514,16 +516,26 @@ export const getEuiContextMapping = (): EuiTokensObject => { 'euiFilePicker.clearSelectedFiles': i18n.translate('core.euiFilePicker.clearSelectedFiles', { defaultMessage: 'Clear selected files', }), - 'euiFilePicker.filesSelected': i18n.translate('core.euiFilePicker.filesSelected', { - defaultMessage: 'files selected', + 'euiFilePicker.filesSelected': ({ fileCount }: EuiValues) => + i18n.translate('core.euiFilePicker.filesSelected', { + defaultMessage: '{fileCount} files selected', + values: { fileCount }, + }), + 'euiFilePicker.promptText': i18n.translate('core.euiFilePicker.promptText', { + defaultMessage: 'Select or drag and drop a file', }), 'euiFilePicker.removeSelected': i18n.translate('core.euiFilePicker.removeSelected', { defaultMessage: 'Remove', }), - 'euiFilterButton.filterBadge': ({ count, hasActiveFilters }: EuiValues) => - i18n.translate('core.euiFilterButton.filterBadge', { - defaultMessage: '{count} {hasActiveFilters} filters', - values: { count, hasActiveFilters: hasActiveFilters ? 'active' : 'available' }, + 'euiFilterButton.filterBadgeActiveAriaLabel': ({ count }: EuiValues) => + i18n.translate('core.euiFilterButton.filterBadgeActiveAriaLabel', { + defaultMessage: '{count} active filters', + values: { count }, + }), + 'euiFilterButton.filterBadgeAvailableAriaLabel': ({ count }: EuiValues) => + i18n.translate('core.euiFilterButton.filterBadgeAvailableAriaLabel', { + defaultMessage: '{count} available filters', + values: { count }, }), 'euiFlyout.closeAriaLabel': i18n.translate('core.euiFlyout.closeAriaLabel', { defaultMessage: 'Close this dialog', diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md index 043759378faa3..f18e1dc26bd87 100644 --- a/src/core/public/public.api.md +++ b/src/core/public/public.api.md @@ -473,6 +473,10 @@ export interface DocLinksStart { // (undocumented) readonly links: { readonly settings: string; + readonly apm: { + readonly kibanaSettings: string; + readonly supportedServiceMaps: string; + }; readonly canvas: { readonly guide: string; }; @@ -592,6 +596,7 @@ export interface DocLinksStart { readonly rollupJobs: string; readonly elasticsearch: Record; readonly siem: { + readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; diff --git a/src/core/server/config/deprecation/core_deprecations.test.ts b/src/core/server/config/deprecation/core_deprecations.test.ts index 06c7116c8bebb..759e2375ce987 100644 --- a/src/core/server/config/deprecation/core_deprecations.test.ts +++ b/src/core/server/config/deprecation/core_deprecations.test.ts @@ -62,7 +62,7 @@ describe('core deprecations', () => { expect(migrated.server.xsrf.allowlist).toEqual(['/path']); expect(messages).toMatchInlineSnapshot(` Array [ - "\\"server.xsrf.whitelist\\" is deprecated and has been replaced by \\"server.xsrf.allowlist\\"", + "Setting \\"server.xsrf.whitelist\\" has been replaced by \\"server.xsrf.allowlist\\"", ] `); }); diff --git a/src/core/server/config/integration_tests/config_deprecation.test.ts b/src/core/server/config/integration_tests/config_deprecation.test.ts index 2d86281ce40d6..c941053a2f0a1 100644 --- a/src/core/server/config/integration_tests/config_deprecation.test.ts +++ b/src/core/server/config/integration_tests/config_deprecation.test.ts @@ -51,8 +51,8 @@ describe('configuration deprecations', () => { const logs = loggingSystemMock.collect(mockLoggingSystem); expect(logs.warn.flat()).toMatchInlineSnapshot(` Array [ - "optimize.lazy is deprecated and is no longer used", - "optimize.lazyPort is deprecated and is no longer used", + "You no longer need to configure \\"optimize.lazy\\".", + "You no longer need to configure \\"optimize.lazyPort\\".", "\\"logging.silent\\" has been deprecated and will be removed in 8.0. Moving forward, you can use \\"logging.root.level:off\\" in your logging configuration. ", ] `); diff --git a/src/core/server/core_usage_data/core_usage_data_service.mock.ts b/src/core/server/core_usage_data/core_usage_data_service.mock.ts index a03f79096004b..941ac5afacb40 100644 --- a/src/core/server/core_usage_data/core_usage_data_service.mock.ts +++ b/src/core/server/core_usage_data/core_usage_data_service.mock.ts @@ -47,6 +47,7 @@ const createStartContractMock = () => { keystoreConfigured: false, truststoreConfigured: false, }, + principal: 'unknown', }, http: { basePathConfigured: false, diff --git a/src/core/server/core_usage_data/core_usage_data_service.test.ts b/src/core/server/core_usage_data/core_usage_data_service.test.ts index 7ecfa37492242..478cfe5daff46 100644 --- a/src/core/server/core_usage_data/core_usage_data_service.test.ts +++ b/src/core/server/core_usage_data/core_usage_data_service.test.ts @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import type { ConfigPath } from '@kbn/config'; import { BehaviorSubject, Observable } from 'rxjs'; import { HotObservable } from 'rxjs/internal/testing/HotObservable'; import { TestScheduler } from 'rxjs/testing'; @@ -29,12 +30,31 @@ import { CORE_USAGE_STATS_TYPE } from './constants'; import { CoreUsageStatsClient } from './core_usage_stats_client'; describe('CoreUsageDataService', () => { + function getConfigServiceAtPathMockImplementation() { + return (path: ConfigPath) => { + if (path === 'elasticsearch') { + return new BehaviorSubject(RawElasticsearchConfig.schema.validate({})); + } else if (path === 'server') { + return new BehaviorSubject(RawHttpConfig.schema.validate({})); + } else if (path === 'logging') { + return new BehaviorSubject(RawLoggingConfig.schema.validate({})); + } else if (path === 'savedObjects') { + return new BehaviorSubject(RawSavedObjectsConfig.schema.validate({})); + } else if (path === 'kibana') { + return new BehaviorSubject(RawKibanaConfig.schema.validate({})); + } + return new BehaviorSubject({}); + }; + } + const getTestScheduler = () => new TestScheduler((actual, expected) => { expect(actual).toEqual(expected); }); let service: CoreUsageDataService; + let configService: ReturnType; + const mockConfig = { unused_config: {}, elasticsearch: { username: 'kibana_system', password: 'changeme' }, @@ -60,27 +80,11 @@ describe('CoreUsageDataService', () => { }, }; - const configService = configServiceMock.create({ - getConfig$: mockConfig, - }); - - configService.atPath.mockImplementation((path) => { - if (path === 'elasticsearch') { - return new BehaviorSubject(RawElasticsearchConfig.schema.validate({})); - } else if (path === 'server') { - return new BehaviorSubject(RawHttpConfig.schema.validate({})); - } else if (path === 'logging') { - return new BehaviorSubject(RawLoggingConfig.schema.validate({})); - } else if (path === 'savedObjects') { - return new BehaviorSubject(RawSavedObjectsConfig.schema.validate({})); - } else if (path === 'kibana') { - return new BehaviorSubject(RawKibanaConfig.schema.validate({})); - } - return new BehaviorSubject({}); - }); - const coreContext = mockCoreContext.create({ configService }); - beforeEach(() => { + configService = configServiceMock.create({ getConfig$: mockConfig }); + configService.atPath.mockImplementation(getConfigServiceAtPathMockImplementation()); + + const coreContext = mockCoreContext.create({ configService }); service = new CoreUsageDataService(coreContext); }); @@ -150,7 +154,7 @@ describe('CoreUsageDataService', () => { describe('start', () => { describe('getCoreUsageData', () => { - it('returns core metrics for default config', async () => { + function setup() { const http = httpServiceMock.createInternalSetupContract(); const metrics = metricsServiceMock.createInternalSetupContract(); const savedObjectsStartPromise = Promise.resolve( @@ -208,6 +212,11 @@ describe('CoreUsageDataService', () => { exposedConfigsToUsage: new Map(), elasticsearch, }); + return { getCoreUsageData }; + } + + it('returns core metrics for default config', async () => { + const { getCoreUsageData } = setup(); expect(getCoreUsageData()).resolves.toMatchInlineSnapshot(` Object { "config": Object { @@ -226,6 +235,7 @@ describe('CoreUsageDataService', () => { "logQueries": false, "numberOfHostsConfigured": 1, "pingTimeoutMs": 30000, + "principal": "unknown", "requestHeadersWhitelistConfigured": false, "requestTimeoutMs": 30000, "shardTimeoutMs": 30000, @@ -354,6 +364,60 @@ describe('CoreUsageDataService', () => { } `); }); + + describe('elasticsearch.principal', () => { + async function doTest({ + username, + serviceAccountToken, + expectedPrincipal, + }: { + username?: string; + serviceAccountToken?: string; + expectedPrincipal: string; + }) { + const defaultMockImplementation = getConfigServiceAtPathMockImplementation(); + configService.atPath.mockImplementation((path) => { + if (path === 'elasticsearch') { + return new BehaviorSubject( + RawElasticsearchConfig.schema.validate({ username, serviceAccountToken }) + ); + } + return defaultMockImplementation(path); + }); + const { getCoreUsageData } = setup(); + return expect(getCoreUsageData()).resolves.toEqual( + expect.objectContaining({ + config: expect.objectContaining({ + elasticsearch: expect.objectContaining({ principal: expectedPrincipal }), + }), + }) + ); + } + + it('returns expected usage data for elastic.username "elastic"', async () => { + return doTest({ username: 'elastic', expectedPrincipal: 'elastic_user' }); + }); + + it('returns expected usage data for elastic.username "kibana"', async () => { + return doTest({ username: 'kibana', expectedPrincipal: 'kibana_user' }); + }); + + it('returns expected usage data for elastic.username "kibana_system"', async () => { + return doTest({ username: 'kibana_system', expectedPrincipal: 'kibana_system_user' }); + }); + + it('returns expected usage data for elastic.username anything else', async () => { + return doTest({ username: 'anything else', expectedPrincipal: 'other_user' }); + }); + + it('returns expected usage data for elastic.serviceAccountToken', async () => { + // Note: elastic.username and elastic.serviceAccountToken are mutually exclusive + return doTest({ + serviceAccountToken: 'any', + expectedPrincipal: 'kibana_service_account', + }); + }); + }); }); describe('getConfigsUsageData', () => { diff --git a/src/core/server/core_usage_data/core_usage_data_service.ts b/src/core/server/core_usage_data/core_usage_data_service.ts index 7cf38dddc563e..73f63d4d634df 100644 --- a/src/core/server/core_usage_data/core_usage_data_service.ts +++ b/src/core/server/core_usage_data/core_usage_data_service.ts @@ -29,6 +29,7 @@ import type { CoreUsageDataStart, CoreUsageDataSetup, ConfigUsageData, + CoreConfigUsageData, } from './types'; import { isConfigured } from './is_configured'; import { ElasticsearchServiceStart } from '../elasticsearch'; @@ -253,6 +254,7 @@ export class CoreUsageDataService implements CoreService = { exposeToBrowser: { defaultAppId: true, @@ -97,12 +108,23 @@ export const config: PluginConfigDescriptor = { return completeConfig; } addDeprecation({ - message: `kibana.defaultAppId is deprecated and will be removed in 8.0. Please use the "defaultRoute" advanced setting instead`, + title: i18n.translate('kibana_legacy.deprecations.defaultAppIdTitle', { + defaultMessage: 'Setting "kibana.defaultAppId" is deprecated', + }), + message: i18n.translate('kibana_legacy.deprecations.defaultAppIdMessage', { + defaultMessage: 'Use the "defaultRoute" advanced setting instead of "kibana.defaultAppId".', + }), correctiveActions: { manualSteps: [ - 'Go to Stack Management > Advanced Settings', - 'Update the "defaultRoute" setting under the General section', - 'Remove "kibana.defaultAppId" from the kibana.yml config file', + i18n.translate('kibana_legacy.deprecations.defaultAppId.manualStepOneMessage', { + defaultMessage: 'Go to Stack Management > Advanced Settings.', + }), + i18n.translate('kibana_legacy.deprecations.defaultAppId.manualStepTwoMessage', { + defaultMessage: 'Update the "defaultRoute" setting in the General section.', + }), + i18n.translate('kibana_legacy.deprecations.defaultAppId.manualStepThreeMessage', { + defaultMessage: 'Remove "kibana.defaultAppId" from the kibana.yml config file.', + }), ], }, }); @@ -138,39 +160,49 @@ To check the full TS types of the service please check the [generated core docs] ### Example ```ts import { DeprecationsDetails, GetDeprecationsContext } from 'src/core/server'; +import { i18n } from '@kbn/i18n'; async function getDeprecations({ esClient, savedObjectsClient }: GetDeprecationsContext): Promise { const deprecations: DeprecationsDetails[] = []; const testDashboardUser = await getTestDashboardUser(savedObjectsClient); if (testDashboardUser) { - deprecations.push({ - message: 'User "test_dashboard_user" is using a deprecated role: "kibana_user"', - documentationUrl: 'https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html', - level: 'critical', - correctiveActions: { - api: { - path: '/internal/security/users/test_dashboard_user', - method: 'POST', - body: { - username: 'test_dashboard_user', - roles: [ - 'machine_learning_user', - 'enrich_user', - 'kibana_admin' - ], - full_name: 'Alison Goryachev', - email: 'alisongoryachev@gmail.com', - metadata: {}, - enabled: true - } + deprecations.push({ + title: i18n.translate('security.deprecations.kibanaUserRoleTitle', { + defaultMessage: 'Deprecated roles are assigned to some users', + }), + message: i18n.translate('security.deprecations.kibanaUserRoleMessage', { + defaultMessage: 'User "test_dashboard_user" is using a deprecated role: "kibana_user".', + }), + documentationUrl: 'https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html', + level: 'critical', + correctiveActions: { + api: { + path: '/internal/security/users/test_dashboard_user', + method: 'POST', + body: { + username: 'test_dashboard_user', + roles: [ + 'machine_learning_user', + 'enrich_user', + 'kibana_admin' + ], + full_name: 'Alison Goryachev', + email: 'alisongoryachev@gmail.com', + metadata: {}, + enabled: true + } + }, + manualSteps: [ + i18n.translate('security.deprecations.kibanaUserRole.manualStepOneMessage', { + defaultMessage: 'Switch all users with the "kibana_user" role to the kibana_admin role in Management > Security > Users.', + }), + i18n.translate('security.deprecations.kibanaUserRole.manualStepTwoMessage', { + defaultMessage: 'Update all mappings in Management > Security > Role Mappings to assign the "kibana_admin" role instead of the "kibana_user" role.' + }), + ], }, - manualSteps: [ - 'Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.', - 'Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role.' - ] - }, - }); + }); } return deprecations; @@ -204,7 +236,128 @@ Currently we do not have test objects to run functional tests against the Upgrad Yes. Using this service should help users find and resolve any issues specific to their deployment before upgrading. We recommend adding a `documentationUrl` for every deprecation you expose to further assist our users if they need extra help. +## Writing deprecation details + +State what is being deprecated and what action the user needs to take: + +> Abc is deprecated. Use Xyz to do the thing. + +Provide as much context as possible for what triggered the deprecation warning. +If action is not required (for example the default behavior is changing), describe the impact of doing nothing. + +Examples: +- > Setting `xpack.reporting.roles.enabled` is deprecated. Use feature controls to grant reporting privileges. +- > The Joda Time century-of era-formatter (C) is deprecated. Use a `java.time` formatter instead. +- > The default for the `cluster.routing.allocation.disk.watermark` setting is changing from false to true. + > If you do not explicitly configure this setting when you upgrade, indices in this one node cluster will + > become read-only if disk usage reaches 95%. + ## Note on i18n -We have decided to support i18n to the exposed deprecations for a better user experience when using the UA. -We will inject `i18n` into the deprecation function to enable teams to use it before fully documenting its usage. -For context follow [this issue](https://github.com/elastic/kibana/issues/99072). +All deprecation titles, messsages, and manual steps should be wrapped in `i18n.translate`. This +provides a better user experience using different locales. Follow the writing guidelines below for +best practices to writing the i18n messages and ids. + +### Writing guidelines +The deprecation service enables you to specify a `title`, `message`, `documentationUrl`, +and the `manual steps` for resolving a deprecation issue. + +#### Title: +No end punctuation is required. +i18n id: `{plugin_domain}.deprecations.{deprecationTitle}Title` + +Example: +```ts +title: i18n.translate('xpack.reporting.deprecations.reportingRoleTitle', { + defaultMessage: `Found deprecated reporting roles`, +}) +``` + +#### Message +Keep it brief, but multiple sentences are allowed if needed. +i18n id: `{plugin_domain}.deprecations.{deprecationTitle}Message` + +Example: +```ts +message: i18n.translate('xpack.reporting.deprecations.reportingRoleMessage', { + defaultMessage: `The deprecated "${deprecatedRole}" role has been found for ${numReportingUsers} user(s): "${usernames}"`, + values: { deprecatedRole, numReportingUsers, usernames }, +}), +``` + +#### Documentation URL +Don’t link to the Migration guide/breaking changes. +Only specify a doc URL if the user truly needs to “learn more” to understand what actions they need to take. + +Example: +```ts +documentationUrl: 'https://www.elastic.co/guide/en/kibana/current/secure-reporting.html', +``` +#### Manual steps +State the action first for each step. +i18n id: `{plugin_domain}.deprecations.{deprecationTitle}.manualStep{Step#}Message` + +Example: +```ts +manualSteps: [ + i18n.translate('xpack.reporting.deprecations.reportingRole.manualStepTwoMessage', { + defaultMessage: `Create one or more custom roles that provide Kibana application privileges to reporting features in **Management > Security > Roles**.`, + }), + i18n.translate('xpack.reporting.deprecations.reportingRole.manualStepThreeMessage', { + defaultMessage: `Assign the custom role(s) as desired, and remove the "${deprecatedRole}" role from the user(s).`, + values: { deprecatedRole }, + }), +] +``` + +#### General Guidelines + +##### What is deprecated +Use the present tense: +- Types are deprecated in geo_shape queries. +- Sorting is deprecated in reindex requests. + +Avoid: +- The type should no longer be specified in geo_shape queries. +- Sorting has been deprecated in reindex requests. + +##### What action the user needs to take +Use the imperative voice: +- Do not specify a type in the indexed_shape section. +- Use query filtering to reindex a subset of documents. + +Avoid: +- Please use query filtering instead. +- You should use query filtering instead. +- Instead consider using query filtering to find the desired subset of data. + +##### Context +Where possible, provide the specific context that resulted in the warning: +- The Abc timezone used by rollup job Def is deprecated. Use Xyz instead. + +##### Impact +Many deprecations are clearcut--you are using this old thing and need to switch to using this new thing. +Others are more nuanced and don’t necessarily require any changes. In this case, the warning needs to address +the impact of not taking action: +- The default for the `cluster.routing.allocation.disk.watermark` setting is changing from false to true. + If you do not explicitly configure this setting when you upgrade, indices in this one node cluster will + become read-only if disk usage reaches 95%. + +##### Version +You do not need to include any form of "and will be removed in a future release". +The assumption is that deprecated things are going to be removed, and the standard schedule for removal +is the next major version. + +If things are targeted for removal in a specific minor release, the message should include that information: +- Abc is deprecated. Use Xyz to do the thing. Support for Abc will be removed in n.n. + +If an item is deprecated, but won’t be removed in the next major version, the message should indicate that: +- Abc is deprecated. Use Xyz to do the thing. Support for Abc will be removed following the release of n.0. + +Avoid: +- Xyz is deprecated and will be removed in 8.0. +- Xyz is deprecated and will be unsupported in future. +- Xyz is deprecated and will not be supported in the next major version of Elasticsearch. + +##### Formatting +- Sentence style capitalization and punctuation. +- Avoid quotes for emphasis. diff --git a/src/core/server/deprecations/deprecations_factory.test.ts b/src/core/server/deprecations/deprecations_factory.test.ts index 187f3880f9998..73beb84f57fa6 100644 --- a/src/core/server/deprecations/deprecations_factory.test.ts +++ b/src/core/server/deprecations/deprecations_factory.test.ts @@ -124,16 +124,21 @@ describe('DeprecationsFactory', () => { `Failed to get deprecations info for plugin "${domainId}".`, mockError ); - expect(derpecations).toStrictEqual([ - { - domainId, - message: `Failed to get deprecations info for plugin "${domainId}".`, - level: 'fetch_error', - correctiveActions: { - manualSteps: ['Check Kibana server logs for error message.'], + expect(derpecations).toMatchInlineSnapshot(` + Array [ + Object { + "correctiveActions": Object { + "manualSteps": Array [ + "Check Kibana server logs for error message.", + ], + }, + "domainId": "mockPlugin", + "level": "fetch_error", + "message": "Unable to fetch deprecations info for plugin mockPlugin.", + "title": "Failed to fetch deprecations for mockPlugin", }, - }, - ]); + ] + `); }); it(`returns successful results even when some getDeprecations fail`, async () => { @@ -167,7 +172,8 @@ describe('DeprecationsFactory', () => { ...mockPluginDeprecationsInfo.map((info) => ({ ...info, domainId: 'mockPlugin' })), { domainId: 'anotherMockPlugin', - message: `Failed to get deprecations info for plugin "anotherMockPlugin".`, + title: 'Failed to fetch deprecations for anotherMockPlugin', + message: 'Unable to fetch deprecations info for plugin anotherMockPlugin.', level: 'fetch_error', correctiveActions: { manualSteps: ['Check Kibana server logs for error message.'], diff --git a/src/core/server/deprecations/deprecations_factory.ts b/src/core/server/deprecations/deprecations_factory.ts index 3699c088e20f1..9905f0b26b4f3 100644 --- a/src/core/server/deprecations/deprecations_factory.ts +++ b/src/core/server/deprecations/deprecations_factory.ts @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import { i18n } from '@kbn/i18n'; import { DeprecationsRegistry } from './deprecations_registry'; import type { Logger } from '../logging'; import type { @@ -89,10 +90,24 @@ export class DeprecationsFactory { ); return [ { - message: `Failed to get deprecations info for plugin "${domainId}".`, + title: i18n.translate('core.deprecations.deprecations.fetchFailedTitle', { + defaultMessage: `Failed to fetch deprecations for {domainId}`, + values: { domainId }, + }), + message: i18n.translate('core.deprecations.deprecations.fetchFailedMessage', { + defaultMessage: 'Unable to fetch deprecations info for plugin {domainId}.', + values: { domainId }, + }), level: 'fetch_error', correctiveActions: { - manualSteps: ['Check Kibana server logs for error message.'], + manualSteps: [ + i18n.translate( + 'core.deprecations.deprecations.fetchFailed.manualStepOneMessage', + { + defaultMessage: 'Check Kibana server logs for error message.', + } + ), + ], }, }, ]; diff --git a/src/core/server/deprecations/deprecations_service.test.ts b/src/core/server/deprecations/deprecations_service.test.ts index 75a0d6a63d919..0e8aaf3de49c9 100644 --- a/src/core/server/deprecations/deprecations_service.test.ts +++ b/src/core/server/deprecations/deprecations_service.test.ts @@ -110,6 +110,7 @@ describe('DeprecationsService', () => { "level": "critical", "message": "testMessage", "requireRestart": true, + "title": "testDomain has a deprecated setting", }, ] `); diff --git a/src/core/server/deprecations/deprecations_service.ts b/src/core/server/deprecations/deprecations_service.ts index 7c4f74fe7d0ec..c41567d88a2aa 100644 --- a/src/core/server/deprecations/deprecations_service.ts +++ b/src/core/server/deprecations/deprecations_service.ts @@ -33,6 +33,7 @@ import { SavedObjectsClientContract } from '../saved_objects/types'; * @example * ```ts * import { DeprecationsDetails, GetDeprecationsContext, CoreSetup } from 'src/core/server'; + * import { i18n } from '@kbn/i18n'; * * async function getDeprecations({ esClient, savedObjectsClient }: GetDeprecationsContext): Promise { * const deprecations: DeprecationsDetails[] = []; @@ -41,52 +42,44 @@ import { SavedObjectsClientContract } from '../saved_objects/types'; * if (count > 0) { * // Example of a manual correctiveAction * deprecations.push({ - * message: `You have ${count} Timelion worksheets. The Timelion app will be removed in 8.0. To continue using your Timelion worksheets, migrate them to a dashboard.`, + * title: i18n.translate('xpack.timelion.deprecations.worksheetsTitle', { + * defaultMessage: 'Timelion worksheets are deprecated' + * }), + * message: i18n.translate('xpack.timelion.deprecations.worksheetsMessage', { + * defaultMessage: 'You have {count} Timelion worksheets. Migrate your Timelion worksheets to a dashboard to continue using them.', + * values: { count }, + * }), * documentationUrl: * 'https://www.elastic.co/guide/en/kibana/current/create-panels-with-timelion.html', * level: 'warning', * correctiveActions: { * manualSteps: [ - * 'Navigate to the Kibana Dashboard and click "Create dashboard".', - * 'Select Timelion from the "New Visualization" window.', - * 'Open a new tab, open the Timelion app, select the chart you want to copy, then copy the chart expression.', - * 'Go to Timelion, paste the chart expression in the Timelion expression field, then click Update.', - * 'In the toolbar, click Save.', - * 'On the Save visualization window, enter the visualization Title, then click Save and return.', + * i18n.translate('xpack.timelion.deprecations.worksheets.manualStepOneMessage', { + * defaultMessage: 'Navigate to the Kibana Dashboard and click "Create dashboard".', + * }), + * i18n.translate('xpack.timelion.deprecations.worksheets.manualStepTwoMessage', { + * defaultMessage: 'Select Timelion from the "New Visualization" window.', + * }), * ], + * api: { + * path: '/internal/security/users/test_dashboard_user', + * method: 'POST', + * body: { + * username: 'test_dashboard_user', + * roles: [ + * "machine_learning_user", + * "enrich_user", + * "kibana_admin" + * ], + * full_name: "Alison Goryachev", + * email: "alisongoryachev@gmail.com", + * metadata: {}, + * enabled: true + * } + * }, * }, * }); * } - * - * // Example of an api correctiveAction - * deprecations.push({ - * "message": "User 'test_dashboard_user' is using a deprecated role: 'kibana_user'", - * "documentationUrl": "https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html", - * "level": "critical", - * "correctiveActions": { - * "api": { - * "path": "/internal/security/users/test_dashboard_user", - * "method": "POST", - * "body": { - * "username": "test_dashboard_user", - * "roles": [ - * "machine_learning_user", - * "enrich_user", - * "kibana_admin" - * ], - * "full_name": "Alison Goryachev", - * "email": "alisongoryachev@gmail.com", - * "metadata": {}, - * "enabled": true - * } - * }, - * "manualSteps": [ - * "Using Kibana user management, change all users using the kibana_user role to the kibana_admin role.", - * "Using Kibana role-mapping management, change all role-mappings which assing the kibana_user role to the kibana_admin role." - * ] - * }, - * }); - * * return deprecations; * } * @@ -192,16 +185,19 @@ export class DeprecationsService const deprecationsRegistry = deprecationsFactory.getRegistry(domainId); deprecationsRegistry.registerDeprecations({ getDeprecations: () => { - return deprecationsContexts.map(({ message, correctiveActions, documentationUrl }) => { - return { - level: 'critical', - deprecationType: 'config', - message, - correctiveActions, - documentationUrl, - requireRestart: true, - }; - }); + return deprecationsContexts.map( + ({ title, message, correctiveActions, documentationUrl }) => { + return { + title: title || `${domainId} has a deprecated setting`, + level: 'critical', + deprecationType: 'config', + message, + correctiveActions, + documentationUrl, + requireRestart: true, + }; + } + ); }, }); } diff --git a/src/core/server/deprecations/types.ts b/src/core/server/deprecations/types.ts index 486fec5dfd8be..c924cacd02e28 100644 --- a/src/core/server/deprecations/types.ts +++ b/src/core/server/deprecations/types.ts @@ -16,7 +16,15 @@ export interface DomainDeprecationDetails extends DeprecationsDetails { } export interface DeprecationsDetails { - /* The message to be displayed for the deprecation. */ + /** + * The title of the deprecation. + * Check the README for writing deprecations in `src/core/server/deprecations/README.mdx` + */ + title: string; + /** + * The description message to be displayed for the deprecation. + * Check the README for writing deprecations in `src/core/server/deprecations/README.mdx` + */ message: string; /** * levels: @@ -60,6 +68,7 @@ export interface DeprecationsDetails { * Specify a list of manual steps users need to follow to * fix the deprecation before upgrade. Required even if an API * corrective action is set in case the API fails. + * Check the README for writing deprecations in `src/core/server/deprecations/README.mdx` */ manualSteps: string[]; }; diff --git a/src/core/server/elasticsearch/client/client_config.ts b/src/core/server/elasticsearch/client/client_config.ts index 27d6f877a5572..a6b0891fc12dd 100644 --- a/src/core/server/elasticsearch/client/client_config.ts +++ b/src/core/server/elasticsearch/client/client_config.ts @@ -56,6 +56,9 @@ export function parseClientOptions( ...DEFAULT_HEADERS, ...config.customHeaders, }, + // do not make assumption on user-supplied data content + // fixes https://github.com/elastic/kibana/issues/101944 + disablePrototypePoisoningProtection: true, }; if (config.pingTimeout != null) { diff --git a/src/core/server/elasticsearch/client/configure_client.test.ts b/src/core/server/elasticsearch/client/configure_client.test.ts index f954b121320fe..4e2c9c22f42f8 100644 --- a/src/core/server/elasticsearch/client/configure_client.test.ts +++ b/src/core/server/elasticsearch/client/configure_client.test.ts @@ -10,6 +10,7 @@ import { Buffer } from 'buffer'; import { Readable } from 'stream'; import { RequestEvent, errors } from '@elastic/elasticsearch'; +import type { Client } from '@elastic/elasticsearch'; import type { TransportRequestOptions, TransportRequestParams, @@ -18,7 +19,6 @@ import type { import { parseClientOptionsMock, ClientMock } from './configure_client.test.mocks'; import { loggingSystemMock } from '../../logging/logging_system.mock'; -import { EventEmitter } from 'events'; import type { ElasticsearchClientConfig } from './client_config'; import { configureClient } from './configure_client'; @@ -32,7 +32,10 @@ const createFakeConfig = ( }; const createFakeClient = () => { - const client = new EventEmitter(); + const actualEs = jest.requireActual('@elastic/elasticsearch'); + const client = new actualEs.Client({ + nodes: ['http://localhost'], // Enforcing `nodes` because it's mandatory + }); jest.spyOn(client, 'on'); return client; }; @@ -67,6 +70,14 @@ const createApiResponse = ({ }; }; +function getProductCheckValue(client: Client) { + const tSymbol = Object.getOwnPropertySymbols(client.transport || client).filter( + (symbol) => symbol.description === 'product check' + )[0]; + // @ts-expect-error `tSymbol` is missing in the index signature of Transport + return (client.transport || client)[tSymbol]; +} + describe('configureClient', () => { let logger: ReturnType; let config: ElasticsearchClientConfig; @@ -117,6 +128,24 @@ describe('configureClient', () => { expect(client.on).toHaveBeenCalledWith('response', expect.any(Function)); }); + describe('Product check', () => { + it('should not skip the product check for the unscoped client', () => { + const client = configureClient(config, { logger, type: 'test', scoped: false }); + expect(getProductCheckValue(client)).toBe(0); + }); + + it('should skip the product check for the scoped client', () => { + const client = configureClient(config, { logger, type: 'test', scoped: true }); + expect(getProductCheckValue(client)).toBe(2); + }); + + it('should skip the product check for the children of the scoped client', () => { + const client = configureClient(config, { logger, type: 'test', scoped: true }); + const asScoped = client.child({ headers: { 'x-custom-header': 'Custom value' } }); + expect(getProductCheckValue(asScoped)).toBe(2); + }); + }); + describe('Client logging', () => { function createResponseWithBody(body?: RequestBody) { return createApiResponse({ diff --git a/src/core/server/elasticsearch/client/configure_client.ts b/src/core/server/elasticsearch/client/configure_client.ts index 35825ef765dbf..efd22365d44f3 100644 --- a/src/core/server/elasticsearch/client/configure_client.ts +++ b/src/core/server/elasticsearch/client/configure_client.ts @@ -49,6 +49,12 @@ export const configureClient = ( const client = new Client({ ...clientOptions, Transport: KibanaTransport }); addLogging(client, logger.get('query', type)); + // --------------------------------------------------------------------------------- // + // Hack to disable the "Product check" only in the scoped clients while we // + // come up with a better approach in https://github.com/elastic/kibana/issues/110675 // + if (scoped) skipProductCheck(client); + // --------------------------------------------------------------------------------- // + return client; }; @@ -131,3 +137,21 @@ const addLogging = (client: Client, logger: Logger) => { } }); }; + +/** + * Hack to skip the Product Check performed by the Elasticsearch-js client. + * We noticed that the scoped clients are always performing this check because + * of the way we initialize the clients. We'll discuss changing this in the issue + * https://github.com/elastic/kibana/issues/110675. In the meanwhile, let's skip + * it for the scoped clients. + * + * The hack is copied from the test/utils in the elasticsearch-js repo + * (https://github.com/elastic/elasticsearch-js/blob/master/test/utils/index.js#L45-L56) + */ +function skipProductCheck(client: Client) { + const tSymbol = Object.getOwnPropertySymbols(client.transport || client).filter( + (symbol) => symbol.description === 'product check' + )[0]; + // @ts-expect-error `tSymbol` is missing in the index signature of Transport + (client.transport || client)[tSymbol] = 2; +} diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts index 530203e659086..9471bbc1b87a6 100644 --- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts +++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.mock.ts @@ -11,6 +11,7 @@ import { buildActiveMappings } from '../core'; const { mergeTypes } = jest.requireActual('./kibana_migrator'); import { SavedObjectsType } from '../../types'; import { BehaviorSubject } from 'rxjs'; +import { ByteSizeValue } from '@kbn/config-schema'; const defaultSavedObjectTypes: SavedObjectsType[] = [ { @@ -37,6 +38,7 @@ const createMigrator = ( kibanaVersion: '8.0.0-testing', soMigrationsConfig: { batchSize: 100, + maxBatchSizeBytes: ByteSizeValue.parse('30kb'), scrollDuration: '15m', pollInterval: 1500, skip: false, diff --git a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts index d0cc52f2dd9bd..6e10349f4b57c 100644 --- a/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts +++ b/src/core/server/saved_objects/migrations/kibana/kibana_migrator.test.ts @@ -15,6 +15,7 @@ import { loggingSystemMock } from '../../../logging/logging_system.mock'; import { SavedObjectTypeRegistry } from '../../saved_objects_type_registry'; import { SavedObjectsType } from '../../types'; import { DocumentMigrator } from '../core/document_migrator'; +import { ByteSizeValue } from '@kbn/config-schema'; jest.mock('../core/document_migrator', () => { return { // Create a mock for spying on the constructor @@ -396,6 +397,7 @@ const mockOptions = ({ enableV2 }: { enableV2: boolean } = { enableV2: false }) } as KibanaMigratorOptions['kibanaConfig'], soMigrationsConfig: { batchSize: 20, + maxBatchSizeBytes: ByteSizeValue.parse('20mb'), pollInterval: 20000, scrollDuration: '10m', skip: false, diff --git a/src/core/server/saved_objects/migrationsv2/README.md b/src/core/server/saved_objects/migrationsv2/README.md index 5bdc548987842..5121e66052f40 100644 --- a/src/core/server/saved_objects/migrationsv2/README.md +++ b/src/core/server/saved_objects/migrationsv2/README.md @@ -316,7 +316,10 @@ completed this step: - temp index has a write block - temp index is not found ### New control state +1. If `currentBatch` is the last batch in `transformedDocBatches` → `REINDEX_SOURCE_TO_TEMP_READ` +2. If there are more batches left in `transformedDocBatches` + → `REINDEX_SOURCE_TO_TEMP_INDEX_BULK` ## REINDEX_SOURCE_TO_TEMP_CLOSE_PIT ### Next action diff --git a/src/core/server/saved_objects/migrationsv2/actions/bulk_overwrite_transformed_documents.ts b/src/core/server/saved_objects/migrationsv2/actions/bulk_overwrite_transformed_documents.ts index 4217ca599297a..82f642b928058 100644 --- a/src/core/server/saved_objects/migrationsv2/actions/bulk_overwrite_transformed_documents.ts +++ b/src/core/server/saved_objects/migrationsv2/actions/bulk_overwrite_transformed_documents.ts @@ -23,6 +23,27 @@ import type { IndexNotFound, } from './index'; +/** + * Given a document and index, creates a valid body for the Bulk API. + */ +export const createBulkOperationBody = (doc: SavedObjectsRawDoc, index: string) => { + return [ + { + index: { + _index: index, + _id: doc._id, + // overwrite existing documents + op_type: 'index', + // use optimistic concurrency control to ensure that outdated + // documents are only overwritten once with the latest version + if_seq_no: doc._seq_no, + if_primary_term: doc._primary_term, + }, + }, + doc._source, + ]; +}; + /** @internal */ export interface BulkOverwriteTransformedDocumentsParams { client: ElasticsearchClient; @@ -47,6 +68,10 @@ export const bulkOverwriteTransformedDocuments = ({ | RequestEntityTooLargeException, 'bulk_index_succeeded' > => () => { + const body = transformedDocs.flatMap((doc) => { + return createBulkOperationBody(doc, index); + }); + return client .bulk({ // Because we only add aliases in the MARK_VERSION_INDEX_READY step we @@ -60,23 +85,7 @@ export const bulkOverwriteTransformedDocuments = ({ wait_for_active_shards: WAIT_FOR_ALL_SHARDS_TO_BE_ACTIVE, refresh, filter_path: ['items.*.error'], - body: transformedDocs.flatMap((doc) => { - return [ - { - index: { - _index: index, - _id: doc._id, - // overwrite existing documents - op_type: 'index', - // use optimistic concurrency control to ensure that outdated - // documents are only overwritten once with the latest version - if_seq_no: doc._seq_no, - if_primary_term: doc._primary_term, - }, - }, - doc._source, - ]; - }), + body, }) .then((res) => { // Filter out version_conflict_engine_exception since these just mean diff --git a/src/core/server/saved_objects/migrationsv2/initial_state.test.ts b/src/core/server/saved_objects/migrationsv2/initial_state.test.ts index 4066efeb65de0..26ba129cbeab4 100644 --- a/src/core/server/saved_objects/migrationsv2/initial_state.test.ts +++ b/src/core/server/saved_objects/migrationsv2/initial_state.test.ts @@ -6,6 +6,7 @@ * Side Public License, v 1. */ +import { ByteSizeValue } from '@kbn/config-schema'; import * as Option from 'fp-ts/Option'; import { SavedObjectsMigrationConfigType } from '../saved_objects_config'; import { SavedObjectTypeRegistry } from '../saved_objects_type_registry'; @@ -21,6 +22,7 @@ describe('createInitialState', () => { const migrationsConfig = ({ retryAttempts: 15, batchSize: 1000, + maxBatchSizeBytes: ByteSizeValue.parse('100mb'), } as unknown) as SavedObjectsMigrationConfigType; it('creates the initial state for the model based on the passed in parameters', () => { expect( @@ -37,6 +39,7 @@ describe('createInitialState', () => { }) ).toEqual({ batchSize: 1000, + maxBatchSizeBytes: ByteSizeValue.parse('100mb').getValueInBytes(), controlState: 'INIT', currentAlias: '.kibana_task_manager', excludeFromUpgradeFilterHooks: {}, diff --git a/src/core/server/saved_objects/migrationsv2/initial_state.ts b/src/core/server/saved_objects/migrationsv2/initial_state.ts index dce37b384a4f7..a61967be9242c 100644 --- a/src/core/server/saved_objects/migrationsv2/initial_state.ts +++ b/src/core/server/saved_objects/migrationsv2/initial_state.ts @@ -82,6 +82,7 @@ export const createInitialState = ({ retryDelay: 0, retryAttempts: migrationsConfig.retryAttempts, batchSize: migrationsConfig.batchSize, + maxBatchSizeBytes: migrationsConfig.maxBatchSizeBytes.getValueInBytes(), logs: [], unusedTypesQuery: excludeUnusedTypesQuery, knownTypes, diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7.7.2_xpack_100k.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/7.7.2_xpack_100k.test.ts similarity index 94% rename from src/core/server/saved_objects/migrationsv2/integration_tests/migration_7.7.2_xpack_100k.test.ts rename to src/core/server/saved_objects/migrationsv2/integration_tests/7.7.2_xpack_100k.test.ts index ed21349a700fc..41d89e2a01541 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7.7.2_xpack_100k.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/7.7.2_xpack_100k.test.ts @@ -17,7 +17,7 @@ import { InternalCoreStart } from '../../../internal_types'; import { Root } from '../../../root'; const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version; -const logFilePath = path.join(__dirname, 'migration_test_kibana.log'); +const logFilePath = path.join(__dirname, '7.7.2_xpack_100k.log'); async function removeLogFile() { // ignore errors if it doesn't exist @@ -61,9 +61,12 @@ describe('migration from 7.7.2-xpack with 100k objects', () => { }, }, }, - root: { - appenders: ['default', 'file'], - }, + loggers: [ + { + name: 'root', + appenders: ['file'], + }, + ], }, }, { diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_failed_action_tasks.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_failed_action_tasks.test.ts similarity index 99% rename from src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_failed_action_tasks.test.ts rename to src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_failed_action_tasks.test.ts index 0788a7ecdf0b1..d70e034703158 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_failed_action_tasks.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_failed_action_tasks.test.ts @@ -12,7 +12,7 @@ import * as kbnTestServer from '../../../../test_helpers/kbn_server'; import { Root } from '../../../root'; import { ElasticsearchClient } from '../../../elasticsearch'; -const logFilePath = Path.join(__dirname, '7_13_failed_action_tasks_test.log'); +const logFilePath = Path.join(__dirname, '7_13_failed_action_tasks.log'); async function removeLogFile() { // ignore errors if it doesn't exist diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_transform_failures.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_transform_failures.test.ts similarity index 99% rename from src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_transform_failures.test.ts rename to src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_transform_failures.test.ts index 3258732c6fdd2..fb40bda81cba5 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_transform_failures.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_transform_failures.test.ts @@ -12,7 +12,7 @@ import Util from 'util'; import * as kbnTestServer from '../../../../test_helpers/kbn_server'; import { Root } from '../../../root'; -const logFilePath = Path.join(__dirname, '7_13_corrupt_transform_failures_test.log'); +const logFilePath = Path.join(__dirname, '7_13_corrupt_transform_failures.log'); const asyncUnlink = Util.promisify(Fs.unlink); diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_unknown_types.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_unknown_types.test.ts similarity index 86% rename from src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_unknown_types.test.ts rename to src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_unknown_types.test.ts index aded389bbb595..0be8b1187af71 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/migration_7_13_0_unknown_types.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/7_13_0_unknown_types.test.ts @@ -16,10 +16,12 @@ import { ElasticsearchClient } from '../../../elasticsearch'; import { Env } from '@kbn/config'; import { REPO_ROOT } from '@kbn/utils'; import { getEnvOptions } from '../../../config/mocks'; +import { retryAsync } from '../test_helpers/retry_async'; +import { LogRecord } from '@kbn/logging'; const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version; const targetIndex = `.kibana_${kibanaVersion}_001`; -const logFilePath = Path.join(__dirname, '7_13_unknown_types_test.log'); +const logFilePath = Path.join(__dirname, '7_13_unknown_types.log'); async function removeLogFile() { // ignore errors if it doesn't exist @@ -68,23 +70,30 @@ describe('migration v2', () => { await root.setup(); await root.start(); - const logFileContent = await fs.readFile(logFilePath, 'utf-8'); - const records = logFileContent - .split('\n') - .filter(Boolean) - .map((str) => JSON5.parse(str)); + let unknownDocsWarningLog: LogRecord; - const unknownDocsWarningLog = records.find((rec) => - rec.message.startsWith(`[.kibana] CHECK_UNKNOWN_DOCUMENTS`) - ); + await retryAsync( + async () => { + const logFileContent = await fs.readFile(logFilePath, 'utf-8'); + const records = logFileContent + .split('\n') + .filter(Boolean) + .map((str) => JSON5.parse(str)); + + unknownDocsWarningLog = records.find((rec) => + rec.message.startsWith(`[.kibana] CHECK_UNKNOWN_DOCUMENTS`) + ); - expect( - unknownDocsWarningLog.message.startsWith( - '[.kibana] CHECK_UNKNOWN_DOCUMENTS Upgrades will fail for 8.0+ because documents were found for unknown saved ' + - 'object types. To ensure that upgrades will succeed in the future, either re-enable plugins or delete ' + - `these documents from the "${targetIndex}" index after the current upgrade completes.` - ) - ).toBeTruthy(); + expect( + unknownDocsWarningLog.message.startsWith( + '[.kibana] CHECK_UNKNOWN_DOCUMENTS Upgrades will fail for 8.0+ because documents were found for unknown saved ' + + 'object types. To ensure that upgrades will succeed in the future, either re-enable plugins or delete ' + + `these documents from the "${targetIndex}" index after the current upgrade completes.` + ) + ).toBeTruthy(); + }, + { retryAttempts: 10, retryDelayMs: 200 } + ); const unknownDocs = [ { type: 'space', id: 'space:default' }, diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/archives/7.14.0_xpack_sample_saved_objects.zip b/src/core/server/saved_objects/migrationsv2/integration_tests/archives/7.14.0_xpack_sample_saved_objects.zip new file mode 100644 index 0000000000000..70d68587e3603 Binary files /dev/null and b/src/core/server/saved_objects/migrationsv2/integration_tests/archives/7.14.0_xpack_sample_saved_objects.zip differ diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/batch_size_bytes.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/batch_size_bytes.test.ts new file mode 100644 index 0000000000000..e96aeb6a93b65 --- /dev/null +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/batch_size_bytes.test.ts @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import Path from 'path'; +import fs from 'fs/promises'; +import JSON5 from 'json5'; +import * as kbnTestServer from '../../../../test_helpers/kbn_server'; +import { Root } from '../../../root'; +import { ElasticsearchClient } from '../../../elasticsearch'; +import { Env } from '@kbn/config'; +import { REPO_ROOT } from '@kbn/utils'; +import { getEnvOptions } from '../../../config/mocks'; +import { LogRecord } from '@kbn/logging'; +import { retryAsync } from '../test_helpers/retry_async'; + +const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version; +const targetIndex = `.kibana_${kibanaVersion}_001`; +const logFilePath = Path.join(__dirname, 'batch_size_bytes.log'); + +async function removeLogFile() { + // ignore errors if it doesn't exist + await fs.unlink(logFilePath).catch(() => void 0); +} + +describe('migration v2', () => { + let esServer: kbnTestServer.TestElasticsearchUtils; + let root: Root; + let startES: () => Promise; + + beforeAll(async () => { + await removeLogFile(); + }); + + beforeEach(() => { + ({ startES } = kbnTestServer.createTestServers({ + adjustTimeout: (t: number) => jest.setTimeout(t), + settings: { + es: { + license: 'basic', + dataArchive: Path.join(__dirname, 'archives', '7.14.0_xpack_sample_saved_objects.zip'), + esArgs: ['http.max_content_length=1715275b'], + }, + }, + })); + }); + + afterEach(async () => { + if (root) { + await root.shutdown(); + } + if (esServer) { + await esServer.stop(); + } + + await new Promise((resolve) => setTimeout(resolve, 10000)); + }); + + it('completes the migration even when a full batch would exceed ES http.max_content_length', async () => { + root = createRoot({ maxBatchSizeBytes: 1715275 }); + esServer = await startES(); + await root.preboot(); + await root.setup(); + await expect(root.start()).resolves.toBeTruthy(); + + await new Promise((resolve) => setTimeout(resolve, 1000)); + + const esClient: ElasticsearchClient = esServer.es.getClient(); + const migratedIndexResponse = await esClient.count({ + index: targetIndex, + }); + const oldIndexResponse = await esClient.count({ + index: '.kibana_7.14.0_001', + }); + + // Use a >= comparison since once Kibana has started it might create new + // documents like telemetry tasks + expect(migratedIndexResponse.body.count).toBeGreaterThanOrEqual(oldIndexResponse.body.count); + }); + + it('fails with a descriptive message when a single document exceeds maxBatchSizeBytes', async () => { + root = createRoot({ maxBatchSizeBytes: 1015275 }); + esServer = await startES(); + await root.preboot(); + await root.setup(); + await expect(root.start()).rejects.toMatchInlineSnapshot( + `[Error: Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.]` + ); + + await retryAsync( + async () => { + const logFileContent = await fs.readFile(logFilePath, 'utf-8'); + const records = logFileContent + .split('\n') + .filter(Boolean) + .map((str) => JSON5.parse(str)) as LogRecord[]; + expect( + records.find((rec) => + rec.message.startsWith( + `Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.` + ) + ) + ).toBeDefined(); + }, + { retryAttempts: 10, retryDelayMs: 200 } + ); + }); +}); + +function createRoot(options: { maxBatchSizeBytes?: number }) { + return kbnTestServer.createRootWithCorePlugins( + { + migrations: { + skip: false, + enableV2: true, + batchSize: 1000, + maxBatchSizeBytes: options.maxBatchSizeBytes, + }, + logging: { + appenders: { + file: { + type: 'file', + fileName: logFilePath, + layout: { + type: 'json', + }, + }, + }, + loggers: [ + { + name: 'root', + appenders: ['file'], + }, + ], + }, + }, + { + oss: true, + } + ); +} diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/batch_size_bytes_exceeds_es_content_length.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/batch_size_bytes_exceeds_es_content_length.test.ts new file mode 100644 index 0000000000000..192321227d4ae --- /dev/null +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/batch_size_bytes_exceeds_es_content_length.test.ts @@ -0,0 +1,117 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import Path from 'path'; +import fs from 'fs/promises'; +import JSON5 from 'json5'; +import * as kbnTestServer from '../../../../test_helpers/kbn_server'; +import { Root } from '../../../root'; +import { retryAsync } from '../test_helpers/retry_async'; + +const logFilePath = Path.join(__dirname, 'batch_size_bytes_exceeds_es_content_length.log'); + +async function removeLogFile() { + // ignore errors if it doesn't exist + await fs.unlink(logFilePath).catch(() => void 0); +} + +describe('migration v2', () => { + let esServer: kbnTestServer.TestElasticsearchUtils; + let root: Root; + let startES: () => Promise; + + beforeAll(async () => { + await removeLogFile(); + }); + + beforeEach(() => { + ({ startES } = kbnTestServer.createTestServers({ + adjustTimeout: (t: number) => jest.setTimeout(t), + settings: { + es: { + license: 'basic', + dataArchive: Path.join(__dirname, 'archives', '7.14.0_xpack_sample_saved_objects.zip'), + esArgs: ['http.max_content_length=1mb'], + }, + }, + })); + }); + + afterEach(async () => { + if (root) { + await root.shutdown(); + } + if (esServer) { + await esServer.stop(); + } + + await new Promise((resolve) => setTimeout(resolve, 10000)); + }); + + it('fails with a descriptive message when maxBatchSizeBytes exceeds ES http.max_content_length', async () => { + root = createRoot({ maxBatchSizeBytes: 1715275 }); + esServer = await startES(); + await root.preboot(); + await root.setup(); + await expect(root.start()).rejects.toMatchInlineSnapshot( + `[Error: Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.]` + ); + + await retryAsync( + async () => { + const logFileContent = await fs.readFile(logFilePath, 'utf-8'); + const records = logFileContent + .split('\n') + .filter(Boolean) + .map((str) => JSON5.parse(str)) as any[]; + + expect( + records.find((rec) => + rec.message.startsWith( + `Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.` + ) + ) + ).toBeDefined(); + }, + { retryAttempts: 10, retryDelayMs: 200 } + ); + }); +}); + +function createRoot(options: { maxBatchSizeBytes?: number }) { + return kbnTestServer.createRootWithCorePlugins( + { + migrations: { + skip: false, + enableV2: true, + batchSize: 1000, + maxBatchSizeBytes: options.maxBatchSizeBytes, + }, + logging: { + appenders: { + file: { + type: 'file', + fileName: logFilePath, + layout: { + type: 'json', + }, + }, + }, + loggers: [ + { + name: 'root', + appenders: ['file'], + }, + ], + }, + }, + { + oss: true, + } + ); +} diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/cleanup.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/cleanup.test.ts index 684b75056bf44..bb408d14df6d7 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/cleanup.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/cleanup.test.ts @@ -13,7 +13,7 @@ import JSON5 from 'json5'; import * as kbnTestServer from '../../../../test_helpers/kbn_server'; import type { Root } from '../../../root'; -const logFilePath = Path.join(__dirname, 'cleanup_test.log'); +const logFilePath = Path.join(__dirname, 'cleanup.log'); const asyncUnlink = Util.promisify(Fs.unlink); const asyncReadFile = Util.promisify(Fs.readFile); diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/type_migration_failure.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/collects_corrupt_docs.test.ts similarity index 98% rename from src/core/server/saved_objects/migrationsv2/integration_tests/type_migration_failure.test.ts rename to src/core/server/saved_objects/migrationsv2/integration_tests/collects_corrupt_docs.test.ts index b3721d603d7d9..02b7d0eae2a90 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/type_migration_failure.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/collects_corrupt_docs.test.ts @@ -12,7 +12,7 @@ import Util from 'util'; import * as kbnTestServer from '../../../../test_helpers/kbn_server'; import { Root } from '../../../root'; -const logFilePath = Path.join(__dirname, 'migration_test_corrupt_docs_kibana.log'); +const logFilePath = Path.join(__dirname, 'collects_corrupt_docs.log'); const asyncUnlink = Util.promisify(Fs.unlink); diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/corrupt_outdated_docs.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/corrupt_outdated_docs.test.ts index de58dded69422..446542cc37306 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/corrupt_outdated_docs.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/corrupt_outdated_docs.test.ts @@ -12,7 +12,7 @@ import Util from 'util'; import * as kbnTestServer from '../../../../test_helpers/kbn_server'; import { Root } from '../../../root'; -const logFilePath = Path.join(__dirname, 'migration_test_corrupt_docs_kibana.log'); +const logFilePath = Path.join(__dirname, 'corrupt_outdated_docs.log'); const asyncUnlink = Util.promisify(Fs.unlink); diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/migration.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/migration_from_v1.test.ts similarity index 99% rename from src/core/server/saved_objects/migrationsv2/integration_tests/migration.test.ts rename to src/core/server/saved_objects/migrationsv2/integration_tests/migration_from_v1.test.ts index 2a1d6bff0c247..fc01e6a408497 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/migration.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/migration_from_v1.test.ts @@ -21,7 +21,7 @@ import { Root } from '../../../root'; const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version; -const logFilePath = Path.join(__dirname, 'migration_test_kibana_from_v1.log'); +const logFilePath = Path.join(__dirname, 'migration_from_v1.log'); const asyncUnlink = Util.promisify(Fs.unlink); async function removeLogFile() { diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/outdated_docs.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/outdated_docs.test.ts index 822a44fb22dc1..58ff34913f5d4 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/outdated_docs.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/outdated_docs.test.ts @@ -14,7 +14,7 @@ import * as kbnTestServer from '../../../../test_helpers/kbn_server'; import type { ElasticsearchClient } from '../../../elasticsearch'; import { Root } from '../../../root'; -const logFilePath = Path.join(__dirname, 'migration_test_kibana.log'); +const logFilePath = Path.join(__dirname, 'outdated_docs.log'); const asyncUnlink = Util.promisify(Fs.unlink); async function removeLogFile() { diff --git a/src/core/server/saved_objects/migrationsv2/integration_tests/rewriting_id.test.ts b/src/core/server/saved_objects/migrationsv2/integration_tests/rewriting_id.test.ts index 0bdf7a0d98766..4564a89ee0816 100644 --- a/src/core/server/saved_objects/migrationsv2/integration_tests/rewriting_id.test.ts +++ b/src/core/server/saved_objects/migrationsv2/integration_tests/rewriting_id.test.ts @@ -15,7 +15,7 @@ import type { ElasticsearchClient } from '../../../elasticsearch'; import { Root } from '../../../root'; import { deterministicallyRegenerateObjectId } from '../../migrations/core/document_migrator'; -const logFilePath = Path.join(__dirname, 'migration_test_kibana.log'); +const logFilePath = Path.join(__dirname, 'rewriting_id.log'); const asyncUnlink = Util.promisify(Fs.unlink); async function removeLogFile() { diff --git a/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts b/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts index 773a0af469bd4..a312ac6be0c3d 100644 --- a/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts +++ b/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.test.ts @@ -17,6 +17,7 @@ import { elasticsearchClientMock } from '../../elasticsearch/client/mocks'; import { LoggerAdapter } from '../../logging/logger_adapter'; import { AllControlStates, State } from './types'; import { createInitialState } from './initial_state'; +import { ByteSizeValue } from '@kbn/config-schema'; const esClient = elasticsearchServiceMock.createElasticsearchClient(); @@ -40,6 +41,7 @@ describe('migrationsStateActionMachine', () => { indexPrefix: '.my-so-index', migrationsConfig: { batchSize: 1000, + maxBatchSizeBytes: new ByteSizeValue(1e8), pollInterval: 0, scrollDuration: '0s', skip: false, @@ -235,6 +237,7 @@ describe('migrationsStateActionMachine', () => { ...initialState, reason: 'the fatal reason', outdatedDocuments: [{ _id: '1234', password: 'sensitive password' }], + transformedDocBatches: [[{ _id: '1234', password: 'sensitive transformed password' }]], } as State, logger: mockLogger.get(), model: transitionModel(['LEGACY_DELETE', 'FATAL']), @@ -257,6 +260,7 @@ describe('migrationsStateActionMachine', () => { kibana: { migrationState: { batchSize: 1000, + maxBatchSizeBytes: 1e8, controlState: 'LEGACY_DELETE', currentAlias: '.my-so-index', excludeFromUpgradeFilterHooks: {}, @@ -270,7 +274,7 @@ describe('migrationsStateActionMachine', () => { message: 'Log from LEGACY_DELETE control state', }, ], - outdatedDocuments: ['1234'], + outdatedDocuments: [{ _id: '1234' }], outdatedDocumentsQuery: expect.any(Object), preMigrationScript: { _tag: 'None', @@ -284,6 +288,7 @@ describe('migrationsStateActionMachine', () => { }, tempIndex: '.my-so-index_7.11.0_reindex_temp', tempIndexMappings: expect.any(Object), + transformedDocBatches: [[{ _id: '1234' }]], unusedTypesQuery: expect.any(Object), versionAlias: '.my-so-index_7.11.0', versionIndex: '.my-so-index_7.11.0_001', @@ -304,6 +309,7 @@ describe('migrationsStateActionMachine', () => { kibana: { migrationState: { batchSize: 1000, + maxBatchSizeBytes: 1e8, controlState: 'FATAL', currentAlias: '.my-so-index', excludeFromUpgradeFilterHooks: {}, @@ -321,7 +327,7 @@ describe('migrationsStateActionMachine', () => { message: 'Log from FATAL control state', }, ], - outdatedDocuments: ['1234'], + outdatedDocuments: [{ _id: '1234' }], outdatedDocumentsQuery: expect.any(Object), preMigrationScript: { _tag: 'None', @@ -335,6 +341,7 @@ describe('migrationsStateActionMachine', () => { }, tempIndex: '.my-so-index_7.11.0_reindex_temp', tempIndexMappings: expect.any(Object), + transformedDocBatches: [[{ _id: '1234' }]], unusedTypesQuery: expect.any(Object), versionAlias: '.my-so-index_7.11.0', versionIndex: '.my-so-index_7.11.0_001', @@ -447,6 +454,7 @@ describe('migrationsStateActionMachine', () => { kibana: { migrationState: { batchSize: 1000, + maxBatchSizeBytes: 1e8, controlState: 'LEGACY_REINDEX', currentAlias: '.my-so-index', excludeFromUpgradeFilterHooks: {}, @@ -474,6 +482,7 @@ describe('migrationsStateActionMachine', () => { }, tempIndex: '.my-so-index_7.11.0_reindex_temp', tempIndexMappings: expect.any(Object), + transformedDocBatches: [], unusedTypesQuery: expect.any(Object), versionAlias: '.my-so-index_7.11.0', versionIndex: '.my-so-index_7.11.0_001', @@ -488,6 +497,7 @@ describe('migrationsStateActionMachine', () => { kibana: { migrationState: { batchSize: 1000, + maxBatchSizeBytes: 1e8, controlState: 'LEGACY_DELETE', currentAlias: '.my-so-index', excludeFromUpgradeFilterHooks: {}, @@ -519,6 +529,7 @@ describe('migrationsStateActionMachine', () => { }, tempIndex: '.my-so-index_7.11.0_reindex_temp', tempIndexMappings: expect.any(Object), + transformedDocBatches: [], unusedTypesQuery: expect.any(Object), versionAlias: '.my-so-index_7.11.0', versionIndex: '.my-so-index_7.11.0_001', diff --git a/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.ts b/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.ts index 8e3b8ee4ab556..58c299b77fc60 100644 --- a/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.ts +++ b/src/core/server/saved_objects/migrationsv2/migrations_state_action_machine.ts @@ -13,7 +13,8 @@ import type { ElasticsearchClient } from '../../elasticsearch'; import { getErrorMessage, getRequestDebugMeta } from '../../elasticsearch'; import { Model, Next, stateActionMachine } from './state_action_machine'; import { cleanup } from './migrations_state_machine_cleanup'; -import { State } from './types'; +import { ReindexSourceToTempIndex, ReindexSourceToTempIndexBulk, State } from './types'; +import { SavedObjectsRawDoc } from '../serialization'; interface StateLogMeta extends LogMeta { kibana: { @@ -140,11 +141,22 @@ export async function migrationStateActionMachine({ const newState = model(state, res); // Redact the state to reduce the memory consumption and so that we // don't log sensitive information inside documents by only keeping - // the _id's of outdatedDocuments + // the _id's of documents const redactedNewState = { ...newState, - // @ts-expect-error outdatedDocuments don't exist in all states - ...{ outdatedDocuments: (newState.outdatedDocuments ?? []).map((doc) => doc._id) }, + ...{ + outdatedDocuments: ((newState as ReindexSourceToTempIndex).outdatedDocuments ?? []).map( + (doc) => + ({ + _id: doc._id, + } as SavedObjectsRawDoc) + ), + }, + ...{ + transformedDocBatches: ( + (newState as ReindexSourceToTempIndexBulk).transformedDocBatches ?? [] + ).map((batches) => batches.map((doc) => ({ _id: doc._id }))) as [SavedObjectsRawDoc[]], + }, }; executionLog.push({ type: 'transition', diff --git a/src/core/server/saved_objects/migrationsv2/model/create_batches.test.ts b/src/core/server/saved_objects/migrationsv2/model/create_batches.test.ts new file mode 100644 index 0000000000000..552c4c237675f --- /dev/null +++ b/src/core/server/saved_objects/migrationsv2/model/create_batches.test.ts @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +import * as Either from 'fp-ts/lib/Either'; +import { SavedObjectsRawDoc } from '../../serialization'; +import { createBatches } from './create_batches'; + +describe('createBatches', () => { + const DOCUMENT_SIZE_BYTES = 128; + const INDEX = '.kibana_version_index'; + it('returns right one batch if all documents fit in maxBatchSizeBytes', () => { + const documents = [ + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ¹' } }, + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ²' } }, + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ®' } }, + ]; + + expect(createBatches(documents, INDEX, DOCUMENT_SIZE_BYTES * 3)).toEqual( + Either.right([documents]) + ); + }); + it('creates multiple batches with each batch limited to maxBatchSizeBytes', () => { + const documents = [ + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ¹' } }, + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ²' } }, + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ®' } }, + { _id: '', _source: { type: 'dashboard', title: 'my saved object title 44' } }, + { _id: '', _source: { type: 'dashboard', title: 'my saved object title 55' } }, + ]; + expect(createBatches(documents, INDEX, DOCUMENT_SIZE_BYTES * 2)).toEqual( + Either.right([[documents[0], documents[1]], [documents[2], documents[3]], [documents[4]]]) + ); + }); + it('creates a single empty batch if there are no documents', () => { + const documents = [] as SavedObjectsRawDoc[]; + expect(createBatches(documents, INDEX, 100)).toEqual(Either.right([[]])); + }); + it('throws if any one document exceeds the maxBatchSizeBytes', () => { + const documents = [ + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ¹' } }, + { + _id: '', + _source: { + type: 'dashboard', + title: 'my saved object title ² with a very long title that exceeds max size bytes', + }, + }, + { _id: '', _source: { type: 'dashboard', title: 'my saved object title ®' } }, + ]; + expect(createBatches(documents, INDEX, 178)).toEqual( + Either.left({ + maxBatchSizeBytes: 178, + docSizeBytes: 179, + type: 'document_exceeds_batch_size_bytes', + document: documents[1], + }) + ); + }); +}); diff --git a/src/core/server/saved_objects/migrationsv2/model/create_batches.ts b/src/core/server/saved_objects/migrationsv2/model/create_batches.ts new file mode 100644 index 0000000000000..c80003fef09fb --- /dev/null +++ b/src/core/server/saved_objects/migrationsv2/model/create_batches.ts @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import * as Either from 'fp-ts/lib/Either'; +import { SavedObjectsRawDoc } from '../..'; +import { createBulkOperationBody } from '../actions/bulk_overwrite_transformed_documents'; + +/** + * Creates batches of documents to be used by the bulk API. Each batch will + * have a request body content length that's <= maxBatchSizeBytes + */ +export function createBatches( + docs: SavedObjectsRawDoc[], + index: string, + maxBatchSizeBytes: number +) { + /* To build up the NDJSON request body we construct an array of objects like: + * [ + * {"index": ...} + * {"title": "my saved object"} + * ... + * ] + * However, when we call JSON.stringify on this array the resulting string + * will be surrounded by `[]` which won't be present in the NDJSON so these + * two characters need to be removed from the size calculation. + */ + const BRACKETS_BYTES = 2; + /* Each document in the NDJSON (including the last one) needs to be + * terminated by a newline, so we need to account for an extra newline + * character + */ + const NDJSON_NEW_LINE_BYTES = 1; + + const batches = [[]] as [SavedObjectsRawDoc[]]; + let currBatch = 0; + let currBatchSizeBytes = 0; + for (const doc of docs) { + const bulkOperationBody = createBulkOperationBody(doc, index); + const docSizeBytes = + Buffer.byteLength(JSON.stringify(bulkOperationBody), 'utf8') - + BRACKETS_BYTES + + NDJSON_NEW_LINE_BYTES; + if (docSizeBytes > maxBatchSizeBytes) { + return Either.left({ + type: 'document_exceeds_batch_size_bytes', + docSizeBytes, + maxBatchSizeBytes, + document: doc, + }); + } else if (currBatchSizeBytes + docSizeBytes <= maxBatchSizeBytes) { + batches[currBatch].push(doc); + currBatchSizeBytes = currBatchSizeBytes + docSizeBytes; + } else { + currBatch++; + batches[currBatch] = [doc]; + currBatchSizeBytes = docSizeBytes; + } + } + + return Either.right(batches); +} diff --git a/src/core/server/saved_objects/migrationsv2/model/model.test.ts b/src/core/server/saved_objects/migrationsv2/model/model.test.ts index f24d175f416a7..1d017116bf3fd 100644 --- a/src/core/server/saved_objects/migrationsv2/model/model.test.ts +++ b/src/core/server/saved_objects/migrationsv2/model/model.test.ts @@ -58,6 +58,7 @@ describe('migrations v2 model', () => { retryDelay: 0, retryAttempts: 15, batchSize: 1000, + maxBatchSizeBytes: 1e8, indexPrefix: '.kibana', outdatedDocumentsQuery: {}, targetIndexMappings: { @@ -1065,6 +1066,8 @@ describe('migrations v2 model', () => { }); const newState = model(state, res) as ReindexSourceToTempIndexBulk; expect(newState.controlState).toEqual('REINDEX_SOURCE_TO_TEMP_INDEX_BULK'); + expect(newState.currentBatch).toEqual(0); + expect(newState.transformedDocBatches).toEqual([processedDocs]); expect(newState.progress.processed).toBe(0); // Result of `(undefined ?? 0) + corruptDocumentsId.length` }); @@ -1119,16 +1122,19 @@ describe('migrations v2 model', () => { }); describe('REINDEX_SOURCE_TO_TEMP_INDEX_BULK', () => { - const transformedDocs = [ - { - _id: 'a:b', - _source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] }, - }, - ] as SavedObjectsRawDoc[]; + const transformedDocBatches = [ + [ + { + _id: 'a:b', + _source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] }, + }, + ], + ] as [SavedObjectsRawDoc[]]; const reindexSourceToTempIndexBulkState: ReindexSourceToTempIndexBulk = { ...baseState, controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK', - transformedDocs, + transformedDocBatches, + currentBatch: 0, versionIndexReadyActions: Option.none, sourceIndex: Option.some('.kibana') as Option.Some, sourceIndexPitId: 'pit_id', @@ -1171,7 +1177,7 @@ describe('migrations v2 model', () => { const newState = model(reindexSourceToTempIndexBulkState, res) as FatalState; expect(newState.controlState).toEqual('FATAL'); expect(newState.reason).toMatchInlineSnapshot( - `"While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Try to use smaller batches by changing the Kibana 'migrations.batchSize' configuration option and restarting Kibana."` + `"While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option."` ); }); test('REINDEX_SOURCE_TO_TEMP_INDEX_BULK should throw a throwBadResponse error if action failed', () => { @@ -1438,7 +1444,8 @@ describe('migrations v2 model', () => { res ) as TransformedDocumentsBulkIndex; expect(newState.controlState).toEqual('TRANSFORMED_DOCUMENTS_BULK_INDEX'); - expect(newState.transformedDocs).toEqual(processedDocs); + expect(newState.transformedDocBatches).toEqual([processedDocs]); + expect(newState.currentBatch).toEqual(0); expect(newState.retryCount).toEqual(0); expect(newState.retryDelay).toEqual(0); expect(newState.progress.processed).toBe(outdatedDocuments.length); @@ -1521,16 +1528,31 @@ describe('migrations v2 model', () => { }); describe('TRANSFORMED_DOCUMENTS_BULK_INDEX', () => { - const transformedDocs = [ - { - _id: 'a:b', - _source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] }, - }, - ] as SavedObjectsRawDoc[]; + const transformedDocBatches = [ + [ + // batch 0 + { + _id: 'a:b', + _source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] }, + }, + { + _id: 'a:c', + _source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] }, + }, + ], + [ + // batch 1 + { + _id: 'a:d', + _source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] }, + }, + ], + ] as SavedObjectsRawDoc[][]; const transformedDocumentsBulkIndexState: TransformedDocumentsBulkIndex = { ...baseState, controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX', - transformedDocs, + transformedDocBatches, + currentBatch: 0, versionIndexReadyActions: Option.none, sourceIndex: Option.some('.kibana') as Option.Some, targetIndex: '.kibana_7.11.0_001', @@ -1540,6 +1562,29 @@ describe('migrations v2 model', () => { progress: createInitialProgress(), }; + test('TRANSFORMED_DOCUMENTS_BULK_INDEX -> TRANSFORMED_DOCUMENTS_BULK_INDEX and increments currentBatch if more batches are left', () => { + const res: ResponseType<'TRANSFORMED_DOCUMENTS_BULK_INDEX'> = Either.right( + 'bulk_index_succeeded' + ); + const newState = model( + transformedDocumentsBulkIndexState, + res + ) as TransformedDocumentsBulkIndex; + expect(newState.controlState).toEqual('TRANSFORMED_DOCUMENTS_BULK_INDEX'); + expect(newState.currentBatch).toEqual(1); + }); + + test('TRANSFORMED_DOCUMENTS_BULK_INDEX -> OUTDATED_DOCUMENTS_SEARCH_READ if all batches were written', () => { + const res: ResponseType<'TRANSFORMED_DOCUMENTS_BULK_INDEX'> = Either.right( + 'bulk_index_succeeded' + ); + const newState = model( + { ...transformedDocumentsBulkIndexState, ...{ currentBatch: 1 } }, + res + ); + expect(newState.controlState).toEqual('OUTDATED_DOCUMENTS_SEARCH_READ'); + }); + test('TRANSFORMED_DOCUMENTS_BULK_INDEX throws if action returns left index_not_found_exception', () => { const res: ResponseType<'TRANSFORMED_DOCUMENTS_BULK_INDEX'> = Either.left({ type: 'index_not_found_exception', @@ -1570,7 +1615,7 @@ describe('migrations v2 model', () => { const newState = model(transformedDocumentsBulkIndexState, res) as FatalState; expect(newState.controlState).toEqual('FATAL'); expect(newState.reason).toMatchInlineSnapshot( - `"While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Try to use smaller batches by changing the Kibana 'migrations.batchSize' configuration option and restarting Kibana."` + `"While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option."` ); }); }); diff --git a/src/core/server/saved_objects/migrationsv2/model/model.ts b/src/core/server/saved_objects/migrationsv2/model/model.ts index 50be4a524f5c5..8aa3d7b83b295 100644 --- a/src/core/server/saved_objects/migrationsv2/model/model.ts +++ b/src/core/server/saved_objects/migrationsv2/model/model.ts @@ -31,6 +31,19 @@ import { throwBadControlState, throwBadResponse, } from './helpers'; +import { createBatches } from './create_batches'; + +const FATAL_REASON_REQUEST_ENTITY_TOO_LARGE = `While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.`; +const fatalReasonDocumentExceedsMaxBatchSizeBytes = ({ + _id, + docSizeBytes, + maxBatchSizeBytes, +}: { + _id: string; + docSizeBytes: number; + maxBatchSizeBytes: number; +}) => + `The document with _id "${_id}" is ${docSizeBytes} bytes which exceeds the configured maximum batch size of ${maxBatchSizeBytes} bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.`; export const model = (currentState: State, resW: ResponseType): State => { // The action response `resW` is weakly typed, the type includes all action @@ -489,12 +502,30 @@ export const model = (currentState: State, resW: ResponseType): if (Either.isRight(res)) { if (stateP.corruptDocumentIds.length === 0 && stateP.transformErrors.length === 0) { - return { - ...stateP, - controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK', // handles the actual bulk indexing into temp index - transformedDocs: [...res.right.processedDocs], - progress, - }; + const batches = createBatches( + res.right.processedDocs, + stateP.tempIndex, + stateP.maxBatchSizeBytes + ); + if (Either.isRight(batches)) { + return { + ...stateP, + controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK', // handles the actual bulk indexing into temp index + transformedDocBatches: batches.right, + currentBatch: 0, + progress, + }; + } else { + return { + ...stateP, + controlState: 'FATAL', + reason: fatalReasonDocumentExceedsMaxBatchSizeBytes({ + _id: batches.left.document._id, + docSizeBytes: batches.left.docSizeBytes, + maxBatchSizeBytes: batches.left.maxBatchSizeBytes, + }), + }; + } } else { // we don't have any transform issues with the current batch of outdated docs but // we have carried through previous transformation issues. @@ -525,13 +556,21 @@ export const model = (currentState: State, resW: ResponseType): } else if (stateP.controlState === 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK') { const res = resW as ExcludeRetryableEsError>; if (Either.isRight(res)) { - return { - ...stateP, - controlState: 'REINDEX_SOURCE_TO_TEMP_READ', - // we're still on the happy path with no transformation failures seen. - corruptDocumentIds: [], - transformErrors: [], - }; + if (stateP.currentBatch + 1 < stateP.transformedDocBatches.length) { + return { + ...stateP, + controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK', + currentBatch: stateP.currentBatch + 1, + }; + } else { + return { + ...stateP, + controlState: 'REINDEX_SOURCE_TO_TEMP_READ', + // we're still on the happy path with no transformation failures seen. + corruptDocumentIds: [], + transformErrors: [], + }; + } } else { if ( isLeftTypeof(res.left, 'target_index_had_write_block') || @@ -548,7 +587,7 @@ export const model = (currentState: State, resW: ResponseType): return { ...stateP, controlState: 'FATAL', - reason: `While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Try to use smaller batches by changing the Kibana 'migrations.batchSize' configuration option and restarting Kibana.`, + reason: FATAL_REASON_REQUEST_ENTITY_TOO_LARGE, }; } throwBadResponse(stateP, res.left); @@ -677,13 +716,31 @@ export const model = (currentState: State, resW: ResponseType): // we haven't seen corrupt documents or any transformation errors thus far in the migration // index the migrated docs if (stateP.corruptDocumentIds.length === 0 && stateP.transformErrors.length === 0) { - return { - ...stateP, - controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX', - transformedDocs: [...res.right.processedDocs], - hasTransformedDocs: true, - progress, - }; + const batches = createBatches( + res.right.processedDocs, + stateP.targetIndex, + stateP.maxBatchSizeBytes + ); + if (Either.isRight(batches)) { + return { + ...stateP, + controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX', + transformedDocBatches: batches.right, + currentBatch: 0, + hasTransformedDocs: true, + progress, + }; + } else { + return { + ...stateP, + controlState: 'FATAL', + reason: fatalReasonDocumentExceedsMaxBatchSizeBytes({ + _id: batches.left.document._id, + docSizeBytes: batches.left.docSizeBytes, + maxBatchSizeBytes: batches.left.maxBatchSizeBytes, + }), + }; + } } else { // We have seen corrupt documents and/or transformation errors // skip indexing and go straight to reading and transforming more docs @@ -711,6 +768,13 @@ export const model = (currentState: State, resW: ResponseType): } else if (stateP.controlState === 'TRANSFORMED_DOCUMENTS_BULK_INDEX') { const res = resW as ExcludeRetryableEsError>; if (Either.isRight(res)) { + if (stateP.currentBatch + 1 < stateP.transformedDocBatches.length) { + return { + ...stateP, + controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX', + currentBatch: stateP.currentBatch + 1, + }; + } return { ...stateP, controlState: 'OUTDATED_DOCUMENTS_SEARCH_READ', @@ -723,7 +787,7 @@ export const model = (currentState: State, resW: ResponseType): return { ...stateP, controlState: 'FATAL', - reason: `While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Try to use smaller batches by changing the Kibana 'migrations.batchSize' configuration option and restarting Kibana.`, + reason: FATAL_REASON_REQUEST_ENTITY_TOO_LARGE, }; } else if ( isLeftTypeof(res.left, 'target_index_had_write_block') || diff --git a/src/core/server/saved_objects/migrationsv2/next.ts b/src/core/server/saved_objects/migrationsv2/next.ts index 9b091b6fc8509..3f3714552725b 100644 --- a/src/core/server/saved_objects/migrationsv2/next.ts +++ b/src/core/server/saved_objects/migrationsv2/next.ts @@ -111,7 +111,7 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra Actions.bulkOverwriteTransformedDocuments({ client, index: state.tempIndex, - transformedDocs: state.transformedDocs, + transformedDocs: state.transformedDocBatches[state.currentBatch], /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. @@ -160,7 +160,7 @@ export const nextActionMap = (client: ElasticsearchClient, transformRawDocs: Tra Actions.bulkOverwriteTransformedDocuments({ client, index: state.targetIndex, - transformedDocs: state.transformedDocs, + transformedDocs: state.transformedDocBatches[state.currentBatch], /** * Since we don't run a search against the target index, we disable "refresh" to speed up * the migration process. diff --git a/src/core/server/saved_objects/migrationsv2/test_helpers/retry.test.ts b/src/core/server/saved_objects/migrationsv2/test_helpers/retry.test.ts new file mode 100644 index 0000000000000..246f61c71ae4d --- /dev/null +++ b/src/core/server/saved_objects/migrationsv2/test_helpers/retry.test.ts @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { retryAsync } from './retry_async'; + +describe('retry', () => { + it('retries throwing functions until they succeed', async () => { + let i = 0; + await expect( + retryAsync( + () => { + if (i++ < 2) { + return Promise.reject(new Error('boom')); + } else { + return Promise.resolve('done'); + } + }, + { retryAttempts: 3, retryDelayMs: 1 } + ) + ).resolves.toEqual('done'); + }); + + it('throws if all attempts are exhausted before success', async () => { + let attempts = 0; + await expect(() => + retryAsync( + () => { + attempts++; + return Promise.reject(new Error('boom')); + }, + { retryAttempts: 3, retryDelayMs: 1 } + ) + ).rejects.toMatchInlineSnapshot(`[Error: boom]`); + expect(attempts).toEqual(3); + }); + + it('waits retryDelayMs between each attempt ', async () => { + const now = Date.now(); + let i = 0; + await retryAsync( + () => { + if (i++ < 2) { + return Promise.reject(new Error('boom')); + } else { + return Promise.resolve('done'); + } + }, + { retryAttempts: 3, retryDelayMs: 100 } + ); + expect(Date.now() - now).toBeGreaterThanOrEqual(200); + }); +}); diff --git a/src/core/server/saved_objects/migrationsv2/test_helpers/retry_async.ts b/src/core/server/saved_objects/migrationsv2/test_helpers/retry_async.ts new file mode 100644 index 0000000000000..f5dffede67a16 --- /dev/null +++ b/src/core/server/saved_objects/migrationsv2/test_helpers/retry_async.ts @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +function delay(delayInMs: number) { + return new Promise((resolve) => setTimeout(resolve, delayInMs)); +} + +export async function retryAsync( + fn: () => Promise, + options: { retryAttempts: number; retryDelayMs: number } +): Promise { + try { + return await fn(); + } catch (e) { + if (options.retryAttempts > 1) { + await delay(options.retryDelayMs); + return retryAsync(fn, { + retryAttempts: options.retryAttempts - 1, + retryDelayMs: options.retryDelayMs, + }); + } else { + throw e; + } + } +} diff --git a/src/core/server/saved_objects/migrationsv2/types.ts b/src/core/server/saved_objects/migrationsv2/types.ts index ea03b64e03dc8..49ce12c53aa1a 100644 --- a/src/core/server/saved_objects/migrationsv2/types.ts +++ b/src/core/server/saved_objects/migrationsv2/types.ts @@ -76,19 +76,31 @@ export interface BaseState extends ControlState { readonly retryAttempts: number; /** - * The number of documents to fetch from Elasticsearch server to run migration over. + * The number of documents to process in each batch. This determines the + * maximum number of documents that will be read and written in a single + * request. * - * The higher the value, the faster the migration process will be performed since it reduces - * the number of round trips between Kibana and Elasticsearch servers. - * For the migration speed, we have to pay the price of increased memory consumption. + * The higher the value, the faster the migration process will be performed + * since it reduces the number of round trips between Kibana and + * Elasticsearch servers. For the migration speed, we have to pay the price + * of increased memory consumption and HTTP payload size. * - * Since batchSize defines the number of documents, not their size, it might happen that - * Elasticsearch fails a request with circuit_breaking_exception when it retrieves a set of - * saved objects of significant size. + * Since we cannot control the size in bytes of a batch when reading, + * Elasticsearch might fail with a circuit_breaking_exception when it + * retrieves a set of saved objects of significant size. In this case, you + * should set a smaller batchSize value and restart the migration process + * again. * - * In this case, you should set a smaller batchSize value and restart the migration process again. + * When writing batches, we limit the number of documents in a batch + * (batchSize) as well as the size of the batch in bytes (maxBatchSizeBytes). */ readonly batchSize: number; + /** + * When writing batches, limits the batch size in bytes to ensure that we + * don't construct HTTP requests which would exceed Elasticsearch's + * http.max_content_length which defaults to 100mb. + */ + readonly maxBatchSizeBytes: number; readonly logs: MigrationLog[]; /** * The current alias e.g. `.kibana` which always points to the latest @@ -233,7 +245,8 @@ export interface ReindexSourceToTempIndex extends PostInitState { export interface ReindexSourceToTempIndexBulk extends PostInitState { readonly controlState: 'REINDEX_SOURCE_TO_TEMP_INDEX_BULK'; - readonly transformedDocs: SavedObjectsRawDoc[]; + readonly transformedDocBatches: [SavedObjectsRawDoc[]]; + readonly currentBatch: number; readonly sourceIndexPitId: string; readonly lastHitSortValue: number[] | undefined; readonly progress: Progress; @@ -318,7 +331,8 @@ export interface TransformedDocumentsBulkIndex extends PostInitState { * Write the up-to-date transformed documents to the target index */ readonly controlState: 'TRANSFORMED_DOCUMENTS_BULK_INDEX'; - readonly transformedDocs: SavedObjectsRawDoc[]; + readonly transformedDocBatches: SavedObjectsRawDoc[][]; + readonly currentBatch: number; readonly lastHitSortValue: number[] | undefined; readonly hasTransformedDocs: boolean; readonly pitId: string; diff --git a/src/core/server/saved_objects/saved_objects_config.ts b/src/core/server/saved_objects/saved_objects_config.ts index c62d322f0bf8d..e7bbd706762f5 100644 --- a/src/core/server/saved_objects/saved_objects_config.ts +++ b/src/core/server/saved_objects/saved_objects_config.ts @@ -12,6 +12,7 @@ import type { ConfigDeprecationProvider } from '../config'; const migrationSchema = schema.object({ batchSize: schema.number({ defaultValue: 1_000 }), + maxBatchSizeBytes: schema.byteSize({ defaultValue: '100mb' }), // 100mb is the default http.max_content_length Elasticsearch config value scrollDuration: schema.string({ defaultValue: '15m' }), pollInterval: schema.number({ defaultValue: 1_500 }), skip: schema.boolean({ defaultValue: false }), diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md index 333ef8e7bf34c..aa421fe393059 100644 --- a/src/core/server/server.api.md +++ b/src/core/server/server.api.md @@ -293,6 +293,7 @@ export interface CoreConfigUsageData { }; apiVersion: string; healthCheckDelayMs: number; + principal: 'elastic_user' | 'kibana_user' | 'kibana_system_user' | 'other_user' | 'kibana_service_account' | 'unknown'; }; // (undocumented) http: { @@ -754,10 +755,10 @@ export interface DeprecationsDetails { // (undocumented) documentationUrl?: string; level: 'warning' | 'critical' | 'fetch_error'; - // (undocumented) message: string; // (undocumented) requireRestart?: boolean; + title: string; } // @public diff --git a/src/dev/build/build_distributables.ts b/src/dev/build/build_distributables.ts index 9ddf02e101a19..1042cdc484c12 100644 --- a/src/dev/build/build_distributables.ts +++ b/src/dev/build/build_distributables.ts @@ -105,6 +105,10 @@ export async function buildDistributables(log: ToolingLog, options: BuildOptions // control w/ --skip-archives await run(Tasks.CreateArchives); } + + if (options.createDebPackage || options.createRpmPackage) { + await run(Tasks.CreatePackageConfig); + } if (options.createDebPackage) { // control w/ --deb or --skip-os-packages await run(Tasks.CreateDebPackage); diff --git a/src/dev/build/tasks/os_packages/create_os_package_kibana_yml.ts b/src/dev/build/tasks/os_packages/create_os_package_kibana_yml.ts new file mode 100644 index 0000000000000..e7137ada02182 --- /dev/null +++ b/src/dev/build/tasks/os_packages/create_os_package_kibana_yml.ts @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { readFileSync, writeFileSync } from 'fs'; +import { resolve } from 'path'; +import { Build, Config, mkdirp } from '../../lib'; + +export async function createOSPackageKibanaYML(config: Config, build: Build) { + const configReadPath = config.resolveFromRepo('config', 'kibana.yml'); + const configWriteDir = config.resolveFromRepo('build', 'os_packages', 'config'); + const configWritePath = resolve(configWriteDir, 'kibana.yml'); + + await mkdirp(configWriteDir); + + let kibanaYML = readFileSync(configReadPath, { encoding: 'utf8' }); + + [ + [/#pid.file:.*/g, 'pid.file: /run/kibana/kibana.pid'], + [/#logging.dest:.*/g, 'logging.dest: /var/log/kibana/kibana.log'], + ].forEach((options) => { + const [regex, setting] = options; + const diff = kibanaYML; + const match = kibanaYML.search(regex) >= 0; + if (match) { + if (typeof setting === 'string') { + kibanaYML = kibanaYML.replace(regex, setting); + } + } + + if (!diff.localeCompare(kibanaYML)) { + throw new Error( + `OS package configuration unmodified. Verify match for ${regex} is available` + ); + } + }); + + try { + writeFileSync(configWritePath, kibanaYML, { flag: 'wx' }); + } catch (err) { + if (err.code === 'EEXIST') { + return; + } + throw err; + } +} diff --git a/src/dev/build/tasks/os_packages/create_os_package_tasks.ts b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts index 99d0e1998e78a..67a9e86ee2073 100644 --- a/src/dev/build/tasks/os_packages/create_os_package_tasks.ts +++ b/src/dev/build/tasks/os_packages/create_os_package_tasks.ts @@ -9,6 +9,15 @@ import { Task } from '../../lib'; import { runFpm } from './run_fpm'; import { runDockerGenerator } from './docker_generator'; +import { createOSPackageKibanaYML } from './create_os_package_kibana_yml'; + +export const CreatePackageConfig: Task = { + description: 'Creating OS package kibana.yml', + + async run(config, log, build) { + await createOSPackageKibanaYML(config, build); + }, +}; export const CreateDebPackage: Task = { description: 'Creating deb package', diff --git a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker index c883e0b68114e..0af087f1427d7 100755 --- a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker +++ b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker @@ -108,6 +108,7 @@ kibana_vars=( map.tilemap.options.subdomains map.tilemap.url migrations.batchSize + migrations.maxBatchSizeBytes migrations.enableV2 migrations.pollInterval migrations.retryAttempts diff --git a/src/dev/build/tasks/os_packages/run_fpm.ts b/src/dev/build/tasks/os_packages/run_fpm.ts index b732e4c80ea37..c7d9f6997cdf2 100644 --- a/src/dev/build/tasks/os_packages/run_fpm.ts +++ b/src/dev/build/tasks/os_packages/run_fpm.ts @@ -123,6 +123,7 @@ export async function runFpm( `${resolveWithTrailingSlash(fromBuild('.'))}=/usr/share/kibana/`, // copy the config directory to /etc/kibana + `${config.resolveFromRepo('build/os_packages/config/kibana.yml')}=/etc/kibana/kibana.yml`, `${resolveWithTrailingSlash(fromBuild('config'))}=/etc/kibana/`, // copy the data directory at /var/lib/kibana diff --git a/src/dev/build/tasks/os_packages/service_templates/systemd/usr/lib/systemd/system/kibana.service b/src/dev/build/tasks/os_packages/service_templates/systemd/usr/lib/systemd/system/kibana.service index 7a1508d91b213..df33b82f1f967 100644 --- a/src/dev/build/tasks/os_packages/service_templates/systemd/usr/lib/systemd/system/kibana.service +++ b/src/dev/build/tasks/os_packages/service_templates/systemd/usr/lib/systemd/system/kibana.service @@ -15,7 +15,7 @@ Environment=KBN_PATH_CONF=/etc/kibana EnvironmentFile=-/etc/default/kibana EnvironmentFile=-/etc/sysconfig/kibana -ExecStart=/usr/share/kibana/bin/kibana --logging.dest="/var/log/kibana/kibana.log" --pid.file="/run/kibana/kibana.pid" +ExecStart=/usr/share/kibana/bin/kibana Restart=on-failure RestartSec=3 diff --git a/src/dev/build/tasks/package_json/find_used_dependencies.ts b/src/dev/build/tasks/package_json/find_used_dependencies.ts index 004e17b87ac8b..8cb8b3c986de7 100644 --- a/src/dev/build/tasks/package_json/find_used_dependencies.ts +++ b/src/dev/build/tasks/package_json/find_used_dependencies.ts @@ -29,9 +29,9 @@ export async function findUsedDependencies(listedPkgDependencies: any, baseDir: ]; const discoveredPluginEntries = await globby([ - normalize(Path.resolve(baseDir, `src/plugins/*/server/index.js`)), + normalize(Path.resolve(baseDir, `src/plugins/**/server/index.js`)), `!${normalize(Path.resolve(baseDir, `/src/plugins/**/public`))}`, - normalize(Path.resolve(baseDir, `x-pack/plugins/*/server/index.js`)), + normalize(Path.resolve(baseDir, `x-pack/plugins/**/server/index.js`)), `!${normalize(Path.resolve(baseDir, `/x-pack/plugins/**/public`))}`, ]); diff --git a/src/dev/license_checker/config.ts b/src/dev/license_checker/config.ts index cb7e3781e2511..ee355d6a9811b 100644 --- a/src/dev/license_checker/config.ts +++ b/src/dev/license_checker/config.ts @@ -75,7 +75,7 @@ export const LICENSE_OVERRIDES = { '@mapbox/jsonlint-lines-primitives@2.0.2': ['MIT'], // license in readme https://github.com/tmcw/jsonlint 'node-sql-parser@3.6.1': ['(GPL-2.0 OR MIT)'], // GPL-2.0* https://github.com/taozhi8833998/node-sql-parser '@elastic/ems-client@7.15.0': ['Elastic License 2.0'], - '@elastic/eui@37.3.0': ['SSPL-1.0 OR Elastic License 2.0'], + '@elastic/eui@37.3.1': ['SSPL-1.0 OR Elastic License 2.0'], // TODO can be removed if the https://github.com/jindw/xmldom/issues/239 is released 'xmldom@0.1.27': ['MIT'], diff --git a/src/plugins/advanced_settings/public/management_app/components/field/__snapshots__/field.test.tsx.snap b/src/plugins/advanced_settings/public/management_app/components/field/__snapshots__/field.test.tsx.snap index be5163e89367c..9249f5f98e9c9 100644 --- a/src/plugins/advanced_settings/public/management_app/components/field/__snapshots__/field.test.tsx.snap +++ b/src/plugins/advanced_settings/public/management_app/components/field/__snapshots__/field.test.tsx.snap @@ -1326,7 +1326,12 @@ exports[`Field for image setting should render as read only if saving is disable disabled={true} display="large" fullWidth={true} - initialPromptText="Select or drag and drop a file" + initialPromptText={ + + } onChange={[Function]} /> @@ -1472,7 +1477,12 @@ exports[`Field for image setting should render custom setting icon if it is cust disabled={false} display="large" fullWidth={true} - initialPromptText="Select or drag and drop a file" + initialPromptText={ + + } onChange={[Function]} /> @@ -1526,7 +1536,12 @@ exports[`Field for image setting should render default value if there is no user disabled={false} display="large" fullWidth={true} - initialPromptText="Select or drag and drop a file" + initialPromptText={ + + } onChange={[Function]} /> @@ -1597,7 +1612,12 @@ exports[`Field for image setting should render unsaved value if there are unsave disabled={false} display="large" fullWidth={true} - initialPromptText="Select or drag and drop a file" + initialPromptText={ + + } onChange={[Function]} /> diff --git a/src/plugins/charts/public/services/active_cursor/use_active_cursor.test.ts b/src/plugins/charts/public/services/active_cursor/use_active_cursor.test.ts index efe5c9b49849f..50e7c995a1250 100644 --- a/src/plugins/charts/public/services/active_cursor/use_active_cursor.test.ts +++ b/src/plugins/charts/public/services/active_cursor/use_active_cursor.test.ts @@ -15,7 +15,8 @@ import type { ActiveCursorSyncOption, ActiveCursorPayload } from './types'; import type { Chart, PointerEvent } from '@elastic/charts'; import type { Datatable } from '../../../../expressions/public'; -describe('useActiveCursor', () => { +// FLAKY: https://github.com/elastic/kibana/issues/110038 +describe.skip('useActiveCursor', () => { let cursor: ActiveCursorPayload['cursor']; let dispatchExternalPointerEvent: jest.Mock; @@ -24,42 +25,47 @@ describe('useActiveCursor', () => { events: Array>, eventsTimeout = 1 ) => - new Promise(async (resolve) => { - const activeCursor = new ActiveCursor(); - let allEventsExecuted = false; - - activeCursor.setup(); + new Promise(async (resolve, reject) => { + try { + const activeCursor = new ActiveCursor(); + let allEventsExecuted = false; + activeCursor.setup(); + dispatchExternalPointerEvent.mockImplementation((pointerEvent) => { + if (allEventsExecuted) { + resolve(pointerEvent); + } + }); + renderHook(() => + useActiveCursor( + activeCursor, + { + current: { + dispatchExternalPointerEvent: dispatchExternalPointerEvent as ( + pointerEvent: PointerEvent + ) => void, + }, + } as RefObject, + { ...syncOption, debounce: syncOption.debounce ?? 1 } + ) + ); - dispatchExternalPointerEvent.mockImplementation((pointerEvent) => { - if (allEventsExecuted) { - resolve(pointerEvent); + for (const e of events) { + await new Promise((eventResolve) => + setTimeout(() => { + if (e === events[events.length - 1]) { + allEventsExecuted = true; + } + + activeCursor.activeCursor$!.next({ + cursor, + ...e, + }); + eventResolve(null); + }, eventsTimeout) + ); } - }); - - renderHook(() => - useActiveCursor( - activeCursor, - { - current: { - dispatchExternalPointerEvent: dispatchExternalPointerEvent as ( - pointerEvent: PointerEvent - ) => void, - }, - } as RefObject, - { ...syncOption, debounce: syncOption.debounce ?? 1 } - ) - ); - - for (const e of events) { - await new Promise((eventResolve) => - setTimeout(() => { - if (e === events[events.length - 1]) { - allEventsExecuted = true; - } - activeCursor.activeCursor$!.next({ cursor, ...e }); - eventResolve(null); - }, eventsTimeout) - ); + } catch (error) { + reject(error); } }); diff --git a/src/plugins/data/public/index.ts b/src/plugins/data/public/index.ts index 986e794c48488..f70733d1b3e8a 100644 --- a/src/plugins/data/public/index.ts +++ b/src/plugins/data/public/index.ts @@ -62,7 +62,13 @@ export const indexPatterns = { flattenHitWrapper, }; -export { IndexPatternsContract, IndexPattern, IndexPatternField, TypeMeta } from './index_patterns'; +export { + IndexPatternsContract, + DataViewsContract, + IndexPattern, + IndexPatternField, + TypeMeta, +} from './index_patterns'; export { IIndexPattern, diff --git a/src/plugins/data/public/index_patterns/index.ts b/src/plugins/data/public/index_patterns/index.ts index 7229ca5750a38..d1a2b0f28f1d2 100644 --- a/src/plugins/data/public/index_patterns/index.ts +++ b/src/plugins/data/public/index_patterns/index.ts @@ -23,6 +23,9 @@ export { IndexPatternsContract, IndexPattern, IndexPatternsApiClient, + DataViewsService, + DataViewsContract, + DataView, } from './index_patterns'; export { UiSettingsPublicToCommon } from './ui_settings_wrapper'; export { SavedObjectsClientPublicToCommon } from './saved_objects_client_wrapper'; diff --git a/src/plugins/data/public/mocks.ts b/src/plugins/data/public/mocks.ts index b9b859fd96625..40882fa1134e9 100644 --- a/src/plugins/data/public/mocks.ts +++ b/src/plugins/data/public/mocks.ts @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -import { DataPlugin, IndexPatternsContract } from '.'; +import { DataPlugin, DataViewsContract } from '.'; import { fieldFormatsServiceMock } from '../../field_formats/public/mocks'; import { searchServiceMock } from './search/mocks'; import { queryServiceMock } from './query/mocks'; @@ -38,6 +38,20 @@ const createSetupContract = (): Setup => { const createStartContract = (): Start => { const queryStartMock = queryServiceMock.createStartContract(); + const dataViews = ({ + find: jest.fn((search) => [{ id: search, title: search }]), + createField: jest.fn(() => {}), + createFieldList: jest.fn(() => []), + ensureDefaultIndexPattern: jest.fn(), + make: () => ({ + fieldsFetcher: { + fetchForWildcard: jest.fn(), + }, + }), + get: jest.fn().mockReturnValue(Promise.resolve({})), + clearCache: jest.fn(), + } as unknown) as DataViewsContract; + return { actions: { createFiltersFromValueClickAction: jest.fn().mockResolvedValue(['yes']), @@ -51,19 +65,11 @@ const createStartContract = (): Start => { IndexPatternSelect: jest.fn(), SearchBar: jest.fn().mockReturnValue(null), }, - indexPatterns: ({ - find: jest.fn((search) => [{ id: search, title: search }]), - createField: jest.fn(() => {}), - createFieldList: jest.fn(() => []), - ensureDefaultIndexPattern: jest.fn(), - make: () => ({ - fieldsFetcher: { - fetchForWildcard: jest.fn(), - }, - }), - get: jest.fn().mockReturnValue(Promise.resolve({})), - clearCache: jest.fn(), - } as unknown) as IndexPatternsContract, + dataViews, + /** + * @deprecated Use dataViews service instead. All index pattern interfaces were renamed. + */ + indexPatterns: dataViews, nowProvider: createNowProviderMock(), }; }; diff --git a/src/plugins/data/public/plugin.ts b/src/plugins/data/public/plugin.ts index 67adcc7a1716d..a12bb50815982 100644 --- a/src/plugins/data/public/plugin.ts +++ b/src/plugins/data/public/plugin.ts @@ -197,6 +197,7 @@ export class DataPublicPlugin autocomplete: this.autocomplete.start(), fieldFormats, indexPatterns, + dataViews: indexPatterns, query, search, nowProvider: this.nowProvider, diff --git a/src/plugins/data/public/types.ts b/src/plugins/data/public/types.ts index 4b52ddfb68824..b31a4ab933ae2 100644 --- a/src/plugins/data/public/types.ts +++ b/src/plugins/data/public/types.ts @@ -17,7 +17,7 @@ import { AutocompleteSetup, AutocompleteStart } from './autocomplete'; import { createFiltersFromRangeSelectAction, createFiltersFromValueClickAction } from './actions'; import { ISearchSetup, ISearchStart } from './search'; import { QuerySetup, QueryStart } from './query'; -import { IndexPatternsContract } from './index_patterns'; +import { DataViewsContract } from './index_patterns'; import { IndexPatternSelectProps, StatefulSearchBarProps } from './ui'; import { UsageCollectionSetup, UsageCollectionStart } from '../../usage_collection/public'; import { Setup as InspectorSetup } from '../../inspector/public'; @@ -76,11 +76,17 @@ export interface DataPublicPluginStart { * {@link AutocompleteStart} */ autocomplete: AutocompleteStart; + /** + * data views service + * {@link DataViewsContract} + */ + dataViews: DataViewsContract; /** * index patterns service - * {@link IndexPatternsContract} + * {@link DataViewsContract} + * @deprecated Use dataViews service instead. All index pattern interfaces were renamed. */ - indexPatterns: IndexPatternsContract; + indexPatterns: DataViewsContract; /** * search service * {@link ISearchStart} diff --git a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap index 612ffdcf5029e..9cd0687a1074d 100644 --- a/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap +++ b/src/plugins/data/public/utils/table_inspector_view/components/__snapshots__/data_view.test.tsx.snap @@ -1403,7 +1403,7 @@ exports[`Inspector Data View component should render single table without select >
" +
markdown mock
My Canvas Workpad
" `; exports[`Canvas Shareable Workpad API Placed successfully with height specified 1`] = `"
"`; @@ -21,7 +21,7 @@ exports[`Canvas Shareable Workpad API Placed successfully with height specified
markdown mock
markdown mock
My Canvas Workpad
" +
markdown mock
My Canvas Workpad
" `; exports[`Canvas Shareable Workpad API Placed successfully with page specified 1`] = `"
"`; @@ -33,7 +33,7 @@ exports[`Canvas Shareable Workpad API Placed successfully with page specified 2`
markdown mock
markdown mock
My Canvas Workpad
" +
markdown mock
My Canvas Workpad
" `; exports[`Canvas Shareable Workpad API Placed successfully with width and height specified 1`] = `"
"`; @@ -45,7 +45,7 @@ exports[`Canvas Shareable Workpad API Placed successfully with width and height
markdown mock
markdown mock
My Canvas Workpad
" +
markdown mock
My Canvas Workpad
" `; exports[`Canvas Shareable Workpad API Placed successfully with width specified 1`] = `"
"`; @@ -57,5 +57,5 @@ exports[`Canvas Shareable Workpad API Placed successfully with width specified 2
markdown mock
markdown mock
My Canvas Workpad
" +
markdown mock
My Canvas Workpad
" `; diff --git a/x-pack/plugins/canvas/shareable_runtime/components/__stories__/__snapshots__/canvas.stories.storyshot b/x-pack/plugins/canvas/shareable_runtime/components/__stories__/__snapshots__/canvas.stories.storyshot index c5b6d768c89d8..a5eefde192371 100644 --- a/x-pack/plugins/canvas/shareable_runtime/components/__stories__/__snapshots__/canvas.stories.storyshot +++ b/x-pack/plugins/canvas/shareable_runtime/components/__stories__/__snapshots__/canvas.stories.storyshot @@ -1375,7 +1375,7 @@ exports[`Storyshots shareables/Canvas component 1`] = ` >