diff --git a/NOTICE.txt b/NOTICE.txt
index 4ede43610ca7b..1694193892e16 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -295,7 +295,7 @@ MIT License http://www.opensource.org/licenses/mit-license
---
This product includes code that is adapted from mapbox-gl-js, which is
available under a "BSD-3-Clause" license.
-https://github.com/mapbox/mapbox-gl-js/blob/master/src/util/image.js
+https://github.com/mapbox/mapbox-gl-js/blob/v1.13.2/src/util/image.js
Copyright (c) 2016, Mapbox
diff --git a/dev_docs/tutorials/expressions.mdx b/dev_docs/tutorials/expressions.mdx
index c4b37a125838e..d9abf3dd57eb8 100644
--- a/dev_docs/tutorials/expressions.mdx
+++ b/dev_docs/tutorials/expressions.mdx
@@ -57,7 +57,7 @@ const result = await executionContract.getData();
```
- Check the full spec of execute function [here](https://github.com/elastic/kibana/blob/main/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.execution.md)
+ Check the full spec of execute function
In addition, on the browser side, there are two additional ways to run expressions and render the results.
@@ -71,7 +71,7 @@ This is the easiest way to get expressions rendered inside your application.
```
- Check the full spec of ReactExpressionRenderer component props [here](https://github.com/elastic/kibana/blob/main/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.reactexpressionrendererprops.md)
+ Check the full spec of ReactExpressionRenderer component props
#### Expression loader
@@ -83,7 +83,7 @@ const handler = loader(domElement, expression, params);
```
- Check the full spec of expression loader params [here](https://github.com/elastic/kibana/blob/main/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.iexpressionloaderparams.md)
+ Check the full spec of expression loader params
### Creating new expression functions
@@ -106,7 +106,7 @@ expressions.registerFunction(functionDefinition);
```
- Check the full interface of ExpressionFuntionDefinition [here](https://github.com/elastic/kibana/blob/main/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionfunctiondefinition.md)
+ Check the full interface of ExpressionFuntionDefinition
### Creating new expression renderers
@@ -128,5 +128,5 @@ expressions.registerRenderer(rendererDefinition);
```
- Check the full interface of ExpressionRendererDefinition [here](https://github.com/elastic/kibana/blob/main/docs/development/plugins/expressions/public/kibana-plugin-plugins-expressions-public.expressionrenderdefinition.md)
+ Check the full interface of ExpressionRendererDefinition
diff --git a/docs/developer/getting-started/debugging.asciidoc b/docs/developer/getting-started/debugging.asciidoc
index f3308a1267386..1254462d2e4ea 100644
--- a/docs/developer/getting-started/debugging.asciidoc
+++ b/docs/developer/getting-started/debugging.asciidoc
@@ -130,71 +130,3 @@ Once you're finished, you can stop Kibana normally, then stop the {es} and APM s
----
./scripts/compose.py stop
----
-
-=== Using {kib} server logs
-{kib} Logs is a great way to see what's going on in your application and to debug performance issues. Navigating through a large number of generated logs can be overwhelming, and following are some techniques that you can use to optimize the process.
-
-Start by defining a problem area that you are interested in. For example, you might be interested in seeing how a particular {kib} Plugin is performing, so no need to gather logs for all of {kib}. Or you might want to focus on a particular feature, such as requests from the {kib} server to the {es} server.
-Depending on your needs, you can configure {kib} to generate logs for a specific feature.
-[source,yml]
-----
-logging:
- appenders:
- file:
- type: file
- fileName: ./kibana.log
- layout:
- type: json
-
-### gather all the Kibana logs into a file
-logging.root:
- appenders: [file]
- level: all
-
-### or gather a subset of the logs
-logging.loggers:
- ### responses to an HTTP request
- - name: http.server.response
- level: debug
- appenders: [file]
- ### result of a query to the Elasticsearch server
- - name: elasticsearch.query
- level: debug
- appenders: [file]
- ### logs generated by my plugin
- - name: plugins.myPlugin
- level: debug
- appenders: [file]
-----
-WARNING: Kibana's `file` appender is configured to produce logs in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format. It's the only format that includes the meta information necessary for https://www.elastic.co/guide/en/apm/agent/nodejs/current/log-correlation.html[log correlation] out-of-the-box.
-
-The next step is to define what https://www.elastic.co/observability[observability tools] are available.
-For a better experience, set up an https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[Observability integration] provided by Elastic to debug your application with the <>
-To debug something quickly without setting up additional tooling, you can work with <>
-
-[[debugging-logs-apm-ui]]
-==== APM UI
-*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
-
-To debug {kib} with the APM UI, you must set up the APM infrastructure. You can find instructions for the setup process
-https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[on the Observability integrations page].
-
-Once you set up the APM infrastructure, you can enable the APM agent and put {kib} under load to collect APM events. To analyze the collected metrics and logs, use the APM UI as demonstrated https://www.elastic.co/guide/en/kibana/master/transactions.html#transaction-trace-sample[in the docs].
-
-[[plain-kibana-logs]]
-==== Plain {kib} logs
-*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
-
-Open {kib} Logs and search for an operation you are interested in.
-For example, suppose you want to investigate the response times for queries to the `/api/telemetry/v2/clusters/_stats` {kib} endpoint.
-Open Kibana Logs and search for the HTTP server response for the endpoint. It looks similar to the following (some fields are omitted for brevity).
-[source,json]
-----
-{
- "message":"POST /api/telemetry/v2/clusters/_stats 200 1014ms - 43.2KB",
- "log":{"level":"DEBUG","logger":"http.server.response"},
- "trace":{"id":"9b99131a6f66587971ef085ef97dfd07"},
- "transaction":{"id":"d0c5bbf14f5febca"}
-}
-----
-You are interested in the https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html#field-trace-id[trace.id] field, which is a unique identifier of a trace. The `trace.id` provides a way to group multiple events, like transactions, which belong together. You can search for `"trace":{"id":"9b99131a6f66587971ef085ef97dfd07"}` to get all the logs that belong to the same trace. This enables you to see how many {es} requests were triggered during the `9b99131a6f66587971ef085ef97dfd07` trace, what they looked like, what {es} endpoints were hit, and so on.
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
index 8f6f1f6c98ab2..63c29df44019d 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.links.md
@@ -290,7 +290,14 @@ readonly links: {
}>;
readonly watcher: Record;
readonly ccs: Record;
- readonly plugins: Record;
+ readonly plugins: {
+ azureRepo: string;
+ gcsRepo: string;
+ hdfsRepo: string;
+ s3Repo: string;
+ snapshotRestoreRepos: string;
+ mapperSize: string;
+ };
readonly snapshotRestore: Record;
readonly ingest: Record;
readonly fleet: Readonly<{
diff --git a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
index a9828f04672e9..b60f9ad17e9c4 100644
--- a/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
+++ b/docs/development/core/public/kibana-plugin-core-public.doclinksstart.md
@@ -17,5 +17,5 @@ export interface DocLinksStart
| --- | --- | --- |
| [DOC\_LINK\_VERSION](./kibana-plugin-core-public.doclinksstart.doc_link_version.md) | string | |
| [ELASTIC\_WEBSITE\_URL](./kibana-plugin-core-public.doclinksstart.elastic_website_url.md) | string | |
-| [links](./kibana-plugin-core-public.doclinksstart.links.md) | { readonly settings: string; readonly elasticStackGetStarted: string; readonly upgrade: { readonly upgradingElasticStack: string; }; readonly apm: { readonly kibanaSettings: string; readonly supportedServiceMaps: string; readonly customLinks: string; readonly droppedTransactionSpans: string; readonly upgrading: string; readonly metaData: string; }; readonly canvas: { readonly guide: string; }; readonly cloud: { readonly indexManagement: string; }; readonly dashboard: { readonly guide: string; readonly drilldowns: string; readonly drilldownsTriggerPicker: string; readonly urlDrilldownTemplateSyntax: string; readonly urlDrilldownVariables: string; }; readonly discover: Record<string, string>; readonly filebeat: { readonly base: string; readonly installation: string; readonly configuration: string; readonly elasticsearchOutput: string; readonly elasticsearchModule: string; readonly startup: string; readonly exportedFields: string; readonly suricataModule: string; readonly zeekModule: string; }; readonly auditbeat: { readonly base: string; readonly auditdModule: string; readonly systemModule: string; }; readonly metricbeat: { readonly base: string; readonly configure: string; readonly httpEndpoint: string; readonly install: string; readonly start: string; }; readonly appSearch: { readonly apiRef: string; readonly apiClients: string; readonly apiKeys: string; readonly authentication: string; readonly crawlRules: string; readonly curations: string; readonly duplicateDocuments: string; readonly entryPoints: string; readonly guide: string; readonly indexingDocuments: string; readonly indexingDocumentsSchema: string; readonly logSettings: string; readonly metaEngines: string; readonly recisionTuning: string; readonly relevanceTuning: string; readonly resultSettings: string; readonly searchUI: string; readonly security: string; readonly synonyms: string; readonly webCrawler: string; readonly webCrawlerEventLogs: string; }; readonly enterpriseSearch: { readonly configuration: string; readonly licenseManagement: string; readonly mailService: string; readonly usersAccess: string; }; readonly workplaceSearch: { readonly apiKeys: string; readonly box: string; readonly confluenceCloud: string; readonly confluenceServer: string; readonly customSources: string; readonly customSourcePermissions: string; readonly documentPermissions: string; readonly dropbox: string; readonly externalIdentities: string; readonly gitHub: string; readonly gettingStarted: string; readonly gmail: string; readonly googleDrive: string; readonly indexingSchedule: string; readonly jiraCloud: string; readonly jiraServer: string; readonly oneDrive: string; readonly permissions: string; readonly salesforce: string; readonly security: string; readonly serviceNow: string; readonly sharePoint: string; readonly slack: string; readonly synch: string; readonly zendesk: string; }; readonly heartbeat: { readonly base: string; }; readonly libbeat: { readonly getStarted: string; }; readonly logstash: { readonly base: string; }; readonly functionbeat: { readonly base: string; }; readonly winlogbeat: { readonly base: string; }; readonly aggs: { readonly composite: string; readonly composite\_missing\_bucket: string; readonly date\_histogram: string; readonly date\_range: string; readonly date\_format\_pattern: string; readonly filter: string; readonly filters: string; readonly geohash\_grid: string; readonly histogram: string; readonly ip\_range: string; readonly range: string; readonly significant\_terms: string; readonly terms: string; readonly terms\_doc\_count\_error: string; readonly avg: string; readonly avg\_bucket: string; readonly max\_bucket: string; readonly min\_bucket: string; readonly sum\_bucket: string; readonly cardinality: string; readonly count: string; readonly cumulative\_sum: string; readonly derivative: string; readonly geo\_bounds: string; readonly geo\_centroid: string; readonly max: string; readonly median: string; readonly min: string; readonly moving\_avg: string; readonly percentile\_ranks: string; readonly serial\_diff: string; readonly std\_dev: string; readonly sum: string; readonly top\_hits: string; }; readonly runtimeFields: { readonly overview: string; readonly mapping: string; }; readonly scriptedFields: { readonly scriptFields: string; readonly scriptAggs: string; readonly painless: string; readonly painlessApi: string; readonly painlessLangSpec: string; readonly painlessSyntax: string; readonly painlessWalkthrough: string; readonly luceneExpressions: string; }; readonly search: { readonly sessions: string; readonly sessionLimits: string; }; readonly indexPatterns: { readonly introduction: string; readonly fieldFormattersNumber: string; readonly fieldFormattersString: string; readonly runtimeFields: string; }; readonly addData: string; readonly kibana: string; readonly upgradeAssistant: { readonly overview: string; readonly batchReindex: string; readonly remoteReindex: string; }; readonly rollupJobs: string; readonly elasticsearch: Record<string, string>; readonly siem: { readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; readonly ruleChangeLog: string; readonly detectionsReq: string; readonly networkMap: string; readonly troubleshootGaps: string; }; readonly securitySolution: { readonly trustedApps: string; }; readonly query: { readonly eql: string; readonly kueryQuerySyntax: string; readonly luceneQuerySyntax: string; readonly percolate: string; readonly queryDsl: string; }; readonly date: { readonly dateMath: string; readonly dateMathIndexNames: string; }; readonly management: Record<string, string>; readonly ml: Record<string, string>; readonly transforms: Record<string, string>; readonly visualize: Record<string, string>; readonly apis: Readonly<{ bulkIndexAlias: string; byteSizeUnits: string; createAutoFollowPattern: string; createFollower: string; createIndex: string; createSnapshotLifecyclePolicy: string; createRoleMapping: string; createRoleMappingTemplates: string; createRollupJobsRequest: string; createApiKey: string; createPipeline: string; createTransformRequest: string; cronExpressions: string; executeWatchActionModes: string; indexExists: string; openIndex: string; putComponentTemplate: string; painlessExecute: string; painlessExecuteAPIContexts: string; putComponentTemplateMetadata: string; putSnapshotLifecyclePolicy: string; putIndexTemplateV1: string; putWatch: string; simulatePipeline: string; timeUnits: string; updateTransform: string; }>; readonly observability: Readonly<{ guide: string; infrastructureThreshold: string; logsThreshold: string; metricsThreshold: string; monitorStatus: string; monitorUptime: string; tlsCertificate: string; uptimeDurationAnomaly: string; }>; readonly alerting: Record<string, string>; readonly maps: Readonly<{ guide: string; importGeospatialPrivileges: string; gdalTutorial: string; }>; readonly monitoring: Record<string, string>; readonly security: Readonly<{ apiKeyServiceSettings: string; clusterPrivileges: string; elasticsearchSettings: string; elasticsearchEnableSecurity: string; elasticsearchEnableApiKeys: string; indicesPrivileges: string; kibanaTLS: string; kibanaPrivileges: string; mappingRoles: string; mappingRolesFieldRules: string; runAsPrivilege: string; }>; readonly spaces: Readonly<{ kibanaLegacyUrlAliases: string; kibanaDisableLegacyUrlAliasesApi: string; }>; readonly watcher: Record<string, string>; readonly ccs: Record<string, string>; readonly plugins: Record<string, string>; readonly snapshotRestore: Record<string, string>; readonly ingest: Record<string, string>; readonly fleet: Readonly<{ beatsAgentComparison: string; guide: string; fleetServer: string; fleetServerAddFleetServer: string; settings: string; settingsFleetServerHostSettings: string; settingsFleetServerProxySettings: string; troubleshooting: string; elasticAgent: string; datastreams: string; datastreamsNamingScheme: string; installElasticAgent: string; installElasticAgentStandalone: string; upgradeElasticAgent: string; upgradeElasticAgent712lower: string; learnMoreBlog: string; apiKeysLearnMore: string; onPremRegistry: string; }>; readonly ecs: { readonly guide: string; }; readonly clients: { readonly guide: string; readonly goOverview: string; readonly javaIndex: string; readonly jsIntro: string; readonly netGuide: string; readonly perlGuide: string; readonly phpGuide: string; readonly pythonGuide: string; readonly rubyOverview: string; readonly rustGuide: string; }; readonly endpoints: { readonly troubleshooting: string; }; } | |
+| [links](./kibana-plugin-core-public.doclinksstart.links.md) | { readonly settings: string; readonly elasticStackGetStarted: string; readonly upgrade: { readonly upgradingElasticStack: string; }; readonly apm: { readonly kibanaSettings: string; readonly supportedServiceMaps: string; readonly customLinks: string; readonly droppedTransactionSpans: string; readonly upgrading: string; readonly metaData: string; }; readonly canvas: { readonly guide: string; }; readonly cloud: { readonly indexManagement: string; }; readonly dashboard: { readonly guide: string; readonly drilldowns: string; readonly drilldownsTriggerPicker: string; readonly urlDrilldownTemplateSyntax: string; readonly urlDrilldownVariables: string; }; readonly discover: Record<string, string>; readonly filebeat: { readonly base: string; readonly installation: string; readonly configuration: string; readonly elasticsearchOutput: string; readonly elasticsearchModule: string; readonly startup: string; readonly exportedFields: string; readonly suricataModule: string; readonly zeekModule: string; }; readonly auditbeat: { readonly base: string; readonly auditdModule: string; readonly systemModule: string; }; readonly metricbeat: { readonly base: string; readonly configure: string; readonly httpEndpoint: string; readonly install: string; readonly start: string; }; readonly appSearch: { readonly apiRef: string; readonly apiClients: string; readonly apiKeys: string; readonly authentication: string; readonly crawlRules: string; readonly curations: string; readonly duplicateDocuments: string; readonly entryPoints: string; readonly guide: string; readonly indexingDocuments: string; readonly indexingDocumentsSchema: string; readonly logSettings: string; readonly metaEngines: string; readonly precisionTuning: string; readonly relevanceTuning: string; readonly resultSettings: string; readonly searchUI: string; readonly security: string; readonly synonyms: string; readonly webCrawler: string; readonly webCrawlerEventLogs: string; }; readonly enterpriseSearch: { readonly configuration: string; readonly licenseManagement: string; readonly mailService: string; readonly usersAccess: string; }; readonly workplaceSearch: { readonly apiKeys: string; readonly box: string; readonly confluenceCloud: string; readonly confluenceServer: string; readonly customSources: string; readonly customSourcePermissions: string; readonly documentPermissions: string; readonly dropbox: string; readonly externalIdentities: string; readonly gitHub: string; readonly gettingStarted: string; readonly gmail: string; readonly googleDrive: string; readonly indexingSchedule: string; readonly jiraCloud: string; readonly jiraServer: string; readonly oneDrive: string; readonly permissions: string; readonly salesforce: string; readonly security: string; readonly serviceNow: string; readonly sharePoint: string; readonly slack: string; readonly synch: string; readonly zendesk: string; }; readonly heartbeat: { readonly base: string; }; readonly libbeat: { readonly getStarted: string; }; readonly logstash: { readonly base: string; }; readonly functionbeat: { readonly base: string; }; readonly winlogbeat: { readonly base: string; }; readonly aggs: { readonly composite: string; readonly composite\_missing\_bucket: string; readonly date\_histogram: string; readonly date\_range: string; readonly date\_format\_pattern: string; readonly filter: string; readonly filters: string; readonly geohash\_grid: string; readonly histogram: string; readonly ip\_range: string; readonly range: string; readonly significant\_terms: string; readonly terms: string; readonly terms\_doc\_count\_error: string; readonly avg: string; readonly avg\_bucket: string; readonly max\_bucket: string; readonly min\_bucket: string; readonly sum\_bucket: string; readonly cardinality: string; readonly count: string; readonly cumulative\_sum: string; readonly derivative: string; readonly geo\_bounds: string; readonly geo\_centroid: string; readonly max: string; readonly median: string; readonly min: string; readonly moving\_avg: string; readonly percentile\_ranks: string; readonly serial\_diff: string; readonly std\_dev: string; readonly sum: string; readonly top\_hits: string; }; readonly runtimeFields: { readonly overview: string; readonly mapping: string; }; readonly scriptedFields: { readonly scriptFields: string; readonly scriptAggs: string; readonly painless: string; readonly painlessApi: string; readonly painlessLangSpec: string; readonly painlessSyntax: string; readonly painlessWalkthrough: string; readonly luceneExpressions: string; }; readonly search: { readonly sessions: string; readonly sessionLimits: string; }; readonly indexPatterns: { readonly introduction: string; readonly fieldFormattersNumber: string; readonly fieldFormattersString: string; readonly runtimeFields: string; }; readonly addData: string; readonly kibana: string; readonly upgradeAssistant: { readonly overview: string; readonly batchReindex: string; readonly remoteReindex: string; }; readonly rollupJobs: string; readonly elasticsearch: Record<string, string>; readonly siem: { readonly privileges: string; readonly guide: string; readonly gettingStarted: string; readonly ml: string; readonly ruleChangeLog: string; readonly detectionsReq: string; readonly networkMap: string; readonly troubleshootGaps: string; }; readonly securitySolution: { readonly trustedApps: string; }; readonly query: { readonly eql: string; readonly kueryQuerySyntax: string; readonly luceneQuerySyntax: string; readonly percolate: string; readonly queryDsl: string; }; readonly date: { readonly dateMath: string; readonly dateMathIndexNames: string; }; readonly management: Record<string, string>; readonly ml: Record<string, string>; readonly transforms: Record<string, string>; readonly visualize: Record<string, string>; readonly apis: Readonly<{ bulkIndexAlias: string; byteSizeUnits: string; createAutoFollowPattern: string; createFollower: string; createIndex: string; createSnapshotLifecyclePolicy: string; createRoleMapping: string; createRoleMappingTemplates: string; createRollupJobsRequest: string; createApiKey: string; createPipeline: string; createTransformRequest: string; cronExpressions: string; executeWatchActionModes: string; indexExists: string; openIndex: string; putComponentTemplate: string; painlessExecute: string; painlessExecuteAPIContexts: string; putComponentTemplateMetadata: string; putSnapshotLifecyclePolicy: string; putIndexTemplateV1: string; putWatch: string; simulatePipeline: string; timeUnits: string; updateTransform: string; }>; readonly observability: Readonly<{ guide: string; infrastructureThreshold: string; logsThreshold: string; metricsThreshold: string; monitorStatus: string; monitorUptime: string; tlsCertificate: string; uptimeDurationAnomaly: string; }>; readonly alerting: Record<string, string>; readonly maps: Readonly<{ guide: string; importGeospatialPrivileges: string; gdalTutorial: string; }>; readonly monitoring: Record<string, string>; readonly security: Readonly<{ apiKeyServiceSettings: string; clusterPrivileges: string; elasticsearchSettings: string; elasticsearchEnableSecurity: string; elasticsearchEnableApiKeys: string; indicesPrivileges: string; kibanaTLS: string; kibanaPrivileges: string; mappingRoles: string; mappingRolesFieldRules: string; runAsPrivilege: string; }>; readonly spaces: Readonly<{ kibanaLegacyUrlAliases: string; kibanaDisableLegacyUrlAliasesApi: string; }>; readonly watcher: Record<string, string>; readonly ccs: Record<string, string>; readonly plugins: { azureRepo: string; gcsRepo: string; hdfsRepo: string; s3Repo: string; snapshotRestoreRepos: string; mapperSize: string; }; readonly snapshotRestore: Record<string, string>; readonly ingest: Record<string, string>; readonly fleet: Readonly<{ beatsAgentComparison: string; guide: string; fleetServer: string; fleetServerAddFleetServer: string; settings: string; settingsFleetServerHostSettings: string; settingsFleetServerProxySettings: string; troubleshooting: string; elasticAgent: string; datastreams: string; datastreamsNamingScheme: string; installElasticAgent: string; installElasticAgentStandalone: string; upgradeElasticAgent: string; upgradeElasticAgent712lower: string; learnMoreBlog: string; apiKeysLearnMore: string; onPremRegistry: string; }>; readonly ecs: { readonly guide: string; }; readonly clients: { readonly guide: string; readonly goOverview: string; readonly javaIndex: string; readonly jsIntro: string; readonly netGuide: string; readonly perlGuide: string; readonly phpGuide: string; readonly pythonGuide: string; readonly rubyOverview: string; readonly rustGuide: string; }; readonly endpoints: { readonly troubleshooting: string; }; } | |
diff --git a/docs/settings/apm-settings.asciidoc b/docs/settings/apm-settings.asciidoc
index 77a250a14f929..27ea7f4dc7cd0 100644
--- a/docs/settings/apm-settings.asciidoc
+++ b/docs/settings/apm-settings.asciidoc
@@ -101,8 +101,8 @@ Changing these settings may disable features of the APM App.
| `xpack.apm.indices.sourcemap` {ess-icon}
| Matcher for all source map indices. Defaults to `apm-*`.
-| `xpack.apm.autocreateApmIndexPattern` {ess-icon}
- | Set to `false` to disable the automatic creation of the APM index pattern when the APM app is opened. Defaults to `true`.
+| `xpack.apm.autoCreateApmDataView` {ess-icon}
+ | Set to `false` to disable the automatic creation of the APM data view when the APM app is opened. Defaults to `true`.
|===
-// end::general-apm-settings[]
\ No newline at end of file
+// end::general-apm-settings[]
diff --git a/docs/settings/fleet-settings.asciidoc b/docs/settings/fleet-settings.asciidoc
index f0dfeb619bb38..a088f31937cc8 100644
--- a/docs/settings/fleet-settings.asciidoc
+++ b/docs/settings/fleet-settings.asciidoc
@@ -87,6 +87,7 @@ Optional properties are:
`data_output_id`:: ID of the output to send data (Need to be identical to `monitoring_output_id`)
`monitoring_output_id`:: ID of the output to send monitoring data. (Need to be identical to `data_output_id`)
`package_policies`:: List of integration policies to add to this policy.
+ `id`::: Unique ID of the integration policy. The ID may be a number or string.
`name`::: (required) Name of the integration policy.
`package`::: (required) Integration that this policy configures
`name`:::: Name of the integration associated with this policy.
@@ -128,6 +129,7 @@ xpack.fleet.agentPolicies:
- package:
name: system
name: System Integration
+ id: preconfigured-system
inputs:
- type: system/metrics
enabled: true
diff --git a/docs/settings/task-manager-settings.asciidoc b/docs/settings/task-manager-settings.asciidoc
index c61ef83953347..286bb71542b3a 100644
--- a/docs/settings/task-manager-settings.asciidoc
+++ b/docs/settings/task-manager-settings.asciidoc
@@ -9,51 +9,59 @@ Task Manager runs background tasks by polling for work on an interval. You can
[float]
[[task-manager-settings]]
-==== Task Manager settings
+==== Task Manager settings
-[cols="2*<"]
-|===
-| `xpack.task_manager.max_attempts`
- | The maximum number of times a task will be attempted before being abandoned as failed. Defaults to 3.
-| `xpack.task_manager.poll_interval`
- | How often, in milliseconds, the task manager will look for more work. Defaults to 3000 and cannot be lower than 100.
-| `xpack.task_manager.request_capacity`
- | How many requests can Task Manager buffer before it rejects new requests. Defaults to 1000.
+`xpack.task_manager.max_attempts`::
+The maximum number of times a task will be attempted before being abandoned as failed. Defaults to 3.
- | `xpack.task_manager.max_workers`
- | The maximum number of tasks that this Kibana instance will run simultaneously. Defaults to 10.
- Starting in 8.0, it will not be possible to set the value greater than 100.
+`xpack.task_manager.poll_interval`::
+How often, in milliseconds, the task manager will look for more work. Defaults to 3000 and cannot be lower than 100.
- | `xpack.task_manager.`
- `monitored_stats_health_verbose_log.enabled`
- | This flag will enable automatic warn and error logging if task manager self detects a performance issue, such as the time between when a task is scheduled to execute and when it actually executes. Defaults to false.
+`xpack.task_manager.request_capacity`::
+How many requests can Task Manager buffer before it rejects new requests. Defaults to 1000.
- | `xpack.task_manager.`
- `monitored_stats_health_verbose_log.`
- `warn_delayed_task_start_in_seconds`
- | The amount of seconds we allow a task to delay before printing a warning server log. Defaults to 60.
+`xpack.task_manager.max_workers`::
+The maximum number of tasks that this Kibana instance will run simultaneously. Defaults to 10.
+Starting in 8.0, it will not be possible to set the value greater than 100.
- | `xpack.task_manager.ephemeral_tasks.enabled`
- | Enables an experimental feature that executes a limited (and configurable) number of actions in the same task as the alert which triggered them.
- These action tasks will reduce the latency of the time it takes an action to run after it's triggered, but are not persisted as SavedObjects.
- These non-persisted action tasks have a risk that they won't be run at all if the Kibana instance running them exits unexpectedly. Defaults to false.
+`xpack.task_manager.monitored_stats_health_verbose_log.enabled`::
+This flag will enable automatic warn and error logging if task manager self detects a performance issue, such as the time between when a task is scheduled to execute and when it actually executes. Defaults to false.
+
+`xpack.task_manager.monitored_stats_health_verbose_log.warn_delayed_task_start_in_seconds`::
+The amount of seconds we allow a task to delay before printing a warning server log. Defaults to 60.
+
+`xpack.task_manager.ephemeral_tasks.enabled`::
+Enables an experimental feature that executes a limited (and configurable) number of actions in the same task as the alert which triggered them.
+These action tasks will reduce the latency of the time it takes an action to run after it's triggered, but are not persisted as SavedObjects.
+These non-persisted action tasks have a risk that they won't be run at all if the Kibana instance running them exits unexpectedly. Defaults to false.
+
+`xpack.task_manager.ephemeral_tasks.request_capacity`::
+Sets the size of the ephemeral queue defined above. Defaults to 10.
- | `xpack.task_manager.ephemeral_tasks.request_capacity`
- | Sets the size of the ephemeral queue defined above. Defaults to 10.
-|===
[float]
[[task-manager-health-settings]]
-==== Task Manager Health settings
+==== Task Manager Health settings
Settings that configure the <> endpoint.
-[cols="2*<"]
-|===
-| `xpack.task_manager.`
-`monitored_task_execution_thresholds`
- | Configures the threshold of failed task executions at which point the `warn` or `error` health status is set under each task type execution status (under `stats.runtime.value.execution.result_frequency_percent_as_number[${task type}].status`). This setting allows configuration of both the default level and a custom task type specific level. By default, this setting is configured to mark the health of every task type as `warning` when it exceeds 80% failed executions, and as `error` at 90%. Custom configurations allow you to reduce this threshold to catch failures sooner for task types that you might consider critical, such as alerting tasks. This value can be set to any number between 0 to 100, and a threshold is hit when the value *exceeds* this number. This means that you can avoid setting the status to `error` by setting the threshold at 100, or hit `error` the moment any task fails by setting the threshold to 0 (as it will exceed 0 once a single failure occurs).
-
-|===
+`xpack.task_manager.monitored_task_execution_thresholds`::
+Configures the threshold of failed task executions at which point the `warn` or
+`error` health status is set under each task type execution status
+(under `stats.runtime.value.execution.result_frequency_percent_as_number[${task type}].status`).
++
+This setting allows configuration of both the default level and a
+custom task type specific level. By default, this setting is configured to mark
+the health of every task type as `warning` when it exceeds 80% failed executions,
+and as `error` at 90%.
++
+Custom configurations allow you to reduce this threshold to catch failures sooner
+for task types that you might consider critical, such as alerting tasks.
++
+This value can be set to any number between 0 to 100, and a threshold is hit
+when the value *exceeds* this number. This means that you can avoid setting the
+status to `error` by setting the threshold at 100, or hit `error` the moment
+any task fails by setting the threshold to 0 (as it will exceed 0 once a
+single failure occurs).
diff --git a/docs/settings/telemetry-settings.asciidoc b/docs/settings/telemetry-settings.asciidoc
index 0329e2f010e80..65f78a2eaf12d 100644
--- a/docs/settings/telemetry-settings.asciidoc
+++ b/docs/settings/telemetry-settings.asciidoc
@@ -17,29 +17,26 @@ See our https://www.elastic.co/legal/privacy-statement[Privacy Statement] to lea
[[telemetry-general-settings]]
==== General telemetry settings
-[cols="2*<"]
-|===
-|[[telemetry-enabled]] `telemetry.enabled`
- | Set to `true` to send cluster statistics to Elastic. Reporting your
+
+[[telemetry-enabled]] `telemetry.enabled`::
+ Set to `true` to send cluster statistics to Elastic. Reporting your
cluster statistics helps us improve your user experience. Your data is never
shared with anyone. Set to `false` to disable statistics reporting from any
browser connected to the {kib} instance. Defaults to `true`.
-| `telemetry.sendUsageFrom`
- | Set to `'server'` to report the cluster statistics from the {kib} server.
+`telemetry.sendUsageFrom`::
+ Set to `'server'` to report the cluster statistics from the {kib} server.
If the server fails to connect to our endpoint at https://telemetry.elastic.co/, it assumes
it is behind a firewall and falls back to `'browser'` to send it from users' browsers
when they are navigating through {kib}. Defaults to `'server'`.
-|[[telemetry-optIn]] `telemetry.optIn`
- | Set to `true` to automatically opt into reporting cluster statistics. You can also opt out through
+[[telemetry-optIn]] `telemetry.optIn`::
+ Set to `true` to automatically opt into reporting cluster statistics. You can also opt out through
*Advanced Settings* in {kib}. Defaults to `true`.
-| `telemetry.allowChangingOptInStatus`
- | Set to `true` to allow overwriting the <> setting via the {kib} UI. Defaults to `true`. +
-
-|===
-
+`telemetry.allowChangingOptInStatus`::
+ Set to `true` to allow overwriting the <> setting via the {kib} UI. Defaults to `true`. +
++
[NOTE]
============
When `false`, <> must be `true`. To disable telemetry and not allow users to change that parameter, use <>.
diff --git a/docs/setup/upgrade.asciidoc b/docs/setup/upgrade.asciidoc
index a139b8a50ca4d..c828b837d8efd 100644
--- a/docs/setup/upgrade.asciidoc
+++ b/docs/setup/upgrade.asciidoc
@@ -44,13 +44,20 @@ a|
[[upgrade-before-you-begin]]
=== Before you begin
-WARNING: {kib} automatically runs upgrade migrations when required. To roll back to an earlier version in case of an upgrade failure, you **must** have a {ref}/snapshot-restore.html[backup snapshot] available. This snapshot must include the `kibana` feature state or all `kibana*` indices. For more information see <>.
+[WARNING]
+====
+{kib} automatically runs upgrade migrations when required. To roll back to an
+earlier version in case of an upgrade failure, you **must** have a
+{ref}/snapshot-restore.html[backup snapshot] that includes the `kibana` feature
+state. Snapshots include this feature state by default.
+
+For more information, refer to <>.
+====
Before you upgrade {kib}:
* Consult the <>.
-* {ref}/snapshots-take-snapshot.html[Take a snapshot] of your data. To roll back to an earlier version, the snapshot must include the `kibana` feature state or all `.kibana*` indices.
-* Although not a requirement for rollbacks, we recommend taking a snapshot of all {kib} indices created by the plugins you use such as the `.reporting*` indices created by the reporting plugin.
+* {ref}/snapshots-take-snapshot.html[Take a snapshot] of your data. To roll back to an earlier version, the snapshot must include the `kibana` feature state.
* Before you upgrade production servers, test the upgrades in a dev environment.
* See <> for common reasons upgrades fail and how to prevent these.
* If you are using custom plugins, check that a compatible version is
diff --git a/docs/setup/upgrade/upgrade-migrations.asciidoc b/docs/setup/upgrade/upgrade-migrations.asciidoc
index c47c2c1745e94..e9e1b757fd71d 100644
--- a/docs/setup/upgrade/upgrade-migrations.asciidoc
+++ b/docs/setup/upgrade/upgrade-migrations.asciidoc
@@ -151,17 +151,18 @@ In order to rollback after a failed upgrade migration, the saved object indices
[float]
===== Rollback by restoring a backup snapshot:
-1. Before proceeding, {ref}/snapshots-take-snapshot.html[take a snapshot] that contains the `kibana` feature state or all `.kibana*` indices.
+1. Before proceeding, {ref}/snapshots-take-snapshot.html[take a snapshot] that contains the `kibana` feature state.
+ Snapshots include this feature state by default.
2. Shutdown all {kib} instances to be 100% sure that there are no instances currently performing a migration.
3. Delete all saved object indices with `DELETE /.kibana*`
-4. {ref}/snapshots-restore-snapshot.html[Restore] the `kibana` feature state or all `.kibana* indices and their aliases from the snapshot.
+4. {ref}/snapshots-restore-snapshot.html[Restore] the `kibana` feature state from the snapshot.
5. Start up all {kib} instances on the older version you wish to rollback to.
[float]
===== (Not recommended) Rollback without a backup snapshot:
1. Shutdown all {kib} instances to be 100% sure that there are no {kib} instances currently performing a migration.
-2. {ref}/snapshots-take-snapshot.html[Take a snapshot] that includes the `kibana` feature state or all `.kibana*` indices.
+2. {ref}/snapshots-take-snapshot.html[Take a snapshot] that includes the `kibana` feature state. Snapshots include this feature state by default.
3. Delete the version specific indices created by the failed upgrade migration. E.g. if you wish to rollback from a failed upgrade to v7.12.0 `DELETE /.kibana_7.12.0_*,.kibana_task_manager_7.12.0_*`
4. Inspect the output of `GET /_cat/aliases`. If either the `.kibana` and/or `.kibana_task_manager` alias is missing, these will have to be created manually. Find the latest index from the output of `GET /_cat/indices` and create the missing alias to point to the latest index. E.g. if the `.kibana` alias was missing and the latest index is `.kibana_3` create a new alias with `POST /.kibana_3/_aliases/.kibana`.
5. Remove the write block from the rollback indices. `PUT /.kibana,.kibana_task_manager/_settings {"index.blocks.write": false}`
diff --git a/docs/user/alerting/alerting-troubleshooting.asciidoc b/docs/user/alerting/alerting-troubleshooting.asciidoc
index 74a32b94975ad..5f3c566e82d42 100644
--- a/docs/user/alerting/alerting-troubleshooting.asciidoc
+++ b/docs/user/alerting/alerting-troubleshooting.asciidoc
@@ -15,7 +15,7 @@ Rules and connectors log to the Kibana logger with tags of [alerting] and [actio
[source, txt]
--------------------------------------------------
-server log [11:39:40.389] [error][alerting][alerting][plugins][plugins] Executing Alert "5b6237b0-c6f6-11eb-b0ff-a1a0cbcf29b6" has resulted in Error: Saved object [action/fdbc8610-c6f5-11eb-b0ff-a1a0cbcf29b6] not found
+server log [11:39:40.389] [error][alerting][alerting][plugins][plugins] Executing Rule "5b6237b0-c6f6-11eb-b0ff-a1a0cbcf29b6" has resulted in Error: Saved object [action/fdbc8610-c6f5-11eb-b0ff-a1a0cbcf29b6] not found
--------------------------------------------------
Some of the resources, such as saved objects and API keys, may no longer be available or valid, yielding error messages about those missing resources.
diff --git a/docs/user/alerting/troubleshooting/event-log-index.asciidoc b/docs/user/alerting/troubleshooting/event-log-index.asciidoc
index 393b982b279f5..5016b6d6f19c9 100644
--- a/docs/user/alerting/troubleshooting/event-log-index.asciidoc
+++ b/docs/user/alerting/troubleshooting/event-log-index.asciidoc
@@ -170,7 +170,7 @@ And see the errors for the rules you might provide the next search query:
}
],
},
- "message": "alert executed: .index-threshold:30d856c0-b14b-11eb-9a7c-9df284da9f99: 'test'",
+ "message": "rule executed: .index-threshold:30d856c0-b14b-11eb-9a7c-9df284da9f99: 'test'",
"error" : {
"message" : "Saved object [action/ef0e2530-b14a-11eb-9a7c-9df284da9f99] not found"
},
diff --git a/docs/user/commands/cli-commands.asciidoc b/docs/user/commands/cli-commands.asciidoc
new file mode 100644
index 0000000000000..35a25235bc238
--- /dev/null
+++ b/docs/user/commands/cli-commands.asciidoc
@@ -0,0 +1,8 @@
+[[cli-commands]]
+== Command line tools
+
+{kib} provides the following tools for configuring security and performing other tasks from the command line:
+
+* <>
+
+include::kibana-verification-code.asciidoc[]
\ No newline at end of file
diff --git a/docs/user/commands/kibana-verification-code.asciidoc b/docs/user/commands/kibana-verification-code.asciidoc
new file mode 100644
index 0000000000000..3ad1b0da51e2b
--- /dev/null
+++ b/docs/user/commands/kibana-verification-code.asciidoc
@@ -0,0 +1,44 @@
+[[kibana-verification-code]]
+=== kibana-verification-code
+
+The `kibana-verification-code` tool retrieves a verification code for enrolling
+a {kib} instance with a secured {es} cluster.
+
+[discrete]
+==== Synopsis
+
+[source,shell]
+----
+bin/kibana-verification-code
+[-V, --version] [-h, --help]
+----
+
+[discrete]
+==== Description
+
+Use this command to retrieve a verification code for {kib}. You enter this code
+in {kib} when manually configuring a secure connection with an {es} cluster.
+This tool is useful if you don’t have access to the {kib} terminal output, such
+as on a hosted environment. You can connect to a machine where {kib} is
+running (such as using SSH) and retrieve a verification code that you enter in
+{kib}.
+
+IMPORTANT: You must run this tool on the same machine where {kib} is running.
+
+[discrete]
+[[kibana-verification-code-parameters]]
+==== Parameters
+
+`-h, --help`:: Returns all of the command parameters.
+
+`-V, --version`:: Displays the {kib} version number.
+
+[discrete]
+==== Examples
+
+The following command retrieves a verification code for {kib}.
+
+[source,shell]
+----
+bin/kibana-verification-code
+----
\ No newline at end of file
diff --git a/docs/user/index.asciidoc b/docs/user/index.asciidoc
index 75d0da1c597b6..57668b3f5bccf 100644
--- a/docs/user/index.asciidoc
+++ b/docs/user/index.asciidoc
@@ -45,3 +45,5 @@ include::management.asciidoc[]
include::api.asciidoc[]
include::plugins.asciidoc[]
+
+include::troubleshooting.asciidoc[]
diff --git a/docs/user/production-considerations/task-manager-troubleshooting.asciidoc b/docs/user/production-considerations/task-manager-troubleshooting.asciidoc
index 09eb304646e96..a22d46902f54c 100644
--- a/docs/user/production-considerations/task-manager-troubleshooting.asciidoc
+++ b/docs/user/production-considerations/task-manager-troubleshooting.asciidoc
@@ -1020,7 +1020,7 @@ This log message tells us that when Task Manager was running one of our rules, i
For example, in this case, we’d expect to see a corresponding log line from the Alerting framework itself, saying that the rule failed. You should look in the Kibana log for a line similar to the log line below (probably shortly before the Task Manager log line):
-Executing Alert "27559295-44e4-4983-aa1b-94fe043ab4f9" has resulted in Error: Unable to load resource ‘/api/something’
+Executing Rule "27559295-44e4-4983-aa1b-94fe043ab4f9" has resulted in Error: Unable to load resource ‘/api/something’
This would confirm that the error did in fact happen in the rule itself (rather than the Task Manager) and it would help us pin-point the specific ID of the rule which failed: 27559295-44e4-4983-aa1b-94fe043ab4f9
diff --git a/docs/user/setup.asciidoc b/docs/user/setup.asciidoc
index 546cc8f974865..87213249e0d97 100644
--- a/docs/user/setup.asciidoc
+++ b/docs/user/setup.asciidoc
@@ -70,3 +70,5 @@ include::monitoring/configuring-monitoring.asciidoc[leveloffset=+1]
include::monitoring/monitoring-metricbeat.asciidoc[leveloffset=+2]
include::monitoring/viewing-metrics.asciidoc[leveloffset=+2]
include::monitoring/monitoring-kibana.asciidoc[leveloffset=+2]
+
+include::commands/cli-commands.asciidoc[]
diff --git a/docs/user/troubleshooting.asciidoc b/docs/user/troubleshooting.asciidoc
new file mode 100644
index 0000000000000..8b32471c98d86
--- /dev/null
+++ b/docs/user/troubleshooting.asciidoc
@@ -0,0 +1,70 @@
+[[kibana-troubleshooting]]
+== Troubleshooting
+
+=== Using {kib} server logs
+{kib} Logs is a great way to see what's going on in your application and to debug performance issues. Navigating through a large number of generated logs can be overwhelming, and following are some techniques that you can use to optimize the process.
+
+Start by defining a problem area that you are interested in. For example, you might be interested in seeing how a particular {kib} Plugin is performing, so no need to gather logs for all of {kib}. Or you might want to focus on a particular feature, such as requests from the {kib} server to the {es} server.
+Depending on your needs, you can configure {kib} to generate logs for a specific feature.
+[source,yml]
+----
+logging:
+ appenders:
+ file:
+ type: file
+ fileName: ./kibana.log
+ layout:
+ type: json
+
+### gather all the Kibana logs into a file
+logging.root:
+ appenders: [file]
+ level: all
+
+### or gather a subset of the logs
+logging.loggers:
+ ### responses to an HTTP request
+ - name: http.server.response
+ level: debug
+ appenders: [file]
+ ### result of a query to the Elasticsearch server
+ - name: elasticsearch.query
+ level: debug
+ appenders: [file]
+ ### logs generated by my plugin
+ - name: plugins.myPlugin
+ level: debug
+ appenders: [file]
+----
+WARNING: Kibana's `file` appender is configured to produce logs in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format. It's the only format that includes the meta information necessary for https://www.elastic.co/guide/en/apm/agent/nodejs/current/log-correlation.html[log correlation] out-of-the-box.
+
+The next step is to define what https://www.elastic.co/observability[observability tools] are available.
+For a better experience, set up an https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[Observability integration] provided by Elastic to debug your application with the <>
+To debug something quickly without setting up additional tooling, you can work with <>
+
+[[debugging-logs-apm-ui]]
+==== APM UI
+*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
+
+To debug {kib} with the APM UI, you must set up the APM infrastructure. You can find instructions for the setup process
+https://www.elastic.co/guide/en/apm/get-started/current/observability-integrations.html[on the Observability integrations page].
+
+Once you set up the APM infrastructure, you can enable the APM agent and put {kib} under load to collect APM events. To analyze the collected metrics and logs, use the APM UI as demonstrated https://www.elastic.co/guide/en/kibana/master/transactions.html#transaction-trace-sample[in the docs].
+
+[[plain-kibana-logs]]
+==== Plain {kib} logs
+*Prerequisites* {kib} logs are configured to be in https://www.elastic.co/guide/en/ecs/master/ecs-reference.html[ECS JSON] format to include tracing identifiers.
+
+Open {kib} Logs and search for an operation you are interested in.
+For example, suppose you want to investigate the response times for queries to the `/api/telemetry/v2/clusters/_stats` {kib} endpoint.
+Open Kibana Logs and search for the HTTP server response for the endpoint. It looks similar to the following (some fields are omitted for brevity).
+[source,json]
+----
+{
+ "message":"POST /api/telemetry/v2/clusters/_stats 200 1014ms - 43.2KB",
+ "log":{"level":"DEBUG","logger":"http.server.response"},
+ "trace":{"id":"9b99131a6f66587971ef085ef97dfd07"},
+ "transaction":{"id":"d0c5bbf14f5febca"}
+}
+----
+You are interested in the https://www.elastic.co/guide/en/ecs/current/ecs-tracing.html#field-trace-id[trace.id] field, which is a unique identifier of a trace. The `trace.id` provides a way to group multiple events, like transactions, which belong together. You can search for `"trace":{"id":"9b99131a6f66587971ef085ef97dfd07"}` to get all the logs that belong to the same trace. This enables you to see how many {es} requests were triggered during the `9b99131a6f66587971ef085ef97dfd07` trace, what they looked like, what {es} endpoints were hit, and so on.
diff --git a/package.json b/package.json
index 6b7d6662eb70b..cae25e40ccf07 100644
--- a/package.json
+++ b/package.json
@@ -100,16 +100,15 @@
"@dnd-kit/core": "^3.1.1",
"@dnd-kit/sortable": "^4.0.0",
"@dnd-kit/utilities": "^2.0.0",
- "@elastic/apm-rum": "^5.9.1",
- "@elastic/apm-rum-react": "^1.3.1",
+ "@elastic/apm-rum": "^5.10.0",
+ "@elastic/apm-rum-react": "^1.3.2",
"@elastic/apm-synthtrace": "link:bazel-bin/packages/elastic-apm-synthtrace",
"@elastic/charts": "40.1.0",
"@elastic/datemath": "link:bazel-bin/packages/elastic-datemath",
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@^8.0.0-canary.35",
"@elastic/ems-client": "8.0.0",
- "@elastic/eui": "41.0.0",
+ "@elastic/eui": "41.2.3",
"@elastic/filesaver": "1.1.2",
- "@elastic/maki": "6.3.0",
"@elastic/node-crypto": "1.2.1",
"@elastic/numeral": "^2.5.1",
"@elastic/react-search-ui": "^1.6.0",
@@ -196,8 +195,10 @@
"archiver": "^5.2.0",
"axios": "^0.21.1",
"base64-js": "^1.3.1",
+ "bitmap-sdf": "^1.0.3",
"brace": "0.11.1",
"broadcast-channel": "^4.7.0",
+ "canvg": "^3.0.9",
"chalk": "^4.1.0",
"cheerio": "^1.0.0-rc.10",
"chokidar": "^3.4.3",
@@ -225,7 +226,7 @@
"deep-freeze-strict": "^1.1.1",
"deepmerge": "^4.2.2",
"del": "^5.1.0",
- "elastic-apm-node": "^3.25.0",
+ "elastic-apm-node": "^3.26.0",
"execa": "^4.0.2",
"exit-hook": "^2.2.0",
"expiry-js": "0.1.7",
@@ -368,7 +369,7 @@
"redux-thunks": "^1.0.0",
"regenerator-runtime": "^0.13.3",
"remark-parse": "^8.0.3",
- "remark-stringify": "^9.0.0",
+ "remark-stringify": "^8.0.3",
"require-in-the-middle": "^5.1.0",
"reselect": "^4.0.0",
"resize-observer-polyfill": "^1.5.1",
@@ -520,7 +521,6 @@
"@types/ejs": "^3.0.6",
"@types/elastic__apm-synthtrace": "link:bazel-bin/packages/elastic-apm-synthtrace/npm_module_types",
"@types/elastic__datemath": "link:bazel-bin/packages/elastic-datemath/npm_module_types",
- "@types/elasticsearch": "^5.0.33",
"@types/enzyme": "^3.10.8",
"@types/eslint": "^7.28.0",
"@types/express": "^4.17.13",
@@ -567,7 +567,10 @@
"@types/kbn__config": "link:bazel-bin/packages/kbn-config/npm_module_types",
"@types/kbn__config-schema": "link:bazel-bin/packages/kbn-config-schema/npm_module_types",
"@types/kbn__crypto": "link:bazel-bin/packages/kbn-crypto/npm_module_types",
+ "@types/kbn__dev-utils": "link:bazel-bin/packages/kbn-dev-utils/npm_module_types",
"@types/kbn__docs-utils": "link:bazel-bin/packages/kbn-docs-utils/npm_module_types",
+ "@types/kbn__es-archiver": "link:bazel-bin/packages/kbn-es-archiver/npm_module_types",
+ "@types/kbn__es-query": "link:bazel-bin/packages/kbn-es-query/npm_module_types",
"@types/kbn__i18n": "link:bazel-bin/packages/kbn-i18n/npm_module_types",
"@types/kbn__i18n-react": "link:bazel-bin/packages/kbn-i18n-react/npm_module_types",
"@types/license-checker": "15.0.0",
diff --git a/packages/BUILD.bazel b/packages/BUILD.bazel
index aa90c3c122171..5fdaa9931bc4d 100644
--- a/packages/BUILD.bazel
+++ b/packages/BUILD.bazel
@@ -86,7 +86,10 @@ filegroup(
"//packages/kbn-config:build_types",
"//packages/kbn-config-schema:build_types",
"//packages/kbn-crypto:build_types",
+ "//packages/kbn-dev-utils:build_types",
"//packages/kbn-docs-utils:build_types",
+ "//packages/kbn-es-archiver:build_types",
+ "//packages/kbn-es-query:build_types",
"//packages/kbn-i18n:build_types",
"//packages/kbn-i18n-react:build_types",
],
diff --git a/packages/elastic-apm-synthtrace/src/lib/apm/apm_fields.ts b/packages/elastic-apm-synthtrace/src/lib/apm/apm_fields.ts
index a7a826d144d0e..e0a48fdcf2b89 100644
--- a/packages/elastic-apm-synthtrace/src/lib/apm/apm_fields.ts
+++ b/packages/elastic-apm-synthtrace/src/lib/apm/apm_fields.ts
@@ -15,6 +15,9 @@ export type ApmApplicationMetricFields = Partial<{
'system.cpu.total.norm.pct': number;
'system.process.memory.rss.bytes': number;
'system.process.cpu.total.norm.pct': number;
+ 'jvm.memory.heap.used': number;
+ 'jvm.memory.non_heap.used': number;
+ 'jvm.thread.count': number;
}>;
export type ApmUserAgentFields = Partial<{
diff --git a/packages/elastic-apm-synthtrace/src/lib/apm/service.ts b/packages/elastic-apm-synthtrace/src/lib/apm/service.ts
index 16917821c7ee4..d55f60d86e4db 100644
--- a/packages/elastic-apm-synthtrace/src/lib/apm/service.ts
+++ b/packages/elastic-apm-synthtrace/src/lib/apm/service.ts
@@ -15,6 +15,7 @@ export class Service extends Entity {
return new Instance({
...this.fields,
['service.node.name']: instanceName,
+ 'host.name': instanceName,
'container.id': instanceName,
});
}
diff --git a/packages/elastic-apm-synthtrace/src/test/scenarios/01_simple_trace.test.ts b/packages/elastic-apm-synthtrace/src/test/scenarios/01_simple_trace.test.ts
index b38d34266f3ac..a78f1ec987bcf 100644
--- a/packages/elastic-apm-synthtrace/src/test/scenarios/01_simple_trace.test.ts
+++ b/packages/elastic-apm-synthtrace/src/test/scenarios/01_simple_trace.test.ts
@@ -70,6 +70,7 @@ describe('simple trace', () => {
'agent.name': 'java',
'container.id': 'instance-1',
'event.outcome': 'success',
+ 'host.name': 'instance-1',
'processor.event': 'transaction',
'processor.name': 'transaction',
'service.environment': 'production',
@@ -92,6 +93,7 @@ describe('simple trace', () => {
'agent.name': 'java',
'container.id': 'instance-1',
'event.outcome': 'success',
+ 'host.name': 'instance-1',
'parent.id': '0000000000000300',
'processor.event': 'span',
'processor.name': 'transaction',
diff --git a/packages/elastic-apm-synthtrace/src/test/scenarios/__snapshots__/01_simple_trace.test.ts.snap b/packages/elastic-apm-synthtrace/src/test/scenarios/__snapshots__/01_simple_trace.test.ts.snap
index 76a76d41ec81d..1a5fca39e9fd9 100644
--- a/packages/elastic-apm-synthtrace/src/test/scenarios/__snapshots__/01_simple_trace.test.ts.snap
+++ b/packages/elastic-apm-synthtrace/src/test/scenarios/__snapshots__/01_simple_trace.test.ts.snap
@@ -7,6 +7,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -24,6 +25,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000000",
"processor.event": "span",
"processor.name": "transaction",
@@ -43,6 +45,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -60,6 +63,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000004",
"processor.event": "span",
"processor.name": "transaction",
@@ -79,6 +83,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -96,6 +101,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000008",
"processor.event": "span",
"processor.name": "transaction",
@@ -115,6 +121,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -132,6 +139,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000012",
"processor.event": "span",
"processor.name": "transaction",
@@ -151,6 +159,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -168,6 +177,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000016",
"processor.event": "span",
"processor.name": "transaction",
@@ -187,6 +197,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -204,6 +215,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000020",
"processor.event": "span",
"processor.name": "transaction",
@@ -223,6 +235,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -240,6 +253,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000024",
"processor.event": "span",
"processor.name": "transaction",
@@ -259,6 +273,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -276,6 +291,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000028",
"processor.event": "span",
"processor.name": "transaction",
@@ -295,6 +311,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -312,6 +329,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000032",
"processor.event": "span",
"processor.name": "transaction",
@@ -331,6 +349,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -348,6 +367,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000036",
"processor.event": "span",
"processor.name": "transaction",
@@ -367,6 +387,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -384,6 +405,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000040",
"processor.event": "span",
"processor.name": "transaction",
@@ -403,6 +425,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -420,6 +443,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000044",
"processor.event": "span",
"processor.name": "transaction",
@@ -439,6 +463,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -456,6 +481,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000048",
"processor.event": "span",
"processor.name": "transaction",
@@ -475,6 +501,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -492,6 +519,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000052",
"processor.event": "span",
"processor.name": "transaction",
@@ -511,6 +539,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"processor.event": "transaction",
"processor.name": "transaction",
"service.environment": "production",
@@ -528,6 +557,7 @@ Array [
"agent.name": "java",
"container.id": "instance-1",
"event.outcome": "success",
+ "host.name": "instance-1",
"parent.id": "0000000000000056",
"processor.event": "span",
"processor.name": "transaction",
diff --git a/packages/elastic-eslint-config-kibana/react.js b/packages/elastic-eslint-config-kibana/react.js
index 29000bdb15684..0b1cce15de9ad 100644
--- a/packages/elastic-eslint-config-kibana/react.js
+++ b/packages/elastic-eslint-config-kibana/react.js
@@ -1,5 +1,5 @@
const semver = require('semver');
-const { kibanaPackageJson: PKG } = require('@kbn/dev-utils');
+const { kibanaPackageJson: PKG } = require('@kbn/utils');
module.exports = {
plugins: [
diff --git a/packages/elastic-eslint-config-kibana/typescript.js b/packages/elastic-eslint-config-kibana/typescript.js
index 1a0ef81ae2f1e..3ada725cb1805 100644
--- a/packages/elastic-eslint-config-kibana/typescript.js
+++ b/packages/elastic-eslint-config-kibana/typescript.js
@@ -4,7 +4,7 @@
// as this package was moved from typescript-eslint-parser to @typescript-eslint/parser
const semver = require('semver');
-const { kibanaPackageJson: PKG } = require('@kbn/dev-utils');
+const { kibanaPackageJson: PKG } = require('@kbn/utils');
const eslintConfigPrettierTypescriptEslintRules = require('eslint-config-prettier/@typescript-eslint').rules;
diff --git a/packages/kbn-cli-dev-mode/BUILD.bazel b/packages/kbn-cli-dev-mode/BUILD.bazel
index dfb441dffc6ef..cdc40e85c972a 100644
--- a/packages/kbn-cli-dev-mode/BUILD.bazel
+++ b/packages/kbn-cli-dev-mode/BUILD.bazel
@@ -50,7 +50,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-config:npm_module_types",
"//packages/kbn-config-schema:npm_module_types",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-logging",
"//packages/kbn-optimizer",
"//packages/kbn-server-http-tools",
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
index e5e009e51e69e..0066644d0825a 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.test.ts
@@ -8,11 +8,9 @@
import Path from 'path';
import * as Rx from 'rxjs';
-import {
- REPO_ROOT,
- createAbsolutePathSerializer,
- createAnyInstanceSerializer,
-} from '@kbn/dev-utils';
+import { createAbsolutePathSerializer, createAnyInstanceSerializer } from '@kbn/dev-utils';
+
+import { REPO_ROOT } from '@kbn/utils';
import { TestLog } from './log';
import { CliDevMode, SomeCliArgs } from './cli_dev_mode';
diff --git a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
index 2396b316aa3a2..9cf688b675e67 100644
--- a/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
+++ b/packages/kbn-cli-dev-mode/src/cli_dev_mode.ts
@@ -22,7 +22,8 @@ import {
takeUntil,
} from 'rxjs/operators';
import { CliArgs } from '@kbn/config';
-import { REPO_ROOT, CiStatsReporter } from '@kbn/dev-utils';
+import { CiStatsReporter } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Log, CliLog } from './log';
import { Optimizer } from './optimizer';
diff --git a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
index 06ded8d8bf526..25bc59bf78458 100644
--- a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
+++ b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.test.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getServerWatchPaths } from './get_server_watch_paths';
diff --git a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts
index f075dc806b6ec..acfc9aeecdc80 100644
--- a/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts
+++ b/packages/kbn-cli-dev-mode/src/get_server_watch_paths.ts
@@ -9,7 +9,7 @@
import Path from 'path';
import Fs from 'fs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
interface Options {
pluginPaths: string[];
diff --git a/packages/kbn-config-schema/src/byte_size_value/index.test.ts b/packages/kbn-config-schema/src/byte_size_value/index.test.ts
index a5d0142853416..7a2e3a5d6cb0f 100644
--- a/packages/kbn-config-schema/src/byte_size_value/index.test.ts
+++ b/packages/kbn-config-schema/src/byte_size_value/index.test.ts
@@ -30,6 +30,11 @@ describe('parsing units', () => {
expect(ByteSizeValue.parse('1gb').getValueInBytes()).toBe(1073741824);
});
+ test('case insensitive units', () => {
+ expect(ByteSizeValue.parse('1KB').getValueInBytes()).toBe(1024);
+ expect(ByteSizeValue.parse('1Mb').getValueInBytes()).toBe(1024 * 1024);
+ });
+
test('throws an error when unsupported unit specified', () => {
expect(() => ByteSizeValue.parse('1tb')).toThrowErrorMatchingInlineSnapshot(
`"Failed to parse value as byte value. Value must be either number of bytes, or follow the format [b|kb|mb|gb] (e.g., '1024kb', '200mb', '1gb'), where the number is a safe positive integer."`
diff --git a/packages/kbn-config-schema/src/byte_size_value/index.ts b/packages/kbn-config-schema/src/byte_size_value/index.ts
index fb90bd70ed5c6..6fabe35b30024 100644
--- a/packages/kbn-config-schema/src/byte_size_value/index.ts
+++ b/packages/kbn-config-schema/src/byte_size_value/index.ts
@@ -22,7 +22,7 @@ function renderUnit(value: number, unit: string) {
export class ByteSizeValue {
public static parse(text: string): ByteSizeValue {
- const match = /([1-9][0-9]*)(b|kb|mb|gb)/.exec(text);
+ const match = /([1-9][0-9]*)(b|kb|mb|gb)/i.exec(text);
if (!match) {
const number = Number(text);
if (typeof number !== 'number' || isNaN(number)) {
@@ -35,7 +35,7 @@ export class ByteSizeValue {
}
const value = parseInt(match[1], 10);
- const unit = match[2];
+ const unit = match[2].toLowerCase();
return new ByteSizeValue(value * unitMultiplier[unit]);
}
diff --git a/packages/kbn-config/src/deprecation/apply_deprecations.test.ts b/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
index 70945b2d96b32..3f84eed867655 100644
--- a/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
+++ b/packages/kbn-config/src/deprecation/apply_deprecations.test.ts
@@ -116,6 +116,36 @@ describe('applyDeprecations', () => {
expect(migrated).toEqual({ foo: 'bar', newname: 'renamed' });
});
+ it('nested properties take into account if their parents are empty objects, and remove them if so', () => {
+ const initialConfig = {
+ foo: 'bar',
+ deprecated: { nested: 'deprecated' },
+ nested: {
+ from: {
+ rename: 'renamed',
+ },
+ to: {
+ keep: 'keep',
+ },
+ },
+ };
+
+ const { config: migrated } = applyDeprecations(initialConfig, [
+ wrapHandler(deprecations.unused('deprecated.nested')),
+ wrapHandler(deprecations.rename('nested.from.rename', 'nested.to.renamed')),
+ ]);
+
+ expect(migrated).toStrictEqual({
+ foo: 'bar',
+ nested: {
+ to: {
+ keep: 'keep',
+ renamed: 'renamed',
+ },
+ },
+ });
+ });
+
it('does not alter the initial config', () => {
const initialConfig = { foo: 'bar', deprecated: 'deprecated' };
diff --git a/packages/kbn-config/src/deprecation/apply_deprecations.ts b/packages/kbn-config/src/deprecation/apply_deprecations.ts
index 11b35840969d0..9b0c409204414 100644
--- a/packages/kbn-config/src/deprecation/apply_deprecations.ts
+++ b/packages/kbn-config/src/deprecation/apply_deprecations.ts
@@ -6,13 +6,14 @@
* Side Public License, v 1.
*/
-import { cloneDeep, unset } from 'lodash';
+import { cloneDeep } from 'lodash';
import { set } from '@elastic/safer-lodash-set';
import type {
AddConfigDeprecation,
ChangedDeprecatedPaths,
ConfigDeprecationWithContext,
} from './types';
+import { unsetAndCleanEmptyParent } from './unset_and_clean_empty_parent';
const noopAddDeprecationFactory: () => AddConfigDeprecation = () => () => undefined;
@@ -45,7 +46,7 @@ export const applyDeprecations = (
if (commands.unset) {
changedPaths.unset.push(...commands.unset.map((c) => c.path));
commands.unset.forEach(function ({ path: commandPath }) {
- unset(result, commandPath);
+ unsetAndCleanEmptyParent(result, commandPath);
});
}
}
diff --git a/packages/kbn-config/src/deprecation/types.ts b/packages/kbn-config/src/deprecation/types.ts
index 7b1eb4a0ea6c1..6abe4cd94a6fb 100644
--- a/packages/kbn-config/src/deprecation/types.ts
+++ b/packages/kbn-config/src/deprecation/types.ts
@@ -186,6 +186,25 @@ export interface ConfigDeprecationFactory {
* rename('oldKey', 'newKey'),
* ]
* ```
+ *
+ * @remarks
+ * If the oldKey is a nested property and it's the last property in an object, it may remove any empty-object parent keys.
+ * ```
+ * // Original object
+ * {
+ * a: {
+ * b: { c: 1 },
+ * d: { e: 1 }
+ * }
+ * }
+ *
+ * // If rename('a.b.c', 'a.d.c'), the resulting object removes the entire "a.b" tree because "c" was the last property in that branch
+ * {
+ * a: {
+ * d: { c: 1, e: 1 }
+ * }
+ * }
+ * ```
*/
rename(
oldKey: string,
@@ -207,6 +226,25 @@ export interface ConfigDeprecationFactory {
* renameFromRoot('oldplugin.key', 'newplugin.key'),
* ]
* ```
+ *
+ * @remarks
+ * If the oldKey is a nested property and it's the last property in an object, it may remove any empty-object parent keys.
+ * ```
+ * // Original object
+ * {
+ * a: {
+ * b: { c: 1 },
+ * d: { e: 1 }
+ * }
+ * }
+ *
+ * // If renameFromRoot('a.b.c', 'a.d.c'), the resulting object removes the entire "a.b" tree because "c" was the last property in that branch
+ * {
+ * a: {
+ * d: { c: 1, e: 1 }
+ * }
+ * }
+ * ```
*/
renameFromRoot(
oldKey: string,
@@ -225,6 +263,25 @@ export interface ConfigDeprecationFactory {
* unused('deprecatedKey'),
* ]
* ```
+ *
+ * @remarks
+ * If the path is a nested property and it's the last property in an object, it may remove any empty-object parent keys.
+ * ```
+ * // Original object
+ * {
+ * a: {
+ * b: { c: 1 },
+ * d: { e: 1 }
+ * }
+ * }
+ *
+ * // If unused('a.b.c'), the resulting object removes the entire "a.b" tree because "c" was the last property in that branch
+ * {
+ * a: {
+ * d: { e: 1 }
+ * }
+ * }
+ * ```
*/
unused(unusedKey: string, details?: Partial): ConfigDeprecation;
@@ -242,6 +299,25 @@ export interface ConfigDeprecationFactory {
* unusedFromRoot('somepath.deprecatedProperty'),
* ]
* ```
+ *
+ * @remarks
+ * If the path is a nested property and it's the last property in an object, it may remove any empty-object parent keys.
+ * ```
+ * // Original object
+ * {
+ * a: {
+ * b: { c: 1 },
+ * d: { e: 1 }
+ * }
+ * }
+ *
+ * // If unused('a.b.c'), the resulting object removes the entire "a.b" tree because "c" was the last property in that branch
+ * {
+ * a: {
+ * d: { e: 1 }
+ * }
+ * }
+ * ```
*/
unusedFromRoot(unusedKey: string, details?: Partial): ConfigDeprecation;
}
diff --git a/packages/kbn-config/src/deprecation/unset_and_clean_empty_parent.test.ts b/packages/kbn-config/src/deprecation/unset_and_clean_empty_parent.test.ts
new file mode 100644
index 0000000000000..115730c106137
--- /dev/null
+++ b/packages/kbn-config/src/deprecation/unset_and_clean_empty_parent.test.ts
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { unsetAndCleanEmptyParent } from './unset_and_clean_empty_parent';
+
+describe('unsetAndcleanEmptyParent', () => {
+ test('unsets the property of the root object, and returns an empty root object', () => {
+ const config = { toRemove: 'toRemove' };
+ unsetAndCleanEmptyParent(config, 'toRemove');
+ expect(config).toStrictEqual({});
+ });
+
+ test('unsets a nested property of the root object, and removes the empty parent property', () => {
+ const config = { nestedToRemove: { toRemove: 'toRemove' } };
+ unsetAndCleanEmptyParent(config, 'nestedToRemove.toRemove');
+ expect(config).toStrictEqual({});
+ });
+
+ describe('Navigating to parent known issue: Array paths', () => {
+ // We navigate to the parent property by splitting the "." and dropping the last item in the path.
+ // This means that paths that are declared as prop1[idx] cannot apply the parent's cleanup logic.
+ // The use cases for this are quite limited, so we'll accept it as a documented limitation.
+
+ test('does not remove a parent array when the index is specified with square brackets', () => {
+ const config = { nestedToRemove: [{ toRemove: 'toRemove' }] };
+ unsetAndCleanEmptyParent(config, 'nestedToRemove[0].toRemove');
+ expect(config).toStrictEqual({ nestedToRemove: [{}] });
+ });
+
+ test('removes a parent array when the index is specified with dots', () => {
+ const config = { nestedToRemove: [{ toRemove: 'toRemove' }] };
+ unsetAndCleanEmptyParent(config, 'nestedToRemove.0.toRemove');
+ expect(config).toStrictEqual({});
+ });
+ });
+});
diff --git a/packages/kbn-config/src/deprecation/unset_and_clean_empty_parent.ts b/packages/kbn-config/src/deprecation/unset_and_clean_empty_parent.ts
new file mode 100644
index 0000000000000..c5f5e5951adc4
--- /dev/null
+++ b/packages/kbn-config/src/deprecation/unset_and_clean_empty_parent.ts
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { get, unset } from 'lodash';
+
+/**
+ * Unsets the path and checks if the parent property is an empty object.
+ * If so, it removes the property from the config object (mutation is applied).
+ *
+ * @internal
+ */
+export const unsetAndCleanEmptyParent = (
+ config: Record,
+ path: string | string[]
+): void => {
+ // 1. Unset the provided path
+ const didUnset = unset(config, path);
+
+ // Check if the unset actually removed anything.
+ // This way we avoid some CPU cycles when the previous action didn't apply any changes.
+ if (didUnset) {
+ // 2. Check if the parent property in the resulting object is an empty object
+ const pathArray = Array.isArray(path) ? path : path.split('.');
+ const parentPath = pathArray.slice(0, -1);
+ if (parentPath.length === 0) {
+ return;
+ }
+ const parentObj = get(config, parentPath);
+ if (
+ typeof parentObj === 'object' &&
+ parentObj !== null &&
+ Object.keys(parentObj).length === 0
+ ) {
+ unsetAndCleanEmptyParent(config, parentPath);
+ }
+ }
+};
diff --git a/packages/kbn-crypto/BUILD.bazel b/packages/kbn-crypto/BUILD.bazel
index 81ee6d770103c..f71c8b866fd5d 100644
--- a/packages/kbn-crypto/BUILD.bazel
+++ b/packages/kbn-crypto/BUILD.bazel
@@ -34,7 +34,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"@npm//@types/flot",
"@npm//@types/jest",
"@npm//@types/node",
diff --git a/packages/kbn-dev-utils/BUILD.bazel b/packages/kbn-dev-utils/BUILD.bazel
index 4fd99e0144cb6..89df1870a3cec 100644
--- a/packages/kbn-dev-utils/BUILD.bazel
+++ b/packages/kbn-dev-utils/BUILD.bazel
@@ -1,9 +1,10 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
+load("@npm//@bazel/typescript:index.bzl", "ts_config")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
PKG_BASE_NAME = "kbn-dev-utils"
PKG_REQUIRE_NAME = "@kbn/dev-utils"
+TYPES_PKG_REQUIRE_NAME = "@types/kbn__dev-utils"
SOURCE_FILES = glob(
[
@@ -43,7 +44,6 @@ NPM_MODULE_EXTRA_FILES = [
]
RUNTIME_DEPS = [
- "//packages/kbn-std",
"//packages/kbn-utils",
"@npm//@babel/core",
"@npm//axios",
@@ -66,7 +66,6 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-std",
"//packages/kbn-utils",
"@npm//@babel/parser",
"@npm//@babel/types",
@@ -124,7 +123,7 @@ ts_project(
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES,
- deps = RUNTIME_DEPS + [":target_node", ":tsc_types"],
+ deps = RUNTIME_DEPS + [":target_node"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
@@ -143,3 +142,20 @@ filegroup(
],
visibility = ["//visibility:public"],
)
+
+pkg_npm_types(
+ name = "npm_module_types",
+ srcs = SRCS,
+ deps = [":tsc_types"],
+ package_name = TYPES_PKG_REQUIRE_NAME,
+ tsconfig = ":tsconfig",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "build_types",
+ srcs = [
+ ":npm_module_types",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/kbn-dev-utils/package.json b/packages/kbn-dev-utils/package.json
index 9d6e6dde86fac..ab4f489e7d345 100644
--- a/packages/kbn-dev-utils/package.json
+++ b/packages/kbn-dev-utils/package.json
@@ -4,7 +4,6 @@
"private": true,
"license": "SSPL-1.0 OR Elastic License 2.0",
"main": "./target_node/index.js",
- "types": "./target_types/index.d.ts",
"kibana": {
"devOnly": true
}
diff --git a/packages/kbn-dev-utils/src/index.ts b/packages/kbn-dev-utils/src/index.ts
index 381e99ac677f5..9b207ad9e9966 100644
--- a/packages/kbn-dev-utils/src/index.ts
+++ b/packages/kbn-dev-utils/src/index.ts
@@ -6,7 +6,6 @@
* Side Public License, v 1.
*/
-export * from '@kbn/utils';
export { withProcRunner, ProcRunner } from './proc_runner';
export * from './tooling_log';
export * from './serializers';
diff --git a/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap b/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap
index 7ff982acafbe4..5fa074d4c7739 100644
--- a/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap
+++ b/packages/kbn-dev-utils/src/tooling_log/__snapshots__/tooling_log_text_writer.test.ts.snap
@@ -170,6 +170,14 @@ exports[`level:warning/type:warning snapshots: output 1`] = `
"
`;
+exports[`never ignores write messages from the kibana elasticsearch.deprecation logger context 1`] = `
+" │[elasticsearch.deprecation]
+ │{ foo: { bar: { '1': [Array] } }, bar: { bar: { '1': [Array] } } }
+ │
+ │Infinity
+"
+`;
+
exports[`throws error if created with invalid level 1`] = `"Invalid log level \\"foo\\" (expected one of silent,error,warning,success,info,debug,verbose)"`;
exports[`throws error if writeTo config is not defined or doesn't have a write method 1`] = `"ToolingLogTextWriter requires the \`writeTo\` option be set to a stream (like process.stdout)"`;
diff --git a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts
index b4668f29b6e21..fbccfdcdf6ac0 100644
--- a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts
+++ b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.test.ts
@@ -88,3 +88,55 @@ it('formats %s patterns and indents multi-line messages correctly', () => {
const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
expect(output).toMatchSnapshot();
});
+
+it('does not write messages from sources in ignoreSources', () => {
+ const write = jest.fn();
+ const writer = new ToolingLogTextWriter({
+ ignoreSources: ['myIgnoredSource'],
+ level: 'debug',
+ writeTo: {
+ write,
+ },
+ });
+
+ writer.write({
+ source: 'myIgnoredSource',
+ type: 'success',
+ indent: 10,
+ args: [
+ '%s\n%O\n\n%d',
+ 'foo bar',
+ { foo: { bar: { 1: [1, 2, 3] } }, bar: { bar: { 1: [1, 2, 3] } } },
+ Infinity,
+ ],
+ });
+
+ const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
+ expect(output).toEqual('');
+});
+
+it('never ignores write messages from the kibana elasticsearch.deprecation logger context', () => {
+ const write = jest.fn();
+ const writer = new ToolingLogTextWriter({
+ ignoreSources: ['myIgnoredSource'],
+ level: 'debug',
+ writeTo: {
+ write,
+ },
+ });
+
+ writer.write({
+ source: 'myIgnoredSource',
+ type: 'write',
+ indent: 10,
+ args: [
+ '%s\n%O\n\n%d',
+ '[elasticsearch.deprecation]',
+ { foo: { bar: { 1: [1, 2, 3] } }, bar: { bar: { 1: [1, 2, 3] } } },
+ Infinity,
+ ],
+ });
+
+ const output = write.mock.calls.reduce((acc, chunk) => `${acc}${chunk}`, '');
+ expect(output).toMatchSnapshot();
+});
diff --git a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts
index 660dae3fa1f55..4fe33241cf77e 100644
--- a/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts
+++ b/packages/kbn-dev-utils/src/tooling_log/tooling_log_text_writer.ts
@@ -92,7 +92,15 @@ export class ToolingLogTextWriter implements Writer {
}
if (this.ignoreSources && msg.source && this.ignoreSources.includes(msg.source)) {
- return false;
+ if (msg.type === 'write') {
+ const txt = format(msg.args[0], ...msg.args.slice(1));
+ // Ensure that Elasticsearch deprecation log messages from Kibana aren't ignored
+ if (!/elasticsearch\.deprecation/.test(txt)) {
+ return false;
+ }
+ } else {
+ return false;
+ }
}
const prefix = has(MSG_PREFIXES, msg.type) ? MSG_PREFIXES[msg.type] : '';
diff --git a/packages/kbn-docs-utils/BUILD.bazel b/packages/kbn-docs-utils/BUILD.bazel
index 37e5bb06377cc..edfd3ee96c181 100644
--- a/packages/kbn-docs-utils/BUILD.bazel
+++ b/packages/kbn-docs-utils/BUILD.bazel
@@ -38,7 +38,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-config:npm_module_types",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-utils",
"@npm//ts-morph",
"@npm//@types/dedent",
diff --git a/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts b/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts
index 2e4ce08540714..3c9137b260a3e 100644
--- a/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts
+++ b/packages/kbn-docs-utils/src/api_docs/build_api_docs_cli.ts
@@ -9,7 +9,8 @@
import Fs from 'fs';
import Path from 'path';
-import { REPO_ROOT, run, CiStatsReporter, createFlagError } from '@kbn/dev-utils';
+import { run, CiStatsReporter, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Project } from 'ts-morph';
import { writePluginDocs } from './mdx/write_plugin_mdx_docs';
@@ -241,7 +242,7 @@ export function runBuildApiDocsCli() {
boolean: ['references'],
help: `
--plugin Optionally, run for only a specific plugin
- --stats Optionally print API stats. Must be one or more of: any, comments or exports.
+ --stats Optionally print API stats. Must be one or more of: any, comments or exports.
--references Collect references for API items
`,
},
diff --git a/packages/kbn-docs-utils/src/api_docs/find_plugins.ts b/packages/kbn-docs-utils/src/api_docs/find_plugins.ts
index 78cba3f3a9476..774452a6f1f9f 100644
--- a/packages/kbn-docs-utils/src/api_docs/find_plugins.ts
+++ b/packages/kbn-docs-utils/src/api_docs/find_plugins.ts
@@ -12,7 +12,8 @@ import globby from 'globby';
import loadJsonFile from 'load-json-file';
import { getPluginSearchPaths } from '@kbn/config';
-import { simpleKibanaPlatformPluginDiscovery, REPO_ROOT } from '@kbn/dev-utils';
+import { simpleKibanaPlatformPluginDiscovery } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ApiScope, PluginOrPackage } from './types';
export function findPlugins(): PluginOrPackage[] {
diff --git a/packages/kbn-es-archiver/BUILD.bazel b/packages/kbn-es-archiver/BUILD.bazel
index 2dc311ed74406..da8aaf913ab67 100644
--- a/packages/kbn-es-archiver/BUILD.bazel
+++ b/packages/kbn-es-archiver/BUILD.bazel
@@ -1,9 +1,10 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
+load("@npm//@bazel/typescript:index.bzl", "ts_config")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
PKG_BASE_NAME = "kbn-es-archiver"
PKG_REQUIRE_NAME = "@kbn/es-archiver"
+TYPES_PKG_REQUIRE_NAME = "@types/kbn__es-archiver"
SOURCE_FILES = glob(
[
@@ -43,7 +44,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-test",
"//packages/kbn-utils",
"@npm//@elastic/elasticsearch",
@@ -90,7 +91,7 @@ ts_project(
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES,
- deps = RUNTIME_DEPS + [":target_node", ":tsc_types"],
+ deps = RUNTIME_DEPS + [":target_node"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
@@ -109,3 +110,20 @@ filegroup(
],
visibility = ["//visibility:public"],
)
+
+pkg_npm_types(
+ name = "npm_module_types",
+ srcs = SRCS,
+ deps = [":tsc_types"],
+ package_name = TYPES_PKG_REQUIRE_NAME,
+ tsconfig = ":tsconfig",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "build_types",
+ srcs = [
+ ":npm_module_types",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/kbn-es-archiver/package.json b/packages/kbn-es-archiver/package.json
index 0cce08eaf0352..bff3990a0c1bc 100644
--- a/packages/kbn-es-archiver/package.json
+++ b/packages/kbn-es-archiver/package.json
@@ -4,7 +4,6 @@
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": "true",
"main": "target_node/index.js",
- "types": "target_types/index.d.ts",
"kibana": {
"devOnly": true
}
diff --git a/packages/kbn-es-archiver/src/actions/load.ts b/packages/kbn-es-archiver/src/actions/load.ts
index 0a7235c566b52..0a318f895deb3 100644
--- a/packages/kbn-es-archiver/src/actions/load.ts
+++ b/packages/kbn-es-archiver/src/actions/load.ts
@@ -9,7 +9,8 @@
import { resolve, relative } from 'path';
import { createReadStream } from 'fs';
import { Readable } from 'stream';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { KbnClient } from '@kbn/test';
import type { Client } from '@elastic/elasticsearch';
import { createPromiseFromStreams, concatStreamProviders } from '@kbn/utils';
@@ -85,15 +86,17 @@ export async function loadAction({
progress.deactivate();
const result = stats.toJSON();
+ const indicesWithDocs: string[] = [];
for (const [index, { docs }] of Object.entries(result)) {
if (docs && docs.indexed > 0) {
log.info('[%s] Indexed %d docs into %j', name, docs.indexed, index);
+ indicesWithDocs.push(index);
}
}
await client.indices.refresh(
{
- index: '_all',
+ index: indicesWithDocs.join(','),
allow_no_indices: true,
},
{
diff --git a/packages/kbn-es-archiver/src/actions/rebuild_all.ts b/packages/kbn-es-archiver/src/actions/rebuild_all.ts
index 360fdb438f2db..27fcae0c7cec5 100644
--- a/packages/kbn-es-archiver/src/actions/rebuild_all.ts
+++ b/packages/kbn-es-archiver/src/actions/rebuild_all.ts
@@ -10,8 +10,8 @@ import { resolve, relative } from 'path';
import { Stats, createReadStream, createWriteStream } from 'fs';
import { stat, rename } from 'fs/promises';
import { Readable, Writable } from 'stream';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
-import { createPromiseFromStreams } from '@kbn/utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { createPromiseFromStreams, REPO_ROOT } from '@kbn/utils';
import {
prioritizeMappings,
readDirectory,
diff --git a/packages/kbn-es-archiver/src/actions/save.ts b/packages/kbn-es-archiver/src/actions/save.ts
index 9cb5be05ac060..e5e3f06b8436d 100644
--- a/packages/kbn-es-archiver/src/actions/save.ts
+++ b/packages/kbn-es-archiver/src/actions/save.ts
@@ -10,8 +10,8 @@ import { resolve, relative } from 'path';
import { createWriteStream, mkdirSync } from 'fs';
import { Readable, Writable } from 'stream';
import type { Client } from '@elastic/elasticsearch';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
-import { createListStream, createPromiseFromStreams } from '@kbn/utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { createListStream, createPromiseFromStreams, REPO_ROOT } from '@kbn/utils';
import {
createStats,
diff --git a/packages/kbn-es-archiver/src/actions/unload.ts b/packages/kbn-es-archiver/src/actions/unload.ts
index 1c5f4cd5d7d03..22830b7289174 100644
--- a/packages/kbn-es-archiver/src/actions/unload.ts
+++ b/packages/kbn-es-archiver/src/actions/unload.ts
@@ -10,9 +10,9 @@ import { resolve, relative } from 'path';
import { createReadStream } from 'fs';
import { Readable, Writable } from 'stream';
import type { Client } from '@elastic/elasticsearch';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
import { KbnClient } from '@kbn/test';
-import { createPromiseFromStreams } from '@kbn/utils';
+import { createPromiseFromStreams, REPO_ROOT } from '@kbn/utils';
import {
isGzip,
diff --git a/packages/kbn-es-archiver/src/es_archiver.ts b/packages/kbn-es-archiver/src/es_archiver.ts
index 354197a98fa46..e13e20f25a703 100644
--- a/packages/kbn-es-archiver/src/es_archiver.ts
+++ b/packages/kbn-es-archiver/src/es_archiver.ts
@@ -10,7 +10,8 @@ import Fs from 'fs';
import Path from 'path';
import type { Client } from '@elastic/elasticsearch';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { KbnClient } from '@kbn/test';
import {
diff --git a/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
index ae21649690a99..2590074a25411 100644
--- a/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
+++ b/packages/kbn-es-archiver/src/lib/docs/generate_doc_records_stream.test.ts
@@ -6,13 +6,14 @@
* Side Public License, v 1.
*/
+import { ToolingLog } from '@kbn/dev-utils';
+
import {
createListStream,
createPromiseFromStreams,
createConcatStream,
createMapStream,
- ToolingLog,
-} from '@kbn/dev-utils';
+} from '@kbn/utils';
import { createGenerateDocRecordsStream } from './generate_doc_records_stream';
import { Progress } from '../progress';
diff --git a/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts b/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
index bcf28a4976a1c..9c0ff4a8f91ec 100644
--- a/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
+++ b/packages/kbn-es-archiver/src/lib/docs/index_doc_records_stream.test.ts
@@ -6,12 +6,9 @@
* Side Public License, v 1.
*/
-import {
- createListStream,
- createPromiseFromStreams,
- ToolingLog,
- createRecursiveSerializer,
-} from '@kbn/dev-utils';
+import { ToolingLog, createRecursiveSerializer } from '@kbn/dev-utils';
+
+import { createListStream, createPromiseFromStreams } from '@kbn/utils';
import { Progress } from '../progress';
import { createIndexDocRecordsStream } from './index_doc_records_stream';
diff --git a/packages/kbn-es-query/BUILD.bazel b/packages/kbn-es-query/BUILD.bazel
index 70d8d659c99fe..86f3d3ccc13a8 100644
--- a/packages/kbn-es-query/BUILD.bazel
+++ b/packages/kbn-es-query/BUILD.bazel
@@ -1,10 +1,11 @@
-load("@npm//@bazel/typescript:index.bzl", "ts_config", "ts_project")
+load("@npm//@bazel/typescript:index.bzl", "ts_config")
load("@npm//peggy:index.bzl", "peggy")
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
-load("//src/dev/bazel:index.bzl", "jsts_transpiler")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "jsts_transpiler", "pkg_npm", "pkg_npm_types", "ts_project")
PKG_BASE_NAME = "kbn-es-query"
PKG_REQUIRE_NAME = "@kbn/es-query"
+TYPES_PKG_REQUIRE_NAME = "@types/kbn__es-query"
SOURCE_FILES = glob(
[
@@ -104,7 +105,7 @@ ts_project(
js_library(
name = PKG_BASE_NAME,
srcs = NPM_MODULE_EXTRA_FILES + [":grammar"],
- deps = RUNTIME_DEPS + [":target_node", ":target_web", ":tsc_types"],
+ deps = RUNTIME_DEPS + [":target_node", ":target_web"],
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
@@ -123,3 +124,20 @@ filegroup(
],
visibility = ["//visibility:public"],
)
+
+pkg_npm_types(
+ name = "npm_module_types",
+ srcs = SRCS,
+ deps = [":tsc_types"],
+ package_name = TYPES_PKG_REQUIRE_NAME,
+ tsconfig = ":tsconfig",
+ visibility = ["//visibility:public"],
+)
+
+filegroup(
+ name = "build_types",
+ srcs = [
+ ":npm_module_types",
+ ],
+ visibility = ["//visibility:public"],
+)
diff --git a/packages/kbn-es-query/package.json b/packages/kbn-es-query/package.json
index 335ef61b8b360..b317ce4ca4c95 100644
--- a/packages/kbn-es-query/package.json
+++ b/packages/kbn-es-query/package.json
@@ -2,7 +2,6 @@
"name": "@kbn/es-query",
"browser": "./target_web/index.js",
"main": "./target_node/index.js",
- "types": "./target_types/index.d.ts",
"version": "1.0.0",
"license": "SSPL-1.0 OR Elastic License 2.0",
"private": true
diff --git a/packages/kbn-es-query/src/kuery/index.ts b/packages/kbn-es-query/src/kuery/index.ts
index 868904125dc44..13039956916cb 100644
--- a/packages/kbn-es-query/src/kuery/index.ts
+++ b/packages/kbn-es-query/src/kuery/index.ts
@@ -23,4 +23,5 @@ export const toElasticsearchQuery = (...params: Parameters {
it('should return artifact metadata for the correct architecture', async () => {
const artifact = await Artifact.getSnapshot('oss', MOCK_VERSION, log);
- expect(artifact.getFilename()).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.oss`);
+ expect(artifact.spec.filename).toEqual(MOCK_FILENAME + `-${ARCHITECTURE}.oss`);
});
});
@@ -182,7 +182,7 @@ describe('Artifact', () => {
describe('with latest unverified snapshot', () => {
beforeEach(() => {
- process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = 1;
+ process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED = '1';
mockFetch(MOCKS.valid);
});
diff --git a/packages/kbn-es/src/artifact.js b/packages/kbn-es/src/artifact.ts
similarity index 65%
rename from packages/kbn-es/src/artifact.js
rename to packages/kbn-es/src/artifact.ts
index 0fa2c7a1727d0..9c5935c96e8cd 100644
--- a/packages/kbn-es/src/artifact.js
+++ b/packages/kbn-es/src/artifact.ts
@@ -6,25 +6,69 @@
* Side Public License, v 1.
*/
-const fetch = require('node-fetch');
-const AbortController = require('abort-controller');
-const fs = require('fs');
-const { promisify } = require('util');
-const { pipeline, Transform } = require('stream');
-const chalk = require('chalk');
-const { createHash } = require('crypto');
-const path = require('path');
+import fs from 'fs';
+import { promisify } from 'util';
+import path from 'path';
+import { createHash } from 'crypto';
+import { pipeline, Transform } from 'stream';
+import { setTimeout } from 'timers/promises';
+
+import fetch, { Headers } from 'node-fetch';
+import AbortController from 'abort-controller';
+import chalk from 'chalk';
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { cache } from './utils/cache';
+import { resolveCustomSnapshotUrl } from './custom_snapshots';
+import { createCliError, isCliError } from './errors';
const asyncPipeline = promisify(pipeline);
const DAILY_SNAPSHOTS_BASE_URL = 'https://storage.googleapis.com/kibana-ci-es-snapshots-daily';
const PERMANENT_SNAPSHOTS_BASE_URL =
'https://storage.googleapis.com/kibana-ci-es-snapshots-permanent';
-const { cache } = require('./utils');
-const { resolveCustomSnapshotUrl } = require('./custom_snapshots');
-const { createCliError, isCliError } = require('./errors');
+type ChecksumType = 'sha512';
+export type ArtifactLicense = 'oss' | 'basic' | 'trial';
+
+interface ArtifactManifest {
+ id: string;
+ bucket: string;
+ branch: string;
+ sha: string;
+ sha_short: string;
+ version: string;
+ generated: string;
+ archives: Array<{
+ filename: string;
+ checksum: string;
+ url: string;
+ version: string;
+ platform: string;
+ architecture: string;
+ license: string;
+ }>;
+}
+
+export interface ArtifactSpec {
+ url: string;
+ checksumUrl: string;
+ checksumType: ChecksumType;
+ filename: string;
+}
+
+interface ArtifactDownloaded {
+ cached: false;
+ checksum: string;
+ etag?: string;
+ contentLength: number;
+ first500Bytes: Buffer;
+ headers: Headers;
+}
+interface ArtifactCached {
+ cached: true;
+}
-function getChecksumType(checksumUrl) {
+function getChecksumType(checksumUrl: string): ChecksumType {
if (checksumUrl.endsWith('.sha512')) {
return 'sha512';
}
@@ -32,15 +76,18 @@ function getChecksumType(checksumUrl) {
throw new Error(`unable to determine checksum type: ${checksumUrl}`);
}
-function headersToString(headers, indent = '') {
+function headersToString(headers: Headers, indent = '') {
return [...headers.entries()].reduce(
(acc, [key, value]) => `${acc}\n${indent}${key}: ${value}`,
''
);
}
-async function retry(log, fn) {
- async function doAttempt(attempt) {
+async function retry(log: ToolingLog, fn: () => Promise): Promise {
+ let attempt = 0;
+ while (true) {
+ attempt += 1;
+
try {
return await fn();
} catch (error) {
@@ -49,13 +96,10 @@ async function retry(log, fn) {
}
log.warning('...failure, retrying in 5 seconds:', error.message);
- await new Promise((resolve) => setTimeout(resolve, 5000));
+ await setTimeout(5000);
log.info('...retrying');
- return await doAttempt(attempt + 1);
}
}
-
- return await doAttempt(1);
}
// Setting this flag provides an easy way to run the latest un-promoted snapshot without having to look it up
@@ -63,7 +107,7 @@ function shouldUseUnverifiedSnapshot() {
return !!process.env.KBN_ES_SNAPSHOT_USE_UNVERIFIED;
}
-async function fetchSnapshotManifest(url, log) {
+async function fetchSnapshotManifest(url: string, log: ToolingLog) {
log.info('Downloading snapshot manifest from %s', chalk.bold(url));
const abc = new AbortController();
@@ -73,7 +117,11 @@ async function fetchSnapshotManifest(url, log) {
return { abc, resp, json };
}
-async function getArtifactSpecForSnapshot(urlVersion, license, log) {
+async function getArtifactSpecForSnapshot(
+ urlVersion: string,
+ license: string,
+ log: ToolingLog
+): Promise {
const desiredVersion = urlVersion.replace('-SNAPSHOT', '');
const desiredLicense = license === 'oss' ? 'oss' : 'default';
@@ -103,17 +151,16 @@ async function getArtifactSpecForSnapshot(urlVersion, license, log) {
throw new Error(`Unable to read snapshot manifest: ${resp.statusText}\n ${json}`);
}
- const manifest = JSON.parse(json);
-
+ const manifest: ArtifactManifest = JSON.parse(json);
const platform = process.platform === 'win32' ? 'windows' : process.platform;
const arch = process.arch === 'arm64' ? 'aarch64' : 'x86_64';
const archive = manifest.archives.find(
- (archive) =>
- archive.version === desiredVersion &&
- archive.platform === platform &&
- archive.license === desiredLicense &&
- archive.architecture === arch
+ (a) =>
+ a.version === desiredVersion &&
+ a.platform === platform &&
+ a.license === desiredLicense &&
+ a.architecture === arch
);
if (!archive) {
@@ -130,93 +177,65 @@ async function getArtifactSpecForSnapshot(urlVersion, license, log) {
};
}
-exports.Artifact = class Artifact {
+export class Artifact {
/**
* Fetch an Artifact from the Artifact API for a license level and version
- * @param {('oss'|'basic'|'trial')} license
- * @param {string} version
- * @param {ToolingLog} log
*/
- static async getSnapshot(license, version, log) {
+ static async getSnapshot(license: ArtifactLicense, version: string, log: ToolingLog) {
const urlVersion = `${encodeURIComponent(version)}-SNAPSHOT`;
const customSnapshotArtifactSpec = resolveCustomSnapshotUrl(urlVersion, license);
if (customSnapshotArtifactSpec) {
- return new Artifact(customSnapshotArtifactSpec, log);
+ return new Artifact(log, customSnapshotArtifactSpec);
}
const artifactSpec = await getArtifactSpecForSnapshot(urlVersion, license, log);
- return new Artifact(artifactSpec, log);
+ return new Artifact(log, artifactSpec);
}
/**
* Fetch an Artifact from the Elasticsearch past releases url
- * @param {string} url
- * @param {ToolingLog} log
*/
- static async getArchive(url, log) {
+ static async getArchive(url: string, log: ToolingLog) {
const shaUrl = `${url}.sha512`;
- const artifactSpec = {
- url: url,
+ return new Artifact(log, {
+ url,
filename: path.basename(url),
checksumUrl: shaUrl,
checksumType: getChecksumType(shaUrl),
- };
-
- return new Artifact(artifactSpec, log);
- }
-
- constructor(spec, log) {
- this._spec = spec;
- this._log = log;
- }
-
- getUrl() {
- return this._spec.url;
- }
-
- getChecksumUrl() {
- return this._spec.checksumUrl;
+ });
}
- getChecksumType() {
- return this._spec.checksumType;
- }
-
- getFilename() {
- return this._spec.filename;
- }
+ constructor(private readonly log: ToolingLog, public readonly spec: ArtifactSpec) {}
/**
* Download the artifact to disk, skips the download if the cache is
* up-to-date, verifies checksum when downloaded
- * @param {string} dest
- * @return {Promise}
*/
- async download(dest, { useCached = false }) {
- await retry(this._log, async () => {
+ async download(dest: string, { useCached = false }: { useCached?: boolean } = {}) {
+ await retry(this.log, async () => {
const cacheMeta = cache.readMeta(dest);
const tmpPath = `${dest}.tmp`;
if (useCached) {
if (cacheMeta.exists) {
- this._log.info(
+ this.log.info(
'use-cached passed, forcing to use existing snapshot',
chalk.bold(cacheMeta.ts)
);
return;
} else {
- this._log.info('use-cached passed but no cached snapshot found. Continuing to download');
+ this.log.info('use-cached passed but no cached snapshot found. Continuing to download');
}
}
- const artifactResp = await this._download(tmpPath, cacheMeta.etag, cacheMeta.ts);
+ const artifactResp = await this.fetchArtifact(tmpPath, cacheMeta.etag, cacheMeta.ts);
if (artifactResp.cached) {
return;
}
- await this._verifyChecksum(artifactResp);
+ await this.verifyChecksum(artifactResp);
// cache the etag for future downloads
cache.writeMeta(dest, { etag: artifactResp.etag });
@@ -228,18 +247,18 @@ exports.Artifact = class Artifact {
/**
* Fetch the artifact with an etag
- * @param {string} tmpPath
- * @param {string} etag
- * @param {string} ts
- * @return {{ cached: true }|{ checksum: string, etag: string, first500Bytes: Buffer }}
*/
- async _download(tmpPath, etag, ts) {
- const url = this.getUrl();
+ private async fetchArtifact(
+ tmpPath: string,
+ etag: string,
+ ts: string
+ ): Promise {
+ const url = this.spec.url;
if (etag) {
- this._log.info('verifying cache of %s', chalk.bold(url));
+ this.log.info('verifying cache of %s', chalk.bold(url));
} else {
- this._log.info('downloading artifact from %s', chalk.bold(url));
+ this.log.info('downloading artifact from %s', chalk.bold(url));
}
const abc = new AbortController();
@@ -251,7 +270,7 @@ exports.Artifact = class Artifact {
});
if (resp.status === 304) {
- this._log.info('etags match, reusing cache from %s', chalk.bold(ts));
+ this.log.info('etags match, reusing cache from %s', chalk.bold(ts));
abc.abort();
return {
@@ -270,10 +289,10 @@ exports.Artifact = class Artifact {
}
if (etag) {
- this._log.info('cache invalid, redownloading');
+ this.log.info('cache invalid, redownloading');
}
- const hash = createHash(this.getChecksumType());
+ const hash = createHash(this.spec.checksumType);
let first500Bytes = Buffer.alloc(0);
let contentLength = 0;
@@ -300,8 +319,9 @@ exports.Artifact = class Artifact {
);
return {
+ cached: false,
checksum: hash.digest('hex'),
- etag: resp.headers.get('etag'),
+ etag: resp.headers.get('etag') ?? undefined,
contentLength,
first500Bytes,
headers: resp.headers,
@@ -310,14 +330,12 @@ exports.Artifact = class Artifact {
/**
* Verify the checksum of the downloaded artifact with the checksum at checksumUrl
- * @param {{ checksum: string, contentLength: number, first500Bytes: Buffer }} artifactResp
- * @return {Promise}
*/
- async _verifyChecksum(artifactResp) {
- this._log.info('downloading artifact checksum from %s', chalk.bold(this.getChecksumUrl()));
+ private async verifyChecksum(artifactResp: ArtifactDownloaded) {
+ this.log.info('downloading artifact checksum from %s', chalk.bold(this.spec.checksumUrl));
const abc = new AbortController();
- const resp = await fetch(this.getChecksumUrl(), {
+ const resp = await fetch(this.spec.checksumUrl, {
signal: abc.signal,
});
@@ -338,7 +356,7 @@ exports.Artifact = class Artifact {
const lenString = `${len} / ${artifactResp.contentLength}`;
throw createCliError(
- `artifact downloaded from ${this.getUrl()} does not match expected checksum\n` +
+ `artifact downloaded from ${this.spec.url} does not match expected checksum\n` +
` expected: ${expectedChecksum}\n` +
` received: ${artifactResp.checksum}\n` +
` headers: ${headersToString(artifactResp.headers, ' ')}\n` +
@@ -346,6 +364,6 @@ exports.Artifact = class Artifact {
);
}
- this._log.info('checksum verified');
+ this.log.info('checksum verified');
}
-};
+}
diff --git a/packages/kbn-es/src/custom_snapshots.js b/packages/kbn-es/src/custom_snapshots.ts
similarity index 82%
rename from packages/kbn-es/src/custom_snapshots.js
rename to packages/kbn-es/src/custom_snapshots.ts
index 9dd8097244947..f3e6d3ecaf857 100644
--- a/packages/kbn-es/src/custom_snapshots.js
+++ b/packages/kbn-es/src/custom_snapshots.ts
@@ -6,13 +6,15 @@
* Side Public License, v 1.
*/
-const { basename } = require('path');
+import Path from 'path';
-function isVersionFlag(a) {
+import type { ArtifactSpec } from './artifact';
+
+function isVersionFlag(a: string) {
return a.startsWith('--version');
}
-function getCustomSnapshotUrl() {
+export function getCustomSnapshotUrl() {
// force use of manually created snapshots until ReindexPutMappings fix
if (
!process.env.ES_SNAPSHOT_MANIFEST &&
@@ -28,7 +30,10 @@ function getCustomSnapshotUrl() {
}
}
-function resolveCustomSnapshotUrl(urlVersion, license) {
+export function resolveCustomSnapshotUrl(
+ urlVersion: string,
+ license: string
+): ArtifactSpec | undefined {
const customSnapshotUrl = getCustomSnapshotUrl();
if (!customSnapshotUrl) {
@@ -48,8 +53,6 @@ function resolveCustomSnapshotUrl(urlVersion, license) {
url: overrideUrl,
checksumUrl: overrideUrl + '.sha512',
checksumType: 'sha512',
- filename: basename(overrideUrl),
+ filename: Path.basename(overrideUrl),
};
}
-
-module.exports = { getCustomSnapshotUrl, resolveCustomSnapshotUrl };
diff --git a/packages/kbn-es/src/errors.js b/packages/kbn-es/src/errors.js
deleted file mode 100644
index 87490168bf5ee..0000000000000
--- a/packages/kbn-es/src/errors.js
+++ /dev/null
@@ -1,17 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-exports.createCliError = function (message) {
- const error = new Error(message);
- error.isCliError = true;
- return error;
-};
-
-exports.isCliError = function (error) {
- return error && error.isCliError;
-};
diff --git a/packages/kbn-es/src/errors.ts b/packages/kbn-es/src/errors.ts
new file mode 100644
index 0000000000000..a0c526dc48a9c
--- /dev/null
+++ b/packages/kbn-es/src/errors.ts
@@ -0,0 +1,25 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+interface CliError extends Error {
+ isCliError: boolean;
+}
+
+export function createCliError(message: string) {
+ return Object.assign(new Error(message), {
+ isCliError: true,
+ });
+}
+
+function isObj(x: unknown): x is Record {
+ return typeof x === 'object' && x !== null;
+}
+
+export function isCliError(error: unknown): error is CliError {
+ return isObj(error) && error.isCliError === true;
+}
diff --git a/packages/kbn-es/src/index.js b/packages/kbn-es/src/index.ts
similarity index 72%
rename from packages/kbn-es/src/index.js
rename to packages/kbn-es/src/index.ts
index 3b12de68234fa..68fd931794c0c 100644
--- a/packages/kbn-es/src/index.js
+++ b/packages/kbn-es/src/index.ts
@@ -6,5 +6,7 @@
* Side Public License, v 1.
*/
-exports.run = require('./cli').run;
-exports.Cluster = require('./cluster').Cluster;
+// @ts-expect-error not typed yet
+export { run } from './cli';
+// @ts-expect-error not typed yet
+export { Cluster } from './cluster';
diff --git a/src/plugins/discover/public/utils/get_single_doc_url.ts b/packages/kbn-es/src/install/index.ts
similarity index 65%
rename from src/plugins/discover/public/utils/get_single_doc_url.ts
rename to packages/kbn-es/src/install/index.ts
index 913463e6d44a4..e827dee2247f9 100644
--- a/src/plugins/discover/public/utils/get_single_doc_url.ts
+++ b/packages/kbn-es/src/install/index.ts
@@ -6,6 +6,6 @@
* Side Public License, v 1.
*/
-export const getSingleDocUrl = (indexPatternId: string, rowIndex: string, rowId: string) => {
- return `/app/discover#/doc/${indexPatternId}/${rowIndex}?id=${encodeURIComponent(rowId)}`;
-};
+export { installArchive } from './install_archive';
+export { installSnapshot, downloadSnapshot } from './install_snapshot';
+export { installSource } from './install_source';
diff --git a/packages/kbn-es/src/install/archive.js b/packages/kbn-es/src/install/install_archive.ts
similarity index 64%
rename from packages/kbn-es/src/install/archive.js
rename to packages/kbn-es/src/install/install_archive.ts
index 76db5a4427e6d..ee04d9e4b62b5 100644
--- a/packages/kbn-es/src/install/archive.js
+++ b/packages/kbn-es/src/install/install_archive.ts
@@ -6,29 +6,40 @@
* Side Public License, v 1.
*/
-const fs = require('fs');
-const path = require('path');
-const chalk = require('chalk');
-const execa = require('execa');
-const del = require('del');
-const url = require('url');
-const { extract } = require('@kbn/dev-utils');
-const { log: defaultLog } = require('../utils');
-const { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } = require('../paths');
-const { Artifact } = require('../artifact');
-const { parseSettings, SettingsFilter } = require('../settings');
+import fs from 'fs';
+import path from 'path';
+
+import chalk from 'chalk';
+import execa from 'execa';
+import del from 'del';
+import { extract, ToolingLog } from '@kbn/dev-utils';
+
+import { BASE_PATH, ES_CONFIG, ES_KEYSTORE_BIN } from '../paths';
+import { Artifact } from '../artifact';
+import { parseSettings, SettingsFilter } from '../settings';
+import { log as defaultLog } from '../utils/log';
+
+interface InstallArchiveOptions {
+ license?: string;
+ password?: string;
+ basePath?: string;
+ installPath?: string;
+ log?: ToolingLog;
+ esArgs?: string[];
+}
+
+const isHttpUrl = (str: string) => {
+ try {
+ return ['http:', 'https:'].includes(new URL(str).protocol);
+ } catch {
+ return false;
+ }
+};
/**
* Extracts an ES archive and optionally installs plugins
- *
- * @param {String} archive - path to tar
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.installArchive = async function installArchive(archive, options = {}) {
+export async function installArchive(archive: string, options: InstallArchiveOptions = {}) {
const {
license = 'basic',
password = 'changeme',
@@ -39,9 +50,9 @@ exports.installArchive = async function installArchive(archive, options = {}) {
} = options;
let dest = archive;
- if (['http:', 'https:'].includes(url.parse(archive).protocol)) {
+ if (isHttpUrl(archive)) {
const artifact = await Artifact.getArchive(archive, log);
- dest = path.resolve(basePath, 'cache', artifact.getFilename());
+ dest = path.resolve(basePath, 'cache', artifact.spec.filename);
await artifact.download(dest);
}
@@ -75,28 +86,23 @@ exports.installArchive = async function installArchive(archive, options = {}) {
}
return { installPath };
-};
+}
/**
* Appends single line to elasticsearch.yml config file
- *
- * @param {String} installPath
- * @param {String} key
- * @param {String} value
*/
-async function appendToConfig(installPath, key, value) {
+async function appendToConfig(installPath: string, key: string, value: string) {
fs.appendFileSync(path.resolve(installPath, ES_CONFIG), `${key}: ${value}\n`, 'utf8');
}
/**
* Creates and configures Keystore
- *
- * @param {String} installPath
- * @param {ToolingLog} log
- * @param {Array<[string, string]>} secureSettings List of custom Elasticsearch secure settings to
- * add into the keystore.
*/
-async function configureKeystore(installPath, log = defaultLog, secureSettings) {
+async function configureKeystore(
+ installPath: string,
+ log: ToolingLog = defaultLog,
+ secureSettings: Array<[string, string]>
+) {
const env = { JAVA_HOME: '' };
await execa(ES_KEYSTORE_BIN, ['create'], { cwd: installPath, env });
diff --git a/packages/kbn-es/src/install/snapshot.js b/packages/kbn-es/src/install/install_snapshot.ts
similarity index 55%
rename from packages/kbn-es/src/install/snapshot.js
rename to packages/kbn-es/src/install/install_snapshot.ts
index cf1ce50f7e413..84d713745eb82 100644
--- a/packages/kbn-es/src/install/snapshot.js
+++ b/packages/kbn-es/src/install/install_snapshot.ts
@@ -6,56 +6,58 @@
* Side Public License, v 1.
*/
-const chalk = require('chalk');
-const path = require('path');
-const { BASE_PATH } = require('../paths');
-const { installArchive } = require('./archive');
-const { log: defaultLog } = require('../utils');
-const { Artifact } = require('../artifact');
+import path from 'path';
+
+import chalk from 'chalk';
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { BASE_PATH } from '../paths';
+import { installArchive } from './install_archive';
+import { log as defaultLog } from '../utils/log';
+import { Artifact, ArtifactLicense } from '../artifact';
+
+interface DownloadSnapshotOptions {
+ version: string;
+ license?: ArtifactLicense;
+ basePath?: string;
+ installPath?: string;
+ log?: ToolingLog;
+ useCached?: boolean;
+}
/**
* Download an ES snapshot
- *
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.version
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.downloadSnapshot = async function installSnapshot({
+export async function downloadSnapshot({
license = 'basic',
version,
basePath = BASE_PATH,
installPath = path.resolve(basePath, version),
log = defaultLog,
useCached = false,
-}) {
+}: DownloadSnapshotOptions) {
log.info('version: %s', chalk.bold(version));
log.info('install path: %s', chalk.bold(installPath));
log.info('license: %s', chalk.bold(license));
const artifact = await Artifact.getSnapshot(license, version, log);
- const dest = path.resolve(basePath, 'cache', artifact.getFilename());
+ const dest = path.resolve(basePath, 'cache', artifact.spec.filename);
await artifact.download(dest, { useCached });
return {
downloadPath: dest,
};
-};
+}
+
+interface InstallSnapshotOptions extends DownloadSnapshotOptions {
+ password?: string;
+ esArgs?: string[];
+}
/**
* Installs ES from snapshot
- *
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.password
- * @property {String} options.version
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.installSnapshot = async function installSnapshot({
+export async function installSnapshot({
license = 'basic',
password = 'password',
version,
@@ -64,8 +66,8 @@ exports.installSnapshot = async function installSnapshot({
log = defaultLog,
esArgs,
useCached = false,
-}) {
- const { downloadPath } = await exports.downloadSnapshot({
+}: InstallSnapshotOptions) {
+ const { downloadPath } = await downloadSnapshot({
license,
version,
basePath,
@@ -82,4 +84,4 @@ exports.installSnapshot = async function installSnapshot({
log,
esArgs,
});
-};
+}
diff --git a/packages/kbn-es/src/install/source.js b/packages/kbn-es/src/install/install_source.ts
similarity index 73%
rename from packages/kbn-es/src/install/source.js
rename to packages/kbn-es/src/install/install_source.ts
index 81a1019509906..d8c272677058e 100644
--- a/packages/kbn-es/src/install/source.js
+++ b/packages/kbn-es/src/install/install_source.ts
@@ -6,28 +6,35 @@
* Side Public License, v 1.
*/
-const path = require('path');
-const fs = require('fs');
-const os = require('os');
-const chalk = require('chalk');
-const crypto = require('crypto');
-const simpleGit = require('simple-git/promise');
-const { installArchive } = require('./archive');
-const { log: defaultLog, cache, buildSnapshot, archiveForPlatform } = require('../utils');
-const { BASE_PATH } = require('../paths');
+import path from 'path';
+import fs from 'fs';
+import os from 'os';
+import crypto from 'crypto';
+
+import chalk from 'chalk';
+import simpleGit from 'simple-git/promise';
+import { ToolingLog } from '@kbn/dev-utils';
+
+import { installArchive } from './install_archive';
+import { log as defaultLog } from '../utils/log';
+import { cache } from '../utils/cache';
+import { buildSnapshot, archiveForPlatform } from '../utils/build_snapshot';
+import { BASE_PATH } from '../paths';
+
+interface InstallSourceOptions {
+ sourcePath: string;
+ license?: string;
+ password?: string;
+ basePath?: string;
+ installPath?: string;
+ log?: ToolingLog;
+ esArgs?: string[];
+}
/**
* Installs ES from source
- *
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.password
- * @property {String} options.sourcePath
- * @property {String} options.basePath
- * @property {String} options.installPath
- * @property {ToolingLog} options.log
*/
-exports.installSource = async function installSource({
+export async function installSource({
license = 'basic',
password = 'changeme',
sourcePath,
@@ -35,7 +42,7 @@ exports.installSource = async function installSource({
installPath = path.resolve(basePath, 'source'),
log = defaultLog,
esArgs,
-}) {
+}: InstallSourceOptions) {
log.info('source path: %s', chalk.bold(sourcePath));
log.info('install path: %s', chalk.bold(installPath));
log.info('license: %s', chalk.bold(license));
@@ -62,14 +69,9 @@ exports.installSource = async function installSource({
log,
esArgs,
});
-};
+}
-/**
- *
- * @param {String} cwd
- * @param {ToolingLog} log
- */
-async function sourceInfo(cwd, license, log = defaultLog) {
+async function sourceInfo(cwd: string, license: string, log: ToolingLog = defaultLog) {
if (!fs.existsSync(cwd)) {
throw new Error(`${cwd} does not exist`);
}
diff --git a/packages/kbn-es/src/paths.js b/packages/kbn-es/src/paths.js
deleted file mode 100644
index 5c8d3b654ecf9..0000000000000
--- a/packages/kbn-es/src/paths.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-const os = require('os');
-const path = require('path');
-
-function maybeUseBat(bin) {
- return os.platform().startsWith('win') ? `${bin}.bat` : bin;
-}
-
-const tempDir = os.tmpdir();
-
-exports.BASE_PATH = path.resolve(tempDir, 'kbn-es');
-
-exports.GRADLE_BIN = maybeUseBat('./gradlew');
-exports.ES_BIN = maybeUseBat('bin/elasticsearch');
-exports.ES_CONFIG = 'config/elasticsearch.yml';
-
-exports.ES_KEYSTORE_BIN = maybeUseBat('./bin/elasticsearch-keystore');
diff --git a/packages/kbn-es/src/paths.ts b/packages/kbn-es/src/paths.ts
new file mode 100644
index 0000000000000..c1b859af4e1f5
--- /dev/null
+++ b/packages/kbn-es/src/paths.ts
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import Os from 'os';
+import Path from 'path';
+
+function maybeUseBat(bin: string) {
+ return Os.platform().startsWith('win') ? `${bin}.bat` : bin;
+}
+
+const tempDir = Os.tmpdir();
+
+export const BASE_PATH = Path.resolve(tempDir, 'kbn-es');
+
+export const GRADLE_BIN = maybeUseBat('./gradlew');
+export const ES_BIN = maybeUseBat('bin/elasticsearch');
+export const ES_CONFIG = 'config/elasticsearch.yml';
+
+export const ES_KEYSTORE_BIN = maybeUseBat('./bin/elasticsearch-keystore');
diff --git a/packages/kbn-es/src/utils/build_snapshot.js b/packages/kbn-es/src/utils/build_snapshot.ts
similarity index 53%
rename from packages/kbn-es/src/utils/build_snapshot.js
rename to packages/kbn-es/src/utils/build_snapshot.ts
index ec26ba69e658b..542e63dcc0748 100644
--- a/packages/kbn-es/src/utils/build_snapshot.js
+++ b/packages/kbn-es/src/utils/build_snapshot.ts
@@ -6,25 +6,25 @@
* Side Public License, v 1.
*/
-const execa = require('execa');
-const path = require('path');
-const os = require('os');
-const readline = require('readline');
-const { createCliError } = require('../errors');
-const { findMostRecentlyChanged } = require('../utils');
-const { GRADLE_BIN } = require('../paths');
+import path from 'path';
+import os from 'os';
-const onceEvent = (emitter, event) => new Promise((resolve) => emitter.once(event, resolve));
+import { ToolingLog, withProcRunner } from '@kbn/dev-utils';
+
+import { createCliError } from '../errors';
+import { findMostRecentlyChanged } from './find_most_recently_changed';
+import { GRADLE_BIN } from '../paths';
+
+interface BuildSnapshotOptions {
+ license: string;
+ sourcePath: string;
+ log: ToolingLog;
+ platform?: string;
+}
/**
* Creates archive from source
*
- * @param {Object} options
- * @property {('oss'|'basic'|'trial')} options.license
- * @property {String} options.sourcePath
- * @property {ToolingLog} options.log
- * @returns {Object} containing archive and optional plugins
- *
* Gradle tasks:
* $ ./gradlew tasks --all | grep 'distribution.*assemble\s'
* :distribution:archives:darwin-tar:assemble
@@ -34,39 +34,27 @@ const onceEvent = (emitter, event) => new Promise((resolve) => emitter.once(even
* :distribution:archives:oss-linux-tar:assemble
* :distribution:archives:oss-windows-zip:assemble
*/
-exports.buildSnapshot = async ({ license, sourcePath, log, platform = os.platform() }) => {
+export async function buildSnapshot({
+ license,
+ sourcePath,
+ log,
+ platform = os.platform(),
+}: BuildSnapshotOptions) {
const { task, ext } = exports.archiveForPlatform(platform, license);
const buildArgs = [`:distribution:archives:${task}:assemble`];
log.info('%s %s', GRADLE_BIN, buildArgs.join(' '));
log.debug('cwd:', sourcePath);
- const build = execa(GRADLE_BIN, buildArgs, {
- cwd: sourcePath,
- stdio: ['ignore', 'pipe', 'pipe'],
+ await withProcRunner(log, async (procs) => {
+ await procs.run('gradle', {
+ cmd: GRADLE_BIN,
+ args: buildArgs,
+ cwd: sourcePath,
+ wait: true,
+ });
});
- const stdout = readline.createInterface({ input: build.stdout });
- const stderr = readline.createInterface({ input: build.stderr });
-
- stdout.on('line', (line) => log.debug(line));
- stderr.on('line', (line) => log.error(line));
-
- const [exitCode] = await Promise.all([
- Promise.race([
- onceEvent(build, 'exit'),
- onceEvent(build, 'error').then((error) => {
- throw createCliError(`Error spawning gradle: ${error.message}`);
- }),
- ]),
- onceEvent(stdout, 'close'),
- onceEvent(stderr, 'close'),
- ]);
-
- if (exitCode > 0) {
- throw createCliError('unable to build ES');
- }
-
const archivePattern = `distribution/archives/${task}/build/distributions/elasticsearch-*.${ext}`;
const esArchivePath = findMostRecentlyChanged(path.resolve(sourcePath, archivePattern));
@@ -75,9 +63,9 @@ exports.buildSnapshot = async ({ license, sourcePath, log, platform = os.platfor
}
return esArchivePath;
-};
+}
-exports.archiveForPlatform = (platform, license) => {
+export function archiveForPlatform(platform: NodeJS.Platform, license: string) {
const taskPrefix = license === 'oss' ? 'oss-' : '';
switch (platform) {
@@ -88,6 +76,6 @@ exports.archiveForPlatform = (platform, license) => {
case 'linux':
return { format: 'tar', ext: 'tar.gz', task: `${taskPrefix}linux-tar`, platform: 'linux' };
default:
- throw new Error(`unknown platform: ${platform}`);
+ throw new Error(`unsupported platform: ${platform}`);
}
-};
+}
diff --git a/packages/kbn-es/src/utils/cache.js b/packages/kbn-es/src/utils/cache.js
deleted file mode 100644
index 248faf23bbc46..0000000000000
--- a/packages/kbn-es/src/utils/cache.js
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-const fs = require('fs');
-const path = require('path');
-
-exports.readMeta = function readMeta(file) {
- try {
- const meta = fs.readFileSync(`${file}.meta`, {
- encoding: 'utf8',
- });
-
- return {
- exists: fs.existsSync(file),
- ...JSON.parse(meta),
- };
- } catch (e) {
- if (e.code !== 'ENOENT') {
- throw e;
- }
-
- return {
- exists: false,
- };
- }
-};
-
-exports.writeMeta = function readMeta(file, details = {}) {
- const meta = {
- ts: new Date(),
- ...details,
- };
-
- fs.mkdirSync(path.dirname(file), { recursive: true });
- fs.writeFileSync(`${file}.meta`, JSON.stringify(meta, null, 2));
-};
diff --git a/packages/kbn-es/src/utils/cache.ts b/packages/kbn-es/src/utils/cache.ts
new file mode 100644
index 0000000000000..819119b6ce010
--- /dev/null
+++ b/packages/kbn-es/src/utils/cache.ts
@@ -0,0 +1,40 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import Fs from 'fs';
+import Path from 'path';
+
+export const cache = {
+ readMeta(path: string) {
+ try {
+ const meta = Fs.readFileSync(`${path}.meta`, {
+ encoding: 'utf8',
+ });
+
+ return {
+ ...JSON.parse(meta),
+ };
+ } catch (e) {
+ if (e.code !== 'ENOENT') {
+ throw e;
+ }
+
+ return {};
+ }
+ },
+
+ writeMeta(path: string, details = {}) {
+ const meta = {
+ ts: new Date(),
+ ...details,
+ };
+
+ Fs.mkdirSync(Path.dirname(path), { recursive: true });
+ Fs.writeFileSync(`${path}.meta`, JSON.stringify(meta, null, 2));
+ },
+};
diff --git a/packages/kbn-es/src/utils/find_most_recently_changed.test.js b/packages/kbn-es/src/utils/find_most_recently_changed.test.ts
similarity index 93%
rename from packages/kbn-es/src/utils/find_most_recently_changed.test.js
rename to packages/kbn-es/src/utils/find_most_recently_changed.test.ts
index 8198495e7197f..721e5baba7513 100644
--- a/packages/kbn-es/src/utils/find_most_recently_changed.test.js
+++ b/packages/kbn-es/src/utils/find_most_recently_changed.test.ts
@@ -6,6 +6,8 @@
* Side Public License, v 1.
*/
+import { findMostRecentlyChanged } from './find_most_recently_changed';
+
jest.mock('fs', () => ({
statSync: jest.fn().mockImplementation((path) => {
if (path.includes('oldest')) {
@@ -31,8 +33,6 @@ jest.mock('fs', () => ({
}),
}));
-const { findMostRecentlyChanged } = require('./find_most_recently_changed');
-
test('returns newest file', () => {
const file = findMostRecentlyChanged('/data/*.yml');
expect(file).toEqual('/data/newest.yml');
diff --git a/packages/kbn-es/src/utils/find_most_recently_changed.js b/packages/kbn-es/src/utils/find_most_recently_changed.ts
similarity index 65%
rename from packages/kbn-es/src/utils/find_most_recently_changed.js
rename to packages/kbn-es/src/utils/find_most_recently_changed.ts
index 16d300f080b8d..29e1edcc5fcc9 100644
--- a/packages/kbn-es/src/utils/find_most_recently_changed.js
+++ b/packages/kbn-es/src/utils/find_most_recently_changed.ts
@@ -6,25 +6,22 @@
* Side Public License, v 1.
*/
-const path = require('path');
-const fs = require('fs');
-const glob = require('glob');
+import path from 'path';
+import fs from 'fs';
+import glob from 'glob';
/**
* Find the most recently modified file that matches the pattern pattern
- *
- * @param {String} pattern absolute path with glob expressions
- * @return {String} Absolute path
*/
-exports.findMostRecentlyChanged = function findMostRecentlyChanged(pattern) {
+export function findMostRecentlyChanged(pattern: string) {
if (!path.isAbsolute(pattern)) {
throw new TypeError(`Pattern must be absolute, got ${pattern}`);
}
- const ctime = (path) => fs.statSync(path).ctime.getTime();
+ const ctime = (p: string) => fs.statSync(p).ctime.getTime();
return glob
.sync(pattern)
.sort((a, b) => ctime(a) - ctime(b))
.pop();
-};
+}
diff --git a/packages/kbn-es/src/utils/index.js b/packages/kbn-es/src/utils/index.js
deleted file mode 100644
index ed83495e5310a..0000000000000
--- a/packages/kbn-es/src/utils/index.js
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-exports.cache = require('./cache');
-exports.log = require('./log').log;
-exports.parseEsLog = require('./parse_es_log').parseEsLog;
-exports.findMostRecentlyChanged = require('./find_most_recently_changed').findMostRecentlyChanged;
-exports.extractConfigFiles = require('./extract_config_files').extractConfigFiles;
-exports.NativeRealm = require('./native_realm').NativeRealm;
-exports.buildSnapshot = require('./build_snapshot').buildSnapshot;
-exports.archiveForPlatform = require('./build_snapshot').archiveForPlatform;
diff --git a/packages/kbn-es/src/utils/index.ts b/packages/kbn-es/src/utils/index.ts
new file mode 100644
index 0000000000000..ce0a222dafd3b
--- /dev/null
+++ b/packages/kbn-es/src/utils/index.ts
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export { cache } from './cache';
+export { log } from './log';
+// @ts-expect-error not typed yet
+export { parseEsLog } from './parse_es_log';
+export { findMostRecentlyChanged } from './find_most_recently_changed';
+// @ts-expect-error not typed yet
+export { extractConfigFiles } from './extract_config_files';
+// @ts-expect-error not typed yet
+export { NativeRealm } from './native_realm';
+export { buildSnapshot } from './build_snapshot';
+export { archiveForPlatform } from './build_snapshot';
diff --git a/packages/kbn-es/src/utils/log.js b/packages/kbn-es/src/utils/log.ts
similarity index 80%
rename from packages/kbn-es/src/utils/log.js
rename to packages/kbn-es/src/utils/log.ts
index b33ae509c6c45..a0299f885cf6a 100644
--- a/packages/kbn-es/src/utils/log.js
+++ b/packages/kbn-es/src/utils/log.ts
@@ -6,11 +6,9 @@
* Side Public License, v 1.
*/
-const { ToolingLog } = require('@kbn/dev-utils');
+import { ToolingLog } from '@kbn/dev-utils';
-const log = new ToolingLog({
+export const log = new ToolingLog({
level: 'verbose',
writeTo: process.stdout,
});
-
-exports.log = log;
diff --git a/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel b/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel
index a4d96f76053e1..759f4ac706471 100644
--- a/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel
+++ b/packages/kbn-eslint-import-resolver-kibana/BUILD.bazel
@@ -1,4 +1,5 @@
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "pkg_npm")
PKG_BASE_NAME = "kbn-eslint-import-resolver-kibana"
PKG_REQUIRE_NAME = "@kbn/eslint-import-resolver-kibana"
diff --git a/packages/kbn-eslint-plugin-eslint/BUILD.bazel b/packages/kbn-eslint-plugin-eslint/BUILD.bazel
index 5baab89d6f03d..c02a468456f77 100644
--- a/packages/kbn-eslint-plugin-eslint/BUILD.bazel
+++ b/packages/kbn-eslint-plugin-eslint/BUILD.bazel
@@ -1,4 +1,5 @@
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "pkg_npm")
PKG_BASE_NAME = "kbn-eslint-plugin-eslint"
PKG_REQUIRE_NAME = "@kbn/eslint-plugin-eslint"
@@ -28,7 +29,7 @@ NPM_MODULE_EXTRA_FILES = [
"README.md",
]
-DEPS = [
+RUNTIME_DEPS = [
"@npm//@babel/eslint-parser",
"@npm//dedent",
"@npm//eslint",
@@ -41,7 +42,7 @@ js_library(
srcs = NPM_MODULE_EXTRA_FILES + [
":srcs",
],
- deps = DEPS,
+ deps = RUNTIME_DEPS,
package_name = PKG_REQUIRE_NAME,
visibility = ["//visibility:public"],
)
diff --git a/packages/kbn-eslint-plugin-eslint/helpers/exports.js b/packages/kbn-eslint-plugin-eslint/helpers/exports.js
index b7af8e83d7661..971364633356c 100644
--- a/packages/kbn-eslint-plugin-eslint/helpers/exports.js
+++ b/packages/kbn-eslint-plugin-eslint/helpers/exports.js
@@ -9,7 +9,7 @@
const Fs = require('fs');
const Path = require('path');
const ts = require('typescript');
-const { REPO_ROOT } = require('@kbn/dev-utils');
+const { REPO_ROOT } = require('@kbn/utils');
const { ExportSet } = require('./export_set');
/** @typedef {import("@typescript-eslint/types").TSESTree.ExportAllDeclaration} ExportAllDeclaration */
diff --git a/packages/kbn-expect/BUILD.bazel b/packages/kbn-expect/BUILD.bazel
index b7eb91a451b9a..9f74cfe6a093d 100644
--- a/packages/kbn-expect/BUILD.bazel
+++ b/packages/kbn-expect/BUILD.bazel
@@ -1,4 +1,5 @@
-load("@build_bazel_rules_nodejs//:index.bzl", "js_library", "pkg_npm")
+load("@build_bazel_rules_nodejs//:index.bzl", "js_library")
+load("//src/dev/bazel:index.bzl", "pkg_npm")
PKG_BASE_NAME = "kbn-expect"
PKG_REQUIRE_NAME = "@kbn/expect"
diff --git a/packages/kbn-optimizer/BUILD.bazel b/packages/kbn-optimizer/BUILD.bazel
index a389086c9ee3c..3bd41249e2d51 100644
--- a/packages/kbn-optimizer/BUILD.bazel
+++ b/packages/kbn-optimizer/BUILD.bazel
@@ -38,10 +38,12 @@ RUNTIME_DEPS = [
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
"//packages/kbn-utils",
+ "@npm//@babel/core",
"@npm//chalk",
"@npm//clean-webpack-plugin",
"@npm//compression-webpack-plugin",
"@npm//cpy",
+ "@npm//dedent",
"@npm//del",
"@npm//execa",
"@npm//jest-diff",
@@ -64,7 +66,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-config:npm_module_types",
"//packages/kbn-config-schema:npm_module_types",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-std",
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
@@ -79,7 +81,9 @@ TYPES_DEPS = [
"@npm//pirates",
"@npm//rxjs",
"@npm//zlib",
+ "@npm//@types/babel__core",
"@npm//@types/compression-webpack-plugin",
+ "@npm//@types/dedent",
"@npm//@types/jest",
"@npm//@types/json-stable-stringify",
"@npm//@types/js-yaml",
diff --git a/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts b/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts
index f00905f3f4920..c07a9764af76f 100644
--- a/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts
+++ b/packages/kbn-optimizer/src/babel_runtime_helpers/find_babel_runtime_helpers_in_entry_bundles.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { run, REPO_ROOT } from '@kbn/dev-utils';
+import { run } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { OptimizerConfig } from '../optimizer';
import { parseStats, inAnyEntryChunk } from './parse_stats';
diff --git a/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts b/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts
index 6f5dabf410ffa..2710ba8a54210 100644
--- a/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts
+++ b/packages/kbn-optimizer/src/node/node_auto_tranpilation.ts
@@ -39,7 +39,7 @@ import Crypto from 'crypto';
import * as babel from '@babel/core';
import { addHook } from 'pirates';
-import { REPO_ROOT, UPSTREAM_BRANCH } from '@kbn/dev-utils';
+import { REPO_ROOT, UPSTREAM_BRANCH } from '@kbn/utils';
import sourceMapSupport from 'source-map-support';
import { Cache } from './cache';
diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts
index d3cc5cceefddf..d1754248dba17 100644
--- a/packages/kbn-optimizer/src/optimizer/get_changes.test.ts
+++ b/packages/kbn-optimizer/src/optimizer/get_changes.test.ts
@@ -9,7 +9,8 @@
jest.mock('execa');
import { getChanges } from './get_changes';
-import { REPO_ROOT, createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const execa: jest.Mock = jest.requireMock('execa');
diff --git a/packages/kbn-optimizer/src/optimizer/get_changes.ts b/packages/kbn-optimizer/src/optimizer/get_changes.ts
index c5f8abe99c322..b59f938eb8c37 100644
--- a/packages/kbn-optimizer/src/optimizer/get_changes.ts
+++ b/packages/kbn-optimizer/src/optimizer/get_changes.ts
@@ -10,7 +10,7 @@ import Path from 'path';
import execa from 'execa';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
export type Changes = Map;
diff --git a/packages/kbn-plugin-generator/BUILD.bazel b/packages/kbn-plugin-generator/BUILD.bazel
index c935d1763dae8..488f09bdd5d52 100644
--- a/packages/kbn-plugin-generator/BUILD.bazel
+++ b/packages/kbn-plugin-generator/BUILD.bazel
@@ -51,7 +51,7 @@ RUNTIME_DEPS = [
TYPES_DEPS = [
"//packages/kbn-utils",
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"@npm//del",
"@npm//execa",
"@npm//globby",
diff --git a/packages/kbn-plugin-helpers/BUILD.bazel b/packages/kbn-plugin-helpers/BUILD.bazel
index d7744aecac26e..47f205f1530b7 100644
--- a/packages/kbn-plugin-helpers/BUILD.bazel
+++ b/packages/kbn-plugin-helpers/BUILD.bazel
@@ -42,7 +42,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-optimizer",
"//packages/kbn-utils",
"@npm//del",
diff --git a/packages/kbn-pm/dist/index.js b/packages/kbn-pm/dist/index.js
index c1d0f69e4ea07..fc92d18698132 100644
--- a/packages/kbn-pm/dist/index.js
+++ b/packages/kbn-pm/dist/index.js
@@ -6639,7 +6639,15 @@ class ToolingLogTextWriter {
}
if (this.ignoreSources && msg.source && this.ignoreSources.includes(msg.source)) {
- return false;
+ if (msg.type === 'write') {
+ const txt = (0, _util.format)(msg.args[0], ...msg.args.slice(1)); // Ensure that Elasticsearch deprecation log messages from Kibana aren't ignored
+
+ if (!/elasticsearch\.deprecation/.test(txt)) {
+ return false;
+ }
+ } else {
+ return false;
+ }
}
const prefix = has(MSG_PREFIXES, msg.type) ? MSG_PREFIXES[msg.type] : '';
diff --git a/packages/kbn-rule-data-utils/BUILD.bazel b/packages/kbn-rule-data-utils/BUILD.bazel
index 730e907aafc65..d23cf25f181ca 100644
--- a/packages/kbn-rule-data-utils/BUILD.bazel
+++ b/packages/kbn-rule-data-utils/BUILD.bazel
@@ -34,7 +34,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-es-query",
+ "//packages/kbn-es-query:npm_module_types",
"@npm//@elastic/elasticsearch",
"@npm//tslib",
"@npm//utility-types",
diff --git a/packages/kbn-rule-data-utils/src/technical_field_names.ts b/packages/kbn-rule-data-utils/src/technical_field_names.ts
index 349719c019c22..fde8deade36b5 100644
--- a/packages/kbn-rule-data-utils/src/technical_field_names.ts
+++ b/packages/kbn-rule-data-utils/src/technical_field_names.ts
@@ -24,6 +24,7 @@ const VERSION = `${KIBANA_NAMESPACE}.version` as const;
// Fields pertaining to the alert
const ALERT_ACTION_GROUP = `${ALERT_NAMESPACE}.action_group` as const;
+const ALERT_BUILDING_BLOCK_TYPE = `${ALERT_NAMESPACE}.building_block_type` as const;
const ALERT_DURATION = `${ALERT_NAMESPACE}.duration.us` as const;
const ALERT_END = `${ALERT_NAMESPACE}.end` as const;
const ALERT_EVALUATION_THRESHOLD = `${ALERT_NAMESPACE}.evaluation.threshold` as const;
@@ -91,6 +92,7 @@ const fields = {
TAGS,
TIMESTAMP,
ALERT_ACTION_GROUP,
+ ALERT_BUILDING_BLOCK_TYPE,
ALERT_DURATION,
ALERT_END,
ALERT_EVALUATION_THRESHOLD,
@@ -141,6 +143,7 @@ const fields = {
export {
ALERT_ACTION_GROUP,
+ ALERT_BUILDING_BLOCK_TYPE,
ALERT_DURATION,
ALERT_END,
ALERT_EVALUATION_THRESHOLD,
diff --git a/packages/kbn-securitysolution-autocomplete/BUILD.bazel b/packages/kbn-securitysolution-autocomplete/BUILD.bazel
index 57ac8c62273e0..50df292b8796e 100644
--- a/packages/kbn-securitysolution-autocomplete/BUILD.bazel
+++ b/packages/kbn-securitysolution-autocomplete/BUILD.bazel
@@ -45,7 +45,7 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-es-query",
+ "//packages/kbn-es-query:npm_module_types",
"//packages/kbn-i18n",
"//packages/kbn-securitysolution-list-hooks",
"//packages/kbn-securitysolution-list-utils",
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts
index e491b50b0f9c8..176a6357b30e7 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.mock.ts
@@ -10,9 +10,11 @@ import { EndpointEntriesArray } from '.';
import { getEndpointEntryMatchMock } from '../entry_match/index.mock';
import { getEndpointEntryMatchAnyMock } from '../entry_match_any/index.mock';
import { getEndpointEntryNestedMock } from '../entry_nested/index.mock';
+import { getEndpointEntryMatchWildcard } from '../entry_match_wildcard/index.mock';
export const getEndpointEntriesArrayMock = (): EndpointEntriesArray => [
getEndpointEntryMatchMock(),
getEndpointEntryMatchAnyMock(),
getEndpointEntryNestedMock(),
+ getEndpointEntryMatchWildcard(),
];
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts
index 09f1740567bc1..ca852e15c5c2a 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.test.ts
@@ -20,6 +20,7 @@ import { getEndpointEntryNestedMock } from '../entry_nested/index.mock';
import { getEndpointEntriesArrayMock } from './index.mock';
import { getEntryListMock } from '../../entries_list/index.mock';
import { getEntryExistsMock } from '../../entries_exist/index.mock';
+import { getEndpointEntryMatchWildcard } from '../entry_match_wildcard/index.mock';
describe('Endpoint', () => {
describe('entriesArray', () => {
@@ -99,6 +100,15 @@ describe('Endpoint', () => {
expect(message.schema).toEqual(payload);
});
+ test('it should validate an array with wildcard entry', () => {
+ const payload = [getEndpointEntryMatchWildcard()];
+ const decoded = endpointEntriesArray.decode(payload);
+ const message = pipe(decoded, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
test('it should validate an array with all types of entries', () => {
const payload = getEndpointEntriesArrayMock();
const decoded = endpointEntriesArray.decode(payload);
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts
index 451131dafc459..58b0d80f9c1fa 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entries/index.ts
@@ -11,9 +11,15 @@ import { Either } from 'fp-ts/lib/Either';
import { endpointEntryMatch } from '../entry_match';
import { endpointEntryMatchAny } from '../entry_match_any';
import { endpointEntryNested } from '../entry_nested';
+import { endpointEntryMatchWildcard } from '../entry_match_wildcard';
export const endpointEntriesArray = t.array(
- t.union([endpointEntryMatch, endpointEntryMatchAny, endpointEntryNested])
+ t.union([
+ endpointEntryMatch,
+ endpointEntryMatchAny,
+ endpointEntryMatchWildcard,
+ endpointEntryNested,
+ ])
);
export type EndpointEntriesArray = t.TypeOf;
diff --git a/packages/kbn-es/src/install/index.js b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entry_match_wildcard/index.mock.ts
similarity index 53%
rename from packages/kbn-es/src/install/index.js
rename to packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entry_match_wildcard/index.mock.ts
index 07582f73c663a..e001552277e0c 100644
--- a/packages/kbn-es/src/install/index.js
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/common/endpoint/entry_match_wildcard/index.mock.ts
@@ -6,7 +6,12 @@
* Side Public License, v 1.
*/
-exports.installArchive = require('./archive').installArchive;
-exports.installSnapshot = require('./snapshot').installSnapshot;
-exports.downloadSnapshot = require('./snapshot').downloadSnapshot;
-exports.installSource = require('./source').installSource;
+import { ENTRY_VALUE, FIELD, OPERATOR, WILDCARD } from '../../../constants/index.mock';
+import { EndpointEntryMatchWildcard } from './index';
+
+export const getEndpointEntryMatchWildcard = (): EndpointEntryMatchWildcard => ({
+ field: FIELD,
+ operator: OPERATOR,
+ type: WILDCARD,
+ value: ENTRY_VALUE,
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.mock.ts
new file mode 100644
index 0000000000000..03ec225351e6d
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.mock.ts
@@ -0,0 +1,34 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { ENTRIES } from '../../constants/index.mock';
+import { ImportExceptionListItemSchema, ImportExceptionListItemSchemaDecoded } from '.';
+
+export const getImportExceptionsListItemSchemaMock = (
+ itemId = 'item_id_1',
+ listId = 'detection_list_id'
+): ImportExceptionListItemSchema => ({
+ description: 'some description',
+ entries: ENTRIES,
+ item_id: itemId,
+ list_id: listId,
+ name: 'Query with a rule id',
+ type: 'simple',
+});
+
+export const getImportExceptionsListItemSchemaDecodedMock = (
+ itemId = 'item_id_1',
+ listId = 'detection_list_id'
+): ImportExceptionListItemSchemaDecoded => ({
+ ...getImportExceptionsListItemSchemaMock(itemId, listId),
+ comments: [],
+ meta: undefined,
+ namespace_type: 'single',
+ os_types: [],
+ tags: [],
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.test.ts
new file mode 100644
index 0000000000000..d202f65b57ab5
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.test.ts
@@ -0,0 +1,143 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { pipe } from 'fp-ts/lib/pipeable';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+import { importExceptionListItemSchema, ImportExceptionListItemSchema } from '.';
+import {
+ getImportExceptionsListItemSchemaDecodedMock,
+ getImportExceptionsListItemSchemaMock,
+} from './index.mock';
+
+describe('import_list_item_schema', () => {
+ test('it should validate a typical item request', () => {
+ const payload = getImportExceptionsListItemSchemaMock();
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(getImportExceptionsListItemSchemaDecodedMock());
+ });
+
+ test('it should NOT accept an undefined for "item_id"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.item_id;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "item_id"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "list_id"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.list_id;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "list_id"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "description"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.description;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "description"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "name"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.name;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "name"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "type"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.type;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "type"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "entries"', () => {
+ const payload: Partial> =
+ getImportExceptionsListItemSchemaMock();
+ delete payload.entries;
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "entries"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should accept any partial fields', () => {
+ const payload: ImportExceptionListItemSchema = {
+ ...getImportExceptionsListItemSchemaMock(),
+ id: '123',
+ namespace_type: 'single',
+ comments: [],
+ os_types: [],
+ tags: ['123'],
+ created_at: '2018-08-24T17:49:30.145142000',
+ created_by: 'elastic',
+ updated_at: '2018-08-24T17:49:30.145142000',
+ updated_by: 'elastic',
+ tie_breaker_id: '123',
+ _version: '3',
+ meta: undefined,
+ };
+
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should not allow an extra key to be sent in', () => {
+ const payload: ImportExceptionListItemSchema & {
+ extraKey?: string;
+ } = getImportExceptionsListItemSchemaMock();
+ payload.extraKey = 'some new value';
+ const decoded = importExceptionListItemSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.ts
new file mode 100644
index 0000000000000..3da30a21a0115
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_item_schema/index.ts
@@ -0,0 +1,87 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import { OsTypeArray, osTypeArrayOrUndefined } from '../../common/os_type';
+import { Tags } from '../../common/tags';
+import { NamespaceType } from '../../common/default_namespace';
+import { name } from '../../common/name';
+import { description } from '../../common/description';
+import { namespace_type } from '../../common/namespace_type';
+import { tags } from '../../common/tags';
+import { meta } from '../../common/meta';
+import { list_id } from '../../common/list_id';
+import { item_id } from '../../common/item_id';
+import { id } from '../../common/id';
+import { created_at } from '../../common/created_at';
+import { created_by } from '../../common/created_by';
+import { updated_at } from '../../common/updated_at';
+import { updated_by } from '../../common/updated_by';
+import { _version } from '../../common/underscore_version';
+import { tie_breaker_id } from '../../common/tie_breaker_id';
+import { nonEmptyEntriesArray } from '../../common/non_empty_entries_array';
+import { exceptionListItemType } from '../../common/exception_list_item_type';
+import { ItemId } from '../../common/item_id';
+import { EntriesArray } from '../../common/entries';
+import { CreateCommentsArray } from '../../common/create_comment';
+import { DefaultCreateCommentsArray } from '../../common/default_create_comments_array';
+
+/**
+ * Differences from this and the createExceptionsListItemSchema are
+ * - item_id is required
+ * - id is optional (but ignored in the import code - item_id is exclusively used for imports)
+ * - immutable is optional but if it is any value other than false it will be rejected
+ * - created_at is optional (but ignored in the import code)
+ * - updated_at is optional (but ignored in the import code)
+ * - created_by is optional (but ignored in the import code)
+ * - updated_by is optional (but ignored in the import code)
+ */
+export const importExceptionListItemSchema = t.intersection([
+ t.exact(
+ t.type({
+ description,
+ entries: nonEmptyEntriesArray,
+ item_id,
+ list_id,
+ name,
+ type: exceptionListItemType,
+ })
+ ),
+ t.exact(
+ t.partial({
+ id, // defaults to undefined if not set during decode
+ comments: DefaultCreateCommentsArray, // defaults to empty array if not set during decode
+ created_at, // defaults undefined if not set during decode
+ updated_at, // defaults undefined if not set during decode
+ created_by, // defaults undefined if not set during decode
+ updated_by, // defaults undefined if not set during decode
+ _version, // defaults to undefined if not set during decode
+ tie_breaker_id,
+ meta, // defaults to undefined if not set during decode
+ namespace_type, // defaults to 'single' if not set during decode
+ os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode
+ tags, // defaults to empty array if not set during decode
+ })
+ ),
+]);
+
+export type ImportExceptionListItemSchema = t.OutputOf;
+
+// This type is used after a decode since some things are defaults after a decode.
+export type ImportExceptionListItemSchemaDecoded = Omit<
+ ImportExceptionListItemSchema,
+ 'tags' | 'item_id' | 'entries' | 'namespace_type' | 'comments'
+> & {
+ comments: CreateCommentsArray;
+ tags: Tags;
+ item_id: ItemId;
+ entries: EntriesArray;
+ namespace_type: NamespaceType;
+ os_types: OsTypeArray;
+};
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.mock.ts
new file mode 100644
index 0000000000000..dc6aa8644c1f5
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.mock.ts
@@ -0,0 +1,30 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { ImportExceptionListSchemaDecoded, ImportExceptionsListSchema } from '.';
+
+export const getImportExceptionsListSchemaMock = (
+ listId = 'detection_list_id'
+): ImportExceptionsListSchema => ({
+ description: 'some description',
+ list_id: listId,
+ name: 'Query with a rule id',
+ type: 'detection',
+});
+
+export const getImportExceptionsListSchemaDecodedMock = (
+ listId = 'detection_list_id'
+): ImportExceptionListSchemaDecoded => ({
+ ...getImportExceptionsListSchemaMock(listId),
+ immutable: false,
+ meta: undefined,
+ namespace_type: 'single',
+ os_types: [],
+ tags: [],
+ version: 1,
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.test.ts
new file mode 100644
index 0000000000000..92a24cd4352f5
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.test.ts
@@ -0,0 +1,132 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { pipe } from 'fp-ts/lib/pipeable';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+import { importExceptionsListSchema, ImportExceptionsListSchema } from '.';
+import {
+ getImportExceptionsListSchemaMock,
+ getImportExceptionsListSchemaDecodedMock,
+} from './index.mock';
+
+describe('import_list_item_schema', () => {
+ test('it should validate a typical lists request', () => {
+ const payload = getImportExceptionsListSchemaMock();
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(getImportExceptionsListSchemaDecodedMock());
+ });
+
+ test('it should NOT accept an undefined for "list_id"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.list_id;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "list_id"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "description"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.description;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "description"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "name"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.name;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "name"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "type"', () => {
+ const payload: Partial> =
+ getImportExceptionsListSchemaMock();
+ delete payload.type;
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "type"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept value of "true" for "immutable"', () => {
+ const payload: ImportExceptionsListSchema = {
+ ...getImportExceptionsListSchemaMock(),
+ immutable: true,
+ };
+
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "true" supplied to "immutable"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should accept any partial fields', () => {
+ const payload: ImportExceptionsListSchema = {
+ ...getImportExceptionsListSchemaMock(),
+ namespace_type: 'single',
+ immutable: false,
+ os_types: [],
+ tags: ['123'],
+ created_at: '2018-08-24T17:49:30.145142000',
+ created_by: 'elastic',
+ updated_at: '2018-08-24T17:49:30.145142000',
+ updated_by: 'elastic',
+ version: 3,
+ tie_breaker_id: '123',
+ _version: '3',
+ meta: undefined,
+ };
+
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should not allow an extra key to be sent in', () => {
+ const payload: ImportExceptionsListSchema & {
+ extraKey?: string;
+ } = getImportExceptionsListSchemaMock();
+ payload.extraKey = 'some new value';
+ const decoded = importExceptionsListSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.ts
new file mode 100644
index 0000000000000..610bbae97f579
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/import_exception_list_schema/index.ts
@@ -0,0 +1,87 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import {
+ DefaultVersionNumber,
+ DefaultVersionNumberDecoded,
+ OnlyFalseAllowed,
+} from '@kbn/securitysolution-io-ts-types';
+
+import { exceptionListType } from '../../common/exception_list';
+import { OsTypeArray, osTypeArrayOrUndefined } from '../../common/os_type';
+import { Tags } from '../../common/tags';
+import { ListId } from '../../common/list_id';
+import { NamespaceType } from '../../common/default_namespace';
+import { name } from '../../common/name';
+import { description } from '../../common/description';
+import { namespace_type } from '../../common/namespace_type';
+import { tags } from '../../common/tags';
+import { meta } from '../../common/meta';
+import { list_id } from '../../common/list_id';
+import { id } from '../../common/id';
+import { created_at } from '../../common/created_at';
+import { created_by } from '../../common/created_by';
+import { updated_at } from '../../common/updated_at';
+import { updated_by } from '../../common/updated_by';
+import { _version } from '../../common/underscore_version';
+import { tie_breaker_id } from '../../common/tie_breaker_id';
+
+/**
+ * Differences from this and the createExceptionsSchema are
+ * - list_id is required
+ * - id is optional (but ignored in the import code - list_id is exclusively used for imports)
+ * - immutable is optional but if it is any value other than false it will be rejected
+ * - created_at is optional (but ignored in the import code)
+ * - updated_at is optional (but ignored in the import code)
+ * - created_by is optional (but ignored in the import code)
+ * - updated_by is optional (but ignored in the import code)
+ */
+export const importExceptionsListSchema = t.intersection([
+ t.exact(
+ t.type({
+ description,
+ name,
+ type: exceptionListType,
+ list_id,
+ })
+ ),
+ t.exact(
+ t.partial({
+ id, // defaults to undefined if not set during decode
+ immutable: OnlyFalseAllowed,
+ meta, // defaults to undefined if not set during decode
+ namespace_type, // defaults to 'single' if not set during decode
+ os_types: osTypeArrayOrUndefined, // defaults to empty array if not set during decode
+ tags, // defaults to empty array if not set during decode
+ created_at, // defaults "undefined" if not set during decode
+ updated_at, // defaults "undefined" if not set during decode
+ created_by, // defaults "undefined" if not set during decode
+ updated_by, // defaults "undefined" if not set during decode
+ _version, // defaults to undefined if not set during decode
+ tie_breaker_id,
+ version: DefaultVersionNumber, // defaults to numerical 1 if not set during decode
+ })
+ ),
+]);
+
+export type ImportExceptionsListSchema = t.TypeOf;
+
+// This type is used after a decode since some things are defaults after a decode.
+export type ImportExceptionListSchemaDecoded = Omit<
+ ImportExceptionsListSchema,
+ 'tags' | 'list_id' | 'namespace_type' | 'os_types' | 'immutable'
+> & {
+ immutable: false;
+ tags: Tags;
+ list_id: ListId;
+ namespace_type: NamespaceType;
+ os_types: OsTypeArray;
+ version: DefaultVersionNumberDecoded;
+};
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts
index 3d3c41aed5a72..da8bd7ed8306e 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/request/index.ts
@@ -23,6 +23,8 @@ export * from './find_exception_list_item_schema';
export * from './find_list_item_schema';
export * from './find_list_schema';
export * from './import_list_item_query_schema';
+export * from './import_exception_list_schema';
+export * from './import_exception_item_schema';
export * from './import_list_item_schema';
export * from './patch_list_item_schema';
export * from './patch_list_schema';
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.mock.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.mock.ts
new file mode 100644
index 0000000000000..d4c17c7f9422e
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.mock.ts
@@ -0,0 +1,23 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { ImportExceptionsResponseSchema } from '.';
+
+export const getImportExceptionsResponseSchemaMock = (
+ success = 0,
+ lists = 0,
+ items = 0
+): ImportExceptionsResponseSchema => ({
+ errors: [],
+ success: true,
+ success_count: success,
+ success_exception_lists: true,
+ success_count_exception_lists: lists,
+ success_exception_list_items: true,
+ success_count_exception_list_items: items,
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.test.ts
new file mode 100644
index 0000000000000..dc6780d4b1ce2
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.test.ts
@@ -0,0 +1,129 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { pipe } from 'fp-ts/lib/pipeable';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+import { importExceptionsResponseSchema, ImportExceptionsResponseSchema } from '.';
+import { getImportExceptionsResponseSchemaMock } from './index.mock';
+
+describe('importExceptionsResponseSchema', () => {
+ test('it should validate a typical exceptions import response', () => {
+ const payload = getImportExceptionsResponseSchemaMock();
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should NOT accept an undefined for "errors"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.errors;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "errors"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_count"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_count;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_count"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_exception_lists"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_exception_lists;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_exception_lists"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_count_exception_lists"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_count_exception_lists;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_count_exception_lists"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_exception_list_items"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_exception_list_items;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_exception_list_items"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT accept an undefined for "success_count_exception_list_items"', () => {
+ const payload: Partial> =
+ getImportExceptionsResponseSchemaMock();
+ delete payload.success_count_exception_list_items;
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "undefined" supplied to "success_count_exception_list_items"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should not allow an extra key to be sent in', () => {
+ const payload: ImportExceptionsResponseSchema & {
+ extraKey?: string;
+ } = getImportExceptionsResponseSchemaMock();
+ payload.extraKey = 'some new value';
+ const decoded = importExceptionsResponseSchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = pipe(checked, foldLeftRight);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.ts
new file mode 100644
index 0000000000000..f50356d2789f8
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/import_exceptions_schema/index.ts
@@ -0,0 +1,51 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import { PositiveInteger } from '@kbn/securitysolution-io-ts-types';
+
+import { id } from '../../common/id';
+import { list_id } from '../../common/list_id';
+import { item_id } from '../../common/item_id';
+
+export const bulkErrorErrorSchema = t.exact(
+ t.type({
+ status_code: t.number,
+ message: t.string,
+ })
+);
+
+export const bulkErrorSchema = t.intersection([
+ t.exact(
+ t.type({
+ error: bulkErrorErrorSchema,
+ })
+ ),
+ t.partial({
+ id,
+ list_id,
+ item_id,
+ }),
+]);
+
+export type BulkErrorSchema = t.TypeOf;
+
+export const importExceptionsResponseSchema = t.exact(
+ t.type({
+ errors: t.array(bulkErrorSchema),
+ success: t.boolean,
+ success_count: PositiveInteger,
+ success_exception_lists: t.boolean,
+ success_count_exception_lists: PositiveInteger,
+ success_exception_list_items: t.boolean,
+ success_count_exception_list_items: PositiveInteger,
+ })
+);
+
+export type ImportExceptionsResponseSchema = t.TypeOf;
diff --git a/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts b/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts
index dc29bdf16ab48..c37b092eb3477 100644
--- a/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts
+++ b/packages/kbn-securitysolution-io-ts-list-types/src/response/index.ts
@@ -14,6 +14,7 @@ export * from './found_exception_list_item_schema';
export * from './found_exception_list_schema';
export * from './found_list_item_schema';
export * from './found_list_schema';
+export * from './import_exceptions_schema';
export * from './list_item_schema';
export * from './list_schema';
export * from './exception_list_summary_schema';
diff --git a/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.test.ts b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.test.ts
new file mode 100644
index 0000000000000..03ec9df51a318
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.test.ts
@@ -0,0 +1,54 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { left } from 'fp-ts/lib/Either';
+import { ImportQuerySchema, importQuerySchema } from '.';
+import { exactCheck, foldLeftRight, getPaths } from '@kbn/securitysolution-io-ts-utils';
+
+describe('importQuerySchema', () => {
+ test('it should validate proper schema', () => {
+ const payload = {
+ overwrite: true,
+ };
+ const decoded = importQuerySchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = foldLeftRight(checked);
+
+ expect(getPaths(left(message.errors))).toEqual([]);
+ expect(message.schema).toEqual(payload);
+ });
+
+ test('it should NOT validate a non boolean value for "overwrite"', () => {
+ const payload: Omit & { overwrite: string } = {
+ overwrite: 'wrong',
+ };
+ const decoded = importQuerySchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = foldLeftRight(checked);
+
+ expect(getPaths(left(message.errors))).toEqual([
+ 'Invalid value "wrong" supplied to "overwrite"',
+ ]);
+ expect(message.schema).toEqual({});
+ });
+
+ test('it should NOT allow an extra key to be sent in', () => {
+ const payload: ImportQuerySchema & {
+ extraKey?: string;
+ } = {
+ extraKey: 'extra',
+ overwrite: true,
+ };
+
+ const decoded = importQuerySchema.decode(payload);
+ const checked = exactCheck(payload, decoded);
+ const message = foldLeftRight(checked);
+ expect(getPaths(left(message.errors))).toEqual(['invalid keys "extraKey"']);
+ expect(message.schema).toEqual({});
+ });
+});
diff --git a/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.ts b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.ts
new file mode 100644
index 0000000000000..95cbf96b2ef8d
--- /dev/null
+++ b/packages/kbn-securitysolution-io-ts-types/src/import_query_schema/index.ts
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import * as t from 'io-ts';
+
+import { DefaultStringBooleanFalse } from '../default_string_boolean_false';
+
+export const importQuerySchema = t.exact(
+ t.partial({
+ overwrite: DefaultStringBooleanFalse,
+ })
+);
+
+export type ImportQuerySchema = t.TypeOf;
+export type ImportQuerySchemaDecoded = Omit & {
+ overwrite: boolean;
+};
diff --git a/packages/kbn-securitysolution-io-ts-types/src/index.ts b/packages/kbn-securitysolution-io-ts-types/src/index.ts
index b85bff63fe2a7..0bb99e4c766e7 100644
--- a/packages/kbn-securitysolution-io-ts-types/src/index.ts
+++ b/packages/kbn-securitysolution-io-ts-types/src/index.ts
@@ -17,6 +17,7 @@ export * from './default_version_number';
export * from './empty_string_array';
export * from './enumeration';
export * from './iso_date_string';
+export * from './import_query_schema';
export * from './non_empty_array';
export * from './non_empty_or_nullable_string_array';
export * from './non_empty_string_array';
diff --git a/packages/kbn-securitysolution-list-utils/BUILD.bazel b/packages/kbn-securitysolution-list-utils/BUILD.bazel
index eb33eb1a03b66..30568ca725041 100644
--- a/packages/kbn-securitysolution-list-utils/BUILD.bazel
+++ b/packages/kbn-securitysolution-list-utils/BUILD.bazel
@@ -38,11 +38,12 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-es-query",
- "//packages/kbn-i18n",
+ "//packages/kbn-es-query:npm_module_types",
+ "//packages/kbn-i18n:npm_module_types",
"//packages/kbn-securitysolution-io-ts-list-types",
"//packages/kbn-securitysolution-list-constants",
"//packages/kbn-securitysolution-utils",
+ "@npm//@elastic/elasticsearch",
"@npm//@types/jest",
"@npm//@types/lodash",
"@npm//@types/node",
diff --git a/packages/kbn-storybook/BUILD.bazel b/packages/kbn-storybook/BUILD.bazel
index f2a7bf25fb407..5dbe22b56c63f 100644
--- a/packages/kbn-storybook/BUILD.bazel
+++ b/packages/kbn-storybook/BUILD.bazel
@@ -32,6 +32,7 @@ RUNTIME_DEPS = [
"//packages/kbn-dev-utils",
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
+ "//packages/kbn-utils",
"@npm//@storybook/addons",
"@npm//@storybook/api",
"@npm//@storybook/components",
@@ -47,9 +48,10 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-ui-shared-deps-npm",
"//packages/kbn-ui-shared-deps-src",
+ "//packages/kbn-utils",
"@npm//@storybook/addons",
"@npm//@storybook/api",
"@npm//@storybook/components",
diff --git a/packages/kbn-storybook/src/lib/constants.ts b/packages/kbn-storybook/src/lib/constants.ts
index 722f789fde786..69b05c94ea1b0 100644
--- a/packages/kbn-storybook/src/lib/constants.ts
+++ b/packages/kbn-storybook/src/lib/constants.ts
@@ -7,7 +7,7 @@
*/
import { resolve } from 'path';
-import { REPO_ROOT as KIBANA_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT as KIBANA_ROOT } from '@kbn/utils';
export const REPO_ROOT = KIBANA_ROOT;
export const ASSET_DIR = resolve(KIBANA_ROOT, 'built_assets/storybook');
diff --git a/packages/kbn-telemetry-tools/BUILD.bazel b/packages/kbn-telemetry-tools/BUILD.bazel
index 1183de2586424..d2ea3a704f154 100644
--- a/packages/kbn-telemetry-tools/BUILD.bazel
+++ b/packages/kbn-telemetry-tools/BUILD.bazel
@@ -38,8 +38,9 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-utility-types",
+ "@npm//tslib",
"@npm//@types/glob",
"@npm//@types/jest",
"@npm//@types/listr",
diff --git a/packages/kbn-test/BUILD.bazel b/packages/kbn-test/BUILD.bazel
index 1d1d95d639861..eae0fe2cdf5dc 100644
--- a/packages/kbn-test/BUILD.bazel
+++ b/packages/kbn-test/BUILD.bazel
@@ -44,11 +44,13 @@ RUNTIME_DEPS = [
"@npm//axios",
"@npm//@babel/traverse",
"@npm//chance",
+ "@npm//dedent",
"@npm//del",
"@npm//enzyme",
"@npm//execa",
"@npm//exit-hook",
"@npm//form-data",
+ "@npm//getopts",
"@npm//globby",
"@npm//he",
"@npm//history",
@@ -59,6 +61,7 @@ RUNTIME_DEPS = [
"@npm//@jest/reporters",
"@npm//joi",
"@npm//mustache",
+ "@npm//normalize-path",
"@npm//parse-link-header",
"@npm//prettier",
"@npm//react-dom",
@@ -72,13 +75,17 @@ RUNTIME_DEPS = [
]
TYPES_DEPS = [
- "//packages/kbn-dev-utils",
+ "//packages/kbn-dev-utils:npm_module_types",
"//packages/kbn-i18n-react:npm_module_types",
+ "//packages/kbn-std",
"//packages/kbn-utils",
"@npm//@elastic/elasticsearch",
+ "@npm//axios",
"@npm//elastic-apm-node",
"@npm//del",
+ "@npm//exit-hook",
"@npm//form-data",
+ "@npm//getopts",
"@npm//jest",
"@npm//jest-cli",
"@npm//jest-snapshot",
@@ -86,6 +93,7 @@ TYPES_DEPS = [
"@npm//rxjs",
"@npm//xmlbuilder",
"@npm//@types/chance",
+ "@npm//@types/dedent",
"@npm//@types/enzyme",
"@npm//@types/he",
"@npm//@types/history",
@@ -93,6 +101,7 @@ TYPES_DEPS = [
"@npm//@types/joi",
"@npm//@types/lodash",
"@npm//@types/mustache",
+ "@npm//@types/normalize-path",
"@npm//@types/node",
"@npm//@types/parse-link-header",
"@npm//@types/prettier",
diff --git a/packages/kbn-test/src/es/es_test_config.ts b/packages/kbn-test/src/es/es_test_config.ts
index db5d705710a75..70000c8068e9f 100644
--- a/packages/kbn-test/src/es/es_test_config.ts
+++ b/packages/kbn-test/src/es/es_test_config.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { kibanaPackageJson as pkg } from '@kbn/dev-utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import Url from 'url';
import { adminTestUser } from '../kbn';
diff --git a/packages/kbn-test/src/failed_tests_reporter/buildkite_metadata.ts b/packages/kbn-test/src/failed_tests_reporter/buildkite_metadata.ts
new file mode 100644
index 0000000000000..d63f0166390cb
--- /dev/null
+++ b/packages/kbn-test/src/failed_tests_reporter/buildkite_metadata.ts
@@ -0,0 +1,38 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+export interface BuildkiteMetadata {
+ buildId?: string;
+ jobId?: string;
+ url?: string;
+ jobName?: string;
+ jobUrl?: string;
+}
+
+export function getBuildkiteMetadata(): BuildkiteMetadata {
+ // Buildkite steps that use `parallelism` need a numerical suffix added to identify them
+ // We should also increment the number by one, since it's 0-based
+ const jobNumberSuffix = process.env.BUILDKITE_PARALLEL_JOB
+ ? ` #${parseInt(process.env.BUILDKITE_PARALLEL_JOB, 10) + 1}`
+ : '';
+
+ const buildUrl = process.env.BUILDKITE_BUILD_URL;
+ const jobUrl = process.env.BUILDKITE_JOB_ID
+ ? `${buildUrl}#${process.env.BUILDKITE_JOB_ID}`
+ : undefined;
+
+ return {
+ buildId: process.env.BUJILDKITE_BUILD_ID,
+ jobId: process.env.BUILDKITE_JOB_ID,
+ url: buildUrl,
+ jobUrl,
+ jobName: process.env.BUILDKITE_LABEL
+ ? `${process.env.BUILDKITE_LABEL}${jobNumberSuffix}`
+ : undefined,
+ };
+}
diff --git a/packages/kbn-test/src/failed_tests_reporter/github_api.ts b/packages/kbn-test/src/failed_tests_reporter/github_api.ts
index adaae11b7aa16..bb7570225a013 100644
--- a/packages/kbn-test/src/failed_tests_reporter/github_api.ts
+++ b/packages/kbn-test/src/failed_tests_reporter/github_api.ts
@@ -42,6 +42,7 @@ export class GithubApi {
private readonly token: string | undefined;
private readonly dryRun: boolean;
private readonly x: AxiosInstance;
+ private requestCount: number = 0;
/**
* Create a GithubApi helper object, if token is undefined requests won't be
@@ -68,6 +69,10 @@ export class GithubApi {
});
}
+ getRequestCount() {
+ return this.requestCount;
+ }
+
private failedTestIssuesPageCache: {
pages: GithubIssue[][];
nextRequest: RequestOptions | undefined;
@@ -191,53 +196,50 @@ export class GithubApi {
}> {
const executeRequest = !this.dryRun || options.safeForDryRun;
const maxAttempts = options.maxAttempts || 5;
- const attempt = options.attempt || 1;
-
- this.log.verbose('Github API', executeRequest ? 'Request' : 'Dry Run', options);
-
- if (!executeRequest) {
- return {
- status: 200,
- statusText: 'OK',
- headers: {},
- data: dryRunResponse,
- };
- }
- try {
- return await this.x.request(options);
- } catch (error) {
- const unableToReachGithub = isAxiosRequestError(error);
- const githubApiFailed = isAxiosResponseError(error) && error.response.status >= 500;
- const errorResponseLog =
- isAxiosResponseError(error) &&
- `[${error.config.method} ${error.config.url}] ${error.response.status} ${error.response.statusText} Error`;
+ let attempt = 0;
+ while (true) {
+ attempt += 1;
+ this.log.verbose('Github API', executeRequest ? 'Request' : 'Dry Run', options);
+
+ if (!executeRequest) {
+ return {
+ status: 200,
+ statusText: 'OK',
+ headers: {},
+ data: dryRunResponse,
+ };
+ }
- if ((unableToReachGithub || githubApiFailed) && attempt < maxAttempts) {
- const waitMs = 1000 * attempt;
+ try {
+ this.requestCount += 1;
+ return await this.x.request(options);
+ } catch (error) {
+ const unableToReachGithub = isAxiosRequestError(error);
+ const githubApiFailed = isAxiosResponseError(error) && error.response.status >= 500;
+ const errorResponseLog =
+ isAxiosResponseError(error) &&
+ `[${error.config.method} ${error.config.url}] ${error.response.status} ${error.response.statusText} Error`;
+
+ if ((unableToReachGithub || githubApiFailed) && attempt < maxAttempts) {
+ const waitMs = 1000 * attempt;
+
+ if (errorResponseLog) {
+ this.log.error(`${errorResponseLog}: waiting ${waitMs}ms to retry`);
+ } else {
+ this.log.error(`Unable to reach github, waiting ${waitMs}ms to retry`);
+ }
+
+ await new Promise((resolve) => setTimeout(resolve, waitMs));
+ continue;
+ }
if (errorResponseLog) {
- this.log.error(`${errorResponseLog}: waiting ${waitMs}ms to retry`);
- } else {
- this.log.error(`Unable to reach github, waiting ${waitMs}ms to retry`);
+ throw new Error(`${errorResponseLog}: ${JSON.stringify(error.response.data)}`);
}
- await new Promise((resolve) => setTimeout(resolve, waitMs));
- return await this.request(
- {
- ...options,
- maxAttempts,
- attempt: attempt + 1,
- },
- dryRunResponse
- );
+ throw error;
}
-
- if (errorResponseLog) {
- throw new Error(`${errorResponseLog}: ${JSON.stringify(error.response.data)}`);
- }
-
- throw error;
}
}
}
diff --git a/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts b/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts
index e481da019945c..33dab240ec8b4 100644
--- a/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts
+++ b/packages/kbn-test/src/failed_tests_reporter/report_failures_to_file.ts
@@ -14,6 +14,7 @@ import { ToolingLog } from '@kbn/dev-utils';
import { REPO_ROOT } from '@kbn/utils';
import { escape } from 'he';
+import { BuildkiteMetadata } from './buildkite_metadata';
import { TestFailure } from './get_failures';
const findScreenshots = (dirPath: string, allScreenshots: string[] = []) => {
@@ -37,7 +38,11 @@ const findScreenshots = (dirPath: string, allScreenshots: string[] = []) => {
return allScreenshots;
};
-export function reportFailuresToFile(log: ToolingLog, failures: TestFailure[]) {
+export function reportFailuresToFile(
+ log: ToolingLog,
+ failures: TestFailure[],
+ bkMeta: BuildkiteMetadata
+) {
if (!failures?.length) {
return;
}
@@ -76,28 +81,15 @@ export function reportFailuresToFile(log: ToolingLog, failures: TestFailure[]) {
.flat()
.join('\n');
- // Buildkite steps that use `parallelism` need a numerical suffix added to identify them
- // We should also increment the number by one, since it's 0-based
- const jobNumberSuffix = process.env.BUILDKITE_PARALLEL_JOB
- ? ` #${parseInt(process.env.BUILDKITE_PARALLEL_JOB, 10) + 1}`
- : '';
-
- const buildUrl = process.env.BUILDKITE_BUILD_URL || '';
- const jobUrl = process.env.BUILDKITE_JOB_ID
- ? `${buildUrl}#${process.env.BUILDKITE_JOB_ID}`
- : '';
-
const failureJSON = JSON.stringify(
{
...failure,
hash,
- buildId: process.env.BUJILDKITE_BUILD_ID || '',
- jobId: process.env.BUILDKITE_JOB_ID || '',
- url: buildUrl,
- jobUrl,
- jobName: process.env.BUILDKITE_LABEL
- ? `${process.env.BUILDKITE_LABEL}${jobNumberSuffix}`
- : '',
+ buildId: bkMeta.buildId,
+ jobId: bkMeta.jobId,
+ url: bkMeta.url,
+ jobUrl: bkMeta.jobUrl,
+ jobName: bkMeta.jobName,
},
null,
2
@@ -149,11 +141,11 @@ export function reportFailuresToFile(log: ToolingLog, failures: TestFailure[]) {
${
- jobUrl
+ bkMeta.jobUrl
? `
Buildkite Job
- ${escape(jobUrl)}
+ ${escape(bkMeta.jobUrl)}
`
: ''
diff --git a/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts b/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts
index 193bc668ce003..6ab135a6afa7e 100644
--- a/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts
+++ b/packages/kbn-test/src/failed_tests_reporter/run_failed_tests_reporter_cli.ts
@@ -9,7 +9,7 @@
import Path from 'path';
import { REPO_ROOT } from '@kbn/utils';
-import { run, createFailError, createFlagError } from '@kbn/dev-utils';
+import { run, createFailError, createFlagError, CiStatsReporter } from '@kbn/dev-utils';
import globby from 'globby';
import normalize from 'normalize-path';
@@ -22,6 +22,7 @@ import { addMessagesToReport } from './add_messages_to_report';
import { getReportMessageIter } from './report_metadata';
import { reportFailuresToEs } from './report_failures_to_es';
import { reportFailuresToFile } from './report_failures_to_file';
+import { getBuildkiteMetadata } from './buildkite_metadata';
const DEFAULT_PATTERNS = [Path.resolve(REPO_ROOT, 'target/junit/**/*.xml')];
@@ -71,108 +72,129 @@ export function runFailedTestsReporterCli() {
dryRun: !updateGithub,
});
- const buildUrl = flags['build-url'] || (updateGithub ? '' : 'http://buildUrl');
- if (typeof buildUrl !== 'string' || !buildUrl) {
- throw createFlagError('Missing --build-url or process.env.BUILD_URL');
- }
+ const bkMeta = getBuildkiteMetadata();
- const patterns = (flags._.length ? flags._ : DEFAULT_PATTERNS).map((p) =>
- normalize(Path.resolve(p))
- );
- log.info('Searching for reports at', patterns);
- const reportPaths = await globby(patterns, {
- absolute: true,
- });
+ try {
+ const buildUrl = flags['build-url'] || (updateGithub ? '' : 'http://buildUrl');
+ if (typeof buildUrl !== 'string' || !buildUrl) {
+ throw createFlagError('Missing --build-url or process.env.BUILD_URL');
+ }
- if (!reportPaths.length) {
- throw createFailError(`Unable to find any junit reports with patterns [${patterns}]`);
- }
+ const patterns = (flags._.length ? flags._ : DEFAULT_PATTERNS).map((p) =>
+ normalize(Path.resolve(p))
+ );
+ log.info('Searching for reports at', patterns);
+ const reportPaths = await globby(patterns, {
+ absolute: true,
+ });
- log.info('found', reportPaths.length, 'junit reports', reportPaths);
- const newlyCreatedIssues: Array<{
- failure: TestFailure;
- newIssue: GithubIssueMini;
- }> = [];
+ if (!reportPaths.length) {
+ throw createFailError(`Unable to find any junit reports with patterns [${patterns}]`);
+ }
- for (const reportPath of reportPaths) {
- const report = await readTestReport(reportPath);
- const messages = Array.from(getReportMessageIter(report));
- const failures = await getFailures(report);
+ log.info('found', reportPaths.length, 'junit reports', reportPaths);
+ const newlyCreatedIssues: Array<{
+ failure: TestFailure;
+ newIssue: GithubIssueMini;
+ }> = [];
- if (indexInEs) {
- await reportFailuresToEs(log, failures);
- }
+ for (const reportPath of reportPaths) {
+ const report = await readTestReport(reportPath);
+ const messages = Array.from(getReportMessageIter(report));
+ const failures = await getFailures(report);
- for (const failure of failures) {
- const pushMessage = (msg: string) => {
- messages.push({
- classname: failure.classname,
- name: failure.name,
- message: msg,
- });
- };
-
- if (failure.likelyIrrelevant) {
- pushMessage(
- 'Failure is likely irrelevant' +
- (updateGithub ? ', so an issue was not created or updated' : '')
- );
- continue;
+ if (indexInEs) {
+ await reportFailuresToEs(log, failures);
}
- let existingIssue: GithubIssueMini | undefined = await githubApi.findFailedTestIssue(
- (i) =>
- getIssueMetadata(i.body, 'test.class') === failure.classname &&
- getIssueMetadata(i.body, 'test.name') === failure.name
- );
+ for (const failure of failures) {
+ const pushMessage = (msg: string) => {
+ messages.push({
+ classname: failure.classname,
+ name: failure.name,
+ message: msg,
+ });
+ };
+
+ if (failure.likelyIrrelevant) {
+ pushMessage(
+ 'Failure is likely irrelevant' +
+ (updateGithub ? ', so an issue was not created or updated' : '')
+ );
+ continue;
+ }
- if (!existingIssue) {
- const newlyCreated = newlyCreatedIssues.find(
- ({ failure: f }) => f.classname === failure.classname && f.name === failure.name
- );
+ let existingIssue: GithubIssueMini | undefined = updateGithub
+ ? await githubApi.findFailedTestIssue(
+ (i) =>
+ getIssueMetadata(i.body, 'test.class') === failure.classname &&
+ getIssueMetadata(i.body, 'test.name') === failure.name
+ )
+ : undefined;
+
+ if (!existingIssue) {
+ const newlyCreated = newlyCreatedIssues.find(
+ ({ failure: f }) => f.classname === failure.classname && f.name === failure.name
+ );
+
+ if (newlyCreated) {
+ existingIssue = newlyCreated.newIssue;
+ }
+ }
- if (newlyCreated) {
- existingIssue = newlyCreated.newIssue;
+ if (existingIssue) {
+ const newFailureCount = await updateFailureIssue(
+ buildUrl,
+ existingIssue,
+ githubApi,
+ branch
+ );
+ const url = existingIssue.html_url;
+ failure.githubIssue = url;
+ failure.failureCount = updateGithub ? newFailureCount : newFailureCount - 1;
+ pushMessage(
+ `Test has failed ${newFailureCount - 1} times on tracked branches: ${url}`
+ );
+ if (updateGithub) {
+ pushMessage(`Updated existing issue: ${url} (fail count: ${newFailureCount})`);
+ }
+ continue;
}
- }
- if (existingIssue) {
- const newFailureCount = await updateFailureIssue(
- buildUrl,
- existingIssue,
- githubApi,
- branch
- );
- const url = existingIssue.html_url;
- failure.githubIssue = url;
- failure.failureCount = updateGithub ? newFailureCount : newFailureCount - 1;
- pushMessage(`Test has failed ${newFailureCount - 1} times on tracked branches: ${url}`);
+ const newIssue = await createFailureIssue(buildUrl, failure, githubApi, branch);
+ pushMessage('Test has not failed recently on tracked branches');
if (updateGithub) {
- pushMessage(`Updated existing issue: ${url} (fail count: ${newFailureCount})`);
+ pushMessage(`Created new issue: ${newIssue.html_url}`);
+ failure.githubIssue = newIssue.html_url;
}
- continue;
- }
-
- const newIssue = await createFailureIssue(buildUrl, failure, githubApi, branch);
- pushMessage('Test has not failed recently on tracked branches');
- if (updateGithub) {
- pushMessage(`Created new issue: ${newIssue.html_url}`);
- failure.githubIssue = newIssue.html_url;
+ newlyCreatedIssues.push({ failure, newIssue });
+ failure.failureCount = updateGithub ? 1 : 0;
}
- newlyCreatedIssues.push({ failure, newIssue });
- failure.failureCount = updateGithub ? 1 : 0;
- }
- // mutates report to include messages and writes updated report to disk
- await addMessagesToReport({
- report,
- messages,
- log,
- reportPath,
- dryRun: !flags['report-update'],
- });
+ // mutates report to include messages and writes updated report to disk
+ await addMessagesToReport({
+ report,
+ messages,
+ log,
+ reportPath,
+ dryRun: !flags['report-update'],
+ });
- reportFailuresToFile(log, failures);
+ reportFailuresToFile(log, failures, bkMeta);
+ }
+ } finally {
+ await CiStatsReporter.fromEnv(log).metrics([
+ {
+ group: 'github api request count',
+ id: `failed test reporter`,
+ value: githubApi.getRequestCount(),
+ meta: Object.fromEntries(
+ Object.entries(bkMeta).map(
+ ([k, v]) => [`buildkite${k[0].toUpperCase()}${k.slice(1)}`, v] as const
+ )
+ ),
+ },
+ ]);
}
},
{
diff --git a/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js b/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js
index 3446c5be5d4a7..4f798839d7231 100644
--- a/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js
+++ b/packages/kbn-test/src/functional_test_runner/lib/mocha/validate_ci_group_tags.js
@@ -8,7 +8,7 @@
import Path from 'path';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
/**
* Traverse the suites configured and ensure that each suite has no more than one ciGroup assigned
diff --git a/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts b/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts
index e87f316a100a7..53ce4c74c1388 100644
--- a/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts
+++ b/packages/kbn-test/src/functional_test_runner/lib/suite_tracker.test.ts
@@ -14,7 +14,7 @@ jest.mock('@kbn/utils', () => {
return { REPO_ROOT: '/dev/null/root' };
});
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Lifecycle } from './lifecycle';
import { SuiteTracker } from './suite_tracker';
import { Suite } from '../fake_mocha_types';
diff --git a/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js b/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js
index 03947f7e267ba..63d2b56350ba1 100644
--- a/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js
+++ b/packages/kbn-test/src/functional_tests/lib/babel_register_for_test_plugins.js
@@ -9,7 +9,7 @@
const Fs = require('fs');
const Path = require('path');
-const { REPO_ROOT: REPO_ROOT_FOLLOWING_SYMLINKS } = require('@kbn/dev-utils');
+const { REPO_ROOT: REPO_ROOT_FOLLOWING_SYMLINKS } = require('@kbn/utils');
const BASE_REPO_ROOT = Path.resolve(
Fs.realpathSync(Path.resolve(REPO_ROOT_FOLLOWING_SYMLINKS, 'package.json')),
'..'
diff --git a/packages/kbn-test/src/functional_tests/tasks.ts b/packages/kbn-test/src/functional_tests/tasks.ts
index 6dde114d3a98e..6a6c7edb98c79 100644
--- a/packages/kbn-test/src/functional_tests/tasks.ts
+++ b/packages/kbn-test/src/functional_tests/tasks.ts
@@ -9,7 +9,8 @@
import { relative } from 'path';
import * as Rx from 'rxjs';
import { startWith, switchMap, take } from 'rxjs/operators';
-import { withProcRunner, ToolingLog, REPO_ROOT, getTimeReporter } from '@kbn/dev-utils';
+import { withProcRunner, ToolingLog, getTimeReporter } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import dedent from 'dedent';
import {
diff --git a/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts b/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts
index 4adae7d1cd031..6da34228bbe7f 100644
--- a/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts
+++ b/packages/kbn-test/src/kbn_client/kbn_client_import_export.ts
@@ -12,7 +12,8 @@ import { existsSync } from 'fs';
import Path from 'path';
import FormData from 'form-data';
-import { ToolingLog, isAxiosResponseError, createFailError, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog, isAxiosResponseError, createFailError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { KbnClientRequester, uriencode, ReqOptions } from './kbn_client_requester';
import { KbnClientSavedObjects } from './kbn_client_saved_objects';
diff --git a/packages/kbn-typed-react-router-config/src/types/index.ts b/packages/kbn-typed-react-router-config/src/types/index.ts
index c1ae5afd816ee..f15fd99a02a87 100644
--- a/packages/kbn-typed-react-router-config/src/types/index.ts
+++ b/packages/kbn-typed-react-router-config/src/types/index.ts
@@ -13,97 +13,13 @@ import { RequiredKeys, ValuesType } from 'utility-types';
// import { unconst } from '../unconst';
import { NormalizePath } from './utils';
-type PathsOfRoute =
- | TRoute['path']
- | (TRoute extends { children: Route[] }
- ? AppendPath | PathsOf
- : never);
-
-export type PathsOf = TRoutes extends []
- ? never
- : TRoutes extends [Route]
- ? PathsOfRoute
- : TRoutes extends [Route, Route]
- ? PathsOfRoute | PathsOfRoute
- : TRoutes extends [Route, Route, Route]
- ? PathsOfRoute | PathsOfRoute | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : TRoutes extends [Route, Route, Route, Route, Route, Route, Route, Route, Route, Route, Route]
- ?
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- | PathsOfRoute
- : string;
+// type PathsOfRoute =
+// | TRoute['path']
+// | (TRoute extends { children: Route[] }
+// ? AppendPath | PathsOf
+// : never);
+
+export type PathsOf = keyof MapRoutes & string;
export interface RouteMatch {
route: TRoute;
@@ -347,6 +263,14 @@ type MapRoutes = TRoutes extends [Route]
// const routes = unconst([
// {
+// path: '/link-to/transaction/{transactionId}',
+// element,
+// },
+// {
+// path: '/link-to/trace/{traceId}',
+// element,
+// },
+// {
// path: '/',
// element,
// children: [
@@ -393,6 +317,10 @@ type MapRoutes = TRoutes extends [Route]
// element,
// },
// {
+// path: '/settings/agent-keys',
+// element,
+// },
+// {
// path: '/settings',
// element,
// },
@@ -430,11 +358,19 @@ type MapRoutes = TRoutes extends [Route]
// element,
// },
// {
+// path: '/services/:serviceName/transactions/view',
+// element,
+// },
+// {
+// path: '/services/:serviceName/dependencies',
+// element,
+// },
+// {
// path: '/services/:serviceName/errors',
// element,
// children: [
// {
-// path: '/:groupId',
+// path: '/services/:serviceName/errors/:groupId',
// element,
// params: t.type({
// path: t.type({
@@ -443,7 +379,7 @@ type MapRoutes = TRoutes extends [Route]
// }),
// },
// {
-// path: '/services/:serviceName',
+// path: '/services/:serviceName/errors',
// element,
// params: t.partial({
// query: t.partial({
@@ -457,15 +393,33 @@ type MapRoutes = TRoutes extends [Route]
// ],
// },
// {
-// path: '/services/:serviceName/foo',
+// path: '/services/:serviceName/metrics',
+// element,
+// },
+// {
+// path: '/services/:serviceName/nodes',
+// element,
+// children: [
+// {
+// path: '/services/{serviceName}/nodes/{serviceNodeName}/metrics',
+// element,
+// },
+// {
+// path: '/services/:serviceName/nodes',
+// element,
+// },
+// ],
+// },
+// {
+// path: '/services/:serviceName/service-map',
// element,
// },
// {
-// path: '/services/:serviceName/bar',
+// path: '/services/:serviceName/logs',
// element,
// },
// {
-// path: '/services/:serviceName/baz',
+// path: '/services/:serviceName/profiling',
// element,
// },
// {
@@ -497,6 +451,24 @@ type MapRoutes = TRoutes extends [Route]
// element,
// },
// {
+// path: '/backends',
+// element,
+// children: [
+// {
+// path: '/backends/{backendName}/overview',
+// element,
+// },
+// {
+// path: '/backends/overview',
+// element,
+// },
+// {
+// path: '/backends',
+// element,
+// },
+// ],
+// },
+// {
// path: '/',
// element,
// },
@@ -509,10 +481,11 @@ type MapRoutes = TRoutes extends [Route]
// type Routes = typeof routes;
// type Mapped = keyof MapRoutes;
+// type Paths = PathsOf;
// type Bar = ValuesType>['route']['path'];
// type Foo = OutputOf;
-// type Baz = OutputOf;
+// // type Baz = OutputOf;
// const { path }: Foo = {} as any;
@@ -520,4 +493,4 @@ type MapRoutes = TRoutes extends [Route]
// return {} as any;
// }
-// const params = _useApmParams('/*');
+// // const params = _useApmParams('/services/:serviceName/nodes/*');
diff --git a/src/cli/serve/integration_tests/invalid_config.test.ts b/src/cli/serve/integration_tests/invalid_config.test.ts
index 2de902582a548..ca051f37a816e 100644
--- a/src/cli/serve/integration_tests/invalid_config.test.ts
+++ b/src/cli/serve/integration_tests/invalid_config.test.ts
@@ -8,7 +8,7 @@
import { spawnSync } from 'child_process';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const INVALID_CONFIG_PATH = require.resolve('./__fixtures__/invalid_config.yml');
diff --git a/src/core/public/apm_system.test.ts b/src/core/public/apm_system.test.ts
index f62421cb55abc..842d5de7e5afc 100644
--- a/src/core/public/apm_system.test.ts
+++ b/src/core/public/apm_system.test.ts
@@ -9,6 +9,7 @@
jest.mock('@elastic/apm-rum');
import type { DeeplyMockedKeys, MockedKeys } from '@kbn/utility-types/jest';
import { init, apm } from '@elastic/apm-rum';
+import type { Transaction } from '@elastic/apm-rum';
import { ApmSystem } from './apm_system';
import { Subject } from 'rxjs';
import { InternalApplicationStart } from './application/types';
diff --git a/src/core/public/apm_system.ts b/src/core/public/apm_system.ts
index f15a317f9f934..2231f394381f0 100644
--- a/src/core/public/apm_system.ts
+++ b/src/core/public/apm_system.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import type { ApmBase, AgentConfigOptions } from '@elastic/apm-rum';
+import type { ApmBase, AgentConfigOptions, Transaction } from '@elastic/apm-rum';
import { modifyUrl } from '@kbn/std';
import { CachedResourceObserver } from './apm_resource_counter';
import type { InternalApplicationStart } from './application';
diff --git a/src/core/public/doc_links/doc_links_service.ts b/src/core/public/doc_links/doc_links_service.ts
index 692367cd0f580..fed3aa3093166 100644
--- a/src/core/public/doc_links/doc_links_service.ts
+++ b/src/core/public/doc_links/doc_links_service.ts
@@ -486,6 +486,7 @@ export class DocLinksService {
hdfsRepo: `${PLUGIN_DOCS}repository-hdfs.html`,
s3Repo: `${PLUGIN_DOCS}repository-s3.html`,
snapshotRestoreRepos: `${PLUGIN_DOCS}repository.html`,
+ mapperSize: `${PLUGIN_DOCS}mapper-size-usage.html`,
},
snapshotRestore: {
guide: `${ELASTICSEARCH_DOCS}snapshot-restore.html`,
@@ -874,7 +875,14 @@ export interface DocLinksStart {
}>;
readonly watcher: Record;
readonly ccs: Record;
- readonly plugins: Record;
+ readonly plugins: {
+ azureRepo: string;
+ gcsRepo: string;
+ hdfsRepo: string;
+ s3Repo: string;
+ snapshotRestoreRepos: string;
+ mapperSize: string;
+ };
readonly snapshotRestore: Record;
readonly ingest: Record;
readonly fleet: Readonly<{
diff --git a/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap b/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap
index e93ef34c38025..1c394112a404c 100644
--- a/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap
+++ b/src/core/public/i18n/__snapshots__/i18n_service.test.tsx.snap
@@ -98,6 +98,7 @@ exports[`#start() returns \`Context\` component 1`] = `
"euiDataGridToolbar.fullScreenButtonActive": "Exit full screen",
"euiDatePopoverButton.invalidTitle": [Function],
"euiDatePopoverButton.outdatedTitle": [Function],
+ "euiErrorBoundary.error": "Error",
"euiFieldPassword.maskPassword": "Mask password",
"euiFieldPassword.showPassword": "Show password as plain text. Note: this will visually expose your password on the screen.",
"euiFilePicker.clearSelectedFiles": "Clear selected files",
@@ -218,7 +219,7 @@ exports[`#start() returns \`Context\` component 1`] = `
"euiStyleSelector.labelExpanded": "Expanded density",
"euiStyleSelector.labelNormal": "Normal density",
"euiSuperDatePicker.showDatesButtonLabel": "Show dates",
- "euiSuperSelect.screenReaderAnnouncement": [Function],
+ "euiSuperSelect.screenReaderAnnouncement": "You are in a form selector and must select a single option. Use the up and down keys to navigate or escape to close.",
"euiSuperSelectControl.selectAnOption": [Function],
"euiSuperUpdateButton.cannotUpdateTooltip": "Cannot update",
"euiSuperUpdateButton.clickToApplyTooltip": "Click to apply",
diff --git a/src/core/public/i18n/i18n_eui_mapping.tsx b/src/core/public/i18n/i18n_eui_mapping.tsx
index 7c4d39fa2b11a..e3357d138e794 100644
--- a/src/core/public/i18n/i18n_eui_mapping.tsx
+++ b/src/core/public/i18n/i18n_eui_mapping.tsx
@@ -663,6 +663,10 @@ export const getEuiContextMapping = (): EuiTokensObject => {
defaultMessage: '+ {messagesLength} more',
values: { messagesLength },
}),
+ 'euiErrorBoundary.error': i18n.translate('core.euiErrorBoundary.error', {
+ defaultMessage: 'Error',
+ description: 'Error boundary for uncaught exceptions when rendering part of the application',
+ }),
'euiNotificationEventMessages.accordionAriaLabelButtonText': ({
messagesLength,
eventName,
@@ -1046,12 +1050,13 @@ export const getEuiContextMapping = (): EuiTokensObject => {
description: 'Displayed in a button that shows date picker',
}
),
- 'euiSuperSelect.screenReaderAnnouncement': ({ optionsCount }: EuiValues) =>
- i18n.translate('core.euiSuperSelect.screenReaderAnnouncement', {
+ 'euiSuperSelect.screenReaderAnnouncement': i18n.translate(
+ 'core.euiSuperSelect.screenReaderAnnouncement',
+ {
defaultMessage:
- 'You are in a form selector of {optionsCount} items and must select a single option. Use the up and down keys to navigate or escape to close.',
- values: { optionsCount },
- }),
+ 'You are in a form selector and must select a single option. Use the up and down keys to navigate or escape to close.',
+ }
+ ),
'euiSuperSelectControl.selectAnOption': ({ selectedValue }: EuiValues) =>
i18n.translate('core.euiSuperSelectControl.selectAnOption', {
defaultMessage: 'Select an option: {selectedValue}, is selected',
diff --git a/src/core/public/public.api.md b/src/core/public/public.api.md
index 08d41ab1301b0..63e0898b5fb90 100644
--- a/src/core/public/public.api.md
+++ b/src/core/public/public.api.md
@@ -773,7 +773,14 @@ export interface DocLinksStart {
}>;
readonly watcher: Record;
readonly ccs: Record;
- readonly plugins: Record;
+ readonly plugins: {
+ azureRepo: string;
+ gcsRepo: string;
+ hdfsRepo: string;
+ s3Repo: string;
+ snapshotRestoreRepos: string;
+ mapperSize: string;
+ };
readonly snapshotRestore: Record;
readonly ingest: Record;
readonly fleet: Readonly<{
diff --git a/src/core/server/capabilities/integration_tests/capabilities_service.test.ts b/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
index 2e80fbb9d20c0..c1f6ffb5add77 100644
--- a/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
+++ b/src/core/server/capabilities/integration_tests/capabilities_service.test.ts
@@ -7,7 +7,7 @@
*/
import supertest from 'supertest';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { HttpService, InternalHttpServicePreboot, InternalHttpServiceSetup } from '../../http';
import { contextServiceMock } from '../../context/context_service.mock';
import { executionContextServiceMock } from '../../execution_context/execution_context_service.mock';
diff --git a/src/core/server/core_context.mock.ts b/src/core/server/core_context.mock.ts
index ddb87d31383c8..4d7b4e1ba5548 100644
--- a/src/core/server/core_context.mock.ts
+++ b/src/core/server/core_context.mock.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import type { DeeplyMockedKeys } from '@kbn/utility-types/jest';
import { CoreContext } from './core_context';
import { Env, IConfigService } from './config';
diff --git a/src/core/server/elasticsearch/client/configure_client.test.ts b/src/core/server/elasticsearch/client/configure_client.test.ts
index 7988e81045d17..f252993415afa 100644
--- a/src/core/server/elasticsearch/client/configure_client.test.ts
+++ b/src/core/server/elasticsearch/client/configure_client.test.ts
@@ -6,21 +6,16 @@
* Side Public License, v 1.
*/
-import { Buffer } from 'buffer';
-import { Readable } from 'stream';
-
-import { errors } from '@elastic/elasticsearch';
-import type {
- TransportRequestOptions,
- TransportRequestParams,
- DiagnosticResult,
- RequestBody,
-} from '@elastic/elasticsearch';
+jest.mock('./log_query_and_deprecation.ts', () => ({
+ __esModule: true,
+ instrumentEsQueryAndDeprecationLogger: jest.fn(),
+}));
import { parseClientOptionsMock, ClientMock } from './configure_client.test.mocks';
import { loggingSystemMock } from '../../logging/logging_system.mock';
import type { ElasticsearchClientConfig } from './client_config';
import { configureClient } from './configure_client';
+import { instrumentEsQueryAndDeprecationLogger } from './log_query_and_deprecation';
const createFakeConfig = (
parts: Partial = {}
@@ -36,40 +31,9 @@ const createFakeClient = () => {
const client = new actualEs.Client({
nodes: ['http://localhost'], // Enforcing `nodes` because it's mandatory
});
- jest.spyOn(client.diagnostic, 'on');
return client;
};
-const createApiResponse = ({
- body,
- statusCode = 200,
- headers = {},
- warnings = [],
- params,
- requestOptions = {},
-}: {
- body: T;
- statusCode?: number;
- headers?: Record;
- warnings?: string[];
- params?: TransportRequestParams;
- requestOptions?: TransportRequestOptions;
-}): DiagnosticResult => {
- return {
- body,
- statusCode,
- headers,
- warnings,
- meta: {
- body,
- request: {
- params: params!,
- options: requestOptions,
- } as any,
- } as any,
- };
-};
-
describe('configureClient', () => {
let logger: ReturnType;
let config: ElasticsearchClientConfig;
@@ -84,6 +48,7 @@ describe('configureClient', () => {
afterEach(() => {
parseClientOptionsMock.mockReset();
ClientMock.mockReset();
+ jest.clearAllMocks();
});
it('calls `parseClientOptions` with the correct parameters', () => {
@@ -113,366 +78,14 @@ describe('configureClient', () => {
expect(client).toBe(ClientMock.mock.results[0].value);
});
- it('listens to client on `response` events', () => {
+ it('calls instrumentEsQueryAndDeprecationLogger', () => {
const client = configureClient(config, { logger, type: 'test', scoped: false });
- expect(client.diagnostic.on).toHaveBeenCalledTimes(1);
- expect(client.diagnostic.on).toHaveBeenCalledWith('response', expect.any(Function));
- });
-
- describe('Client logging', () => {
- function createResponseWithBody(body?: RequestBody) {
- return createApiResponse({
- body: {},
- statusCode: 200,
- params: {
- method: 'GET',
- path: '/foo',
- querystring: { hello: 'dolly' },
- body,
- },
- });
- }
-
- describe('logs each query', () => {
- it('creates a query logger context based on the `type` parameter', () => {
- configureClient(createFakeConfig(), { logger, type: 'test123' });
- expect(logger.get).toHaveBeenCalledWith('query', 'test123');
- });
-
- it('when request body is an object', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- });
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is a string', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody(
- JSON.stringify({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- })
- );
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is a buffer', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody(
- Buffer.from(
- JSON.stringify({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- })
- )
- );
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- [buffer]",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is a readable stream', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody(
- Readable.from(
- JSON.stringify({
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- })
- )
- );
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly
- [stream]",
- undefined,
- ],
- ]
- `);
- });
-
- it('when request body is not defined', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createResponseWithBody();
-
- client.diagnostic.emit('response', null, response);
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?hello=dolly",
- undefined,
- ],
- ]
- `);
- });
-
- it('properly encode queries', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({
- body: {},
- statusCode: 200,
- params: {
- method: 'GET',
- path: '/foo',
- querystring: { city: 'Münich' },
- },
- });
-
- client.diagnostic.emit('response', null, response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "200
- GET /foo?city=M%C3%BCnich",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs queries even in case of errors', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({
- statusCode: 500,
- body: {
- error: {
- type: 'internal server error',
- },
- },
- params: {
- method: 'GET',
- path: '/foo',
- querystring: { hello: 'dolly' },
- body: {
- seq_no_primary_term: true,
- query: {
- term: { user: 'kimchy' },
- },
- },
- },
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "500
- GET /foo?hello=dolly
- {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}} [internal server error]: internal server error",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs debug when the client emits an @elastic/elasticsearch error', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({ body: {} });
- client.diagnostic.emit('response', new errors.TimeoutError('message', response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "[TimeoutError]: message",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs debug when the client emits an ResponseError returned by elasticsearch', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- const response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- querystring: { hello: 'dolly' },
- },
- body: {
- error: {
- type: 'illegal_argument_exception',
- reason: 'request [/_path] contains unrecognized parameter: [name]',
- },
- },
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "400
- GET /_path?hello=dolly [illegal_argument_exception]: request [/_path] contains unrecognized parameter: [name]",
- undefined,
- ],
- ]
- `);
- });
-
- it('logs default error info when the error response body is empty', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- let response: DiagnosticResult = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- body: {
- error: {},
- },
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "400
- GET /_path [undefined]: {\\"error\\":{}}",
- undefined,
- ],
- ]
- `);
-
- logger.debug.mockClear();
-
- response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- body: undefined,
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
- Array [
- Array [
- "400
- GET /_path [undefined]: Response Error",
- undefined,
- ],
- ]
- `);
- });
-
- it('adds meta information to logs', () => {
- const client = configureClient(createFakeConfig(), { logger, type: 'test', scoped: false });
-
- let response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- requestOptions: {
- opaqueId: 'opaque-id',
- },
- body: {
- error: {},
- },
- });
- client.diagnostic.emit('response', null, response);
-
- expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
- Object {
- "http": Object {
- "request": Object {
- "id": "opaque-id",
- },
- },
- }
- `);
-
- logger.debug.mockClear();
-
- response = createApiResponse({
- statusCode: 400,
- headers: {},
- params: {
- method: 'GET',
- path: '/_path',
- },
- requestOptions: {
- opaqueId: 'opaque-id',
- },
- body: {} as any,
- });
- client.diagnostic.emit('response', new errors.ResponseError(response), response);
-
- expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
- Object {
- "http": Object {
- "request": Object {
- "id": "opaque-id",
- },
- },
- }
- `);
- });
+ expect(instrumentEsQueryAndDeprecationLogger).toHaveBeenCalledTimes(1);
+ expect(instrumentEsQueryAndDeprecationLogger).toHaveBeenCalledWith({
+ logger,
+ client,
+ type: 'test',
});
});
});
diff --git a/src/core/server/elasticsearch/client/configure_client.ts b/src/core/server/elasticsearch/client/configure_client.ts
index fc8a06660cc5e..e48a36fa4fe58 100644
--- a/src/core/server/elasticsearch/client/configure_client.ts
+++ b/src/core/server/elasticsearch/client/configure_client.ts
@@ -6,21 +6,17 @@
* Side Public License, v 1.
*/
-import { Buffer } from 'buffer';
-import { stringify } from 'querystring';
-import { Client, errors, Transport, HttpConnection } from '@elastic/elasticsearch';
+import { Client, Transport, HttpConnection } from '@elastic/elasticsearch';
import type { KibanaClient } from '@elastic/elasticsearch/lib/api/kibana';
import type {
TransportRequestParams,
TransportRequestOptions,
TransportResult,
- DiagnosticResult,
- RequestBody,
} from '@elastic/elasticsearch';
import { Logger } from '../../logging';
import { parseClientOptions, ElasticsearchClientConfig } from './client_config';
-import type { ElasticsearchErrorDetails } from './types';
+import { instrumentEsQueryAndDeprecationLogger } from './log_query_and_deprecation';
const noop = () => undefined;
@@ -61,91 +57,8 @@ export const configureClient = (
Transport: KibanaTransport,
Connection: HttpConnection,
});
- addLogging(client, logger.get('query', type));
- return client as KibanaClient;
-};
-
-const convertQueryString = (qs: string | Record | undefined): string => {
- if (qs === undefined || typeof qs === 'string') {
- return qs ?? '';
- }
- return stringify(qs);
-};
-
-function ensureString(body: RequestBody): string {
- if (typeof body === 'string') return body;
- if (Buffer.isBuffer(body)) return '[buffer]';
- if ('readable' in body && body.readable && typeof body._read === 'function') return '[stream]';
- return JSON.stringify(body);
-}
-
-/**
- * Returns a debug message from an Elasticsearch error in the following format:
- * [error type] error reason
- */
-export function getErrorMessage(error: errors.ElasticsearchClientError): string {
- if (error instanceof errors.ResponseError) {
- const errorBody = error.meta.body as ElasticsearchErrorDetails;
- return `[${errorBody?.error?.type}]: ${errorBody?.error?.reason ?? error.message}`;
- }
- return `[${error.name}]: ${error.message}`;
-}
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type });
-/**
- * returns a string in format:
- *
- * status code
- * method URL
- * request body
- *
- * so it could be copy-pasted into the Dev console
- */
-function getResponseMessage(event: DiagnosticResult): string {
- const errorMeta = getRequestDebugMeta(event);
- const body = errorMeta.body ? `\n${errorMeta.body}` : '';
- return `${errorMeta.statusCode}\n${errorMeta.method} ${errorMeta.url}${body}`;
-}
-
-/**
- * Returns stringified debug information from an Elasticsearch request event
- * useful for logging in case of an unexpected failure.
- */
-export function getRequestDebugMeta(event: DiagnosticResult): {
- url: string;
- body: string;
- statusCode: number | null;
- method: string;
-} {
- const params = event.meta.request.params;
- // definition is wrong, `params.querystring` can be either a string or an object
- const querystring = convertQueryString(params.querystring);
- return {
- url: `${params.path}${querystring ? `?${querystring}` : ''}`,
- body: params.body ? `${ensureString(params.body)}` : '',
- method: params.method,
- statusCode: event.statusCode!,
- };
-}
-
-const addLogging = (client: Client, logger: Logger) => {
- client.diagnostic.on('response', (error, event) => {
- if (event) {
- const opaqueId = event.meta.request.options.opaqueId;
- const meta = opaqueId
- ? {
- http: { request: { id: event.meta.request.options.opaqueId } },
- }
- : undefined; // do not clutter logs if opaqueId is not present
- if (error) {
- if (error instanceof errors.ResponseError) {
- logger.debug(`${getResponseMessage(event)} ${getErrorMessage(error)}`, meta);
- } else {
- logger.debug(getErrorMessage(error), meta);
- }
- } else {
- logger.debug(getResponseMessage(event), meta);
- }
- }
- });
+ return client as KibanaClient;
};
diff --git a/src/core/server/elasticsearch/client/index.ts b/src/core/server/elasticsearch/client/index.ts
index 2cf5a0229a489..123c498f1ee21 100644
--- a/src/core/server/elasticsearch/client/index.ts
+++ b/src/core/server/elasticsearch/client/index.ts
@@ -21,5 +21,6 @@ export type { IScopedClusterClient } from './scoped_cluster_client';
export type { ElasticsearchClientConfig } from './client_config';
export { ClusterClient } from './cluster_client';
export type { IClusterClient, ICustomClusterClient } from './cluster_client';
-export { configureClient, getRequestDebugMeta, getErrorMessage } from './configure_client';
+export { configureClient } from './configure_client';
+export { getRequestDebugMeta, getErrorMessage } from './log_query_and_deprecation';
export { retryCallCluster, migrationRetryCallCluster } from './retry_call_cluster';
diff --git a/src/core/server/elasticsearch/client/log_query_and_deprecation.test.ts b/src/core/server/elasticsearch/client/log_query_and_deprecation.test.ts
new file mode 100644
index 0000000000000..30d5d8b87ed1c
--- /dev/null
+++ b/src/core/server/elasticsearch/client/log_query_and_deprecation.test.ts
@@ -0,0 +1,624 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { Buffer } from 'buffer';
+import { Readable } from 'stream';
+
+import {
+ Client,
+ ConnectionRequestParams,
+ errors,
+ TransportRequestOptions,
+ TransportRequestParams,
+} from '@elastic/elasticsearch';
+import type { DiagnosticResult, RequestBody } from '@elastic/elasticsearch';
+
+import { parseClientOptionsMock, ClientMock } from './configure_client.test.mocks';
+import { loggingSystemMock } from '../../logging/logging_system.mock';
+import { instrumentEsQueryAndDeprecationLogger } from './log_query_and_deprecation';
+
+const createApiResponse = ({
+ body,
+ statusCode = 200,
+ headers = {},
+ warnings = null,
+ params,
+ requestOptions = {},
+}: {
+ body: T;
+ statusCode?: number;
+ headers?: Record;
+ warnings?: string[] | null;
+ params?: TransportRequestParams | ConnectionRequestParams;
+ requestOptions?: TransportRequestOptions;
+}): DiagnosticResult => {
+ return {
+ body,
+ statusCode,
+ headers,
+ warnings,
+ meta: {
+ body,
+ request: {
+ params: params!,
+ options: requestOptions,
+ } as any,
+ } as any,
+ };
+};
+
+const createFakeClient = () => {
+ const actualEs = jest.requireActual('@elastic/elasticsearch');
+ const client = new actualEs.Client({
+ nodes: ['http://localhost'], // Enforcing `nodes` because it's mandatory
+ });
+ jest.spyOn(client.diagnostic, 'on');
+ return client as Client;
+};
+
+describe('instrumentQueryAndDeprecationLogger', () => {
+ let logger: ReturnType;
+ const client = createFakeClient();
+
+ beforeEach(() => {
+ logger = loggingSystemMock.createLogger();
+ parseClientOptionsMock.mockReturnValue({});
+ ClientMock.mockImplementation(() => createFakeClient());
+ });
+
+ afterEach(() => {
+ parseClientOptionsMock.mockReset();
+ ClientMock.mockReset();
+ jest.clearAllMocks();
+ });
+
+ function createResponseWithBody(body?: RequestBody) {
+ return createApiResponse({
+ body: {},
+ statusCode: 200,
+ params: {
+ method: 'GET',
+ path: '/foo',
+ querystring: { hello: 'dolly' },
+ body,
+ },
+ });
+ }
+
+ it('creates a query logger context based on the `type` parameter', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test123' });
+ expect(logger.get).toHaveBeenCalledWith('query', 'test123');
+ });
+
+ describe('logs each query', () => {
+ it('when request body is an object', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ });
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is a string', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody(
+ JSON.stringify({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ })
+ );
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}}",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is a buffer', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody(
+ Buffer.from(
+ JSON.stringify({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ })
+ )
+ );
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ [buffer]",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is a readable stream', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody(
+ Readable.from(
+ JSON.stringify({
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ })
+ )
+ );
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly
+ [stream]",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('when request body is not defined', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createResponseWithBody();
+
+ client.diagnostic.emit('response', null, response);
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?hello=dolly",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('properly encode queries', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ body: {},
+ statusCode: 200,
+ params: {
+ method: 'GET',
+ path: '/foo',
+ querystring: { city: 'Münich' },
+ },
+ });
+
+ client.diagnostic.emit('response', null, response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "200
+ GET /foo?city=M%C3%BCnich",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs queries even in case of errors', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 500,
+ body: {
+ error: {
+ type: 'internal server error',
+ },
+ },
+ params: {
+ method: 'GET',
+ path: '/foo',
+ querystring: { hello: 'dolly' },
+ body: {
+ seq_no_primary_term: true,
+ query: {
+ term: { user: 'kimchy' },
+ },
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "500
+ GET /foo?hello=dolly
+ {\\"seq_no_primary_term\\":true,\\"query\\":{\\"term\\":{\\"user\\":\\"kimchy\\"}}} [internal server error]: internal server error",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs debug when the client emits an @elastic/elasticsearch error', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({ body: {} });
+ client.diagnostic.emit('response', new errors.TimeoutError('message', response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "[TimeoutError]: message",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs debug when the client emits an ResponseError returned by elasticsearch', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ error: {
+ type: 'illegal_argument_exception',
+ reason: 'request [/_path] contains unrecognized parameter: [name]',
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "400
+ GET /_path?hello=dolly [illegal_argument_exception]: request [/_path] contains unrecognized parameter: [name]",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('logs default error info when the error response body is empty', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ let response: DiagnosticResult = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ body: {
+ error: {},
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "400
+ GET /_path [undefined]: {\\"error\\":{}}",
+ undefined,
+ ],
+ ]
+ `);
+
+ logger.debug.mockClear();
+
+ response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ body: undefined,
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug).toMatchInlineSnapshot(`
+ Array [
+ Array [
+ "400
+ GET /_path [undefined]: Response Error",
+ undefined,
+ ],
+ ]
+ `);
+ });
+
+ it('adds meta information to logs', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ let response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ requestOptions: {
+ opaqueId: 'opaque-id',
+ },
+ body: {
+ error: {},
+ },
+ });
+ client.diagnostic.emit('response', null, response);
+
+ expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
+ Object {
+ "http": Object {
+ "request": Object {
+ "id": "opaque-id",
+ },
+ },
+ }
+ `);
+
+ logger.debug.mockClear();
+
+ response = createApiResponse({
+ statusCode: 400,
+ headers: {},
+ params: {
+ method: 'GET',
+ path: '/_path',
+ },
+ requestOptions: {
+ opaqueId: 'opaque-id',
+ },
+ body: {} as any,
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).debug[0][1]).toMatchInlineSnapshot(`
+ Object {
+ "http": Object {
+ "request": Object {
+ "id": "opaque-id",
+ },
+ },
+ }
+ `);
+ });
+ });
+
+ describe('deprecation warnings from response headers', () => {
+ it('does not log when no deprecation warning header is returned', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: null,
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ });
+
+ it('does not log when warning header comes from a warn-agent that is not elasticsearch', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: [
+ '299 nginx/2.3.1 "GET /_path is deprecated"',
+ '299 nginx/2.3.1 "GET hello query param is deprecated"',
+ ],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ });
+
+ it('logs error when the client receives an Elasticsearch error response for a deprecated request originating from a user', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 400,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ error: {
+ type: 'illegal_argument_exception',
+ reason: 'request [/_path] contains unrecognized parameter: [name]',
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ // Test debug[1] since theree is one log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch('Origin:user');
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ /Query:\n.*400\n.*GET \/_path\?hello\=dolly \[illegal_argument_exception\]: request \[\/_path\] contains unrecognized parameter: \[name\]/
+ );
+ });
+
+ it('logs warning when the client receives an Elasticsearch error response for a deprecated request originating from kibana', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 400,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ // Set the request header to indicate to Elasticsearch that this is a request over which users have no control
+ headers: { 'x-elastic-product-origin': 'kibana' },
+ },
+ body: {
+ error: {
+ type: 'illegal_argument_exception',
+ reason: 'request [/_path] contains unrecognized parameter: [name]',
+ },
+ },
+ });
+ client.diagnostic.emit('response', new errors.ResponseError(response), response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch('Origin:kibana');
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ /Query:\n.*400\n.*GET \/_path\?hello\=dolly \[illegal_argument_exception\]: request \[\/_path\] contains unrecognized parameter: \[name\]/
+ );
+ });
+
+ it('logs error when the client receives an Elasticsearch success response for a deprecated request originating from a user', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', null, response);
+
+ expect(loggingSystemMock.collect(logger).info).toEqual([]);
+ // Test debug[1] since theree is one log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch('Origin:user');
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).debug[1][0]).toMatch(
+ /Query:\n.*200\n.*GET \/_path\?hello\=dolly/
+ );
+ });
+
+ it('logs warning when the client receives an Elasticsearch success response for a deprecated request originating from kibana', () => {
+ instrumentEsQueryAndDeprecationLogger({ logger, client, type: 'test type' });
+
+ const response = createApiResponse({
+ statusCode: 200,
+ warnings: ['299 Elasticsearch-8.1.0 "GET /_path is deprecated"'],
+ params: {
+ method: 'GET',
+ path: '/_path',
+ querystring: { hello: 'dolly' },
+ // Set the request header to indicate to Elasticsearch that this is a request over which users have no control
+ headers: { 'x-elastic-product-origin': 'kibana' },
+ },
+ body: {
+ hits: [
+ {
+ _source: 'may the source be with you',
+ },
+ ],
+ },
+ });
+ client.diagnostic.emit('response', null, response);
+
+ // One debug log entry from 'elasticsearch.query' context
+ expect(loggingSystemMock.collect(logger).debug.length).toEqual(1);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ 'Elasticsearch deprecation: 299 Elasticsearch-8.1.0 "GET /_path is deprecated"'
+ );
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch('Origin:kibana');
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(/Stack trace:\n.*at/);
+ expect(loggingSystemMock.collect(logger).info[0][0]).toMatch(
+ /Query:\n.*200\n.*GET \/_path\?hello\=dolly/
+ );
+ });
+ });
+});
diff --git a/src/core/server/elasticsearch/client/log_query_and_deprecation.ts b/src/core/server/elasticsearch/client/log_query_and_deprecation.ts
new file mode 100644
index 0000000000000..fc5a0fa6e1111
--- /dev/null
+++ b/src/core/server/elasticsearch/client/log_query_and_deprecation.ts
@@ -0,0 +1,143 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { Buffer } from 'buffer';
+import { stringify } from 'querystring';
+import { errors, DiagnosticResult, RequestBody, Client } from '@elastic/elasticsearch';
+import type { ElasticsearchErrorDetails } from './types';
+import { Logger } from '../../logging';
+
+const convertQueryString = (qs: string | Record | undefined): string => {
+ if (qs === undefined || typeof qs === 'string') {
+ return qs ?? '';
+ }
+ return stringify(qs);
+};
+
+function ensureString(body: RequestBody): string {
+ if (typeof body === 'string') return body;
+ if (Buffer.isBuffer(body)) return '[buffer]';
+ if ('readable' in body && body.readable && typeof body._read === 'function') return '[stream]';
+ return JSON.stringify(body);
+}
+
+/**
+ * Returns a debug message from an Elasticsearch error in the following format:
+ * [error type] error reason
+ */
+export function getErrorMessage(error: errors.ElasticsearchClientError): string {
+ if (error instanceof errors.ResponseError) {
+ const errorBody = error.meta.body as ElasticsearchErrorDetails;
+ return `[${errorBody?.error?.type}]: ${errorBody?.error?.reason ?? error.message}`;
+ }
+ return `[${error.name}]: ${error.message}`;
+}
+
+/**
+ * returns a string in format:
+ *
+ * status code
+ * method URL
+ * request body
+ *
+ * so it could be copy-pasted into the Dev console
+ */
+function getResponseMessage(event: DiagnosticResult): string {
+ const errorMeta = getRequestDebugMeta(event);
+ const body = errorMeta.body ? `\n${errorMeta.body}` : '';
+ return `${errorMeta.statusCode}\n${errorMeta.method} ${errorMeta.url}${body}`;
+}
+
+/**
+ * Returns stringified debug information from an Elasticsearch request event
+ * useful for logging in case of an unexpected failure.
+ */
+export function getRequestDebugMeta(event: DiagnosticResult): {
+ url: string;
+ body: string;
+ statusCode: number | null;
+ method: string;
+} {
+ const params = event.meta.request.params;
+ // definition is wrong, `params.querystring` can be either a string or an object
+ const querystring = convertQueryString(params.querystring);
+ return {
+ url: `${params.path}${querystring ? `?${querystring}` : ''}`,
+ body: params.body ? `${ensureString(params.body)}` : '',
+ method: params.method,
+ statusCode: event.statusCode!,
+ };
+}
+
+/** HTTP Warning headers have the following syntax:
+ * (where warn-code is a three digit number)
+ * This function tests if a warning comes from an Elasticsearch warn-agent
+ * */
+const isEsWarning = (warning: string) => /\d\d\d Elasticsearch-/.test(warning);
+
+export const instrumentEsQueryAndDeprecationLogger = ({
+ logger,
+ client,
+ type,
+}: {
+ logger: Logger;
+ client: Client;
+ type: string;
+}) => {
+ const queryLogger = logger.get('query', type);
+ const deprecationLogger = logger.get('deprecation');
+ client.diagnostic.on('response', (error, event) => {
+ if (event) {
+ const opaqueId = event.meta.request.options.opaqueId;
+ const meta = opaqueId
+ ? {
+ http: { request: { id: event.meta.request.options.opaqueId } },
+ }
+ : undefined; // do not clutter logs if opaqueId is not present
+ let queryMsg = '';
+ if (error) {
+ if (error instanceof errors.ResponseError) {
+ queryMsg = `${getResponseMessage(event)} ${getErrorMessage(error)}`;
+ } else {
+ queryMsg = getErrorMessage(error);
+ }
+ } else {
+ queryMsg = getResponseMessage(event);
+ }
+
+ queryLogger.debug(queryMsg, meta);
+
+ if (event.warnings && event.warnings.filter(isEsWarning).length > 0) {
+ // Plugins can explicitly mark requests as originating from a user by
+ // removing the `'x-elastic-product-origin': 'kibana'` header that's
+ // added by default. User requests will be shown to users in the
+ // upgrade assistant UI as an action item that has to be addressed
+ // before they upgrade.
+ // Kibana requests will be hidden from the upgrade assistant UI and are
+ // only logged to help developers maintain their plugins
+ const requestOrigin =
+ (event.meta.request.params.headers != null &&
+ (event.meta.request.params.headers[
+ 'x-elastic-product-origin'
+ ] as unknown as string)) === 'kibana'
+ ? 'kibana'
+ : 'user';
+
+ // Strip the first 5 stack trace lines as these are irrelavent to finding the call site
+ const stackTrace = new Error().stack?.split('\n').slice(5).join('\n');
+
+ const deprecationMsg = `Elasticsearch deprecation: ${event.warnings}\nOrigin:${requestOrigin}\nStack trace:\n${stackTrace}\nQuery:\n${queryMsg}`;
+ if (requestOrigin === 'kibana') {
+ deprecationLogger.info(deprecationMsg);
+ } else {
+ deprecationLogger.debug(deprecationMsg);
+ }
+ }
+ }
+ });
+};
diff --git a/src/core/server/elasticsearch/elasticsearch_service.test.ts b/src/core/server/elasticsearch/elasticsearch_service.test.ts
index 3b75d19b80a10..ce5672ad30519 100644
--- a/src/core/server/elasticsearch/elasticsearch_service.test.ts
+++ b/src/core/server/elasticsearch/elasticsearch_service.test.ts
@@ -21,7 +21,7 @@ import { MockClusterClient, isScriptingEnabledMock } from './elasticsearch_servi
import type { NodesVersionCompatibility } from './version_check/ensure_es_version';
import { BehaviorSubject } from 'rxjs';
import { first } from 'rxjs/operators';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '../config';
import { configServiceMock, getEnvOptions } from '../config/mocks';
import { CoreContext } from '../core_context';
diff --git a/src/core/server/http/cookie_session_storage.test.ts b/src/core/server/http/cookie_session_storage.test.ts
index ad05d37c81e99..8e2cd58733faf 100644
--- a/src/core/server/http/cookie_session_storage.test.ts
+++ b/src/core/server/http/cookie_session_storage.test.ts
@@ -8,7 +8,7 @@
import { parse as parseCookie } from 'tough-cookie';
import supertest from 'supertest';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ByteSizeValue } from '@kbn/config-schema';
import { BehaviorSubject } from 'rxjs';
diff --git a/src/core/server/http/http_service.test.ts b/src/core/server/http/http_service.test.ts
index 4955d19668580..3a387cdfd5e35 100644
--- a/src/core/server/http/http_service.test.ts
+++ b/src/core/server/http/http_service.test.ts
@@ -10,7 +10,7 @@ import { mockHttpServer } from './http_service.test.mocks';
import { noop } from 'lodash';
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../config/mocks';
import { HttpService } from '.';
import { HttpConfigType, config } from './http_config';
diff --git a/src/core/server/http/test_utils.ts b/src/core/server/http/test_utils.ts
index 4e1a88e967f8f..8a8c545b365b3 100644
--- a/src/core/server/http/test_utils.ts
+++ b/src/core/server/http/test_utils.ts
@@ -8,7 +8,7 @@
import { BehaviorSubject } from 'rxjs';
import moment from 'moment';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ByteSizeValue } from '@kbn/config-schema';
import { Env } from '../config';
import { HttpService } from './http_service';
diff --git a/src/core/server/metrics/logging/get_ops_metrics_log.test.ts b/src/core/server/metrics/logging/get_ops_metrics_log.test.ts
index cba188c94c74e..3fd3c4a7a24d6 100644
--- a/src/core/server/metrics/logging/get_ops_metrics_log.test.ts
+++ b/src/core/server/metrics/logging/get_ops_metrics_log.test.ts
@@ -42,6 +42,7 @@ const testMetrics = {
memory: { heap: { used_in_bytes: 100 } },
uptime_in_millis: 1500,
event_loop_delay: 50,
+ event_loop_delay_histogram: { percentiles: { '50': 50, '75': 75, '95': 95, '99': 99 } },
},
os: {
load: {
@@ -56,7 +57,7 @@ describe('getEcsOpsMetricsLog', () => {
it('provides correctly formatted message', () => {
const result = getEcsOpsMetricsLog(createMockOpsMetrics(testMetrics));
expect(result.message).toMatchInlineSnapshot(
- `"memory: 100.0B uptime: 0:00:01 load: [10.00,20.00,30.00] delay: 50.000"`
+ `"memory: 100.0B uptime: 0:00:01 load: [10.00,20.00,30.00] mean delay: 50.000 delay histogram: { 50: 50.000; 95: 95.000; 99: 99.000 }"`
);
});
@@ -70,6 +71,7 @@ describe('getEcsOpsMetricsLog', () => {
const missingMetrics = {
...baseMetrics,
process: {},
+ processes: [],
os: {},
} as unknown as OpsMetrics;
const logMeta = getEcsOpsMetricsLog(missingMetrics);
@@ -77,39 +79,41 @@ describe('getEcsOpsMetricsLog', () => {
});
it('provides an ECS-compatible response', () => {
- const logMeta = getEcsOpsMetricsLog(createBaseOpsMetrics());
- expect(logMeta).toMatchInlineSnapshot(`
+ const logMeta = getEcsOpsMetricsLog(createMockOpsMetrics(testMetrics));
+ expect(logMeta.meta).toMatchInlineSnapshot(`
Object {
- "message": "memory: 1.0B load: [1.00,1.00,1.00] delay: 1.000",
- "meta": Object {
- "event": Object {
- "category": Array [
- "process",
- "host",
- ],
- "kind": "metric",
- "type": Array [
- "info",
- ],
- },
- "host": Object {
- "os": Object {
- "load": Object {
- "15m": 1,
- "1m": 1,
- "5m": 1,
- },
+ "event": Object {
+ "category": Array [
+ "process",
+ "host",
+ ],
+ "kind": "metric",
+ "type": Array [
+ "info",
+ ],
+ },
+ "host": Object {
+ "os": Object {
+ "load": Object {
+ "15m": 30,
+ "1m": 10,
+ "5m": 20,
},
},
- "process": Object {
- "eventLoopDelay": 1,
- "memory": Object {
- "heap": Object {
- "usedInBytes": 1,
- },
+ },
+ "process": Object {
+ "eventLoopDelay": 50,
+ "eventLoopDelayHistogram": Object {
+ "50": 50,
+ "95": 95,
+ "99": 99,
+ },
+ "memory": Object {
+ "heap": Object {
+ "usedInBytes": 100,
},
- "uptime": 0,
},
+ "uptime": 1,
},
}
`);
diff --git a/src/core/server/metrics/logging/get_ops_metrics_log.ts b/src/core/server/metrics/logging/get_ops_metrics_log.ts
index 7e13f35889ec7..6211407ae86f0 100644
--- a/src/core/server/metrics/logging/get_ops_metrics_log.ts
+++ b/src/core/server/metrics/logging/get_ops_metrics_log.ts
@@ -30,10 +30,29 @@ export function getEcsOpsMetricsLog(metrics: OpsMetrics) {
// HH:mm:ss message format for backward compatibility
const uptimeValMsg = uptimeVal ? `uptime: ${numeral(uptimeVal).format('00:00:00')} ` : '';
- // Event loop delay is in ms
+ // Event loop delay metrics are in ms
const eventLoopDelayVal = process?.event_loop_delay;
const eventLoopDelayValMsg = eventLoopDelayVal
- ? `delay: ${numeral(process?.event_loop_delay).format('0.000')}`
+ ? `mean delay: ${numeral(process?.event_loop_delay).format('0.000')}`
+ : '';
+
+ const eventLoopDelayPercentiles = process?.event_loop_delay_histogram?.percentiles;
+
+ // Extract 50th, 95th and 99th percentiles for log meta
+ const eventLoopDelayHistVals = eventLoopDelayPercentiles
+ ? {
+ 50: eventLoopDelayPercentiles[50],
+ 95: eventLoopDelayPercentiles[95],
+ 99: eventLoopDelayPercentiles[99],
+ }
+ : undefined;
+ // Format message from 50th, 95th and 99th percentiles
+ const eventLoopDelayHistMsg = eventLoopDelayPercentiles
+ ? ` delay histogram: { 50: ${numeral(eventLoopDelayPercentiles['50']).format(
+ '0.000'
+ )}; 95: ${numeral(eventLoopDelayPercentiles['95']).format('0.000')}; 99: ${numeral(
+ eventLoopDelayPercentiles['99']
+ ).format('0.000')} }`
: '';
const loadEntries = {
@@ -65,6 +84,7 @@ export function getEcsOpsMetricsLog(metrics: OpsMetrics) {
},
},
eventLoopDelay: eventLoopDelayVal,
+ eventLoopDelayHistogram: eventLoopDelayHistVals,
},
host: {
os: {
@@ -75,7 +95,13 @@ export function getEcsOpsMetricsLog(metrics: OpsMetrics) {
};
return {
- message: `${processMemoryUsedInBytesMsg}${uptimeValMsg}${loadValsMsg}${eventLoopDelayValMsg}`,
+ message: [
+ processMemoryUsedInBytesMsg,
+ uptimeValMsg,
+ loadValsMsg,
+ eventLoopDelayValMsg,
+ eventLoopDelayHistMsg,
+ ].join(''),
meta,
};
}
diff --git a/src/core/server/metrics/metrics_service.test.ts b/src/core/server/metrics/metrics_service.test.ts
index d7de41fd7ccf7..27043b8fa2c8a 100644
--- a/src/core/server/metrics/metrics_service.test.ts
+++ b/src/core/server/metrics/metrics_service.test.ts
@@ -203,6 +203,7 @@ describe('MetricsService', () => {
},
"process": Object {
"eventLoopDelay": undefined,
+ "eventLoopDelayHistogram": undefined,
"memory": Object {
"heap": Object {
"usedInBytes": undefined,
diff --git a/src/core/server/plugins/discovery/plugins_discovery.test.ts b/src/core/server/plugins/discovery/plugins_discovery.test.ts
index 958e051d0476d..a6ffdff4422be 100644
--- a/src/core/server/plugins/discovery/plugins_discovery.test.ts
+++ b/src/core/server/plugins/discovery/plugins_discovery.test.ts
@@ -7,7 +7,7 @@
*/
// must be before mocks imports to avoid conflicting with `REPO_ROOT` accessor.
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { mockPackage, scanPluginSearchPathsMock } from './plugins_discovery.test.mocks';
import mockFs from 'mock-fs';
import { loggingSystemMock } from '../../logging/logging_system.mock';
diff --git a/src/core/server/plugins/integration_tests/plugins_service.test.ts b/src/core/server/plugins/integration_tests/plugins_service.test.ts
index 4170d9422f277..ebbb3fa473b6d 100644
--- a/src/core/server/plugins/integration_tests/plugins_service.test.ts
+++ b/src/core/server/plugins/integration_tests/plugins_service.test.ts
@@ -7,7 +7,7 @@
*/
// must be before mocks imports to avoid conflicting with `REPO_ROOT` accessor.
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { mockPackage, mockDiscover } from './plugins_service.test.mocks';
import { join } from 'path';
diff --git a/src/core/server/plugins/plugin.test.ts b/src/core/server/plugins/plugin.test.ts
index 513e893992005..92cbda2a69cfe 100644
--- a/src/core/server/plugins/plugin.test.ts
+++ b/src/core/server/plugins/plugin.test.ts
@@ -8,7 +8,7 @@
import { join } from 'path';
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { schema } from '@kbn/config-schema';
import { Env } from '../config';
diff --git a/src/core/server/plugins/plugin_context.test.ts b/src/core/server/plugins/plugin_context.test.ts
index 867d4d978314b..7bcf392ed510b 100644
--- a/src/core/server/plugins/plugin_context.test.ts
+++ b/src/core/server/plugins/plugin_context.test.ts
@@ -8,7 +8,7 @@
import { duration } from 'moment';
import { first } from 'rxjs/operators';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { fromRoot } from '@kbn/utils';
import {
createPluginInitializerContext,
diff --git a/src/core/server/plugins/plugins_config.test.ts b/src/core/server/plugins/plugins_config.test.ts
index d65b057fb65c0..b9225054e63ef 100644
--- a/src/core/server/plugins/plugins_config.test.ts
+++ b/src/core/server/plugins/plugins_config.test.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../config/mocks';
import { PluginsConfig, PluginsConfigType } from './plugins_config';
import { Env } from '../config';
diff --git a/src/core/server/plugins/plugins_service.test.ts b/src/core/server/plugins/plugins_service.test.ts
index 0c077d732c67b..5a05817d2111f 100644
--- a/src/core/server/plugins/plugins_service.test.ts
+++ b/src/core/server/plugins/plugins_service.test.ts
@@ -11,7 +11,8 @@ import { mockDiscover, mockPackage } from './plugins_service.test.mocks';
import { resolve, join } from 'path';
import { BehaviorSubject, from } from 'rxjs';
import { schema } from '@kbn/config-schema';
-import { createAbsolutePathSerializer, REPO_ROOT } from '@kbn/dev-utils';
+import { createAbsolutePathSerializer } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ConfigPath, ConfigService, Env } from '../config';
import { rawConfigServiceMock, getEnvOptions } from '../config/mocks';
diff --git a/src/core/server/plugins/plugins_system.test.ts b/src/core/server/plugins/plugins_system.test.ts
index 4cd8e4c551bea..3d8a47005b362 100644
--- a/src/core/server/plugins/plugins_system.test.ts
+++ b/src/core/server/plugins/plugins_system.test.ts
@@ -14,7 +14,7 @@ import {
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '../config';
import { configServiceMock, getEnvOptions } from '../config/mocks';
import { CoreContext } from '../core_context';
diff --git a/src/core/server/preboot/preboot_service.test.ts b/src/core/server/preboot/preboot_service.test.ts
index dd4b1cb7d1df0..77242f0c5765f 100644
--- a/src/core/server/preboot/preboot_service.test.ts
+++ b/src/core/server/preboot/preboot_service.test.ts
@@ -7,7 +7,7 @@
*/
import { nextTick } from '@kbn/test/jest';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { LoggerFactory } from '@kbn/logging';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../config/mocks';
diff --git a/src/core/server/root/index.test.ts b/src/core/server/root/index.test.ts
index 7eba051a128f0..6ea3e05b9c2c2 100644
--- a/src/core/server/root/index.test.ts
+++ b/src/core/server/root/index.test.ts
@@ -10,7 +10,7 @@ import { rawConfigService, configService, logger, mockServer } from './index.tes
import { BehaviorSubject } from 'rxjs';
import { filter, first } from 'rxjs/operators';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../config/mocks';
import { Root } from '.';
import { Env } from '../config';
diff --git a/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts b/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts
index c22c6154c2605..139cd298d28ed 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/7.7.2_xpack_100k.test.ts
@@ -8,7 +8,7 @@
import path from 'path';
import { unlink } from 'fs/promises';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../../../config/mocks';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
diff --git a/src/core/server/saved_objects/migrations/integration_tests/7_13_0_failed_action_tasks.test.ts b/src/core/server/saved_objects/migrations/integration_tests/7_13_0_failed_action_tasks.test.ts
index 2def8e375c81f..479b1e78e1b72 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/7_13_0_failed_action_tasks.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/7_13_0_failed_action_tasks.test.ts
@@ -19,8 +19,7 @@ async function removeLogFile() {
await fs.unlink(logFilePath).catch(() => void 0);
}
-// FLAKY: https://github.com/elastic/kibana/issues/118626
-describe.skip('migration from 7.13 to 7.14+ with many failed action_tasks', () => {
+describe('migration from 7.13 to 7.14+ with many failed action_tasks', () => {
let esServer: kbnTestServer.TestElasticsearchUtils;
let root: Root;
let startES: () => Promise;
diff --git a/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts b/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts
index 0ed9262017263..c341463b78910 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/migration_from_older_v1.test.ts
@@ -10,7 +10,7 @@ import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import Semver from 'semver';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../../../config/mocks';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
diff --git a/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts b/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts
index 15d985daccba6..34d1317755c14 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/migration_from_same_v1.test.ts
@@ -10,7 +10,7 @@ import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import Semver from 'semver';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { Env } from '@kbn/config';
import { getEnvOptions } from '../../../config/mocks';
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
diff --git a/src/core/server/saved_objects/migrations/integration_tests/type_registrations.test.ts b/src/core/server/saved_objects/migrations/integration_tests/type_registrations.test.ts
index 7597657e7706c..4ff66151db925 100644
--- a/src/core/server/saved_objects/migrations/integration_tests/type_registrations.test.ts
+++ b/src/core/server/saved_objects/migrations/integration_tests/type_registrations.test.ts
@@ -34,6 +34,7 @@ const previouslyRegisteredTypes = [
'cases-sub-case',
'cases-user-actions',
'config',
+ 'connector_token',
'core-usage-stats',
'dashboard',
'endpoint:user-artifact',
diff --git a/src/core/server/saved_objects/saved_objects_service.test.ts b/src/core/server/saved_objects/saved_objects_service.test.ts
index a4f6c019c9624..a8bda95af46f9 100644
--- a/src/core/server/saved_objects/saved_objects_service.test.ts
+++ b/src/core/server/saved_objects/saved_objects_service.test.ts
@@ -19,7 +19,7 @@ import {
import { BehaviorSubject } from 'rxjs';
import { RawPackageInfo } from '@kbn/config';
import { ByteSizeValue } from '@kbn/config-schema';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { SavedObjectsService } from './saved_objects_service';
import { mockCoreContext } from '../core_context.mock';
diff --git a/src/core/server/saved_objects/service/lib/repository.test.ts b/src/core/server/saved_objects/service/lib/repository.test.ts
index ab692b146e7f6..ebab5898a0eb9 100644
--- a/src/core/server/saved_objects/service/lib/repository.test.ts
+++ b/src/core/server/saved_objects/service/lib/repository.test.ts
@@ -2272,7 +2272,16 @@ describe('SavedObjectsRepository', () => {
it(`self-generates an id if none is provided`, async () => {
await createSuccess(type, attributes);
- expect(client.create).toHaveBeenCalledWith(
+ expect(client.create).toHaveBeenNthCalledWith(
+ 1,
+ expect.objectContaining({
+ id: expect.objectContaining(/index-pattern:[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}/),
+ }),
+ expect.anything()
+ );
+ await createSuccess(type, attributes, { id: '' });
+ expect(client.create).toHaveBeenNthCalledWith(
+ 2,
expect.objectContaining({
id: expect.objectContaining(/index-pattern:[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}/),
}),
@@ -3558,6 +3567,20 @@ describe('SavedObjectsRepository', () => {
});
});
+ it('search for the right fields when typeToNamespacesMap is set', async () => {
+ const relevantOpts = {
+ ...commonOptions,
+ fields: ['title'],
+ type: '',
+ namespaces: [],
+ typeToNamespacesMap: new Map([[type, [namespace]]]),
+ };
+
+ await findSuccess(relevantOpts, namespace);
+ const esOptions = client.search.mock.calls[0][0];
+ expect(esOptions?._source ?? []).toContain('index-pattern.title');
+ });
+
it(`accepts hasReferenceOperator`, async () => {
const relevantOpts: SavedObjectsFindOptions = {
...commonOptions,
@@ -4147,6 +4170,13 @@ describe('SavedObjectsRepository', () => {
await test({});
});
+ it(`throws when id is empty`, async () => {
+ await expect(
+ savedObjectsRepository.incrementCounter(type, '', counterFields)
+ ).rejects.toThrowError(createBadRequestError('id cannot be empty'));
+ expect(client.update).not.toHaveBeenCalled();
+ });
+
it(`throws when counterField is not CounterField type`, async () => {
const test = async (field: unknown[]) => {
await expect(
@@ -4673,6 +4703,13 @@ describe('SavedObjectsRepository', () => {
expect(client.update).not.toHaveBeenCalled();
});
+ it(`throws when id is empty`, async () => {
+ await expect(savedObjectsRepository.update(type, '', attributes)).rejects.toThrowError(
+ createBadRequestError('id cannot be empty')
+ );
+ expect(client.update).not.toHaveBeenCalled();
+ });
+
it(`throws when ES is unable to find the document during get`, async () => {
client.get.mockResolvedValueOnce(
elasticsearchClientMock.createSuccessTransportRequestPromise(
diff --git a/src/core/server/saved_objects/service/lib/repository.ts b/src/core/server/saved_objects/service/lib/repository.ts
index 0d17525016043..9af85499295b5 100644
--- a/src/core/server/saved_objects/service/lib/repository.ts
+++ b/src/core/server/saved_objects/service/lib/repository.ts
@@ -303,7 +303,6 @@ export class SavedObjectsRepository {
options: SavedObjectsCreateOptions = {}
): Promise> {
const {
- id = SavedObjectsUtils.generateId(),
migrationVersion,
coreMigrationVersion,
overwrite = false,
@@ -313,6 +312,7 @@ export class SavedObjectsRepository {
initialNamespaces,
version,
} = options;
+ const id = options.id || SavedObjectsUtils.generateId();
const namespace = normalizeNamespace(options.namespace);
this.validateInitialNamespaces(type, initialNamespaces);
@@ -930,7 +930,7 @@ export class SavedObjectsRepository {
index: pit ? undefined : this.getIndicesForTypes(allowedTypes),
// If `searchAfter` is provided, we drop `from` as it will not be used for pagination.
from: searchAfter ? undefined : perPage * (page - 1),
- _source: includedFields(type, fields),
+ _source: includedFields(allowedTypes, fields),
preference,
rest_total_hits_as_int: true,
size: perPage,
@@ -938,7 +938,7 @@ export class SavedObjectsRepository {
size: perPage,
seq_no_primary_term: true,
from: perPage * (page - 1),
- _source: includedFields(type, fields),
+ _source: includedFields(allowedTypes, fields),
...(aggsObject ? { aggs: aggsObject } : {}),
...getSearchDsl(this._mappings, this._registry, {
search,
@@ -1231,6 +1231,9 @@ export class SavedObjectsRepository {
if (!this._allowedTypes.includes(type)) {
throw SavedObjectsErrorHelpers.createGenericNotFoundError(type, id);
}
+ if (!id) {
+ throw SavedObjectsErrorHelpers.createBadRequestError('id cannot be empty'); // prevent potentially upserting a saved object with an empty ID
+ }
const { version, references, upsert, refresh = DEFAULT_REFRESH_SETTING } = options;
const namespace = normalizeNamespace(options.namespace);
@@ -1754,6 +1757,10 @@ export class SavedObjectsRepository {
upsertAttributes,
} = options;
+ if (!id) {
+ throw SavedObjectsErrorHelpers.createBadRequestError('id cannot be empty'); // prevent potentially upserting a saved object with an empty ID
+ }
+
const normalizedCounterFields = counterFields.map((counterField) => {
/**
* no counterField configs provided, instead a field name string was passed.
diff --git a/src/core/server/server.test.ts b/src/core/server/server.test.ts
index 112693aae0279..48547883d5f67 100644
--- a/src/core/server/server.test.ts
+++ b/src/core/server/server.test.ts
@@ -26,7 +26,7 @@ import {
} from './server.test.mocks';
import { BehaviorSubject } from 'rxjs';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { rawConfigServiceMock, getEnvOptions } from './config/mocks';
import { Env } from './config';
import { Server } from './server';
diff --git a/src/core/server/ui_settings/integration_tests/index.test.ts b/src/core/server/ui_settings/integration_tests/index.test.ts
index ef635e90dac70..3f85beb2acec6 100644
--- a/src/core/server/ui_settings/integration_tests/index.test.ts
+++ b/src/core/server/ui_settings/integration_tests/index.test.ts
@@ -7,7 +7,7 @@
*/
import { Env } from '@kbn/config';
-import { REPO_ROOT } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { getEnvOptions } from '../../config/mocks';
import { startServers, stopServers } from './lib';
import { docExistsSuite } from './doc_exists';
diff --git a/src/core/test_helpers/kbn_server.ts b/src/core/test_helpers/kbn_server.ts
index 58720be637e2f..c326c7a35df63 100644
--- a/src/core/test_helpers/kbn_server.ts
+++ b/src/core/test_helpers/kbn_server.ts
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import {
createTestEsCluster,
CreateTestEsClusterOptions,
diff --git a/src/core/types/elasticsearch/search.ts b/src/core/types/elasticsearch/search.ts
index c28bf3c258f77..ac93a45da3258 100644
--- a/src/core/types/elasticsearch/search.ts
+++ b/src/core/types/elasticsearch/search.ts
@@ -9,6 +9,11 @@
import { ValuesType, UnionToIntersection } from 'utility-types';
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
+interface AggregationsAggregationContainer extends Record {
+ aggs?: any;
+ aggregations?: any;
+}
+
type InvalidAggregationRequest = unknown;
// ensures aggregations work with requests where aggregation options are a union type,
@@ -31,7 +36,7 @@ type KeysOfSources = T extends [any]
? KeyOfSource & KeyOfSource & KeyOfSource & KeyOfSource
: Record;
-type CompositeKeysOf =
+type CompositeKeysOf =
TAggregationContainer extends {
composite: { sources: [...infer TSource] };
}
@@ -40,7 +45,7 @@ type CompositeKeysOf =
+type TopMetricKeysOf =
TAggregationContainer extends { top_metrics: { metrics: { field: infer TField } } }
? TField
: TAggregationContainer extends { top_metrics: { metrics: Array<{ field: infer TField }> } }
@@ -92,17 +97,9 @@ type HitsOf<
>
>;
-type AggregationTypeName = Exclude<
- keyof estypes.AggregationsAggregationContainer,
- 'aggs' | 'aggregations'
->;
+type AggregationMap = Partial>;
-type AggregationMap = Partial>;
-
-type TopLevelAggregationRequest = Pick<
- estypes.AggregationsAggregationContainer,
- 'aggs' | 'aggregations'
->;
+type TopLevelAggregationRequest = Pick;
type MaybeKeyed<
TAggregationContainer,
@@ -113,448 +110,460 @@ type MaybeKeyed<
: { buckets: TBucket[] };
export type AggregateOf<
- TAggregationContainer extends estypes.AggregationsAggregationContainer,
+ TAggregationContainer extends AggregationsAggregationContainer,
TDocument
-> = (Record & {
- adjacency_matrix: {
- buckets: Array<
- {
- key: string;
- doc_count: number;
- } & SubAggregateOf
- >;
- };
- auto_date_histogram: {
- interval: string;
- buckets: Array<
- {
- key: number;
- key_as_string: string;
- doc_count: number;
- } & SubAggregateOf
- >;
- };
- avg: {
- value: number | null;
- value_as_string?: string;
- };
- avg_bucket: {
- value: number | null;
- };
- boxplot: {
- min: number | null;
- max: number | null;
- q1: number | null;
- q2: number | null;
- q3: number | null;
- };
- bucket_script: {
- value: unknown;
- };
- cardinality: {
- value: number;
- };
- children: {
- doc_count: number;
- } & SubAggregateOf;
- composite: {
- after_key: CompositeKeysOf;
- buckets: Array<
- {
+> = ValuesType<
+ Pick<
+ Record & {
+ adjacency_matrix: {
+ buckets: Array<
+ {
+ key: string;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ };
+ auto_date_histogram: {
+ interval: string;
+ buckets: Array<
+ {
+ key: number;
+ key_as_string: string;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ };
+ avg: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ avg_bucket: {
+ value: number | null;
+ };
+ boxplot: {
+ min: number | null;
+ max: number | null;
+ q1: number | null;
+ q2: number | null;
+ q3: number | null;
+ };
+ bucket_script: {
+ value: unknown;
+ };
+ cardinality: {
+ value: number;
+ };
+ children: {
doc_count: number;
- key: CompositeKeysOf;
- } & SubAggregateOf
- >;
- };
- cumulative_cardinality: {
- value: number;
- };
- cumulative_sum: {
- value: number;
- };
- date_histogram: MaybeKeyed<
- TAggregationContainer,
- {
- key: number;
- key_as_string: string;
- doc_count: number;
- } & SubAggregateOf
- >;
- date_range: MaybeKeyed<
- TAggregationContainer,
- Partial<{ from: string | number; from_as_string: string }> &
- Partial<{ to: string | number; to_as_string: string }> & {
+ } & SubAggregateOf;
+ composite: {
+ after_key: CompositeKeysOf;
+ buckets: Array<
+ {
+ doc_count: number;
+ key: CompositeKeysOf;
+ } & SubAggregateOf
+ >;
+ };
+ cumulative_cardinality: {
+ value: number;
+ };
+ cumulative_sum: {
+ value: number;
+ };
+ date_histogram: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: number;
+ key_as_string: string;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ date_range: MaybeKeyed<
+ TAggregationContainer,
+ Partial<{ from: string | number; from_as_string: string }> &
+ Partial<{ to: string | number; to_as_string: string }> & {
+ doc_count: number;
+ key: string;
+ }
+ >;
+ derivative:
+ | {
+ value: number | null;
+ }
+ | undefined;
+ extended_stats: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number;
+ sum_of_squares: number | null;
+ variance: number | null;
+ variance_population: number | null;
+ variance_sampling: number | null;
+ std_deviation: number | null;
+ std_deviation_population: number | null;
+ std_deviation_sampling: number | null;
+ std_deviation_bounds: {
+ upper: number | null;
+ lower: number | null;
+ upper_population: number | null;
+ lower_population: number | null;
+ upper_sampling: number | null;
+ lower_sampling: number | null;
+ };
+ } & (
+ | {
+ min_as_string: string;
+ max_as_string: string;
+ avg_as_string: string;
+ sum_of_squares_as_string: string;
+ variance_population_as_string: string;
+ variance_sampling_as_string: string;
+ std_deviation_as_string: string;
+ std_deviation_population_as_string: string;
+ std_deviation_sampling_as_string: string;
+ std_deviation_bounds_as_string: {
+ upper: string;
+ lower: string;
+ upper_population: string;
+ lower_population: string;
+ upper_sampling: string;
+ lower_sampling: string;
+ };
+ }
+ | {}
+ );
+ extended_stats_bucket: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number | null;
+ sum_of_squares: number | null;
+ variance: number | null;
+ variance_population: number | null;
+ variance_sampling: number | null;
+ std_deviation: number | null;
+ std_deviation_population: number | null;
+ std_deviation_sampling: number | null;
+ std_deviation_bounds: {
+ upper: number | null;
+ lower: number | null;
+ upper_population: number | null;
+ lower_population: number | null;
+ upper_sampling: number | null;
+ lower_sampling: number | null;
+ };
+ };
+ filter: {
doc_count: number;
- key: string;
- }
- >;
- derivative:
- | {
- value: number | null;
- }
- | undefined;
- extended_stats: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number;
- sum_of_squares: number | null;
- variance: number | null;
- variance_population: number | null;
- variance_sampling: number | null;
- std_deviation: number | null;
- std_deviation_population: number | null;
- std_deviation_sampling: number | null;
- std_deviation_bounds: {
- upper: number | null;
- lower: number | null;
- upper_population: number | null;
- lower_population: number | null;
- upper_sampling: number | null;
- lower_sampling: number | null;
- };
- } & (
- | {
- min_as_string: string;
- max_as_string: string;
- avg_as_string: string;
- sum_of_squares_as_string: string;
- variance_population_as_string: string;
- variance_sampling_as_string: string;
- std_deviation_as_string: string;
- std_deviation_population_as_string: string;
- std_deviation_sampling_as_string: string;
- std_deviation_bounds_as_string: {
- upper: string;
- lower: string;
- upper_population: string;
- lower_population: string;
- upper_sampling: string;
- lower_sampling: string;
+ } & SubAggregateOf;
+ filters: {
+ buckets: TAggregationContainer extends { filters: { filters: any[] } }
+ ? Array<
+ {
+ doc_count: number;
+ } & SubAggregateOf
+ >
+ : TAggregationContainer extends { filters: { filters: Record } }
+ ? {
+ [key in keyof TAggregationContainer['filters']['filters']]: {
+ doc_count: number;
+ } & SubAggregateOf;
+ } & (TAggregationContainer extends {
+ filters: { other_bucket_key: infer TOtherBucketKey };
+ }
+ ? Record<
+ TOtherBucketKey & string,
+ { doc_count: number } & SubAggregateOf
+ >
+ : unknown) &
+ (TAggregationContainer extends { filters: { other_bucket: true } }
+ ? {
+ _other: { doc_count: number } & SubAggregateOf<
+ TAggregationContainer,
+ TDocument
+ >;
+ }
+ : unknown)
+ : unknown;
+ };
+ geo_bounds: {
+ top_left: {
+ lat: number | null;
+ lon: number | null;
};
- }
- | {}
- );
- extended_stats_bucket: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number | null;
- sum_of_squares: number | null;
- variance: number | null;
- variance_population: number | null;
- variance_sampling: number | null;
- std_deviation: number | null;
- std_deviation_population: number | null;
- std_deviation_sampling: number | null;
- std_deviation_bounds: {
- upper: number | null;
- lower: number | null;
- upper_population: number | null;
- lower_population: number | null;
- upper_sampling: number | null;
- lower_sampling: number | null;
- };
- };
- filter: {
- doc_count: number;
- } & SubAggregateOf;
- filters: {
- buckets: TAggregationContainer extends { filters: { filters: any[] } }
- ? Array<
+ bottom_right: {
+ lat: number | null;
+ lon: number | null;
+ };
+ };
+ geo_centroid: {
+ count: number;
+ location: {
+ lat: number;
+ lon: number;
+ };
+ };
+ geo_distance: MaybeKeyed<
+ TAggregationContainer,
+ {
+ from: number;
+ to?: number;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ geo_hash: {
+ buckets: Array<
{
doc_count: number;
+ key: string;
} & SubAggregateOf
- >
- : TAggregationContainer extends { filters: { filters: Record } }
- ? {
- [key in keyof TAggregationContainer['filters']['filters']]: {
+ >;
+ };
+ geotile_grid: {
+ buckets: Array<
+ {
doc_count: number;
- } & SubAggregateOf;
- } & (TAggregationContainer extends { filters: { other_bucket_key: infer TOtherBucketKey } }
- ? Record<
- TOtherBucketKey & string,
- { doc_count: number } & SubAggregateOf
- >
- : unknown) &
- (TAggregationContainer extends { filters: { other_bucket: true } }
- ? { _other: { doc_count: number } & SubAggregateOf }
- : unknown)
- : unknown;
- };
- geo_bounds: {
- top_left: {
- lat: number | null;
- lon: number | null;
- };
- bottom_right: {
- lat: number | null;
- lon: number | null;
- };
- };
- geo_centroid: {
- count: number;
- location: {
- lat: number;
- lon: number;
- };
- };
- geo_distance: MaybeKeyed<
- TAggregationContainer,
- {
- from: number;
- to?: number;
- doc_count: number;
- } & SubAggregateOf
- >;
- geo_hash: {
- buckets: Array<
- {
+ key: string;
+ } & SubAggregateOf
+ >;
+ };
+ global: {
doc_count: number;
- key: string;
- } & SubAggregateOf
- >;
- };
- geotile_grid: {
- buckets: Array<
- {
+ } & SubAggregateOf;
+ histogram: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: number;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ ip_range: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: string;
+ from?: string;
+ to?: string;
+ doc_count: number;
+ },
+ TAggregationContainer extends { ip_range: { ranges: Array } }
+ ? TRangeType extends { key: infer TKeys }
+ ? TKeys
+ : string
+ : string
+ >;
+ inference: {
+ value: number;
+ prediction_probability: number;
+ prediction_score: number;
+ };
+ max: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ max_bucket: {
+ value: number | null;
+ };
+ min: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ min_bucket: {
+ value: number | null;
+ };
+ median_absolute_deviation: {
+ value: number | null;
+ };
+ moving_avg:
+ | {
+ value: number | null;
+ }
+ | undefined;
+ moving_fn: {
+ value: number | null;
+ };
+ moving_percentiles: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record | undefined;
+ missing: {
doc_count: number;
- key: string;
- } & SubAggregateOf
- >;
- };
- global: {
- doc_count: number;
- } & SubAggregateOf;
- histogram: MaybeKeyed<
- TAggregationContainer,
- {
- key: number;
- doc_count: number;
- } & SubAggregateOf
- >;
- ip_range: MaybeKeyed<
- TAggregationContainer,
- {
- key: string;
- from?: string;
- to?: string;
- doc_count: number;
- },
- TAggregationContainer extends { ip_range: { ranges: Array } }
- ? TRangeType extends { key: infer TKeys }
- ? TKeys
- : string
- : string
- >;
- inference: {
- value: number;
- prediction_probability: number;
- prediction_score: number;
- };
- max: {
- value: number | null;
- value_as_string?: string;
- };
- max_bucket: {
- value: number | null;
- };
- min: {
- value: number | null;
- value_as_string?: string;
- };
- min_bucket: {
- value: number | null;
- };
- median_absolute_deviation: {
- value: number | null;
- };
- moving_avg:
- | {
+ } & SubAggregateOf;
+ multi_terms: {
+ doc_count_error_upper_bound: number;
+ sum_other_doc_count: number;
+ buckets: Array<
+ {
+ doc_count: number;
+ key: string[];
+ } & SubAggregateOf
+ >;
+ };
+ nested: {
+ doc_count: number;
+ } & SubAggregateOf;
+ normalize: {
value: number | null;
- }
- | undefined;
- moving_fn: {
- value: number | null;
- };
- moving_percentiles: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record | undefined;
- missing: {
- doc_count: number;
- } & SubAggregateOf;
- multi_terms: {
- doc_count_error_upper_bound: number;
- sum_other_doc_count: number;
- buckets: Array<
- {
+ // TODO: should be perhaps based on input? ie when `format` is specified
+ value_as_string?: string;
+ };
+ parent: {
doc_count: number;
- key: string[];
- } & SubAggregateOf
- >;
- };
- nested: {
- doc_count: number;
- } & SubAggregateOf;
- normalize: {
- value: number | null;
- // TODO: should be perhaps based on input? ie when `format` is specified
- value_as_string?: string;
- };
- parent: {
- doc_count: number;
- } & SubAggregateOf;
- percentiles: {
- values: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record;
- };
- percentile_ranks: {
- values: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record;
- };
- percentiles_bucket: {
- values: TAggregationContainer extends Record
- ? Array<{ key: number; value: number | null }>
- : Record;
- };
- range: MaybeKeyed<
- TAggregationContainer,
- {
- key: string;
- from?: number;
- from_as_string?: string;
- to?: number;
- to_as_string?: string;
- doc_count: number;
- },
- TAggregationContainer extends { range: { ranges: Array } }
- ? TRangeType extends { key: infer TKeys }
- ? TKeys
- : string
- : string
- >;
- rare_terms: Array<
- {
- key: string | number;
- doc_count: number;
- } & SubAggregateOf
- >;
- rate: {
- value: number | null;
- };
- reverse_nested: {
- doc_count: number;
- } & SubAggregateOf;
- sampler: {
- doc_count: number;
- } & SubAggregateOf;
- scripted_metric: {
- value: unknown;
- };
- serial_diff: {
- value: number | null;
- // TODO: should be perhaps based on input? ie when `format` is specified
- value_as_string?: string;
- };
- significant_terms: {
- doc_count: number;
- bg_count: number;
- buckets: Array<
- {
- key: string | number;
- score: number;
+ } & SubAggregateOf;
+ percentiles: {
+ values: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record;
+ };
+ percentile_ranks: {
+ values: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record;
+ };
+ percentiles_bucket: {
+ values: TAggregationContainer extends Record
+ ? Array<{ key: number; value: number | null }>
+ : Record;
+ };
+ range: MaybeKeyed<
+ TAggregationContainer,
+ {
+ key: string;
+ from?: number;
+ from_as_string?: string;
+ to?: number;
+ to_as_string?: string;
+ doc_count: number;
+ },
+ TAggregationContainer extends { range: { ranges: Array } }
+ ? TRangeType extends { key: infer TKeys }
+ ? TKeys
+ : string
+ : string
+ >;
+ rare_terms: Array<
+ {
+ key: string | number;
+ doc_count: number;
+ } & SubAggregateOf
+ >;
+ rate: {
+ value: number | null;
+ };
+ reverse_nested: {
+ doc_count: number;
+ } & SubAggregateOf;
+ sampler: {
+ doc_count: number;
+ } & SubAggregateOf;
+ scripted_metric: {
+ value: unknown;
+ };
+ serial_diff: {
+ value: number | null;
+ // TODO: should be perhaps based on input? ie when `format` is specified
+ value_as_string?: string;
+ };
+ significant_terms: {
doc_count: number;
bg_count: number;
- } & SubAggregateOf
- >;
- };
- significant_text: {
- doc_count: number;
- buckets: Array<{
- key: string;
- doc_count: number;
- score: number;
- bg_count: number;
- }>;
- };
- stats: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number;
- } & (
- | {
- min_as_string: string;
- max_as_string: string;
- avg_as_string: string;
- sum_as_string: string;
- }
- | {}
- );
- stats_bucket: {
- count: number;
- min: number | null;
- max: number | null;
- avg: number | null;
- sum: number;
- };
- string_stats: {
- count: number;
- min_length: number | null;
- max_length: number | null;
- avg_length: number | null;
- entropy: number | null;
- distribution: Record;
- };
- sum: {
- value: number | null;
- value_as_string?: string;
- };
- sum_bucket: {
- value: number | null;
- };
- terms: {
- doc_count_error_upper_bound: number;
- sum_other_doc_count: number;
- buckets: Array<
- {
+ buckets: Array<
+ {
+ key: string | number;
+ score: number;
+ doc_count: number;
+ bg_count: number;
+ } & SubAggregateOf
+ >;
+ };
+ significant_text: {
doc_count: number;
- key: string | number;
- } & SubAggregateOf
- >;
- };
- top_hits: {
- hits: {
- total: {
+ buckets: Array<{
+ key: string;
+ doc_count: number;
+ score: number;
+ bg_count: number;
+ }>;
+ };
+ stats: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number;
+ } & (
+ | {
+ min_as_string: string;
+ max_as_string: string;
+ avg_as_string: string;
+ sum_as_string: string;
+ }
+ | {}
+ );
+ stats_bucket: {
+ count: number;
+ min: number | null;
+ max: number | null;
+ avg: number | null;
+ sum: number;
+ };
+ string_stats: {
+ count: number;
+ min_length: number | null;
+ max_length: number | null;
+ avg_length: number | null;
+ entropy: number | null;
+ distribution: Record;
+ };
+ sum: {
+ value: number | null;
+ value_as_string?: string;
+ };
+ sum_bucket: {
+ value: number | null;
+ };
+ terms: {
+ doc_count_error_upper_bound: number;
+ sum_other_doc_count: number;
+ buckets: Array<
+ {
+ doc_count: number;
+ key: string | number;
+ } & SubAggregateOf
+ >;
+ };
+ top_hits: {
+ hits: {
+ total: {
+ value: number;
+ relation: 'eq' | 'gte';
+ };
+ max_score: number | null;
+ hits: TAggregationContainer extends { top_hits: estypes.AggregationsTopHitsAggregation }
+ ? HitsOf
+ : estypes.SearchHitsMetadata;
+ };
+ };
+ top_metrics: {
+ top: Array<{
+ sort: number[] | string[];
+ metrics: Record, string | number | null>;
+ }>;
+ };
+ weighted_avg: { value: number | null };
+ value_count: {
value: number;
- relation: 'eq' | 'gte';
};
- max_score: number | null;
- hits: TAggregationContainer extends { top_hits: estypes.AggregationsTopHitsAggregation }
- ? HitsOf
- : estypes.SearchHitsMetadata;
- };
- };
- top_metrics: {
- top: Array<{
- sort: number[] | string[];
- metrics: Record, string | number | null>;
- }>;
- };
- weighted_avg: { value: number | null };
- value_count: {
- value: number;
- };
- // t_test: {} not defined
-})[ValidAggregationKeysOf & AggregationTypeName];
+ // t_test: {} not defined
+ },
+ Exclude, 'aggs' | 'aggregations'> & string
+ >
+>;
type AggregateOfMap = {
- [TAggregationName in keyof TAggregationMap]: Required[TAggregationName] extends estypes.AggregationsAggregationContainer
+ [TAggregationName in keyof TAggregationMap]: Required[TAggregationName] extends AggregationsAggregationContainer
? AggregateOf
: never; // using never means we effectively ignore optional keys, using {} creates a union type of { ... } | {}
};
diff --git a/src/dev/build/lib/integration_tests/version_info.test.ts b/src/dev/build/lib/integration_tests/version_info.test.ts
index e7a3a04c04734..9385de6e00a4f 100644
--- a/src/dev/build/lib/integration_tests/version_info.test.ts
+++ b/src/dev/build/lib/integration_tests/version_info.test.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { kibanaPackageJson as pkg } from '@kbn/dev-utils';
+import { kibanaPackageJson as pkg } from '@kbn/utils';
import { getVersionInfo } from '../version_info';
diff --git a/src/dev/build/tasks/build_kibana_example_plugins.ts b/src/dev/build/tasks/build_kibana_example_plugins.ts
index 7eb696ffdd3b2..93ebf41d259e7 100644
--- a/src/dev/build/tasks/build_kibana_example_plugins.ts
+++ b/src/dev/build/tasks/build_kibana_example_plugins.ts
@@ -13,17 +13,23 @@ import { exec, mkdirp, copyAll, Task } from '../lib';
export const BuildKibanaExamplePlugins: Task = {
description: 'Building distributable versions of Kibana example plugins',
- async run(config, log, build) {
- const examplesDir = Path.resolve(REPO_ROOT, 'examples');
+ async run(config, log) {
const args = [
- '../../scripts/plugin_helpers',
+ Path.resolve(REPO_ROOT, 'scripts/plugin_helpers'),
'build',
`--kibana-version=${config.getBuildVersion()}`,
];
- const folders = Fs.readdirSync(examplesDir, { withFileTypes: true })
- .filter((f) => f.isDirectory())
- .map((f) => Path.resolve(REPO_ROOT, 'examples', f.name));
+ const getExampleFolders = (dir: string) => {
+ return Fs.readdirSync(dir, { withFileTypes: true })
+ .filter((f) => f.isDirectory())
+ .map((f) => Path.resolve(dir, f.name));
+ };
+
+ const folders = [
+ ...getExampleFolders(Path.resolve(REPO_ROOT, 'examples')),
+ ...getExampleFolders(Path.resolve(REPO_ROOT, 'x-pack/examples')),
+ ];
for (const examplePlugin of folders) {
try {
@@ -40,8 +46,8 @@ export const BuildKibanaExamplePlugins: Task = {
const pluginsDir = config.resolveFromTarget('example_plugins');
await mkdirp(pluginsDir);
- await copyAll(examplesDir, pluginsDir, {
- select: ['*/build/*.zip'],
+ await copyAll(REPO_ROOT, pluginsDir, {
+ select: ['examples/*/build/*.zip', 'x-pack/examples/*/build/*.zip'],
});
},
};
diff --git a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts
index 02b469820f900..cc1ffb5f3e301 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts
+++ b/src/dev/build/tasks/os_packages/docker_generator/bundle_dockerfiles.ts
@@ -10,7 +10,8 @@ import { resolve } from 'path';
import { readFileSync } from 'fs';
import { copyFile } from 'fs/promises';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import Mustache from 'mustache';
import { compressTar, copyAll, mkdirp, write, Config } from '../../../lib';
diff --git a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
index 895c42ad5f47d..a7d8fe684ef95 100755
--- a/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
+++ b/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker
@@ -354,6 +354,7 @@ kibana_vars=(
xpack.security.showInsecureClusterWarning
xpack.securitySolution.alertMergeStrategy
xpack.securitySolution.alertIgnoreFields
+ xpack.securitySolution.maxExceptionsImportSize
xpack.securitySolution.maxRuleImportExportSize
xpack.securitySolution.maxRuleImportPayloadBytes
xpack.securitySolution.maxTimelineImportExportSize
diff --git a/src/dev/build/tasks/os_packages/docker_generator/run.ts b/src/dev/build/tasks/os_packages/docker_generator/run.ts
index 6a192baed3fa3..085b4393caa66 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/run.ts
+++ b/src/dev/build/tasks/os_packages/docker_generator/run.ts
@@ -10,7 +10,8 @@ import { access, link, unlink, chmod } from 'fs';
import { resolve, basename } from 'path';
import { promisify } from 'util';
-import { ToolingLog, kibanaPackageJson } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { kibanaPackageJson } from '@kbn/utils';
import { write, copyAll, mkdirp, exec, Config, Build } from '../../../lib';
import * as dockerTemplates from './templates';
diff --git a/src/dev/build/tasks/os_packages/docker_generator/templates/base/Dockerfile b/src/dev/build/tasks/os_packages/docker_generator/templates/base/Dockerfile
index b1d9fafffab57..90a622e64efe4 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/templates/base/Dockerfile
+++ b/src/dev/build/tasks/os_packages/docker_generator/templates/base/Dockerfile
@@ -16,7 +16,7 @@ RUN {{packageManager}} install -y findutils tar gzip
{{/ubi}}
{{#usePublicArtifact}}
-RUN cd /opt && \
+RUN cd /tmp && \
curl --retry 8 -s -L \
--output kibana.tar.gz \
https://artifacts.elastic.co/downloads/kibana/{{artifactPrefix}}-$(arch).tar.gz && \
diff --git a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile
index dbdace85eda01..e9a6ef3539692 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile
+++ b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/Dockerfile
@@ -2,9 +2,9 @@
# Build stage 0
# Extract Kibana and make various file manipulations.
################################################################################
-ARG BASE_REGISTRY=registry1.dsop.io
+ARG BASE_REGISTRY=registry1.dso.mil
ARG BASE_IMAGE=redhat/ubi/ubi8
-ARG BASE_TAG=8.4
+ARG BASE_TAG=8.5
FROM ${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG} as prep_files
diff --git a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml
index 24614039e5eb7..1c7926c2fcbc2 100644
--- a/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml
+++ b/src/dev/build/tasks/os_packages/docker_generator/templates/ironbank/hardening_manifest.yaml
@@ -14,7 +14,7 @@ tags:
# Build args passed to Dockerfile ARGs
args:
BASE_IMAGE: 'redhat/ubi/ubi8'
- BASE_TAG: '8.4'
+ BASE_TAG: '8.5'
# Docker image labels
labels:
@@ -59,4 +59,4 @@ maintainers:
- email: "yalabe.dukuly@anchore.com"
name: "Yalabe Dukuly"
username: "yalabe.dukuly"
- cht_member: true
\ No newline at end of file
+ cht_member: true
diff --git a/src/dev/chromium_version.ts b/src/dev/chromium_version.ts
index 410fcc72fbc0f..1f55330a92bb6 100644
--- a/src/dev/chromium_version.ts
+++ b/src/dev/chromium_version.ts
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { run, REPO_ROOT, ToolingLog } from '@kbn/dev-utils';
+import { run, ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import chalk from 'chalk';
import cheerio from 'cheerio';
import fs from 'fs';
diff --git a/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js b/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js
index 57467d84f1f61..40d36ed46ea34 100644
--- a/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js
+++ b/src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js
@@ -7,7 +7,8 @@
*/
import { enumeratePatterns } from '../team_assignment/enumerate_patterns';
-import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const log = new ToolingLog({
level: 'info',
diff --git a/src/dev/code_coverage/ingest_coverage/team_assignment/index.js b/src/dev/code_coverage/ingest_coverage/team_assignment/index.js
index 0e341a3aac1dc..a38c4ee50b40a 100644
--- a/src/dev/code_coverage/ingest_coverage/team_assignment/index.js
+++ b/src/dev/code_coverage/ingest_coverage/team_assignment/index.js
@@ -6,7 +6,8 @@
* Side Public License, v 1.
*/
-import { run, createFlagError, REPO_ROOT } from '@kbn/dev-utils';
+import { run, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { parse } from './parse_owners';
import { flush } from './flush';
import { enumeratePatterns } from './enumerate_patterns';
diff --git a/src/dev/ensure_all_tests_in_ci_group.ts b/src/dev/ensure_all_tests_in_ci_group.ts
index aeccefae05d2c..a2d9729d3352b 100644
--- a/src/dev/ensure_all_tests_in_ci_group.ts
+++ b/src/dev/ensure_all_tests_in_ci_group.ts
@@ -12,7 +12,8 @@ import Fs from 'fs/promises';
import execa from 'execa';
import { safeLoad } from 'js-yaml';
-import { run, REPO_ROOT } from '@kbn/dev-utils';
+import { run } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { schema } from '@kbn/config-schema';
const RELATIVE_JOBS_YAML_PATH = '.ci/ci_groups.yml';
diff --git a/src/dev/eslint/run_eslint_with_types.ts b/src/dev/eslint/run_eslint_with_types.ts
index 750011dea1031..0f2a10d07d681 100644
--- a/src/dev/eslint/run_eslint_with_types.ts
+++ b/src/dev/eslint/run_eslint_with_types.ts
@@ -14,7 +14,8 @@ import execa from 'execa';
import * as Rx from 'rxjs';
import { mergeMap, reduce } from 'rxjs/operators';
import { supportsColor } from 'chalk';
-import { REPO_ROOT, run, createFailError } from '@kbn/dev-utils';
+import { run, createFailError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { lastValueFrom } from '@kbn/std';
import { PROJECTS } from '../typescript/projects';
diff --git a/src/dev/license_checker/config.ts b/src/dev/license_checker/config.ts
index 52b1f816090df..9674694c0d655 100644
--- a/src/dev/license_checker/config.ts
+++ b/src/dev/license_checker/config.ts
@@ -76,6 +76,6 @@ export const LICENSE_OVERRIDES = {
'jsts@1.6.2': ['Eclipse Distribution License - v 1.0'], // cf. https://github.com/bjornharrtell/jsts
'@mapbox/jsonlint-lines-primitives@2.0.2': ['MIT'], // license in readme https://github.com/tmcw/jsonlint
'@elastic/ems-client@8.0.0': ['Elastic License 2.0'],
- '@elastic/eui@41.0.0': ['SSPL-1.0 OR Elastic License 2.0'],
+ '@elastic/eui@41.2.3': ['SSPL-1.0 OR Elastic License 2.0'],
'language-subtag-registry@0.3.21': ['CC-BY-4.0'], // retired ODC‑By license https://github.com/mattcg/language-subtag-registry
};
diff --git a/src/dev/plugin_discovery/find_plugins.ts b/src/dev/plugin_discovery/find_plugins.ts
index f1725f34d1f8e..53a53bc08e15b 100644
--- a/src/dev/plugin_discovery/find_plugins.ts
+++ b/src/dev/plugin_discovery/find_plugins.ts
@@ -8,11 +8,9 @@
import Path from 'path';
import { getPluginSearchPaths } from '@kbn/config';
-import {
- KibanaPlatformPlugin,
- REPO_ROOT,
- simpleKibanaPlatformPluginDiscovery,
-} from '@kbn/dev-utils';
+import { KibanaPlatformPlugin, simpleKibanaPlatformPluginDiscovery } from '@kbn/dev-utils';
+
+import { REPO_ROOT } from '@kbn/utils';
export interface SearchOptions {
oss: boolean;
diff --git a/src/dev/run_build_docs_cli.ts b/src/dev/run_build_docs_cli.ts
index aad524b4437d3..8ee75912c1a7e 100644
--- a/src/dev/run_build_docs_cli.ts
+++ b/src/dev/run_build_docs_cli.ts
@@ -9,7 +9,8 @@
import Path from 'path';
import dedent from 'dedent';
-import { run, REPO_ROOT, createFailError } from '@kbn/dev-utils';
+import { run, createFailError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
const DEFAULT_DOC_REPO_PATH = Path.resolve(REPO_ROOT, '..', 'docs');
diff --git a/src/dev/run_find_plugins_with_circular_deps.ts b/src/dev/run_find_plugins_with_circular_deps.ts
index f7974b464fcaf..f9ee7bd84c54f 100644
--- a/src/dev/run_find_plugins_with_circular_deps.ts
+++ b/src/dev/run_find_plugins_with_circular_deps.ts
@@ -10,7 +10,8 @@ import dedent from 'dedent';
import { parseDependencyTree, parseCircular, prettyCircular } from 'dpdm';
import { relative } from 'path';
import { getPluginSearchPaths } from '@kbn/config';
-import { REPO_ROOT, run } from '@kbn/dev-utils';
+import { run } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
interface Options {
debug?: boolean;
diff --git a/src/dev/run_precommit_hook.js b/src/dev/run_precommit_hook.js
index a7bd0a9f57f6e..dfa3a94426bb2 100644
--- a/src/dev/run_precommit_hook.js
+++ b/src/dev/run_precommit_hook.js
@@ -8,7 +8,8 @@
import SimpleGit from 'simple-git/promise';
-import { run, combineErrors, createFlagError, REPO_ROOT } from '@kbn/dev-utils';
+import { run, combineErrors, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import * as Eslint from './eslint';
import * as Stylelint from './stylelint';
import { getFilesForCommit, checkFileCasing } from './precommit_hook';
diff --git a/src/dev/typescript/build_ts_refs.ts b/src/dev/typescript/build_ts_refs.ts
index aaa8c0d12fa4d..f3896cf676e27 100644
--- a/src/dev/typescript/build_ts_refs.ts
+++ b/src/dev/typescript/build_ts_refs.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { ToolingLog, REPO_ROOT, ProcRunner } from '@kbn/dev-utils';
+import { ToolingLog, ProcRunner } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import { ROOT_REFS_CONFIG_PATH } from './root_refs_config';
import { Project } from './project';
diff --git a/src/dev/typescript/build_ts_refs_cli.ts b/src/dev/typescript/build_ts_refs_cli.ts
index c68424c2a98f7..09866315fc8dd 100644
--- a/src/dev/typescript/build_ts_refs_cli.ts
+++ b/src/dev/typescript/build_ts_refs_cli.ts
@@ -8,7 +8,8 @@
import Path from 'path';
-import { run, REPO_ROOT, createFlagError } from '@kbn/dev-utils';
+import { run, createFlagError } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import del from 'del';
import { RefOutputCache } from './ref_output_cache';
diff --git a/src/dev/typescript/ref_output_cache/ref_output_cache.ts b/src/dev/typescript/ref_output_cache/ref_output_cache.ts
index b7e641ceb33d5..32b08ec1ba0df 100644
--- a/src/dev/typescript/ref_output_cache/ref_output_cache.ts
+++ b/src/dev/typescript/ref_output_cache/ref_output_cache.ts
@@ -9,7 +9,8 @@
import Path from 'path';
import Fs from 'fs/promises';
-import { ToolingLog, kibanaPackageJson, extract } from '@kbn/dev-utils';
+import { ToolingLog, extract } from '@kbn/dev-utils';
+import { kibanaPackageJson } from '@kbn/utils';
import del from 'del';
import tempy from 'tempy';
diff --git a/src/dev/typescript/root_refs_config.ts b/src/dev/typescript/root_refs_config.ts
index f4aa88f1ea6b2..e20b1ab46cd82 100644
--- a/src/dev/typescript/root_refs_config.ts
+++ b/src/dev/typescript/root_refs_config.ts
@@ -10,7 +10,8 @@ import Path from 'path';
import Fs from 'fs/promises';
import dedent from 'dedent';
-import { REPO_ROOT, ToolingLog } from '@kbn/dev-utils';
+import { ToolingLog } from '@kbn/dev-utils';
+import { REPO_ROOT } from '@kbn/utils';
import normalize from 'normalize-path';
import { PROJECTS } from './projects';
diff --git a/src/plugins/advanced_settings/kibana.json b/src/plugins/advanced_settings/kibana.json
index 7562b6a660193..033d5e9da9eab 100644
--- a/src/plugins/advanced_settings/kibana.json
+++ b/src/plugins/advanced_settings/kibana.json
@@ -5,7 +5,7 @@
"ui": true,
"requiredPlugins": ["management"],
"optionalPlugins": ["home", "usageCollection"],
- "requiredBundles": ["kibanaReact", "kibanaUtils", "home", "esUiShared"],
+ "requiredBundles": ["kibanaReact", "kibanaUtils", "home"],
"owner": {
"name": "Vis Editors",
"githubTeam": "kibana-vis-editors"
diff --git a/src/plugins/advanced_settings/public/management_app/advanced_settings.tsx b/src/plugins/advanced_settings/public/management_app/advanced_settings.tsx
index c0decf516fbad..e0966d70aeb98 100644
--- a/src/plugins/advanced_settings/public/management_app/advanced_settings.tsx
+++ b/src/plugins/advanced_settings/public/management_app/advanced_settings.tsx
@@ -33,6 +33,7 @@ import { getAriaName, toEditableConfig, fieldSorter, DEFAULT_CATEGORY } from './
import { FieldSetting, SettingsChanges } from './types';
import { parseErrorMsg } from './components/search/search';
+import { KibanaContextProvider } from '../../../../../src/plugins/kibana_react/public';
export const QUERY = 'query';
@@ -259,21 +260,23 @@ export class AdvancedSettings extends Component
-
+
+
+
-
@@ -1906,30 +1893,17 @@ exports[`Field for json setting should render as read only with help text if ove
-
@@ -1989,30 +1963,17 @@ exports[`Field for json setting should render custom setting icon if it is custo
-
@@ -2103,30 +2064,17 @@ exports[`Field for json setting should render default value if there is no user
-
@@ -2192,35 +2140,22 @@ exports[`Field for json setting should render unsaved value if there are unsaved
-
@@ -2318,30 +2253,17 @@ exports[`Field for json setting should render user value if there is user value
-
@@ -2390,30 +2312,17 @@ exports[`Field for markdown setting should render as read only if saving is disa
-
@@ -2494,30 +2403,17 @@ exports[`Field for markdown setting should render as read only with help text if
-
@@ -2577,30 +2473,17 @@ exports[`Field for markdown setting should render custom setting icon if it is c
-
@@ -2649,30 +2532,17 @@ exports[`Field for markdown setting should render default value if there is no u
-
@@ -2738,31 +2608,18 @@ exports[`Field for markdown setting should render unsaved value if there are uns
-
@@ -2857,30 +2714,17 @@ exports[`Field for markdown setting should render user value if there is user va
-
diff --git a/src/plugins/advanced_settings/public/management_app/components/field/field.test.tsx b/src/plugins/advanced_settings/public/management_app/components/field/field.test.tsx
index 7047959522427..b77a687b50cd9 100644
--- a/src/plugins/advanced_settings/public/management_app/components/field/field.test.tsx
+++ b/src/plugins/advanced_settings/public/management_app/components/field/field.test.tsx
@@ -17,8 +17,9 @@ import { notificationServiceMock, docLinksServiceMock } from '../../../../../../
import { findTestSubject } from '@elastic/eui/lib/test';
import { Field, getEditableValue } from './field';
-jest.mock('brace/theme/textmate', () => 'brace/theme/textmate');
-jest.mock('brace/mode/markdown', () => 'brace/mode/markdown');
+jest.mock('../../../../../kibana_react/public/ui_settings/use_ui_setting', () => ({
+ useUiSetting: jest.fn(),
+}));
const defaults = {
requiresPageReload: false,
diff --git a/src/plugins/advanced_settings/public/management_app/components/field/field.tsx b/src/plugins/advanced_settings/public/management_app/components/field/field.tsx
index 586609fa1bf64..e43f30e52ee74 100644
--- a/src/plugins/advanced_settings/public/management_app/components/field/field.tsx
+++ b/src/plugins/advanced_settings/public/management_app/components/field/field.tsx
@@ -8,10 +8,6 @@
import React, { PureComponent, Fragment } from 'react';
import classNames from 'classnames';
-import 'react-ace';
-import 'brace/theme/textmate';
-import 'brace/mode/markdown';
-import 'brace/mode/json';
import {
EuiBadge,
@@ -36,10 +32,10 @@ import {
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
+import { FieldCodeEditor } from './field_code_editor';
import { FieldSetting, FieldState } from '../../types';
import { isDefaultValue } from '../../lib';
import { UiSettingsType, DocLinksStart, ToastsStart } from '../../../../../../core/public';
-import { EuiCodeEditor } from '../../../../../es_ui_shared/public';
interface FieldProps {
setting: FieldSetting;
@@ -130,7 +126,7 @@ export class Field extends PureComponent {
switch (type) {
case 'json':
const isJsonArray = Array.isArray(JSON.parse((defVal as string) || '{}'));
- newUnsavedValue = value.trim() || (isJsonArray ? '[]' : '{}');
+ newUnsavedValue = value || (isJsonArray ? '[]' : '{}');
try {
JSON.parse(newUnsavedValue);
} catch (e) {
@@ -291,26 +287,13 @@ export class Field extends PureComponent {
case 'json':
return (
-
);
diff --git a/src/plugins/advanced_settings/public/management_app/components/field/field_code_editor.tsx b/src/plugins/advanced_settings/public/management_app/components/field/field_code_editor.tsx
new file mode 100644
index 0000000000000..5ba1c55e67ec8
--- /dev/null
+++ b/src/plugins/advanced_settings/public/management_app/components/field/field_code_editor.tsx
@@ -0,0 +1,106 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import React, { useCallback } from 'react';
+import { monaco, XJsonLang } from '@kbn/monaco';
+import { CodeEditor, MarkdownLang } from '../../../../../../../src/plugins/kibana_react/public';
+
+interface FieldCodeEditorProps {
+ value: string;
+ onChange: (value: string) => void;
+ type: 'markdown' | 'json';
+ isReadOnly: boolean;
+ a11yProps: Record;
+ name: string;
+}
+
+const MIN_DEFAULT_LINES_COUNT = 6;
+const MAX_DEFAULT_LINES_COUNT = 30;
+
+export const FieldCodeEditor = ({
+ value,
+ onChange,
+ type,
+ isReadOnly,
+ a11yProps,
+ name,
+}: FieldCodeEditorProps) => {
+ // setting editor height based on lines height and count to stretch and fit its content
+ const setEditorCalculatedHeight = useCallback(
+ (editor: monaco.editor.IStandaloneCodeEditor) => {
+ const editorElement = editor.getDomNode();
+
+ if (!editorElement) {
+ return;
+ }
+
+ const lineHeight = editor.getOption(monaco.editor.EditorOption.lineHeight);
+ let lineCount = editor.getModel()?.getLineCount() || MIN_DEFAULT_LINES_COUNT;
+ if (lineCount < MIN_DEFAULT_LINES_COUNT) {
+ lineCount = MIN_DEFAULT_LINES_COUNT;
+ } else if (lineCount > MAX_DEFAULT_LINES_COUNT) {
+ lineCount = MAX_DEFAULT_LINES_COUNT;
+ }
+ const height = lineHeight * lineCount;
+
+ editorElement.id = name;
+ editorElement.style.height = `${height}px`;
+ editor.layout();
+ },
+ [name]
+ );
+
+ const trimEditorBlankLines = useCallback((editor: monaco.editor.IStandaloneCodeEditor) => {
+ const editorModel = editor.getModel();
+
+ if (!editorModel) {
+ return;
+ }
+ const trimmedValue = editorModel.getValue().trim();
+ editorModel.setValue(trimmedValue);
+ }, []);
+
+ const editorDidMount = useCallback(
+ (editor) => {
+ setEditorCalculatedHeight(editor);
+
+ editor.onDidChangeModelContent(() => {
+ setEditorCalculatedHeight(editor);
+ });
+
+ editor.onDidBlurEditorWidget(() => {
+ trimEditorBlankLines(editor);
+ });
+ },
+ [setEditorCalculatedHeight, trimEditorBlankLines]
+ );
+
+ return (
+
+ );
+};
diff --git a/src/plugins/console/public/application/components/editor_example.tsx b/src/plugins/console/public/application/components/editor_example.tsx
index 577f32fa912fb..21e3ab0c7d274 100644
--- a/src/plugins/console/public/application/components/editor_example.tsx
+++ b/src/plugins/console/public/application/components/editor_example.tsx
@@ -8,8 +8,10 @@
import { EuiScreenReaderOnly } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
-import React, { useEffect } from 'react';
-import { createReadOnlyAceEditor } from '../models/legacy_core_editor';
+import React, { useEffect, useRef } from 'react';
+import { createReadOnlyAceEditor, CustomAceEditor } from '../models/sense_editor';
+// @ts-ignore
+import { Mode } from '../models/legacy_core_editor/mode/input';
interface EditorExampleProps {
panel: string;
@@ -27,21 +29,33 @@ GET index/_doc/1
`;
export function EditorExample(props: EditorExampleProps) {
- const elemId = `help-example-${props.panel}`;
const inputId = `help-example-${props.panel}-input`;
+ const wrapperDivRef = useRef(null);
+ const editorRef = useRef();
useEffect(() => {
- const el = document.getElementById(elemId)!;
- el.textContent = exampleText.trim();
- const editor = createReadOnlyAceEditor(el);
- const textarea = el.querySelector('textarea')!;
- textarea.setAttribute('id', inputId);
- textarea.setAttribute('readonly', 'true');
+ if (wrapperDivRef.current) {
+ editorRef.current = createReadOnlyAceEditor(wrapperDivRef.current);
+
+ const editor = editorRef.current;
+ editor.update(exampleText.trim());
+ editor.session.setMode(new Mode());
+ editor.session.setUseWorker(false);
+ editor.setHighlightActiveLine(false);
+
+ const textareaElement = wrapperDivRef.current.querySelector('textarea');
+ if (textareaElement) {
+ textareaElement.setAttribute('id', inputId);
+ textareaElement.setAttribute('readonly', 'true');
+ }
+ }
return () => {
- editor.destroy();
+ if (editorRef.current) {
+ editorRef.current.destroy();
+ }
};
- }, [elemId, inputId]);
+ }, [inputId]);
return (
<>
@@ -52,7 +66,7 @@ export function EditorExample(props: EditorExampleProps) {
})}
-
+
>
);
}
diff --git a/src/plugins/console/public/application/contexts/services_context.mock.ts b/src/plugins/console/public/application/contexts/services_context.mock.ts
index c19413bdd0413..90a5d9ddce010 100644
--- a/src/plugins/console/public/application/contexts/services_context.mock.ts
+++ b/src/plugins/console/public/application/contexts/services_context.mock.ts
@@ -7,7 +7,7 @@
*/
import { notificationServiceMock } from '../../../../../core/public/mocks';
-import { httpServiceMock } from '../../../../../core/public/mocks';
+import { httpServiceMock, themeServiceMock } from '../../../../../core/public/mocks';
import type { ObjectStorageClient } from '../../../common/types';
import { HistoryMock } from '../../services/history.mock';
@@ -35,6 +35,7 @@ export const serviceContextMock = {
objectStorageClient: {} as unknown as ObjectStorageClient,
},
docLinkVersion: 'NA',
+ theme$: themeServiceMock.create().start().theme$,
};
},
};
diff --git a/src/plugins/console/public/application/contexts/services_context.tsx b/src/plugins/console/public/application/contexts/services_context.tsx
index 53c021d4d0982..5912de0375590 100644
--- a/src/plugins/console/public/application/contexts/services_context.tsx
+++ b/src/plugins/console/public/application/contexts/services_context.tsx
@@ -7,7 +7,9 @@
*/
import React, { createContext, useContext, useEffect } from 'react';
-import { NotificationsSetup } from 'kibana/public';
+import { Observable } from 'rxjs';
+import { NotificationsSetup, CoreTheme } from 'kibana/public';
+
import { History, Settings, Storage } from '../../services';
import { ObjectStorageClient } from '../../../common/types';
import { MetricsTracker } from '../../types';
@@ -26,6 +28,7 @@ interface ContextServices {
export interface ContextValue {
services: ContextServices;
docLinkVersion: string;
+ theme$: Observable;
}
interface ContextProps {
diff --git a/src/plugins/console/public/application/hooks/use_send_current_request_to_es/use_send_current_request_to_es.ts b/src/plugins/console/public/application/hooks/use_send_current_request_to_es/use_send_current_request_to_es.ts
index d025760c19d0a..81aa571b45a20 100644
--- a/src/plugins/console/public/application/hooks/use_send_current_request_to_es/use_send_current_request_to_es.ts
+++ b/src/plugins/console/public/application/hooks/use_send_current_request_to_es/use_send_current_request_to_es.ts
@@ -8,20 +8,21 @@
import { i18n } from '@kbn/i18n';
import { useCallback } from 'react';
+
+import { toMountPoint } from '../../../shared_imports';
import { isQuotaExceededError } from '../../../services/history';
+// @ts-ignore
+import { retrieveAutoCompleteInfo } from '../../../lib/mappings/mappings';
import { instance as registry } from '../../contexts/editor_context/editor_registry';
import { useRequestActionContext, useServicesContext } from '../../contexts';
+import { StorageQuotaError } from '../../components/storage_quota_error';
import { sendRequestToES } from './send_request_to_es';
import { track } from './track';
-import { toMountPoint } from '../../../../../kibana_react/public';
-
-// @ts-ignore
-import { retrieveAutoCompleteInfo } from '../../../lib/mappings/mappings';
-import { StorageQuotaError } from '../../components/storage_quota_error';
export const useSendCurrentRequestToES = () => {
const {
services: { history, settings, notifications, trackUiMetric },
+ theme$,
} = useServicesContext();
const dispatch = useRequestActionContext();
@@ -83,7 +84,8 @@ export const useSendCurrentRequestToES = () => {
settings.setHistoryDisabled(true);
notifications.toasts.remove(toast);
},
- })
+ }),
+ { theme$ }
),
});
} else {
@@ -127,5 +129,5 @@ export const useSendCurrentRequestToES = () => {
});
}
}
- }, [dispatch, settings, history, notifications, trackUiMetric]);
+ }, [dispatch, settings, history, notifications, trackUiMetric, theme$]);
};
diff --git a/src/plugins/console/public/application/index.tsx b/src/plugins/console/public/application/index.tsx
index 0b41095f8cc19..719975874cd44 100644
--- a/src/plugins/console/public/application/index.tsx
+++ b/src/plugins/console/public/application/index.tsx
@@ -8,13 +8,16 @@
import React from 'react';
import { render, unmountComponentAtNode } from 'react-dom';
-import { HttpSetup, NotificationsSetup, I18nStart } from 'src/core/public';
-import { ServicesContextProvider, EditorContextProvider, RequestContextProvider } from './contexts';
-import { Main } from './containers';
+import { Observable } from 'rxjs';
+import { HttpSetup, NotificationsSetup, I18nStart, CoreTheme } from 'src/core/public';
+
+import { UsageCollectionSetup } from '../../../usage_collection/public';
+import { KibanaThemeProvider } from '../shared_imports';
import { createStorage, createHistory, createSettings } from '../services';
-import * as localStorageObjectClient from '../lib/local_storage_object_client';
import { createUsageTracker } from '../services/tracker';
-import { UsageCollectionSetup } from '../../../usage_collection/public';
+import * as localStorageObjectClient from '../lib/local_storage_object_client';
+import { Main } from './containers';
+import { ServicesContextProvider, EditorContextProvider, RequestContextProvider } from './contexts';
import { createApi, createEsHostService } from './lib';
export interface BootDependencies {
@@ -24,6 +27,7 @@ export interface BootDependencies {
notifications: NotificationsSetup;
usageCollection?: UsageCollectionSetup;
element: HTMLElement;
+ theme$: Observable;
}
export function renderApp({
@@ -33,6 +37,7 @@ export function renderApp({
usageCollection,
element,
http,
+ theme$,
}: BootDependencies) {
const trackUiMetric = createUsageTracker(usageCollection);
trackUiMetric.load('opened_app');
@@ -49,26 +54,29 @@ export function renderApp({
render(
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
,
element
);
diff --git a/src/plugins/console/public/plugin.ts b/src/plugins/console/public/plugin.ts
index d61769c23dfe0..f46f60b485d55 100644
--- a/src/plugins/console/public/plugin.ts
+++ b/src/plugins/console/public/plugin.ts
@@ -52,7 +52,7 @@ export class ConsoleUIPlugin implements Plugin {
+ mount: async ({ element, theme$ }) => {
const [core] = await getStartServices();
const {
@@ -69,6 +69,7 @@ export class ConsoleUIPlugin implements Plugin {
// populated by a global rule
},
},
+ script_score: {
+ __template: {
+ script: {},
+ query: {},
+ },
+ script: {},
+ query: {},
+ min_score: '',
+ boost: 1.0,
+ },
wrapper: {
__template: {
query: 'QUERY_BASE64_ENCODED',
diff --git a/src/plugins/console/server/lib/spec_definitions/json/overrides/search.json b/src/plugins/console/server/lib/spec_definitions/json/overrides/search.json
new file mode 100644
index 0000000000000..1028422b303f2
--- /dev/null
+++ b/src/plugins/console/server/lib/spec_definitions/json/overrides/search.json
@@ -0,0 +1,7 @@
+{
+ "search": {
+ "url_params": {
+ "error_trace": true
+ }
+ }
+}
diff --git a/src/plugins/console/server/lib/spec_definitions/json/overrides/snapshot.create_repository.json b/src/plugins/console/server/lib/spec_definitions/json/overrides/snapshot.create_repository.json
index c513292f2bd59..3559b8e3811c0 100644
--- a/src/plugins/console/server/lib/spec_definitions/json/overrides/snapshot.create_repository.json
+++ b/src/plugins/console/server/lib/spec_definitions/json/overrides/snapshot.create_repository.json
@@ -9,7 +9,7 @@
"settings": {
"__one_of": [{
"__condition": {
- "lines_regex": "type[\"']\\s*:\\s*[\"']fs`"
+ "lines_regex": "type[\"']\\s*:\\s*[\"']fs"
},
"__template": {
"location": "path"
diff --git a/src/plugins/dashboard/public/application/embeddable/dashboard_container.tsx b/src/plugins/dashboard/public/application/embeddable/dashboard_container.tsx
index 3e259d4e26179..36261fbe130a3 100644
--- a/src/plugins/dashboard/public/application/embeddable/dashboard_container.tsx
+++ b/src/plugins/dashboard/public/application/embeddable/dashboard_container.tsx
@@ -101,6 +101,7 @@ export class DashboardContainer extends Container void;
public controlGroup?: ControlGroupContainer;
+ private domNode?: HTMLElement;
public getPanelCount = () => {
return Object.keys(this.getInput().panels).length;
@@ -258,6 +259,10 @@ export class DashboardContainer extends Container
@@ -275,6 +280,7 @@ export class DashboardContainer extends Container(
original as unknown as DashboardDiffCommonFilters,
newState as unknown as DashboardDiffCommonFilters,
- ['viewMode', 'panels', 'options', 'savedQuery', 'expandedPanelId', 'controlGroupInput'],
+ [
+ 'viewMode',
+ 'panels',
+ 'options',
+ 'fullScreenMode',
+ 'savedQuery',
+ 'expandedPanelId',
+ 'controlGroupInput',
+ ],
true
);
diff --git a/src/plugins/dashboard/public/application/lib/filter_utils.ts b/src/plugins/dashboard/public/application/lib/filter_utils.ts
index a31b83ec2df8f..c6b9ae2d01cf3 100644
--- a/src/plugins/dashboard/public/application/lib/filter_utils.ts
+++ b/src/plugins/dashboard/public/application/lib/filter_utils.ts
@@ -72,7 +72,7 @@ export const cleanFiltersForComparison = (filters: Filter[]) => {
export const cleanFiltersForSerialize = (filters: Filter[]): Filter[] => {
return filters.map((filter) => {
- if (filter.meta.value) {
+ if (filter.meta?.value) {
delete filter.meta.value;
}
return filter;
diff --git a/src/plugins/dashboard/public/application/lib/load_saved_dashboard_state.ts b/src/plugins/dashboard/public/application/lib/load_saved_dashboard_state.ts
index 31579e92bd1ec..03a03842c0e66 100644
--- a/src/plugins/dashboard/public/application/lib/load_saved_dashboard_state.ts
+++ b/src/plugins/dashboard/public/application/lib/load_saved_dashboard_state.ts
@@ -8,10 +8,10 @@
import _ from 'lodash';
+import { getDashboard60Warning, dashboardLoadingErrorStrings } from '../../dashboard_strings';
import { savedObjectToDashboardState } from './convert_dashboard_state';
import { DashboardState, DashboardBuildContext } from '../../types';
import { DashboardConstants, DashboardSavedObject } from '../..';
-import { getDashboard60Warning } from '../../dashboard_strings';
import { migrateLegacyQuery } from './migrate_legacy_query';
import { cleanFiltersForSerialize } from './filter_utils';
import { ViewMode } from '../../services/embeddable';
@@ -52,34 +52,33 @@ export const loadSavedDashboardState = async ({
return;
}
await indexPatterns.ensureDefaultDataView();
- let savedDashboard: DashboardSavedObject | undefined;
try {
- savedDashboard = (await savedDashboards.get({
+ const savedDashboard = (await savedDashboards.get({
id: savedDashboardId,
useResolve: true,
})) as DashboardSavedObject;
+ const savedDashboardState = savedObjectToDashboardState({
+ savedDashboard,
+ usageCollection,
+ showWriteControls,
+ savedObjectsTagging,
+ version: initializerContext.env.packageInfo.version,
+ });
+
+ const isViewMode = !showWriteControls || Boolean(savedDashboard.id);
+ savedDashboardState.viewMode = isViewMode ? ViewMode.VIEW : ViewMode.EDIT;
+ savedDashboardState.filters = cleanFiltersForSerialize(savedDashboardState.filters);
+ savedDashboardState.query = migrateLegacyQuery(
+ savedDashboardState.query || queryString.getDefaultQuery()
+ );
+
+ return { savedDashboardState, savedDashboard };
} catch (error) {
// E.g. a corrupt or deleted dashboard
- notifications.toasts.addDanger(error.message);
+ notifications.toasts.addDanger(
+ dashboardLoadingErrorStrings.getDashboardLoadError(error.message)
+ );
history.push(DashboardConstants.LANDING_PAGE_PATH);
return;
}
- if (!savedDashboard) return;
-
- const savedDashboardState = savedObjectToDashboardState({
- savedDashboard,
- usageCollection,
- showWriteControls,
- savedObjectsTagging,
- version: initializerContext.env.packageInfo.version,
- });
-
- const isViewMode = !showWriteControls || Boolean(savedDashboard.id);
- savedDashboardState.viewMode = isViewMode ? ViewMode.VIEW : ViewMode.EDIT;
- savedDashboardState.filters = cleanFiltersForSerialize(savedDashboardState.filters);
- savedDashboardState.query = migrateLegacyQuery(
- savedDashboardState.query || queryString.getDefaultQuery()
- );
-
- return { savedDashboardState, savedDashboard };
};
diff --git a/src/plugins/dashboard/public/dashboard_strings.ts b/src/plugins/dashboard/public/dashboard_strings.ts
index ca0f51976f3fb..52961c43cc1a2 100644
--- a/src/plugins/dashboard/public/dashboard_strings.ts
+++ b/src/plugins/dashboard/public/dashboard_strings.ts
@@ -359,6 +359,14 @@ export const panelStorageErrorStrings = {
}),
};
+export const dashboardLoadingErrorStrings = {
+ getDashboardLoadError: (message: string) =>
+ i18n.translate('dashboard.loadingError.errorMessage', {
+ defaultMessage: 'Error encountered while loading saved dashboard: {message}',
+ values: { message },
+ }),
+};
+
/*
Empty Screen
*/
diff --git a/src/plugins/data/common/search/aggs/agg_types.ts b/src/plugins/data/common/search/aggs/agg_types.ts
index dd930887f9d19..87496767a33b2 100644
--- a/src/plugins/data/common/search/aggs/agg_types.ts
+++ b/src/plugins/data/common/search/aggs/agg_types.ts
@@ -14,7 +14,6 @@ import * as metrics from './metrics';
import { BUCKET_TYPES, CalculateBoundsFn } from './buckets';
import { METRIC_TYPES } from './metrics';
-/** @internal */
export interface AggTypesDependencies {
calculateBounds: CalculateBoundsFn;
getConfig: (key: string) => T;
@@ -62,6 +61,8 @@ export const getAggTypes = () => ({
{ name: BUCKET_TYPES.SIGNIFICANT_TERMS, fn: buckets.getSignificantTermsBucketAgg },
{ name: BUCKET_TYPES.GEOHASH_GRID, fn: buckets.getGeoHashBucketAgg },
{ name: BUCKET_TYPES.GEOTILE_GRID, fn: buckets.getGeoTitleBucketAgg },
+ { name: BUCKET_TYPES.SAMPLER, fn: buckets.getSamplerBucketAgg },
+ { name: BUCKET_TYPES.DIVERSIFIED_SAMPLER, fn: buckets.getDiversifiedSamplerBucketAgg },
],
});
@@ -79,6 +80,8 @@ export const getAggTypesFunctions = () => [
buckets.aggDateHistogram,
buckets.aggTerms,
buckets.aggMultiTerms,
+ buckets.aggSampler,
+ buckets.aggDiversifiedSampler,
metrics.aggAvg,
metrics.aggBucketAvg,
metrics.aggBucketMax,
diff --git a/src/plugins/data/common/search/aggs/agg_types_registry.ts b/src/plugins/data/common/search/aggs/agg_types_registry.ts
index 108b1eb379ddd..4e57b4db3fb50 100644
--- a/src/plugins/data/common/search/aggs/agg_types_registry.ts
+++ b/src/plugins/data/common/search/aggs/agg_types_registry.ts
@@ -16,8 +16,6 @@ export type AggTypesRegistrySetup = ReturnType;
* real start contract we will need to return the initialized versions.
* So we need to provide the correct typings so they can be overwritten
* on client/server.
- *
- * @internal
*/
export interface AggTypesRegistryStart {
get: (id: string) => BucketAggType | MetricAggType;
diff --git a/src/plugins/data/common/search/aggs/aggs_service.test.ts b/src/plugins/data/common/search/aggs/aggs_service.test.ts
index be3fbae26174a..571083c18156f 100644
--- a/src/plugins/data/common/search/aggs/aggs_service.test.ts
+++ b/src/plugins/data/common/search/aggs/aggs_service.test.ts
@@ -73,6 +73,8 @@ describe('Aggs service', () => {
"significant_terms",
"geohash_grid",
"geotile_grid",
+ "sampler",
+ "diversified_sampler",
"foo",
]
`);
@@ -122,6 +124,8 @@ describe('Aggs service', () => {
"significant_terms",
"geohash_grid",
"geotile_grid",
+ "sampler",
+ "diversified_sampler",
]
`);
expect(bStart.types.getAll().metrics.map((t) => t(aggTypesDependencies).name))
diff --git a/src/plugins/data/common/search/aggs/aggs_service.ts b/src/plugins/data/common/search/aggs/aggs_service.ts
index 86bda5019a496..58f65bb0cab44 100644
--- a/src/plugins/data/common/search/aggs/aggs_service.ts
+++ b/src/plugins/data/common/search/aggs/aggs_service.ts
@@ -32,12 +32,10 @@ export const aggsRequiredUiSettings = [
UI_SETTINGS.COURIER_IGNORE_FILTER_IF_FIELD_NOT_IN_INDEX,
];
-/** @internal */
export interface AggsCommonSetupDependencies {
registerFunction: ExpressionsServiceSetup['registerFunction'];
}
-/** @internal */
export interface AggsCommonStartDependencies {
getConfig: GetConfigFn;
getIndexPattern(id: string): Promise;
diff --git a/src/plugins/data/common/search/aggs/buckets/bucket_agg_types.ts b/src/plugins/data/common/search/aggs/buckets/bucket_agg_types.ts
index 0c01bff90bfee..671266ef15997 100644
--- a/src/plugins/data/common/search/aggs/buckets/bucket_agg_types.ts
+++ b/src/plugins/data/common/search/aggs/buckets/bucket_agg_types.ts
@@ -19,4 +19,6 @@ export enum BUCKET_TYPES {
GEOHASH_GRID = 'geohash_grid',
GEOTILE_GRID = 'geotile_grid',
DATE_HISTOGRAM = 'date_histogram',
+ SAMPLER = 'sampler',
+ DIVERSIFIED_SAMPLER = 'diversified_sampler',
}
diff --git a/src/plugins/data/common/search/aggs/buckets/diversified_sampler.ts b/src/plugins/data/common/search/aggs/buckets/diversified_sampler.ts
new file mode 100644
index 0000000000000..31ebaa094c368
--- /dev/null
+++ b/src/plugins/data/common/search/aggs/buckets/diversified_sampler.ts
@@ -0,0 +1,62 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { i18n } from '@kbn/i18n';
+import { BucketAggType } from './bucket_agg_type';
+import { BaseAggParams } from '../types';
+import { aggDiversifiedSamplerFnName } from './diversified_sampler_fn';
+
+export const DIVERSIFIED_SAMPLER_AGG_NAME = 'diversified_sampler';
+
+const title = i18n.translate('data.search.aggs.buckets.diversifiedSamplerTitle', {
+ defaultMessage: 'Diversified sampler',
+ description: 'Diversified sampler aggregation title',
+});
+
+export interface AggParamsDiversifiedSampler extends BaseAggParams {
+ /**
+ * Is used to provide values used for de-duplication
+ */
+ field: string;
+
+ /**
+ * Limits how many top-scoring documents are collected in the sample processed on each shard.
+ */
+ shard_size?: number;
+
+ /**
+ * Limits how many documents are permitted per choice of de-duplicating value
+ */
+ max_docs_per_value?: number;
+}
+
+/**
+ * Like the sampler aggregation this is a filtering aggregation used to limit any sub aggregations' processing to a sample of the top-scoring documents.
+ * The diversified_sampler aggregation adds the ability to limit the number of matches that share a common value.
+ */
+export const getDiversifiedSamplerBucketAgg = () =>
+ new BucketAggType({
+ name: DIVERSIFIED_SAMPLER_AGG_NAME,
+ title,
+ customLabels: false,
+ expressionName: aggDiversifiedSamplerFnName,
+ params: [
+ {
+ name: 'shard_size',
+ type: 'number',
+ },
+ {
+ name: 'max_docs_per_value',
+ type: 'number',
+ },
+ {
+ name: 'field',
+ type: 'field',
+ },
+ ],
+ });
diff --git a/src/plugins/data/common/search/aggs/buckets/diversified_sampler_fn.test.ts b/src/plugins/data/common/search/aggs/buckets/diversified_sampler_fn.test.ts
new file mode 100644
index 0000000000000..e874542289bb2
--- /dev/null
+++ b/src/plugins/data/common/search/aggs/buckets/diversified_sampler_fn.test.ts
@@ -0,0 +1,58 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { functionWrapper } from '../test_helpers';
+import { aggDiversifiedSampler } from './diversified_sampler_fn';
+
+describe('aggDiversifiedSampler', () => {
+ const fn = functionWrapper(aggDiversifiedSampler());
+
+ test('fills in defaults when only required args are provided', () => {
+ const actual = fn({ id: 'sampler', schema: 'bucket', field: 'author' });
+ expect(actual).toMatchInlineSnapshot(`
+ Object {
+ "type": "agg_type",
+ "value": Object {
+ "enabled": true,
+ "id": "sampler",
+ "params": Object {
+ "field": "author",
+ "max_docs_per_value": undefined,
+ "shard_size": undefined,
+ },
+ "schema": "bucket",
+ "type": "diversified_sampler",
+ },
+ }
+ `);
+ });
+
+ test('includes optional params when they are provided', () => {
+ const actual = fn({
+ id: 'sampler',
+ schema: 'bucket',
+ shard_size: 300,
+ field: 'author',
+ max_docs_per_value: 3,
+ });
+
+ expect(actual.value).toMatchInlineSnapshot(`
+ Object {
+ "enabled": true,
+ "id": "sampler",
+ "params": Object {
+ "field": "author",
+ "max_docs_per_value": 3,
+ "shard_size": 300,
+ },
+ "schema": "bucket",
+ "type": "diversified_sampler",
+ }
+ `);
+ });
+});
diff --git a/src/plugins/data/common/search/aggs/buckets/diversified_sampler_fn.ts b/src/plugins/data/common/search/aggs/buckets/diversified_sampler_fn.ts
new file mode 100644
index 0000000000000..0e1b235dd576d
--- /dev/null
+++ b/src/plugins/data/common/search/aggs/buckets/diversified_sampler_fn.ts
@@ -0,0 +1,90 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { i18n } from '@kbn/i18n';
+import { ExpressionFunctionDefinition } from 'src/plugins/expressions/common';
+import { AggExpressionFunctionArgs, AggExpressionType, BUCKET_TYPES } from '../';
+import { DIVERSIFIED_SAMPLER_AGG_NAME } from './diversified_sampler';
+
+export const aggDiversifiedSamplerFnName = 'aggDiversifiedSampler';
+
+type Input = any;
+type Arguments = AggExpressionFunctionArgs;
+
+type Output = AggExpressionType;
+type FunctionDefinition = ExpressionFunctionDefinition<
+ typeof aggDiversifiedSamplerFnName,
+ Input,
+ Arguments,
+ Output
+>;
+
+export const aggDiversifiedSampler = (): FunctionDefinition => ({
+ name: aggDiversifiedSamplerFnName,
+ help: i18n.translate('data.search.aggs.function.buckets.diversifiedSampler.help', {
+ defaultMessage: 'Generates a serialized agg config for a Diversified sampler agg',
+ }),
+ type: 'agg_type',
+ args: {
+ id: {
+ types: ['string'],
+ help: i18n.translate('data.search.aggs.buckets.diversifiedSampler.id.help', {
+ defaultMessage: 'ID for this aggregation',
+ }),
+ },
+ enabled: {
+ types: ['boolean'],
+ default: true,
+ help: i18n.translate('data.search.aggs.buckets.diversifiedSampler.enabled.help', {
+ defaultMessage: 'Specifies whether this aggregation should be enabled',
+ }),
+ },
+ schema: {
+ types: ['string'],
+ help: i18n.translate('data.search.aggs.buckets.diversifiedSampler.schema.help', {
+ defaultMessage: 'Schema to use for this aggregation',
+ }),
+ },
+ shard_size: {
+ types: ['number'],
+ help: i18n.translate('data.search.aggs.buckets.diversifiedSampler.shardSize.help', {
+ defaultMessage:
+ 'The shard_size parameter limits how many top-scoring documents are collected in the sample processed on each shard.',
+ }),
+ },
+ max_docs_per_value: {
+ types: ['number'],
+ help: i18n.translate('data.search.aggs.buckets.diversifiedSampler.maxDocsPerValue.help', {
+ defaultMessage:
+ 'Limits how many documents are permitted per choice of de-duplicating value.',
+ }),
+ },
+ field: {
+ types: ['string'],
+ help: i18n.translate('data.search.aggs.buckets.diversifiedSampler.field.help', {
+ defaultMessage: 'Used to provide values used for de-duplication.',
+ }),
+ },
+ },
+ fn: (input, args) => {
+ const { id, enabled, schema, ...rest } = args;
+
+ return {
+ type: 'agg_type',
+ value: {
+ id,
+ enabled,
+ schema,
+ type: DIVERSIFIED_SAMPLER_AGG_NAME,
+ params: {
+ ...rest,
+ },
+ },
+ };
+ },
+});
diff --git a/src/plugins/data/common/search/aggs/buckets/index.ts b/src/plugins/data/common/search/aggs/buckets/index.ts
index 421fa0fcfdaf4..bf96a9ef860c0 100644
--- a/src/plugins/data/common/search/aggs/buckets/index.ts
+++ b/src/plugins/data/common/search/aggs/buckets/index.ts
@@ -38,3 +38,7 @@ export * from './terms_fn';
export * from './terms';
export * from './multi_terms_fn';
export * from './multi_terms';
+export * from './sampler_fn';
+export * from './sampler';
+export * from './diversified_sampler_fn';
+export * from './diversified_sampler';
diff --git a/src/plugins/data/common/search/aggs/buckets/multi_terms.ts b/src/plugins/data/common/search/aggs/buckets/multi_terms.ts
index c320c7e242798..02bf6bd12d319 100644
--- a/src/plugins/data/common/search/aggs/buckets/multi_terms.ts
+++ b/src/plugins/data/common/search/aggs/buckets/multi_terms.ts
@@ -34,6 +34,7 @@ export interface AggParamsMultiTerms extends BaseAggParams {
size?: number;
otherBucket?: boolean;
otherBucketLabel?: string;
+ separatorLabel?: string;
}
export const getMultiTermsBucketAgg = () => {
@@ -83,6 +84,7 @@ export const getMultiTermsBucketAgg = () => {
params: {
otherBucketLabel: params.otherBucketLabel,
paramsPerField: formats,
+ separator: agg.params.separatorLabel,
},
};
},
@@ -142,6 +144,11 @@ export const getMultiTermsBucketAgg = () => {
shouldShow: (agg) => agg.getParam('otherBucket'),
write: noop,
},
+ {
+ name: 'separatorLabel',
+ type: 'string',
+ write: noop,
+ },
],
});
};
diff --git a/src/plugins/data/common/search/aggs/buckets/multi_terms_fn.ts b/src/plugins/data/common/search/aggs/buckets/multi_terms_fn.ts
index 58e49479cd2c1..12b9c6d156548 100644
--- a/src/plugins/data/common/search/aggs/buckets/multi_terms_fn.ts
+++ b/src/plugins/data/common/search/aggs/buckets/multi_terms_fn.ts
@@ -111,6 +111,12 @@ export const aggMultiTerms = (): FunctionDefinition => ({
defaultMessage: 'Represents a custom label for this aggregation',
}),
},
+ separatorLabel: {
+ types: ['string'],
+ help: i18n.translate('data.search.aggs.buckets.multiTerms.separatorLabel.help', {
+ defaultMessage: 'The separator label used to join each term combination',
+ }),
+ },
},
fn: (input, args) => {
const { id, enabled, schema, ...rest } = args;
diff --git a/src/plugins/data/common/search/aggs/buckets/sampler.ts b/src/plugins/data/common/search/aggs/buckets/sampler.ts
new file mode 100644
index 0000000000000..7eb4f74115095
--- /dev/null
+++ b/src/plugins/data/common/search/aggs/buckets/sampler.ts
@@ -0,0 +1,43 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { i18n } from '@kbn/i18n';
+import { BucketAggType } from './bucket_agg_type';
+import { BaseAggParams } from '../types';
+import { aggSamplerFnName } from './sampler_fn';
+
+export const SAMPLER_AGG_NAME = 'sampler';
+
+const title = i18n.translate('data.search.aggs.buckets.samplerTitle', {
+ defaultMessage: 'Sampler',
+ description: 'Sampler aggregation title',
+});
+
+export interface AggParamsSampler extends BaseAggParams {
+ /**
+ * Limits how many top-scoring documents are collected in the sample processed on each shard.
+ */
+ shard_size?: number;
+}
+
+/**
+ * A filtering aggregation used to limit any sub aggregations' processing to a sample of the top-scoring documents.
+ */
+export const getSamplerBucketAgg = () =>
+ new BucketAggType({
+ name: SAMPLER_AGG_NAME,
+ title,
+ customLabels: false,
+ expressionName: aggSamplerFnName,
+ params: [
+ {
+ name: 'shard_size',
+ type: 'number',
+ },
+ ],
+ });
diff --git a/src/plugins/data/common/search/aggs/buckets/sampler_fn.test.ts b/src/plugins/data/common/search/aggs/buckets/sampler_fn.test.ts
new file mode 100644
index 0000000000000..76ef901671e72
--- /dev/null
+++ b/src/plugins/data/common/search/aggs/buckets/sampler_fn.test.ts
@@ -0,0 +1,52 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { functionWrapper } from '../test_helpers';
+import { aggSampler } from './sampler_fn';
+
+describe('aggSampler', () => {
+ const fn = functionWrapper(aggSampler());
+
+ test('fills in defaults when only required args are provided', () => {
+ const actual = fn({ id: 'sampler', schema: 'bucket' });
+ expect(actual).toMatchInlineSnapshot(`
+ Object {
+ "type": "agg_type",
+ "value": Object {
+ "enabled": true,
+ "id": "sampler",
+ "params": Object {
+ "shard_size": undefined,
+ },
+ "schema": "bucket",
+ "type": "sampler",
+ },
+ }
+ `);
+ });
+
+ test('includes optional params when they are provided', () => {
+ const actual = fn({
+ id: 'sampler',
+ schema: 'bucket',
+ shard_size: 300,
+ });
+
+ expect(actual.value).toMatchInlineSnapshot(`
+ Object {
+ "enabled": true,
+ "id": "sampler",
+ "params": Object {
+ "shard_size": 300,
+ },
+ "schema": "bucket",
+ "type": "sampler",
+ }
+ `);
+ });
+});
diff --git a/src/plugins/data/common/search/aggs/buckets/sampler_fn.ts b/src/plugins/data/common/search/aggs/buckets/sampler_fn.ts
new file mode 100644
index 0000000000000..2cb30eb70a230
--- /dev/null
+++ b/src/plugins/data/common/search/aggs/buckets/sampler_fn.ts
@@ -0,0 +1,77 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+import { i18n } from '@kbn/i18n';
+import { ExpressionFunctionDefinition } from 'src/plugins/expressions/common';
+import { AggExpressionFunctionArgs, AggExpressionType, BUCKET_TYPES } from '../';
+import { SAMPLER_AGG_NAME } from './sampler';
+
+export const aggSamplerFnName = 'aggSampler';
+
+type Input = any;
+type Arguments = AggExpressionFunctionArgs;
+
+type Output = AggExpressionType;
+type FunctionDefinition = ExpressionFunctionDefinition<
+ typeof aggSamplerFnName,
+ Input,
+ Arguments,
+ Output
+>;
+
+export const aggSampler = (): FunctionDefinition => ({
+ name: aggSamplerFnName,
+ help: i18n.translate('data.search.aggs.function.buckets.sampler.help', {
+ defaultMessage: 'Generates a serialized agg config for a Sampler agg',
+ }),
+ type: 'agg_type',
+ args: {
+ id: {
+ types: ['string'],
+ help: i18n.translate('data.search.aggs.buckets.sampler.id.help', {
+ defaultMessage: 'ID for this aggregation',
+ }),
+ },
+ enabled: {
+ types: ['boolean'],
+ default: true,
+ help: i18n.translate('data.search.aggs.buckets.sampler.enabled.help', {
+ defaultMessage: 'Specifies whether this aggregation should be enabled',
+ }),
+ },
+ schema: {
+ types: ['string'],
+ help: i18n.translate('data.search.aggs.buckets.sampler.schema.help', {
+ defaultMessage: 'Schema to use for this aggregation',
+ }),
+ },
+ shard_size: {
+ types: ['number'],
+ help: i18n.translate('data.search.aggs.buckets.sampler.shardSize.help', {
+ defaultMessage:
+ 'The shard_size parameter limits how many top-scoring documents are collected in the sample processed on each shard.',
+ }),
+ },
+ },
+ fn: (input, args) => {
+ const { id, enabled, schema, ...rest } = args;
+
+ return {
+ type: 'agg_type',
+ value: {
+ id,
+ enabled,
+ schema,
+ type: SAMPLER_AGG_NAME,
+ params: {
+ ...rest,
+ },
+ },
+ };
+ },
+});
diff --git a/src/plugins/data/common/search/aggs/metrics/percentile_ranks.ts b/src/plugins/data/common/search/aggs/metrics/percentile_ranks.ts
index fb142ee1f77c8..8f976ba979b95 100644
--- a/src/plugins/data/common/search/aggs/metrics/percentile_ranks.ts
+++ b/src/plugins/data/common/search/aggs/metrics/percentile_ranks.ts
@@ -13,7 +13,8 @@ import { AggTypesDependencies } from '../agg_types';
import { BaseAggParams } from '../types';
import { MetricAggType } from './metric_agg_type';
-import { getResponseAggConfigClass, IResponseAggConfig } from './lib/get_response_agg_config_class';
+import { getResponseAggConfigClass } from './lib/get_response_agg_config_class';
+import type { IResponseAggConfig } from './lib/get_response_agg_config_class';
import { aggPercentileRanksFnName } from './percentile_ranks_fn';
import { getPercentileValue } from './percentiles_get_value';
import { METRIC_TYPES } from './metric_agg_types';
diff --git a/src/plugins/data/common/search/aggs/metrics/percentiles.test.ts b/src/plugins/data/common/search/aggs/metrics/percentiles.test.ts
index 26189e022e7c6..17c49e2484a80 100644
--- a/src/plugins/data/common/search/aggs/metrics/percentiles.test.ts
+++ b/src/plugins/data/common/search/aggs/metrics/percentiles.test.ts
@@ -10,7 +10,7 @@ import { IPercentileAggConfig, getPercentilesMetricAgg } from './percentiles';
import { AggConfigs, IAggConfigs } from '../agg_configs';
import { mockAggTypesRegistry } from '../test_helpers';
import { METRIC_TYPES } from './metric_agg_types';
-import { IResponseAggConfig } from './lib/get_response_agg_config_class';
+import type { IResponseAggConfig } from './lib/get_response_agg_config_class';
describe('AggTypesMetricsPercentilesProvider class', () => {
let aggConfigs: IAggConfigs;
diff --git a/src/plugins/data/common/search/aggs/metrics/percentiles.ts b/src/plugins/data/common/search/aggs/metrics/percentiles.ts
index 07c4ac2bf2646..d0e1c6df77696 100644
--- a/src/plugins/data/common/search/aggs/metrics/percentiles.ts
+++ b/src/plugins/data/common/search/aggs/metrics/percentiles.ts
@@ -10,7 +10,8 @@ import { i18n } from '@kbn/i18n';
import { MetricAggType } from './metric_agg_type';
import { METRIC_TYPES } from './metric_agg_types';
import { KBN_FIELD_TYPES } from '../../../../common';
-import { getResponseAggConfigClass, IResponseAggConfig } from './lib/get_response_agg_config_class';
+import { getResponseAggConfigClass } from './lib/get_response_agg_config_class';
+import type { IResponseAggConfig } from './lib/get_response_agg_config_class';
import { aggPercentilesFnName } from './percentiles_fn';
import { getPercentileValue } from './percentiles_get_value';
import { ordinalSuffix } from './lib/ordinal_suffix';
diff --git a/src/plugins/data/common/search/aggs/metrics/percentiles_get_value.ts b/src/plugins/data/common/search/aggs/metrics/percentiles_get_value.ts
index 90585909db42a..242a12da35128 100644
--- a/src/plugins/data/common/search/aggs/metrics/percentiles_get_value.ts
+++ b/src/plugins/data/common/search/aggs/metrics/percentiles_get_value.ts
@@ -7,7 +7,7 @@
*/
import { find } from 'lodash';
-import { IResponseAggConfig } from './lib/get_response_agg_config_class';
+import type { IResponseAggConfig } from './lib/get_response_agg_config_class';
export const getPercentileValue = (
agg: TAggConfig,
diff --git a/src/plugins/data/common/search/aggs/metrics/std_deviation.ts b/src/plugins/data/common/search/aggs/metrics/std_deviation.ts
index fa160e5e9d161..9a4c38e296635 100644
--- a/src/plugins/data/common/search/aggs/metrics/std_deviation.ts
+++ b/src/plugins/data/common/search/aggs/metrics/std_deviation.ts
@@ -11,7 +11,8 @@ import { i18n } from '@kbn/i18n';
import { MetricAggType } from './metric_agg_type';
import { aggStdDeviationFnName } from './std_deviation_fn';
import { METRIC_TYPES } from './metric_agg_types';
-import { getResponseAggConfigClass, IResponseAggConfig } from './lib/get_response_agg_config_class';
+import { getResponseAggConfigClass } from './lib/get_response_agg_config_class';
+import type { IResponseAggConfig } from './lib/get_response_agg_config_class';
import { KBN_FIELD_TYPES } from '../../../../common';
import { BaseAggParams } from '../types';
diff --git a/src/plugins/data/common/search/aggs/types.ts b/src/plugins/data/common/search/aggs/types.ts
index b9a977e0a8a09..74356263845d1 100644
--- a/src/plugins/data/common/search/aggs/types.ts
+++ b/src/plugins/data/common/search/aggs/types.ts
@@ -90,6 +90,8 @@ import {
aggFilteredMetric,
aggSinglePercentile,
} from './';
+import { AggParamsSampler } from './buckets/sampler';
+import { AggParamsDiversifiedSampler } from './buckets/diversified_sampler';
export type { IAggConfig, AggConfigSerialized } from './agg_config';
export type { CreateAggConfigParams, IAggConfigs } from './agg_configs';
@@ -100,12 +102,10 @@ export type { IMetricAggType } from './metrics/metric_agg_type';
export type { IpRangeKey } from './buckets/lib/ip_range';
export type { OptionedValueProp } from './param_types/optioned';
-/** @internal */
export interface AggsCommonSetup {
types: AggTypesRegistrySetup;
}
-/** @internal */
export interface AggsCommonStart {
calculateAutoTimeExpression: ReturnType;
datatableUtilities: {
@@ -129,14 +129,12 @@ export interface AggsCommonStart {
*/
export type AggsStart = Assign;
-/** @internal */
export interface BaseAggParams {
json?: string;
customLabel?: string;
timeShift?: string;
}
-/** @internal */
export interface AggExpressionType {
type: 'agg_type';
value: AggConfigSerialized;
@@ -166,6 +164,8 @@ export interface AggParamsMapping {
[BUCKET_TYPES.DATE_HISTOGRAM]: AggParamsDateHistogram;
[BUCKET_TYPES.TERMS]: AggParamsTerms;
[BUCKET_TYPES.MULTI_TERMS]: AggParamsMultiTerms;
+ [BUCKET_TYPES.SAMPLER]: AggParamsSampler;
+ [BUCKET_TYPES.DIVERSIFIED_SAMPLER]: AggParamsDiversifiedSampler;
[METRIC_TYPES.AVG]: AggParamsAvg;
[METRIC_TYPES.CARDINALITY]: AggParamsCardinality;
[METRIC_TYPES.COUNT]: BaseAggParams;
diff --git a/src/plugins/data/common/search/aggs/utils/get_aggs_formats.test.ts b/src/plugins/data/common/search/aggs/utils/get_aggs_formats.test.ts
index 76112980c55fb..8510acf1572c7 100644
--- a/src/plugins/data/common/search/aggs/utils/get_aggs_formats.test.ts
+++ b/src/plugins/data/common/search/aggs/utils/get_aggs_formats.test.ts
@@ -13,6 +13,7 @@ import {
IFieldFormat,
SerializedFieldFormat,
} from '../../../../../field_formats/common';
+import { MultiFieldKey } from '../buckets/multi_field_key';
import { getAggsFormats } from './get_aggs_formats';
const getAggFormat = (
@@ -119,4 +120,35 @@ describe('getAggsFormats', () => {
expect(format.convert('__missing__')).toBe(mapping.params.missingBucketLabel);
expect(getFormat).toHaveBeenCalledTimes(3);
});
+
+ test('uses a default separator for multi terms', () => {
+ const terms = ['source', 'geo.src', 'geo.dest'];
+ const mapping = {
+ id: 'multi_terms',
+ params: {
+ paramsPerField: Array(terms.length).fill({ id: 'terms' }),
+ },
+ };
+
+ const format = getAggFormat(mapping, getFormat);
+
+ expect(format.convert(new MultiFieldKey({ key: terms }))).toBe('source › geo.src › geo.dest');
+ expect(getFormat).toHaveBeenCalledTimes(terms.length);
+ });
+
+ test('uses a custom separator for multi terms when passed', () => {
+ const terms = ['source', 'geo.src', 'geo.dest'];
+ const mapping = {
+ id: 'multi_terms',
+ params: {
+ paramsPerField: Array(terms.length).fill({ id: 'terms' }),
+ separator: ' - ',
+ },
+ };
+
+ const format = getAggFormat(mapping, getFormat);
+
+ expect(format.convert(new MultiFieldKey({ key: terms }))).toBe('source - geo.src - geo.dest');
+ expect(getFormat).toHaveBeenCalledTimes(terms.length);
+ });
});
diff --git a/src/plugins/data/common/search/aggs/utils/get_aggs_formats.ts b/src/plugins/data/common/search/aggs/utils/get_aggs_formats.ts
index aade8bc70e4ee..f14f981fdec65 100644
--- a/src/plugins/data/common/search/aggs/utils/get_aggs_formats.ts
+++ b/src/plugins/data/common/search/aggs/utils/get_aggs_formats.ts
@@ -143,9 +143,11 @@ export function getAggsFormats(getFieldFormat: GetFieldFormat): FieldFormatInsta
return params.otherBucketLabel;
}
+ const joinTemplate = params.separator ?? ' › ';
+
return (val as MultiFieldKey).keys
.map((valPart, i) => formats[i].convert(valPart, type))
- .join(' › ');
+ .join(joinTemplate);
};
getConverterFor = (type: FieldFormatsContentType) => (val: string) => this.convert(val, type);
},
diff --git a/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts b/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts
index a44613cb98b50..eefaf8a9dcd54 100644
--- a/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts
+++ b/src/plugins/data/common/search/expressions/esaggs/request_handler.test.ts
@@ -14,7 +14,7 @@ import type { IAggConfigs } from '../../aggs';
import type { ISearchSource } from '../../search_source';
import { searchSourceCommonMock, searchSourceInstanceMock } from '../../search_source/mocks';
-import { handleRequest, RequestHandlerParams } from './request_handler';
+import { handleRequest } from './request_handler';
jest.mock('../../tabify', () => ({
tabifyAggResponse: jest.fn(),
@@ -25,7 +25,7 @@ import { of } from 'rxjs';
import { toArray } from 'rxjs/operators';
describe('esaggs expression function - public', () => {
- let mockParams: MockedKeys;
+ let mockParams: MockedKeys[0]>;
beforeEach(() => {
jest.clearAllMocks();
diff --git a/src/plugins/data/common/search/expressions/esaggs/request_handler.ts b/src/plugins/data/common/search/expressions/esaggs/request_handler.ts
index 87c1685c9730d..d395baed2f08e 100644
--- a/src/plugins/data/common/search/expressions/esaggs/request_handler.ts
+++ b/src/plugins/data/common/search/expressions/esaggs/request_handler.ts
@@ -17,8 +17,7 @@ import { IAggConfigs } from '../../aggs';
import { ISearchStartSearchSource } from '../../search_source';
import { tabifyAggResponse } from '../../tabify';
-/** @internal */
-export interface RequestHandlerParams {
+interface RequestHandlerParams {
abortSignal?: AbortSignal;
aggs: IAggConfigs;
filters?: Filter[];
diff --git a/src/plugins/data/common/search/expressions/esdsl.ts b/src/plugins/data/common/search/expressions/esdsl.ts
index faa43dab65657..69e3c54e43806 100644
--- a/src/plugins/data/common/search/expressions/esdsl.ts
+++ b/src/plugins/data/common/search/expressions/esdsl.ts
@@ -34,8 +34,7 @@ export type EsdslExpressionFunctionDefinition = ExpressionFunctionDefinition<
Output
>;
-/** @internal */
-export interface EsdslStartDependencies {
+interface EsdslStartDependencies {
search: ISearchGeneric;
uiSettingsClient: UiSettingsCommon;
}
diff --git a/src/plugins/data/common/search/expressions/kibana_context.ts b/src/plugins/data/common/search/expressions/kibana_context.ts
index 47ca24b5be42b..6e38e2a3949d5 100644
--- a/src/plugins/data/common/search/expressions/kibana_context.ts
+++ b/src/plugins/data/common/search/expressions/kibana_context.ts
@@ -19,7 +19,6 @@ import { KibanaTimerangeOutput } from './timerange';
import { SavedObjectReference } from '../../../../../core/types';
import { SavedObjectsClientCommon } from '../..';
-/** @internal */
export interface KibanaContextStartDependencies {
savedObjectsClient: SavedObjectsClientCommon;
}
diff --git a/src/plugins/data/common/search/search_source/extract_references.ts b/src/plugins/data/common/search/search_source/extract_references.ts
index de32836ced124..954d336cb8a92 100644
--- a/src/plugins/data/common/search/search_source/extract_references.ts
+++ b/src/plugins/data/common/search/search_source/extract_references.ts
@@ -14,7 +14,7 @@ import { DATA_VIEW_SAVED_OBJECT_TYPE } from '../../../../data/common';
export const extractReferences = (
state: SerializedSearchSourceFields
-): [SerializedSearchSourceFields & { indexRefName?: string }, SavedObjectReference[]] => {
+): [SerializedSearchSourceFields, SavedObjectReference[]] => {
let searchSourceFields: SerializedSearchSourceFields & { indexRefName?: string } = { ...state };
const references: SavedObjectReference[] = [];
if (searchSourceFields.index) {
diff --git a/src/plugins/data/common/search/search_source/fetch/get_search_params.ts b/src/plugins/data/common/search/search_source/fetch/get_search_params.ts
index 28ee7993c175c..ae01dcf4ea051 100644
--- a/src/plugins/data/common/search/search_source/fetch/get_search_params.ts
+++ b/src/plugins/data/common/search/search_source/fetch/get_search_params.ts
@@ -9,7 +9,7 @@
import { UI_SETTINGS } from '../../../constants';
import { GetConfigFn } from '../../../types';
import { ISearchRequestParams } from '../../index';
-import { SearchRequest } from './types';
+import type { SearchRequest } from './types';
const sessionId = Date.now();
diff --git a/src/plugins/data/common/search/search_source/mocks.ts b/src/plugins/data/common/search/search_source/mocks.ts
index dee5c09a6b858..77ba2a761fbf0 100644
--- a/src/plugins/data/common/search/search_source/mocks.ts
+++ b/src/plugins/data/common/search/search_source/mocks.ts
@@ -40,6 +40,10 @@ export const searchSourceInstanceMock: MockedKeys = {
export const searchSourceCommonMock: jest.Mocked = {
create: jest.fn().mockReturnValue(searchSourceInstanceMock),
createEmpty: jest.fn().mockReturnValue(searchSourceInstanceMock),
+ telemetry: jest.fn(),
+ getAllMigrations: jest.fn(),
+ inject: jest.fn(),
+ extract: jest.fn(),
};
export const createSearchSourceMock = (fields?: SearchSourceFields, response?: any) =>
diff --git a/src/plugins/data/common/search/search_source/search_source.ts b/src/plugins/data/common/search/search_source/search_source.ts
index 3ac6b623fbc80..8acdb0514cccb 100644
--- a/src/plugins/data/common/search/search_source/search_source.ts
+++ b/src/plugins/data/common/search/search_source/search_source.ts
@@ -95,7 +95,8 @@ import type {
SearchSourceFields,
SearchSourceOptions,
} from './types';
-import { FetchHandlers, getSearchParamsFromRequest, RequestFailure, SearchRequest } from './fetch';
+import { getSearchParamsFromRequest, RequestFailure } from './fetch';
+import type { FetchHandlers, SearchRequest } from './fetch';
import { getRequestInspectorStats, getResponseInspectorStats } from './inspect';
import {
diff --git a/src/plugins/data/common/search/search_source/search_source_service.test.ts b/src/plugins/data/common/search/search_source/search_source_service.test.ts
index dc63b96d5258d..a1b49fc433925 100644
--- a/src/plugins/data/common/search/search_source/search_source_service.test.ts
+++ b/src/plugins/data/common/search/search_source/search_source_service.test.ts
@@ -28,7 +28,14 @@ describe('SearchSource service', () => {
dependencies
);
- expect(Object.keys(start)).toEqual(['create', 'createEmpty']);
+ expect(Object.keys(start)).toEqual([
+ 'create',
+ 'createEmpty',
+ 'extract',
+ 'inject',
+ 'getAllMigrations',
+ 'telemetry',
+ ]);
});
});
});
diff --git a/src/plugins/data/common/search/search_source/search_source_service.ts b/src/plugins/data/common/search/search_source/search_source_service.ts
index 886420365f548..a97596d322ccd 100644
--- a/src/plugins/data/common/search/search_source/search_source_service.ts
+++ b/src/plugins/data/common/search/search_source/search_source_service.ts
@@ -6,8 +6,18 @@
* Side Public License, v 1.
*/
-import { createSearchSource, SearchSource, SearchSourceDependencies } from './';
+import { mapValues } from 'lodash';
+import {
+ createSearchSource,
+ extractReferences,
+ injectReferences,
+ SearchSource,
+ SearchSourceDependencies,
+ SerializedSearchSourceFields,
+} from './';
import { IndexPatternsContract } from '../..';
+import { mergeMigrationFunctionMaps } from '../../../../kibana_utils/common';
+import { getAllMigrations as filtersGetAllMigrations } from '../../query/persistable_state';
export class SearchSourceService {
public setup() {}
@@ -24,6 +34,28 @@ export class SearchSourceService {
createEmpty: () => {
return new SearchSource({}, dependencies);
},
+ extract: (state: SerializedSearchSourceFields) => {
+ const [newState, references] = extractReferences(state);
+ return { state: newState, references };
+ },
+ inject: injectReferences,
+ getAllMigrations: () => {
+ const searchSourceMigrations = {};
+
+ // we don't know if embeddables have any migrations defined so we need to fetch them and map the received functions so we pass
+ // them the correct input and that we correctly map the response
+ const filterMigrations = mapValues(filtersGetAllMigrations(), (migrate) => {
+ return (state: SerializedSearchSourceFields) => ({
+ ...state,
+ filter: migrate(state.filter),
+ });
+ });
+
+ return mergeMigrationFunctionMaps(searchSourceMigrations, filterMigrations);
+ },
+ telemetry: () => {
+ return {};
+ },
};
}
diff --git a/src/plugins/data/common/search/search_source/types.ts b/src/plugins/data/common/search/search_source/types.ts
index acfdf17263169..94697ba9521e9 100644
--- a/src/plugins/data/common/search/search_source/types.ts
+++ b/src/plugins/data/common/search/search_source/types.ts
@@ -12,7 +12,8 @@ import { SerializableRecord } from '@kbn/utility-types';
import { Query } from '../..';
import { Filter } from '../../es_query';
import { IndexPattern } from '../..';
-import { SearchSource } from './search_source';
+import type { SearchSource } from './search_source';
+import { PersistableStateService } from '../../../../kibana_utils/common';
/**
* search source interface
@@ -24,7 +25,8 @@ export type ISearchSource = Pick;
* high level search service
* @public
*/
-export interface ISearchStartSearchSource {
+export interface ISearchStartSearchSource
+ extends PersistableStateService {
/**
* creates {@link SearchSource} based on provided serialized {@link SearchSourceFields}
* @param fields
@@ -43,15 +45,17 @@ export enum SortDirection {
desc = 'desc',
}
-export interface SortDirectionFormat {
+// eslint-disable-next-line @typescript-eslint/consistent-type-definitions
+export type SortDirectionFormat = {
order: SortDirection;
format?: string;
-}
+};
-export interface SortDirectionNumeric {
+// eslint-disable-next-line @typescript-eslint/consistent-type-definitions
+export type SortDirectionNumeric = {
order: SortDirection;
numeric_type?: 'double' | 'long' | 'date' | 'date_nanos';
-}
+};
export type EsQuerySortValue = Record<
string,
@@ -114,7 +118,8 @@ export interface SearchSourceFields {
parent?: SearchSourceFields;
}
-export interface SerializedSearchSourceFields {
+// eslint-disable-next-line @typescript-eslint/consistent-type-definitions
+export type SerializedSearchSourceFields = {
type?: string;
/**
* {@link Query}
@@ -159,7 +164,7 @@ export interface SerializedSearchSourceFields {
terminate_after?: number;
parent?: SerializedSearchSourceFields;
-}
+};
export interface SearchSourceOptions {
callParentStartHandlers?: boolean;
diff --git a/src/plugins/data/common/search/tabify/get_columns.test.ts b/src/plugins/data/common/search/tabify/get_columns.test.ts
index d679b3fb36311..1741abfe729d7 100644
--- a/src/plugins/data/common/search/tabify/get_columns.test.ts
+++ b/src/plugins/data/common/search/tabify/get_columns.test.ts
@@ -7,7 +7,7 @@
*/
import { tabifyGetColumns } from './get_columns';
-import { TabbedAggColumn } from './types';
+import type { TabbedAggColumn } from './types';
import { AggConfigs } from '../aggs';
import { mockAggTypesRegistry } from '../aggs/test_helpers';
diff --git a/src/plugins/data/common/search/tabify/get_columns.ts b/src/plugins/data/common/search/tabify/get_columns.ts
index 62798ba8bf680..8957c96a69881 100644
--- a/src/plugins/data/common/search/tabify/get_columns.ts
+++ b/src/plugins/data/common/search/tabify/get_columns.ts
@@ -8,7 +8,7 @@
import { groupBy } from 'lodash';
import { IAggConfig } from '../aggs';
-import { TabbedAggColumn } from './types';
+import type { TabbedAggColumn } from './types';
const getColumn = (agg: IAggConfig, i: number): TabbedAggColumn => {
let name = '';
diff --git a/src/plugins/data/common/search/tabify/response_writer.test.ts b/src/plugins/data/common/search/tabify/response_writer.test.ts
index cee297d255db3..ec131458b8510 100644
--- a/src/plugins/data/common/search/tabify/response_writer.test.ts
+++ b/src/plugins/data/common/search/tabify/response_writer.test.ts
@@ -9,7 +9,7 @@
import { TabbedAggResponseWriter } from './response_writer';
import { AggConfigs, BUCKET_TYPES, METRIC_TYPES } from '../aggs';
import { mockAggTypesRegistry } from '../aggs/test_helpers';
-import { TabbedResponseWriterOptions } from './types';
+import type { TabbedResponseWriterOptions } from './types';
describe('TabbedAggResponseWriter class', () => {
let responseWriter: TabbedAggResponseWriter;
diff --git a/src/plugins/data/common/search/tabify/tabify.ts b/src/plugins/data/common/search/tabify/tabify.ts
index d3273accff974..5b1247a8f1719 100644
--- a/src/plugins/data/common/search/tabify/tabify.ts
+++ b/src/plugins/data/common/search/tabify/tabify.ts
@@ -9,7 +9,7 @@
import { get } from 'lodash';
import { TabbedAggResponseWriter } from './response_writer';
import { TabifyBuckets } from './buckets';
-import { TabbedResponseWriterOptions } from './types';
+import type { TabbedResponseWriterOptions } from './types';
import { AggResponseBucket } from './types';
import { AggGroupNames, IAggConfigs } from '../aggs';
diff --git a/src/plugins/data/common/search/tabify/tabify_docs.ts b/src/plugins/data/common/search/tabify/tabify_docs.ts
index 43b6155f6662f..08172a918c042 100644
--- a/src/plugins/data/common/search/tabify/tabify_docs.ts
+++ b/src/plugins/data/common/search/tabify/tabify_docs.ts
@@ -48,7 +48,7 @@ function isValidMetaFieldName(field: string): field is ValidMetaFieldNames {
return (VALID_META_FIELD_NAMES as string[]).includes(field);
}
-export interface TabifyDocsOptions {
+interface TabifyDocsOptions {
shallow?: boolean;
/**
* If set to `false` the _source of the document, if requested, won't be
diff --git a/src/plugins/data/common/search/tabify/types.ts b/src/plugins/data/common/search/tabify/types.ts
index 9fadb0ef860e3..bf0a99725e2ab 100644
--- a/src/plugins/data/common/search/tabify/types.ts
+++ b/src/plugins/data/common/search/tabify/types.ts
@@ -22,7 +22,6 @@ export interface TimeRangeInformation {
timeFields: string[];
}
-/** @internal **/
export interface TabbedResponseWriterOptions {
metricsAtAllLevels: boolean;
partialRows: boolean;
diff --git a/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts b/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts
index ea17e91d085e7..2ae1805c8aa28 100644
--- a/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts
+++ b/src/plugins/data/public/actions/filters/create_filters_from_range_select.ts
@@ -13,8 +13,7 @@ import { esFilters, IFieldType, RangeFilterParams } from '../../../public';
import { getIndexPatterns, getSearchService } from '../../../public/services';
import { AggConfigSerialized } from '../../../common/search/aggs';
-/** @internal */
-export interface RangeSelectDataContext {
+interface RangeSelectDataContext {
table: Datatable;
column: number;
range: number[];
diff --git a/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts b/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts
index e4854dac9408b..5163f979d3ff5 100644
--- a/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts
+++ b/src/plugins/data/public/actions/filters/create_filters_from_value_click.test.ts
@@ -9,10 +9,7 @@
import { IndexPatternsContract } from '../../../public';
import { dataPluginMock } from '../../../public/mocks';
import { setIndexPatterns, setSearchService } from '../../../public/services';
-import {
- createFiltersFromValueClickAction,
- ValueClickDataContext,
-} from './create_filters_from_value_click';
+import { createFiltersFromValueClickAction } from './create_filters_from_value_click';
import { FieldFormatsGetConfigFn, BytesFormat } from '../../../../field_formats/common';
import { RangeFilter } from '@kbn/es-query';
@@ -22,7 +19,7 @@ const mockField = {
};
describe('createFiltersFromValueClick', () => {
- let dataPoints: ValueClickDataContext['data'];
+ let dataPoints: Parameters[0]['data'];
beforeEach(() => {
dataPoints = [
diff --git a/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts b/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts
index e1088b42e37b6..23ab718e512bd 100644
--- a/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts
+++ b/src/plugins/data/public/actions/filters/create_filters_from_value_click.ts
@@ -12,8 +12,7 @@ import { esFilters, Filter } from '../../../public';
import { getIndexPatterns, getSearchService } from '../../../public/services';
import { AggConfigSerialized } from '../../../common/search/aggs';
-/** @internal */
-export interface ValueClickDataContext {
+interface ValueClickDataContext {
data: Array<{
table: Pick;
column: number;
diff --git a/src/plugins/data/public/autocomplete/autocomplete_service.ts b/src/plugins/data/public/autocomplete/autocomplete_service.ts
index 67efbe2af29ce..0d21c7e765501 100644
--- a/src/plugins/data/public/autocomplete/autocomplete_service.ts
+++ b/src/plugins/data/public/autocomplete/autocomplete_service.ts
@@ -8,13 +8,13 @@
import { CoreSetup, PluginInitializerContext } from 'src/core/public';
import moment from 'moment';
-import { TimefilterSetup } from '../query';
+import type { TimefilterSetup } from '../query';
import { QuerySuggestionGetFn } from './providers/query_suggestion_provider';
import {
getEmptyValueSuggestions,
setupValueSuggestionProvider,
- ValueSuggestionsGetFn,
} from './providers/value_suggestion_provider';
+import type { ValueSuggestionsGetFn } from './providers/value_suggestion_provider';
import { ConfigSchema } from '../../config';
import { UsageCollectionSetup } from '../../../usage_collection/public';
diff --git a/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.test.ts b/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.test.ts
index 7ecd371e39db7..4a68c7232ea7e 100644
--- a/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.test.ts
+++ b/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.test.ts
@@ -7,8 +7,9 @@
*/
import { stubIndexPattern, stubFields } from '../../stubs';
-import { TimefilterSetup } from '../../query';
-import { setupValueSuggestionProvider, ValueSuggestionsGetFn } from './value_suggestion_provider';
+import type { TimefilterSetup } from '../../query';
+import { setupValueSuggestionProvider } from './value_suggestion_provider';
+import type { ValueSuggestionsGetFn } from './value_suggestion_provider';
import { IUiSettingsClient, CoreSetup } from 'kibana/public';
import { UI_SETTINGS } from '../../../common';
diff --git a/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.ts b/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.ts
index 588bac4739c53..31f886daeb4cc 100644
--- a/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.ts
+++ b/src/plugins/data/public/autocomplete/providers/value_suggestion_provider.ts
@@ -11,7 +11,7 @@ import { buildQueryFromFilters } from '@kbn/es-query';
import { memoize } from 'lodash';
import { CoreSetup } from 'src/core/public';
import { IIndexPattern, IFieldType, UI_SETTINGS, ValueSuggestionsMethod } from '../../../common';
-import { TimefilterSetup } from '../../query';
+import type { TimefilterSetup } from '../../query';
import { AutocompleteUsageCollector } from '../collectors';
export type ValueSuggestionsGetFn = (args: ValueSuggestionsGetFnArgs) => Promise;
diff --git a/src/plugins/data/public/plugin.ts b/src/plugins/data/public/plugin.ts
index 25f649f69a052..7d6983725b179 100644
--- a/src/plugins/data/public/plugin.ts
+++ b/src/plugins/data/public/plugin.ts
@@ -11,7 +11,7 @@ import './index.scss';
import { PluginInitializerContext, CoreSetup, CoreStart, Plugin } from 'src/core/public';
import { ConfigSchema } from '../config';
import { Storage, IStorageWrapper, createStartServicesGetter } from '../../kibana_utils/public';
-import {
+import type {
DataPublicPluginSetup,
DataPublicPluginStart,
DataSetupDependencies,
diff --git a/src/plugins/data/public/query/filter_manager/filter_manager.ts b/src/plugins/data/public/query/filter_manager/filter_manager.ts
index f076a2c591fb1..bfedf444cf23e 100644
--- a/src/plugins/data/public/query/filter_manager/filter_manager.ts
+++ b/src/plugins/data/public/query/filter_manager/filter_manager.ts
@@ -14,7 +14,6 @@ import { IUiSettingsClient } from 'src/core/public';
import { isFilterPinned, onlyDisabledFiltersChanged, Filter } from '@kbn/es-query';
import { sortFilters } from './lib/sort_filters';
import { mapAndFlattenFilters } from './lib/map_and_flatten_filters';
-import { PartitionedFilters } from './types';
import {
FilterStateStore,
@@ -31,6 +30,11 @@ import {
telemetry,
} from '../../../common/query/persistable_state';
+interface PartitionedFilters {
+ globalFilters: Filter[];
+ appFilters: Filter[];
+}
+
export class FilterManager implements PersistableStateService {
private filters: Filter[] = [];
private updated$: Subject = new Subject();
diff --git a/src/plugins/data/public/query/lib/get_default_query.ts b/src/plugins/data/public/query/lib/get_default_query.ts
index 015c128171a8e..fd571e46083f5 100644
--- a/src/plugins/data/public/query/lib/get_default_query.ts
+++ b/src/plugins/data/public/query/lib/get_default_query.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-export type QueryLanguage = 'kuery' | 'lucene';
+type QueryLanguage = 'kuery' | 'lucene';
export function getDefaultQuery(language: QueryLanguage = 'kuery') {
return {
diff --git a/src/plugins/data/public/query/query_service.ts b/src/plugins/data/public/query/query_service.ts
index 314f13e3524db..dc6b9586b0b4b 100644
--- a/src/plugins/data/public/query/query_service.ts
+++ b/src/plugins/data/public/query/query_service.ts
@@ -12,10 +12,12 @@ import { IStorageWrapper } from 'src/plugins/kibana_utils/public';
import { buildEsQuery } from '@kbn/es-query';
import { FilterManager } from './filter_manager';
import { createAddToQueryLog } from './lib';
-import { TimefilterService, TimefilterSetup } from './timefilter';
+import { TimefilterService } from './timefilter';
+import type { TimefilterSetup } from './timefilter';
import { createSavedQueryService } from './saved_query/saved_query_service';
import { createQueryStateObservable } from './state_sync/create_global_query_observable';
-import { QueryStringContract, QueryStringManager } from './query_string';
+import type { QueryStringContract } from './query_string';
+import { QueryStringManager } from './query_string';
import { getEsQueryConfig, TimeRange } from '../../common';
import { getUiSettings } from '../services';
import { NowProviderInternalContract } from '../now_provider';
diff --git a/src/plugins/data/public/query/query_string/query_string_manager.mock.ts b/src/plugins/data/public/query/query_string/query_string_manager.mock.ts
index 976d3ce13e7de..6d20f2a4bea34 100644
--- a/src/plugins/data/public/query/query_string/query_string_manager.mock.ts
+++ b/src/plugins/data/public/query/query_string/query_string_manager.mock.ts
@@ -6,7 +6,7 @@
* Side Public License, v 1.
*/
-import { QueryStringContract } from '.';
+import type { QueryStringContract } from '.';
import { Observable } from 'rxjs';
const createSetupContractMock = () => {
diff --git a/src/plugins/data/public/query/saved_query/saved_query_service.test.ts b/src/plugins/data/public/query/saved_query/saved_query_service.test.ts
index 047051c302083..57af09a0ea824 100644
--- a/src/plugins/data/public/query/saved_query/saved_query_service.test.ts
+++ b/src/plugins/data/public/query/saved_query/saved_query_service.test.ts
@@ -8,7 +8,7 @@
import { createSavedQueryService } from './saved_query_service';
import { httpServiceMock } from '../../../../../core/public/mocks';
-import { SavedQueryAttributes } from '../../../common';
+import type { SavedQueryAttributes } from '../../../common';
const http = httpServiceMock.createStartContract();
diff --git a/src/plugins/data/public/query/saved_query/saved_query_service.ts b/src/plugins/data/public/query/saved_query/saved_query_service.ts
index 17b47c78c7000..b5a21e2ac2095 100644
--- a/src/plugins/data/public/query/saved_query/saved_query_service.ts
+++ b/src/plugins/data/public/query/saved_query/saved_query_service.ts
@@ -8,7 +8,7 @@
import { HttpStart } from 'src/core/public';
import { SavedQuery } from './types';
-import { SavedQueryAttributes } from '../../../common';
+import type { SavedQueryAttributes } from '../../../common';
export const createSavedQueryService = (http: HttpStart) => {
const createQuery = async (attributes: SavedQueryAttributes, { overwrite = false } = {}) => {
diff --git a/src/plugins/data/public/query/state_sync/create_global_query_observable.ts b/src/plugins/data/public/query/state_sync/create_global_query_observable.ts
index 3c94d6eb3c056..3577478154c31 100644
--- a/src/plugins/data/public/query/state_sync/create_global_query_observable.ts
+++ b/src/plugins/data/public/query/state_sync/create_global_query_observable.ts
@@ -9,12 +9,12 @@
import { Observable, Subscription } from 'rxjs';
import { map, tap } from 'rxjs/operators';
import { isFilterPinned } from '@kbn/es-query';
-import { TimefilterSetup } from '../timefilter';
+import type { TimefilterSetup } from '../timefilter';
import { FilterManager } from '../filter_manager';
import { QueryState, QueryStateChange } from './index';
import { createStateContainer } from '../../../../kibana_utils/public';
import { compareFilters, COMPARE_ALL_OPTIONS } from '../../../common';
-import { QueryStringContract } from '../query_string';
+import type { QueryStringContract } from '../query_string';
export function createQueryStateObservable({
timefilter: { timefilter },
diff --git a/src/plugins/data/public/query/timefilter/lib/diff_time_picker_vals.ts b/src/plugins/data/public/query/timefilter/lib/diff_time_picker_vals.ts
index 2d815ea168f6b..9b50c8d93d496 100644
--- a/src/plugins/data/public/query/timefilter/lib/diff_time_picker_vals.ts
+++ b/src/plugins/data/public/query/timefilter/lib/diff_time_picker_vals.ts
@@ -9,7 +9,7 @@
import _ from 'lodash';
import { RefreshInterval } from '../../../../common';
-import { InputTimeRange } from '../types';
+import type { InputTimeRange } from '../types';
const valueOf = function (o: any) {
if (o) return o.valueOf();
diff --git a/src/plugins/data/public/query/timefilter/timefilter.ts b/src/plugins/data/public/query/timefilter/timefilter.ts
index f3520abb2f46e..e13e8b17a7f43 100644
--- a/src/plugins/data/public/query/timefilter/timefilter.ts
+++ b/src/plugins/data/public/query/timefilter/timefilter.ts
@@ -11,7 +11,7 @@ import { Subject, BehaviorSubject } from 'rxjs';
import moment from 'moment';
import { PublicMethodsOf } from '@kbn/utility-types';
import { areRefreshIntervalsDifferent, areTimeRangesDifferent } from './lib/diff_time_picker_vals';
-import { TimefilterConfig, InputTimeRange, TimeRangeBounds } from './types';
+import type { TimefilterConfig, InputTimeRange, TimeRangeBounds } from './types';
import { NowProviderInternalContract } from '../../now_provider';
import {
calculateBounds,
diff --git a/src/plugins/data/public/search/aggs/aggs_service.test.ts b/src/plugins/data/public/search/aggs/aggs_service.test.ts
index 20e07360a68e5..c7df4354cc76b 100644
--- a/src/plugins/data/public/search/aggs/aggs_service.test.ts
+++ b/src/plugins/data/public/search/aggs/aggs_service.test.ts
@@ -53,7 +53,7 @@ describe('AggsService - public', () => {
test('registers default agg types', () => {
service.setup(setupDeps);
const start = service.start(startDeps);
- expect(start.types.getAll().buckets.length).toBe(12);
+ expect(start.types.getAll().buckets.length).toBe(14);
expect(start.types.getAll().metrics.length).toBe(23);
});
@@ -69,7 +69,7 @@ describe('AggsService - public', () => {
);
const start = service.start(startDeps);
- expect(start.types.getAll().buckets.length).toBe(13);
+ expect(start.types.getAll().buckets.length).toBe(15);
expect(start.types.getAll().buckets.some(({ name }) => name === 'foo')).toBe(true);
expect(start.types.getAll().metrics.length).toBe(24);
expect(start.types.getAll().metrics.some(({ name }) => name === 'bar')).toBe(true);
diff --git a/src/plugins/data/public/search/collectors/types.ts b/src/plugins/data/public/search/collectors/types.ts
index 49c240d1ccb16..d0a2e61f45109 100644
--- a/src/plugins/data/public/search/collectors/types.ts
+++ b/src/plugins/data/public/search/collectors/types.ts
@@ -68,9 +68,6 @@ export enum SEARCH_EVENT_TYPE {
SESSIONS_LIST_LOADED = 'sessionsListLoaded',
}
-/**
- * @internal
- */
export interface SearchUsageCollector {
trackQueryTimedOut: () => Promise;
trackSessionIndicatorTourLoading: () => Promise;
diff --git a/src/plugins/data/public/search/errors/types.ts b/src/plugins/data/public/search/errors/types.ts
index d541e53be78f9..8f18ab06fcd94 100644
--- a/src/plugins/data/public/search/errors/types.ts
+++ b/src/plugins/data/public/search/errors/types.ts
@@ -32,7 +32,7 @@ export interface Reason {
};
}
-export interface IEsErrorAttributes {
+interface IEsErrorAttributes {
type: string;
reason: string;
root_cause?: Reason[];
diff --git a/src/plugins/data/public/search/fetch/handle_response.tsx b/src/plugins/data/public/search/fetch/handle_response.tsx
index 9e68209af2b92..10b2f69a2a320 100644
--- a/src/plugins/data/public/search/fetch/handle_response.tsx
+++ b/src/plugins/data/public/search/fetch/handle_response.tsx
@@ -13,7 +13,7 @@ import { IKibanaSearchResponse } from 'src/plugins/data/common';
import { ShardFailureOpenModalButton } from '../../ui/shard_failure_modal';
import { toMountPoint } from '../../../../kibana_react/public';
import { getNotifications } from '../../services';
-import { SearchRequest } from '..';
+import type { SearchRequest } from '..';
export function handleResponse(request: SearchRequest, response: IKibanaSearchResponse) {
const { rawResponse } = response;
diff --git a/src/plugins/data/public/search/mocks.ts b/src/plugins/data/public/search/mocks.ts
index 562b367b92c92..b82e0776777c5 100644
--- a/src/plugins/data/public/search/mocks.ts
+++ b/src/plugins/data/public/search/mocks.ts
@@ -8,7 +8,7 @@
import { searchAggsSetupMock, searchAggsStartMock } from './aggs/mocks';
import { searchSourceMock } from './search_source/mocks';
-import { ISearchSetup, ISearchStart } from './types';
+import type { ISearchSetup, ISearchStart } from './types';
import { getSessionsClientMock, getSessionServiceMock } from './session/mocks';
import { createSearchUsageCollectorMock } from './collectors/mocks';
diff --git a/src/plugins/data/public/search/search_service.ts b/src/plugins/data/public/search/search_service.ts
index ecc0e84917251..76aae8582287d 100644
--- a/src/plugins/data/public/search/search_service.ts
+++ b/src/plugins/data/public/search/search_service.ts
@@ -15,7 +15,7 @@ import {
} from 'src/core/public';
import { BehaviorSubject } from 'rxjs';
import { BfetchPublicSetup } from 'src/plugins/bfetch/public';
-import { ISearchSetup, ISearchStart } from './types';
+import type { ISearchSetup, ISearchStart } from './types';
import { handleResponse } from './fetch';
import {
diff --git a/src/plugins/data/public/search/search_source/mocks.ts b/src/plugins/data/public/search/search_source/mocks.ts
index 75ab8dbac7d2d..169ac4b84a505 100644
--- a/src/plugins/data/public/search/search_source/mocks.ts
+++ b/src/plugins/data/public/search/search_source/mocks.ts
@@ -7,7 +7,7 @@
*/
import { searchSourceCommonMock } from '../../../common/search/search_source/mocks';
-import { ISearchStart } from '../types';
+import type { ISearchStart } from '../types';
function createStartContract(): jest.Mocked