diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts
index 30b6be435837b..cbd89db97236f 100644
--- a/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts
+++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/index.ts
@@ -8,4 +8,5 @@ export * from './log_entry_categories';
export * from './log_entry_category_datasets';
export * from './log_entry_category_examples';
export * from './log_entry_rate';
-export * from './log_entry_rate_examples';
+export * from './log_entry_examples';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..639ac63f9b14d
--- /dev/null
+++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_anomalies.ts
@@ -0,0 +1,137 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import * as rt from 'io-ts';
+
+import { timeRangeRT, routeTimingMetadataRT } from '../../shared';
+
+export const LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH =
+ '/api/infra/log_analysis/results/log_entry_anomalies';
+
+// [Sort field value, tiebreaker value]
+const paginationCursorRT = rt.tuple([
+ rt.union([rt.string, rt.number]),
+ rt.union([rt.string, rt.number]),
+]);
+
+export type PaginationCursor = rt.TypeOf;
+
+export const anomalyTypeRT = rt.keyof({
+ logRate: null,
+ logCategory: null,
+});
+
+export type AnomalyType = rt.TypeOf;
+
+const logEntryAnomalyCommonFieldsRT = rt.type({
+ id: rt.string,
+ anomalyScore: rt.number,
+ dataset: rt.string,
+ typical: rt.number,
+ actual: rt.number,
+ type: anomalyTypeRT,
+ duration: rt.number,
+ startTime: rt.number,
+ jobId: rt.string,
+});
+const logEntrylogRateAnomalyRT = logEntryAnomalyCommonFieldsRT;
+const logEntrylogCategoryAnomalyRT = rt.partial({
+ categoryId: rt.string,
+});
+const logEntryAnomalyRT = rt.intersection([
+ logEntryAnomalyCommonFieldsRT,
+ logEntrylogRateAnomalyRT,
+ logEntrylogCategoryAnomalyRT,
+]);
+
+export type LogEntryAnomaly = rt.TypeOf;
+
+export const getLogEntryAnomaliesSuccessReponsePayloadRT = rt.intersection([
+ rt.type({
+ data: rt.intersection([
+ rt.type({
+ anomalies: rt.array(logEntryAnomalyRT),
+ // Signifies there are more entries backwards or forwards. If this was a request
+ // for a previous page, there are more previous pages, if this was a request for a next page,
+ // there are more next pages.
+ hasMoreEntries: rt.boolean,
+ }),
+ rt.partial({
+ paginationCursors: rt.type({
+ // The cursor to use to fetch the previous page
+ previousPageCursor: paginationCursorRT,
+ // The cursor to use to fetch the next page
+ nextPageCursor: paginationCursorRT,
+ }),
+ }),
+ ]),
+ }),
+ rt.partial({
+ timing: routeTimingMetadataRT,
+ }),
+]);
+
+export type GetLogEntryAnomaliesSuccessResponsePayload = rt.TypeOf<
+ typeof getLogEntryAnomaliesSuccessReponsePayloadRT
+>;
+
+const sortOptionsRT = rt.keyof({
+ anomalyScore: null,
+ dataset: null,
+ startTime: null,
+});
+
+const sortDirectionsRT = rt.keyof({
+ asc: null,
+ desc: null,
+});
+
+const paginationPreviousPageCursorRT = rt.type({
+ searchBefore: paginationCursorRT,
+});
+
+const paginationNextPageCursorRT = rt.type({
+ searchAfter: paginationCursorRT,
+});
+
+const paginationRT = rt.intersection([
+ rt.type({
+ pageSize: rt.number,
+ }),
+ rt.partial({
+ cursor: rt.union([paginationPreviousPageCursorRT, paginationNextPageCursorRT]),
+ }),
+]);
+
+export type Pagination = rt.TypeOf;
+
+const sortRT = rt.type({
+ field: sortOptionsRT,
+ direction: sortDirectionsRT,
+});
+
+export type Sort = rt.TypeOf;
+
+export const getLogEntryAnomaliesRequestPayloadRT = rt.type({
+ data: rt.intersection([
+ rt.type({
+ // the ID of the source configuration
+ sourceId: rt.string,
+ // the time range to fetch the log entry anomalies from
+ timeRange: timeRangeRT,
+ }),
+ rt.partial({
+ // Pagination properties
+ pagination: paginationRT,
+ // Sort properties
+ sort: sortRT,
+ }),
+ ]),
+});
+
+export type GetLogEntryAnomaliesRequestPayload = rt.TypeOf<
+ typeof getLogEntryAnomaliesRequestPayloadRT
+>;
diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_examples.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_examples.ts
new file mode 100644
index 0000000000000..1eed29cd37560
--- /dev/null
+++ b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_examples.ts
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import * as rt from 'io-ts';
+
+import {
+ badRequestErrorRT,
+ forbiddenErrorRT,
+ timeRangeRT,
+ routeTimingMetadataRT,
+} from '../../shared';
+
+export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH =
+ '/api/infra/log_analysis/results/log_entry_examples';
+
+/**
+ * request
+ */
+
+export const getLogEntryExamplesRequestPayloadRT = rt.type({
+ data: rt.intersection([
+ rt.type({
+ // the dataset to fetch the log rate examples from
+ dataset: rt.string,
+ // the number of examples to fetch
+ exampleCount: rt.number,
+ // the id of the source configuration
+ sourceId: rt.string,
+ // the time range to fetch the log rate examples from
+ timeRange: timeRangeRT,
+ }),
+ rt.partial({
+ categoryId: rt.string,
+ }),
+ ]),
+});
+
+export type GetLogEntryExamplesRequestPayload = rt.TypeOf<
+ typeof getLogEntryExamplesRequestPayloadRT
+>;
+
+/**
+ * response
+ */
+
+const logEntryExampleRT = rt.type({
+ id: rt.string,
+ dataset: rt.string,
+ message: rt.string,
+ timestamp: rt.number,
+ tiebreaker: rt.number,
+});
+
+export type LogEntryExample = rt.TypeOf;
+
+export const getLogEntryExamplesSuccessReponsePayloadRT = rt.intersection([
+ rt.type({
+ data: rt.type({
+ examples: rt.array(logEntryExampleRT),
+ }),
+ }),
+ rt.partial({
+ timing: routeTimingMetadataRT,
+ }),
+]);
+
+export type GetLogEntryExamplesSuccessReponsePayload = rt.TypeOf<
+ typeof getLogEntryExamplesSuccessReponsePayloadRT
+>;
+
+export const getLogEntryExamplesResponsePayloadRT = rt.union([
+ getLogEntryExamplesSuccessReponsePayloadRT,
+ badRequestErrorRT,
+ forbiddenErrorRT,
+]);
+
+export type GetLogEntryExamplesResponsePayload = rt.TypeOf<
+ typeof getLogEntryExamplesResponsePayloadRT
+>;
diff --git a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate_examples.ts b/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate_examples.ts
deleted file mode 100644
index 700f87ec3beb1..0000000000000
--- a/x-pack/plugins/infra/common/http_api/log_analysis/results/log_entry_rate_examples.ts
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import * as rt from 'io-ts';
-
-import {
- badRequestErrorRT,
- forbiddenErrorRT,
- timeRangeRT,
- routeTimingMetadataRT,
-} from '../../shared';
-
-export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH =
- '/api/infra/log_analysis/results/log_entry_rate_examples';
-
-/**
- * request
- */
-
-export const getLogEntryRateExamplesRequestPayloadRT = rt.type({
- data: rt.type({
- // the dataset to fetch the log rate examples from
- dataset: rt.string,
- // the number of examples to fetch
- exampleCount: rt.number,
- // the id of the source configuration
- sourceId: rt.string,
- // the time range to fetch the log rate examples from
- timeRange: timeRangeRT,
- }),
-});
-
-export type GetLogEntryRateExamplesRequestPayload = rt.TypeOf<
- typeof getLogEntryRateExamplesRequestPayloadRT
->;
-
-/**
- * response
- */
-
-const logEntryRateExampleRT = rt.type({
- id: rt.string,
- dataset: rt.string,
- message: rt.string,
- timestamp: rt.number,
- tiebreaker: rt.number,
-});
-
-export type LogEntryRateExample = rt.TypeOf;
-
-export const getLogEntryRateExamplesSuccessReponsePayloadRT = rt.intersection([
- rt.type({
- data: rt.type({
- examples: rt.array(logEntryRateExampleRT),
- }),
- }),
- rt.partial({
- timing: routeTimingMetadataRT,
- }),
-]);
-
-export type GetLogEntryRateExamplesSuccessReponsePayload = rt.TypeOf<
- typeof getLogEntryRateExamplesSuccessReponsePayloadRT
->;
-
-export const getLogEntryRateExamplesResponsePayloadRT = rt.union([
- getLogEntryRateExamplesSuccessReponsePayloadRT,
- badRequestErrorRT,
- forbiddenErrorRT,
-]);
-
-export type GetLogEntryRateExamplesResponsePayload = rt.TypeOf<
- typeof getLogEntryRateExamplesResponsePayloadRT
->;
diff --git a/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts b/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts
index 19c92cb381104..f4497dbba5056 100644
--- a/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts
+++ b/x-pack/plugins/infra/common/log_analysis/log_analysis_results.ts
@@ -41,6 +41,10 @@ export const formatAnomalyScore = (score: number) => {
return Math.round(score);
};
+export const formatOneDecimalPlace = (number: number) => {
+ return Math.round(number * 10) / 10;
+};
+
export const getFriendlyNameForPartitionId = (partitionId: string) => {
return partitionId !== '' ? partitionId : 'unknown';
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx
index bf4dbcd87cc41..21c3e3ec70029 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/page_results_content.tsx
@@ -5,30 +5,18 @@
*/
import datemath from '@elastic/datemath';
-import {
- EuiBadge,
- EuiFlexGroup,
- EuiFlexItem,
- EuiPage,
- EuiPanel,
- EuiSuperDatePicker,
- EuiText,
-} from '@elastic/eui';
-import numeral from '@elastic/numeral';
-import { FormattedMessage } from '@kbn/i18n/react';
+import { EuiFlexGroup, EuiFlexItem, EuiPage, EuiPanel, EuiSuperDatePicker } from '@elastic/eui';
import moment from 'moment';
import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { euiStyled, useTrackPageview } from '../../../../../observability/public';
import { TimeRange } from '../../../../common/http_api/shared/time_range';
import { bucketSpan } from '../../../../common/log_analysis';
-import { LoadingOverlayWrapper } from '../../../components/loading_overlay_wrapper';
import { LogAnalysisJobProblemIndicator } from '../../../components/logging/log_analysis_job_status';
import { useInterval } from '../../../hooks/use_interval';
-import { useKibanaUiSetting } from '../../../utils/use_kibana_ui_setting';
import { AnomaliesResults } from './sections/anomalies';
-import { LogRateResults } from './sections/log_rate';
import { useLogEntryRateModuleContext } from './use_log_entry_rate_module';
import { useLogEntryRateResults } from './use_log_entry_rate_results';
+import { useLogEntryAnomaliesResults } from './use_log_entry_anomalies_results';
import {
StringTimeRange,
useLogAnalysisResultsUrlState,
@@ -36,6 +24,15 @@ import {
const JOB_STATUS_POLLING_INTERVAL = 30000;
+export const SORT_DEFAULTS = {
+ direction: 'desc' as const,
+ field: 'anomalyScore' as const,
+};
+
+export const PAGINATION_DEFAULTS = {
+ pageSize: 25,
+};
+
interface LogEntryRateResultsContentProps {
onOpenSetup: () => void;
}
@@ -46,8 +43,6 @@ export const LogEntryRateResultsContent: React.FunctionComponent {
setQueryTimeRange({
@@ -182,45 +194,18 @@ export const LogEntryRateResultsContent: React.FunctionComponent
-
-
-
- {logEntryRate ? (
-
-
-
-
- {numeral(logEntryRate.totalNumberOfLogEntries).format('0.00a')}
-
-
- ),
- startTime: (
- {moment(queryTimeRange.value.startTime).format(dateFormat)}
- ),
- endTime: {moment(queryTimeRange.value.endTime).format(dateFormat)},
- }}
- />
-
-
- ) : null}
-
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx
index 79ab4475ee5a3..ae5c3b5b93b47 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/chart.tsx
@@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
-
+import { EuiEmptyPrompt } from '@elastic/eui';
import { RectAnnotationDatum, AnnotationId } from '@elastic/charts';
import {
Axis,
@@ -21,6 +21,7 @@ import numeral from '@elastic/numeral';
import { i18n } from '@kbn/i18n';
import moment from 'moment';
import React, { useCallback, useMemo } from 'react';
+import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import {
@@ -36,7 +37,16 @@ export const AnomaliesChart: React.FunctionComponent<{
series: Array<{ time: number; value: number }>;
annotations: Record;
renderAnnotationTooltip?: (details?: string) => JSX.Element;
-}> = ({ chartId, series, annotations, setTimeRange, timeRange, renderAnnotationTooltip }) => {
+ isLoading: boolean;
+}> = ({
+ chartId,
+ series,
+ annotations,
+ setTimeRange,
+ timeRange,
+ renderAnnotationTooltip,
+ isLoading,
+}) => {
const [dateFormat] = useKibanaUiSetting('dateFormat', 'Y-MM-DD HH:mm:ss.SSS');
const [isDarkMode] = useKibanaUiSetting('theme:darkMode');
@@ -68,41 +78,56 @@ export const AnomaliesChart: React.FunctionComponent<{
[setTimeRange]
);
- return (
-
-
-
- numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
- />
-
+ {i18n.translate('xpack.infra.logs.analysis.anomalySectionLogRateChartNoData', {
+ defaultMessage: 'There is no log rate data to display.',
})}
- xScaleType="time"
- yScaleType="linear"
- xAccessor={'time'}
- yAccessors={['value']}
- data={series}
- barSeriesStyle={barSeriesStyle}
- />
- {renderAnnotations(annotations, chartId, renderAnnotationTooltip)}
-
-
-
+
+ }
+ titleSize="m"
+ />
+ ) : (
+
+
+ {series.length ? (
+
+
+ numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
+ />
+
+ {renderAnnotations(annotations, chartId, renderAnnotationTooltip)}
+
+
+ ) : null}
+
+
);
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx
index c527b8c49d099..e4b12e199a048 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/expanded_row.tsx
@@ -10,12 +10,12 @@ import { i18n } from '@kbn/i18n';
import React from 'react';
import { useMount } from 'react-use';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
-import { AnomalyRecord } from '../../use_log_entry_rate_results';
-import { useLogEntryRateModuleContext } from '../../use_log_entry_rate_module';
-import { useLogEntryRateExamples } from '../../use_log_entry_rate_examples';
+import { LogEntryAnomaly } from '../../../../../../common/http_api';
+import { useLogEntryExamples } from '../../use_log_entry_examples';
import { LogEntryExampleMessages } from '../../../../../components/logging/log_entry_examples/log_entry_examples';
-import { LogEntryRateExampleMessage, LogEntryRateExampleMessageHeaders } from './log_entry_example';
+import { LogEntryExampleMessage, LogEntryExampleMessageHeaders } from './log_entry_example';
import { euiStyled } from '../../../../../../../observability/public';
+import { useLogSourceContext } from '../../../../../containers/logs/log_source';
const EXAMPLE_COUNT = 5;
@@ -24,29 +24,27 @@ const examplesTitle = i18n.translate('xpack.infra.logs.analysis.anomaliesTableEx
});
export const AnomaliesTableExpandedRow: React.FunctionComponent<{
- anomaly: AnomalyRecord;
+ anomaly: LogEntryAnomaly;
timeRange: TimeRange;
- jobId: string;
-}> = ({ anomaly, timeRange, jobId }) => {
- const {
- sourceConfiguration: { sourceId },
- } = useLogEntryRateModuleContext();
+}> = ({ anomaly, timeRange }) => {
+ const { sourceId } = useLogSourceContext();
const {
- getLogEntryRateExamples,
- hasFailedLoadingLogEntryRateExamples,
- isLoadingLogEntryRateExamples,
- logEntryRateExamples,
- } = useLogEntryRateExamples({
- dataset: anomaly.partitionId,
+ getLogEntryExamples,
+ hasFailedLoadingLogEntryExamples,
+ isLoadingLogEntryExamples,
+ logEntryExamples,
+ } = useLogEntryExamples({
+ dataset: anomaly.dataset,
endTime: anomaly.startTime + anomaly.duration,
exampleCount: EXAMPLE_COUNT,
sourceId,
startTime: anomaly.startTime,
+ categoryId: anomaly.categoryId,
});
useMount(() => {
- getLogEntryRateExamples();
+ getLogEntryExamples();
});
return (
@@ -57,17 +55,17 @@ export const AnomaliesTableExpandedRow: React.FunctionComponent<{
{examplesTitle}
0}
+ isLoading={isLoadingLogEntryExamples}
+ hasFailedLoading={hasFailedLoadingLogEntryExamples}
+ hasResults={logEntryExamples.length > 0}
exampleCount={EXAMPLE_COUNT}
- onReload={getLogEntryRateExamples}
+ onReload={getLogEntryExamples}
>
- {logEntryRateExamples.length > 0 ? (
+ {logEntryExamples.length > 0 ? (
<>
-
- {logEntryRateExamples.map((example, exampleIndex) => (
-
+ {logEntryExamples.map((example, exampleIndex) => (
+
))}
>
@@ -87,11 +85,11 @@ export const AnomaliesTableExpandedRow: React.FunctionComponent<{
void;
timeRange: TimeRange;
viewSetupForReconfiguration: () => void;
- jobId: string;
-}> = ({ isLoading, results, setTimeRange, timeRange, viewSetupForReconfiguration, jobId }) => {
- const hasAnomalies = useMemo(() => {
- return results && results.histogramBuckets
- ? results.histogramBuckets.some((bucket) => {
- return bucket.partitions.some((partition) => {
- return partition.anomalies.length > 0;
- });
- })
- : false;
- }, [results]);
-
+ page: Page;
+ fetchNextPage?: FetchNextPage;
+ fetchPreviousPage?: FetchPreviousPage;
+ changeSortOptions: ChangeSortOptions;
+ changePaginationOptions: ChangePaginationOptions;
+ sortOptions: SortOptions;
+ paginationOptions: PaginationOptions;
+}> = ({
+ isLoadingLogRateResults,
+ isLoadingAnomaliesResults,
+ logEntryRateResults,
+ setTimeRange,
+ timeRange,
+ viewSetupForReconfiguration,
+ anomalies,
+ changeSortOptions,
+ sortOptions,
+ changePaginationOptions,
+ paginationOptions,
+ fetchNextPage,
+ fetchPreviousPage,
+ page,
+}) => {
const logEntryRateSeries = useMemo(
- () => (results && results.histogramBuckets ? getLogEntryRateCombinedSeries(results) : []),
- [results]
+ () =>
+ logEntryRateResults && logEntryRateResults.histogramBuckets
+ ? getLogEntryRateCombinedSeries(logEntryRateResults)
+ : [],
+ [logEntryRateResults]
);
const anomalyAnnotations = useMemo(
() =>
- results && results.histogramBuckets
- ? getAnnotationsForAll(results)
+ logEntryRateResults && logEntryRateResults.histogramBuckets
+ ? getAnnotationsForAll(logEntryRateResults)
: {
warning: [],
minor: [],
major: [],
critical: [],
},
- [results]
+ [logEntryRateResults]
);
return (
<>
-
- {title}
+
+ {title}
-
-
-
- }>
- {!results || (results && results.histogramBuckets && !results.histogramBuckets.length) ? (
+ {(!logEntryRateResults ||
+ (logEntryRateResults &&
+ logEntryRateResults.histogramBuckets &&
+ !logEntryRateResults.histogramBuckets.length)) &&
+ (!anomalies || anomalies.length === 0) ? (
+ }
+ >
@@ -94,41 +123,38 @@ export const AnomaliesResults: React.FunctionComponent<{
}
/>
- ) : !hasAnomalies ? (
-
- {i18n.translate('xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle', {
- defaultMessage: 'No anomalies were detected.',
- })}
-
- }
- titleSize="m"
+
+ ) : (
+ <>
+
+
+
+
+
+
+
- ) : (
- <>
-
-
-
-
-
-
-
- >
- )}
-
+ >
+ )}
>
);
};
@@ -137,13 +163,6 @@ const title = i18n.translate('xpack.infra.logs.analysis.anomaliesSectionTitle',
defaultMessage: 'Anomalies',
});
-const loadingAriaLabel = i18n.translate(
- 'xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel',
- { defaultMessage: 'Loading anomalies' }
-);
-
-const LoadingOverlayContent = () => ;
-
interface ParsedAnnotationDetails {
anomalyScoresByPartition: Array<{ partitionName: string; maximumAnomalyScore: number }>;
}
@@ -189,3 +208,10 @@ const renderAnnotationTooltip = (details?: string) => {
const TooltipWrapper = euiStyled('div')`
white-space: nowrap;
`;
+
+const loadingAriaLabel = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel',
+ { defaultMessage: 'Loading anomalies' }
+);
+
+const LoadingOverlayContent = () => ;
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx
index 96f665b3693ca..2965e1fede822 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/log_entry_example.tsx
@@ -28,7 +28,7 @@ import { useLinkProps } from '../../../../../hooks/use_link_props';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import { partitionField } from '../../../../../../common/log_analysis/job_parameters';
import { getEntitySpecificSingleMetricViewerLink } from '../../../../../components/logging/log_analysis_results/analyze_in_ml_button';
-import { LogEntryRateExample } from '../../../../../../common/http_api/log_analysis/results';
+import { LogEntryExample } from '../../../../../../common/http_api/log_analysis/results';
import {
LogColumnConfiguration,
isTimestampLogColumnConfiguration,
@@ -36,6 +36,7 @@ import {
isMessageLogColumnConfiguration,
} from '../../../../../utils/source_configuration';
import { localizedDate } from '../../../../../../common/formatters/datetime';
+import { LogEntryAnomaly } from '../../../../../../common/http_api';
export const exampleMessageScale = 'medium' as const;
export const exampleTimestampFormat = 'time' as const;
@@ -58,19 +59,19 @@ const VIEW_ANOMALY_IN_ML_LABEL = i18n.translate(
}
);
-type Props = LogEntryRateExample & {
+type Props = LogEntryExample & {
timeRange: TimeRange;
- jobId: string;
+ anomaly: LogEntryAnomaly;
};
-export const LogEntryRateExampleMessage: React.FunctionComponent = ({
+export const LogEntryExampleMessage: React.FunctionComponent = ({
id,
dataset,
message,
timestamp,
tiebreaker,
timeRange,
- jobId,
+ anomaly,
}) => {
const [isHovered, setIsHovered] = useState(false);
const [isMenuOpen, setIsMenuOpen] = useState(false);
@@ -107,8 +108,9 @@ export const LogEntryRateExampleMessage: React.FunctionComponent = ({
});
const viewAnomalyInMachineLearningLinkProps = useLinkProps(
- getEntitySpecificSingleMetricViewerLink(jobId, timeRange, {
+ getEntitySpecificSingleMetricViewerLink(anomaly.jobId, timeRange, {
[partitionField]: dataset,
+ ...(anomaly.categoryId ? { mlcategory: anomaly.categoryId } : {}),
})
);
@@ -233,11 +235,11 @@ export const exampleMessageColumnConfigurations: LogColumnConfiguration[] = [
},
];
-export const LogEntryRateExampleMessageHeaders: React.FunctionComponent<{
+export const LogEntryExampleMessageHeaders: React.FunctionComponent<{
dateTime: number;
}> = ({ dateTime }) => {
return (
-
+
<>
{exampleMessageColumnConfigurations.map((columnConfiguration) => {
if (isTimestampLogColumnConfiguration(columnConfiguration)) {
@@ -280,11 +282,11 @@ export const LogEntryRateExampleMessageHeaders: React.FunctionComponent<{
{null}
>
-
+
);
};
-const LogEntryRateExampleMessageHeadersWrapper = euiStyled(LogColumnHeadersWrapper)`
+const LogEntryExampleMessageHeadersWrapper = euiStyled(LogColumnHeadersWrapper)`
border-bottom: none;
box-shadow: none;
padding-right: 0;
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx
index c70a456bfe06a..e0a3b6fb91db0 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/anomalies/table.tsx
@@ -4,45 +4,52 @@
* you may not use this file except in compliance with the Elastic License.
*/
-import { EuiBasicTable, EuiBasicTableColumn } from '@elastic/eui';
+import {
+ EuiBasicTable,
+ EuiBasicTableColumn,
+ EuiIcon,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiButtonIcon,
+ EuiSpacer,
+} from '@elastic/eui';
import { RIGHT_ALIGNMENT } from '@elastic/eui/lib/services';
import moment from 'moment';
import { i18n } from '@kbn/i18n';
-import React, { useCallback, useMemo, useState } from 'react';
+import React, { useCallback, useMemo } from 'react';
import { useSet } from 'react-use';
import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
import {
formatAnomalyScore,
getFriendlyNameForPartitionId,
+ formatOneDecimalPlace,
} from '../../../../../../common/log_analysis';
+import { AnomalyType } from '../../../../../../common/http_api/log_analysis';
import { RowExpansionButton } from '../../../../../components/basic_table';
-import { LogEntryRateResults } from '../../use_log_entry_rate_results';
import { AnomaliesTableExpandedRow } from './expanded_row';
import { AnomalySeverityIndicator } from '../../../../../components/logging/log_analysis_results/anomaly_severity_indicator';
import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
+import {
+ Page,
+ FetchNextPage,
+ FetchPreviousPage,
+ ChangeSortOptions,
+ ChangePaginationOptions,
+ SortOptions,
+ PaginationOptions,
+ LogEntryAnomalies,
+} from '../../use_log_entry_anomalies_results';
+import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
interface TableItem {
id: string;
dataset: string;
datasetName: string;
anomalyScore: number;
- anomalyMessage: string;
startTime: number;
-}
-
-interface SortingOptions {
- sort: {
- field: keyof TableItem;
- direction: 'asc' | 'desc';
- };
-}
-
-interface PaginationOptions {
- pageIndex: number;
- pageSize: number;
- totalItemCount: number;
- pageSizeOptions: number[];
- hidePerPageOptions: boolean;
+ typical: number;
+ actual: number;
+ type: AnomalyType;
}
const anomalyScoreColumnName = i18n.translate(
@@ -73,125 +80,78 @@ const datasetColumnName = i18n.translate(
}
);
-const moreThanExpectedAnomalyMessage = i18n.translate(
- 'xpack.infra.logs.analysis.anomaliesTableMoreThanExpectedAnomalyMessage',
- {
- defaultMessage: 'More log messages in this dataset than expected',
- }
-);
-
-const fewerThanExpectedAnomalyMessage = i18n.translate(
- 'xpack.infra.logs.analysis.anomaliesTableFewerThanExpectedAnomalyMessage',
- {
- defaultMessage: 'Fewer log messages in this dataset than expected',
- }
-);
-
-const getAnomalyMessage = (actualRate: number, typicalRate: number): string => {
- return actualRate < typicalRate
- ? fewerThanExpectedAnomalyMessage
- : moreThanExpectedAnomalyMessage;
-};
-
export const AnomaliesTable: React.FunctionComponent<{
- results: LogEntryRateResults;
+ results: LogEntryAnomalies;
setTimeRange: (timeRange: TimeRange) => void;
timeRange: TimeRange;
- jobId: string;
-}> = ({ results, timeRange, setTimeRange, jobId }) => {
+ changeSortOptions: ChangeSortOptions;
+ changePaginationOptions: ChangePaginationOptions;
+ sortOptions: SortOptions;
+ paginationOptions: PaginationOptions;
+ page: Page;
+ fetchNextPage?: FetchNextPage;
+ fetchPreviousPage?: FetchPreviousPage;
+ isLoading: boolean;
+}> = ({
+ results,
+ timeRange,
+ setTimeRange,
+ changeSortOptions,
+ sortOptions,
+ changePaginationOptions,
+ paginationOptions,
+ fetchNextPage,
+ fetchPreviousPage,
+ page,
+ isLoading,
+}) => {
const [dateFormat] = useKibanaUiSetting('dateFormat', 'Y-MM-DD HH:mm:ss');
+ const tableSortOptions = useMemo(() => {
+ return {
+ sort: sortOptions,
+ };
+ }, [sortOptions]);
+
const tableItems: TableItem[] = useMemo(() => {
- return results.anomalies.map((anomaly) => {
+ return results.map((anomaly) => {
return {
id: anomaly.id,
- dataset: anomaly.partitionId,
- datasetName: getFriendlyNameForPartitionId(anomaly.partitionId),
+ dataset: anomaly.dataset,
+ datasetName: getFriendlyNameForPartitionId(anomaly.dataset),
anomalyScore: formatAnomalyScore(anomaly.anomalyScore),
- anomalyMessage: getAnomalyMessage(anomaly.actualLogEntryRate, anomaly.typicalLogEntryRate),
startTime: anomaly.startTime,
+ type: anomaly.type,
+ typical: anomaly.typical,
+ actual: anomaly.actual,
};
});
}, [results]);
const [expandedIds, { add: expandId, remove: collapseId }] = useSet(new Set());
- const expandedDatasetRowContents = useMemo(
+ const expandedIdsRowContents = useMemo(
() =>
- [...expandedIds].reduce>((aggregatedDatasetRows, id) => {
- const anomaly = results.anomalies.find((_anomaly) => _anomaly.id === id);
+ [...expandedIds].reduce>((aggregatedRows, id) => {
+ const anomaly = results.find((_anomaly) => _anomaly.id === id);
return {
- ...aggregatedDatasetRows,
+ ...aggregatedRows,
[id]: anomaly ? (
-
+
) : null,
};
}, {}),
- [expandedIds, results, timeRange, jobId]
+ [expandedIds, results, timeRange]
);
- const [sorting, setSorting] = useState({
- sort: {
- field: 'anomalyScore',
- direction: 'desc',
- },
- });
-
- const [_pagination, setPagination] = useState({
- pageIndex: 0,
- pageSize: 20,
- totalItemCount: results.anomalies.length,
- pageSizeOptions: [10, 20, 50],
- hidePerPageOptions: false,
- });
-
- const paginationOptions = useMemo(() => {
- return {
- ..._pagination,
- totalItemCount: results.anomalies.length,
- };
- }, [_pagination, results]);
-
const handleTableChange = useCallback(
- ({ page = {}, sort = {} }) => {
- const { index, size } = page;
- setPagination((currentPagination) => {
- return {
- ...currentPagination,
- pageIndex: index,
- pageSize: size,
- };
- });
- const { field, direction } = sort;
- setSorting({
- sort: {
- field,
- direction,
- },
- });
+ ({ sort = {} }) => {
+ changeSortOptions(sort);
},
- [setSorting, setPagination]
+ [changeSortOptions]
);
- const sortedTableItems = useMemo(() => {
- let sortedItems: TableItem[] = [];
- if (sorting.sort.field === 'datasetName') {
- sortedItems = tableItems.sort((a, b) => (a.datasetName > b.datasetName ? 1 : -1));
- } else if (sorting.sort.field === 'anomalyScore') {
- sortedItems = tableItems.sort((a, b) => a.anomalyScore - b.anomalyScore);
- } else if (sorting.sort.field === 'startTime') {
- sortedItems = tableItems.sort((a, b) => a.startTime - b.startTime);
- }
-
- return sorting.sort.direction === 'asc' ? sortedItems : sortedItems.reverse();
- }, [tableItems, sorting]);
-
- const pageOfItems: TableItem[] = useMemo(() => {
- const { pageIndex, pageSize } = paginationOptions;
- return sortedTableItems.slice(pageIndex * pageSize, pageIndex * pageSize + pageSize);
- }, [paginationOptions, sortedTableItems]);
-
const columns: Array> = useMemo(
() => [
{
@@ -204,10 +164,11 @@ export const AnomaliesTable: React.FunctionComponent<{
render: (anomalyScore: number) => ,
},
{
- field: 'anomalyMessage',
name: anomalyMessageColumnName,
- sortable: false,
truncateText: true,
+ render: (item: TableItem) => (
+
+ ),
},
{
field: 'startTime',
@@ -240,18 +201,116 @@ export const AnomaliesTable: React.FunctionComponent<{
],
[collapseId, expandId, expandedIds, dateFormat]
);
+ return (
+ <>
+
+
+
+
+
+ >
+ );
+};
+
+const AnomalyMessage = ({
+ actual,
+ typical,
+ type,
+}: {
+ actual: number;
+ typical: number;
+ type: AnomalyType;
+}) => {
+ const moreThanExpectedAnomalyMessage = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesTableMoreThanExpectedAnomalyMessage',
+ {
+ defaultMessage:
+ 'more log messages in this {type, select, logRate {dataset} logCategory {category}} than expected',
+ values: { type },
+ }
+ );
+
+ const fewerThanExpectedAnomalyMessage = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesTableFewerThanExpectedAnomalyMessage',
+ {
+ defaultMessage:
+ 'fewer log messages in this {type, select, logRate {dataset} logCategory {category}} than expected',
+ values: { type },
+ }
+ );
+
+ const isMore = actual > typical;
+ const message = isMore ? moreThanExpectedAnomalyMessage : fewerThanExpectedAnomalyMessage;
+ const ratio = isMore ? actual / typical : typical / actual;
+ const icon = isMore ? 'sortUp' : 'sortDown';
+ // Edge case scenarios where actual and typical might sit at 0.
+ const useRatio = ratio !== Infinity;
+ const ratioMessage = useRatio ? `${formatOneDecimalPlace(ratio)}x` : '';
return (
-
+
+ {`${ratioMessage} ${message}`}
+
+ );
+};
+
+const previousPageLabel = i18n.translate(
+ 'xpack.infra.logs.analysis.anomaliesTablePreviousPageLabel',
+ {
+ defaultMessage: 'Previous page',
+ }
+);
+
+const nextPageLabel = i18n.translate('xpack.infra.logs.analysis.anomaliesTableNextPageLabel', {
+ defaultMessage: 'Next page',
+});
+
+const PaginationControls = ({
+ fetchPreviousPage,
+ fetchNextPage,
+ page,
+ isLoading,
+}: {
+ fetchPreviousPage?: () => void;
+ fetchNextPage?: () => void;
+ page: number;
+ isLoading: boolean;
+}) => {
+ return (
+
+
+
+
+
+ {page}
+
+
+
+
+
);
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/bar_chart.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/bar_chart.tsx
deleted file mode 100644
index 498a9f88176f8..0000000000000
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/bar_chart.tsx
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import {
- Axis,
- BarSeries,
- Chart,
- niceTimeFormatter,
- Settings,
- TooltipValue,
- BrushEndListener,
- LIGHT_THEME,
- DARK_THEME,
-} from '@elastic/charts';
-import { i18n } from '@kbn/i18n';
-import numeral from '@elastic/numeral';
-import moment from 'moment';
-import React, { useCallback, useMemo } from 'react';
-
-import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
-import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting';
-
-export const LogEntryRateBarChart: React.FunctionComponent<{
- setTimeRange: (timeRange: TimeRange) => void;
- timeRange: TimeRange;
- series: Array<{ group: string; time: number; value: number }>;
-}> = ({ series, setTimeRange, timeRange }) => {
- const [dateFormat] = useKibanaUiSetting('dateFormat');
- const [isDarkMode] = useKibanaUiSetting('theme:darkMode');
-
- const chartDateFormatter = useMemo(
- () => niceTimeFormatter([timeRange.startTime, timeRange.endTime]),
- [timeRange]
- );
-
- const tooltipProps = useMemo(
- () => ({
- headerFormatter: (tooltipData: TooltipValue) =>
- moment(tooltipData.value).format(dateFormat || 'Y-MM-DD HH:mm:ss.SSS'),
- }),
- [dateFormat]
- );
-
- const handleBrushEnd = useCallback(
- ({ x }) => {
- if (!x) {
- return;
- }
- const [startTime, endTime] = x;
- setTimeRange({
- endTime,
- startTime,
- });
- },
- [setTimeRange]
- );
-
- return (
-
-
-
- numeral(value.toPrecision(3)).format('0[.][00]a')} // https://github.com/adamwdraper/Numeral-js/issues/194
- />
-
-
-
-
- );
-};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/index.tsx b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/index.tsx
deleted file mode 100644
index 3da025d90119f..0000000000000
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/sections/log_rate/index.tsx
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { EuiEmptyPrompt, EuiLoadingSpinner, EuiSpacer, EuiText, EuiTitle } from '@elastic/eui';
-import { i18n } from '@kbn/i18n';
-import React, { useMemo } from 'react';
-
-import { TimeRange } from '../../../../../../common/http_api/shared/time_range';
-import { BetaBadge } from '../../../../../components/beta_badge';
-import { LoadingOverlayWrapper } from '../../../../../components/loading_overlay_wrapper';
-import { LogEntryRateResults as Results } from '../../use_log_entry_rate_results';
-import { getLogEntryRatePartitionedSeries } from '../helpers/data_formatters';
-import { LogEntryRateBarChart } from './bar_chart';
-
-export const LogRateResults = ({
- isLoading,
- results,
- setTimeRange,
- timeRange,
-}: {
- isLoading: boolean;
- results: Results | null;
- setTimeRange: (timeRange: TimeRange) => void;
- timeRange: TimeRange;
-}) => {
- const logEntryRateSeries = useMemo(
- () => (results && results.histogramBuckets ? getLogEntryRatePartitionedSeries(results) : []),
- [results]
- );
-
- return (
- <>
-
-
- {title}
-
-
- }>
- {!results || (results && results.histogramBuckets && !results.histogramBuckets.length) ? (
- <>
-
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataTitle', {
- defaultMessage: 'There is no data to display.',
- })}
-
- }
- titleSize="m"
- body={
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionNoDataBody', {
- defaultMessage: 'You may want to adjust your time range.',
- })}
-
- }
- />
- >
- ) : (
- <>
-
-
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionBucketSpanLabel', {
- defaultMessage: 'Bucket span: ',
- })}
-
- {i18n.translate('xpack.infra.logs.analysis.logRateSectionBucketSpanValue', {
- defaultMessage: '15 minutes',
- })}
-
-
-
- >
- )}
-
- >
- );
-};
-
-const title = i18n.translate('xpack.infra.logs.analysis.logRateSectionTitle', {
- defaultMessage: 'Log entries',
-});
-
-const loadingAriaLabel = i18n.translate(
- 'xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel',
- { defaultMessage: 'Loading log rate results' }
-);
-
-const LoadingOverlayContent = () => ;
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts
new file mode 100644
index 0000000000000..d4a0eaae43ac0
--- /dev/null
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_anomalies.ts
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { npStart } from '../../../../legacy_singletons';
+import {
+ getLogEntryAnomaliesRequestPayloadRT,
+ getLogEntryAnomaliesSuccessReponsePayloadRT,
+ LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH,
+} from '../../../../../common/http_api/log_analysis';
+import { decodeOrThrow } from '../../../../../common/runtime_types';
+import { Sort, Pagination } from '../../../../../common/http_api/log_analysis';
+
+export const callGetLogEntryAnomaliesAPI = async (
+ sourceId: string,
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) => {
+ const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH, {
+ method: 'POST',
+ body: JSON.stringify(
+ getLogEntryAnomaliesRequestPayloadRT.encode({
+ data: {
+ sourceId,
+ timeRange: {
+ startTime,
+ endTime,
+ },
+ sort,
+ pagination,
+ },
+ })
+ ),
+ });
+
+ return decodeOrThrow(getLogEntryAnomaliesSuccessReponsePayloadRT)(response);
+};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts
similarity index 77%
rename from x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate_examples.ts
rename to x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts
index d3b30da72af96..a125b53f9e635 100644
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_rate_examples.ts
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/service_calls/get_log_entry_examples.ts
@@ -10,23 +10,24 @@ import { identity } from 'fp-ts/lib/function';
import { npStart } from '../../../../legacy_singletons';
import {
- getLogEntryRateExamplesRequestPayloadRT,
- getLogEntryRateExamplesSuccessReponsePayloadRT,
+ getLogEntryExamplesRequestPayloadRT,
+ getLogEntryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../../common/http_api/log_analysis';
import { createPlainError, throwErrors } from '../../../../../common/runtime_types';
-export const callGetLogEntryRateExamplesAPI = async (
+export const callGetLogEntryExamplesAPI = async (
sourceId: string,
startTime: number,
endTime: number,
dataset: string,
- exampleCount: number
+ exampleCount: number,
+ categoryId?: string
) => {
const response = await npStart.http.fetch(LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH, {
method: 'POST',
body: JSON.stringify(
- getLogEntryRateExamplesRequestPayloadRT.encode({
+ getLogEntryExamplesRequestPayloadRT.encode({
data: {
dataset,
exampleCount,
@@ -35,13 +36,14 @@ export const callGetLogEntryRateExamplesAPI = async (
startTime,
endTime,
},
+ categoryId,
},
})
),
});
return pipe(
- getLogEntryRateExamplesSuccessReponsePayloadRT.decode(response),
+ getLogEntryExamplesSuccessReponsePayloadRT.decode(response),
fold(throwErrors(createPlainError), identity)
);
};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts
new file mode 100644
index 0000000000000..cadb4c420c133
--- /dev/null
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_anomalies_results.ts
@@ -0,0 +1,262 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { useMemo, useState, useCallback, useEffect, useReducer } from 'react';
+
+import { LogEntryAnomaly } from '../../../../common/http_api';
+import { useTrackedPromise } from '../../../utils/use_tracked_promise';
+import { callGetLogEntryAnomaliesAPI } from './service_calls/get_log_entry_anomalies';
+import { Sort, Pagination, PaginationCursor } from '../../../../common/http_api/log_analysis';
+
+export type SortOptions = Sort;
+export type PaginationOptions = Pick;
+export type Page = number;
+export type FetchNextPage = () => void;
+export type FetchPreviousPage = () => void;
+export type ChangeSortOptions = (sortOptions: Sort) => void;
+export type ChangePaginationOptions = (paginationOptions: PaginationOptions) => void;
+export type LogEntryAnomalies = LogEntryAnomaly[];
+interface PaginationCursors {
+ previousPageCursor: PaginationCursor;
+ nextPageCursor: PaginationCursor;
+}
+
+interface ReducerState {
+ page: number;
+ lastReceivedCursors: PaginationCursors | undefined;
+ paginationCursor: Pagination['cursor'] | undefined;
+ hasNextPage: boolean;
+ paginationOptions: PaginationOptions;
+ sortOptions: Sort;
+ timeRange: {
+ start: number;
+ end: number;
+ };
+}
+
+type ReducerStateDefaults = Pick<
+ ReducerState,
+ 'page' | 'lastReceivedCursors' | 'paginationCursor' | 'hasNextPage'
+>;
+
+type ReducerAction =
+ | { type: 'changePaginationOptions'; payload: { paginationOptions: PaginationOptions } }
+ | { type: 'changeSortOptions'; payload: { sortOptions: Sort } }
+ | { type: 'fetchNextPage' }
+ | { type: 'fetchPreviousPage' }
+ | { type: 'changeHasNextPage'; payload: { hasNextPage: boolean } }
+ | { type: 'changeLastReceivedCursors'; payload: { lastReceivedCursors: PaginationCursors } }
+ | { type: 'changeTimeRange'; payload: { timeRange: { start: number; end: number } } };
+
+const stateReducer = (state: ReducerState, action: ReducerAction): ReducerState => {
+ const resetPagination = {
+ page: 1,
+ paginationCursor: undefined,
+ };
+ switch (action.type) {
+ case 'changePaginationOptions':
+ return {
+ ...state,
+ ...resetPagination,
+ ...action.payload,
+ };
+ case 'changeSortOptions':
+ return {
+ ...state,
+ ...resetPagination,
+ ...action.payload,
+ };
+ case 'changeHasNextPage':
+ return {
+ ...state,
+ ...action.payload,
+ };
+ case 'changeLastReceivedCursors':
+ return {
+ ...state,
+ ...action.payload,
+ };
+ case 'fetchNextPage':
+ return state.lastReceivedCursors
+ ? {
+ ...state,
+ page: state.page + 1,
+ paginationCursor: { searchAfter: state.lastReceivedCursors.nextPageCursor },
+ }
+ : state;
+ case 'fetchPreviousPage':
+ return state.lastReceivedCursors
+ ? {
+ ...state,
+ page: state.page - 1,
+ paginationCursor: { searchBefore: state.lastReceivedCursors.previousPageCursor },
+ }
+ : state;
+ case 'changeTimeRange':
+ return {
+ ...state,
+ ...resetPagination,
+ ...action.payload,
+ };
+ default:
+ return state;
+ }
+};
+
+const STATE_DEFAULTS: ReducerStateDefaults = {
+ // NOTE: This piece of state is purely for the client side, it could be extracted out of the hook.
+ page: 1,
+ // Cursor from the last request
+ lastReceivedCursors: undefined,
+ // Cursor to use for the next request. For the first request, and therefore not paging, this will be undefined.
+ paginationCursor: undefined,
+ hasNextPage: false,
+};
+
+export const useLogEntryAnomaliesResults = ({
+ endTime,
+ startTime,
+ sourceId,
+ defaultSortOptions,
+ defaultPaginationOptions,
+}: {
+ endTime: number;
+ startTime: number;
+ sourceId: string;
+ defaultSortOptions: Sort;
+ defaultPaginationOptions: Pick;
+}) => {
+ const initStateReducer = (stateDefaults: ReducerStateDefaults): ReducerState => {
+ return {
+ ...stateDefaults,
+ paginationOptions: defaultPaginationOptions,
+ sortOptions: defaultSortOptions,
+ timeRange: {
+ start: startTime,
+ end: endTime,
+ },
+ };
+ };
+
+ const [reducerState, dispatch] = useReducer(stateReducer, STATE_DEFAULTS, initStateReducer);
+
+ const [logEntryAnomalies, setLogEntryAnomalies] = useState([]);
+
+ const [getLogEntryAnomaliesRequest, getLogEntryAnomalies] = useTrackedPromise(
+ {
+ cancelPreviousOn: 'creation',
+ createPromise: async () => {
+ const {
+ timeRange: { start: queryStartTime, end: queryEndTime },
+ sortOptions,
+ paginationOptions,
+ paginationCursor,
+ } = reducerState;
+ return await callGetLogEntryAnomaliesAPI(
+ sourceId,
+ queryStartTime,
+ queryEndTime,
+ sortOptions,
+ {
+ ...paginationOptions,
+ cursor: paginationCursor,
+ }
+ );
+ },
+ onResolve: ({ data: { anomalies, paginationCursors: requestCursors, hasMoreEntries } }) => {
+ const { paginationCursor } = reducerState;
+ if (requestCursors) {
+ dispatch({
+ type: 'changeLastReceivedCursors',
+ payload: { lastReceivedCursors: requestCursors },
+ });
+ }
+ // Check if we have more "next" entries. "Page" covers the "previous" scenario,
+ // since we need to know the page we're on anyway.
+ if (!paginationCursor || (paginationCursor && 'searchAfter' in paginationCursor)) {
+ dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: hasMoreEntries } });
+ } else if (paginationCursor && 'searchBefore' in paginationCursor) {
+ // We've requested a previous page, therefore there is a next page.
+ dispatch({ type: 'changeHasNextPage', payload: { hasNextPage: true } });
+ }
+ setLogEntryAnomalies(anomalies);
+ },
+ },
+ [
+ sourceId,
+ dispatch,
+ reducerState.timeRange,
+ reducerState.sortOptions,
+ reducerState.paginationOptions,
+ reducerState.paginationCursor,
+ ]
+ );
+
+ const changeSortOptions = useCallback(
+ (nextSortOptions: Sort) => {
+ dispatch({ type: 'changeSortOptions', payload: { sortOptions: nextSortOptions } });
+ },
+ [dispatch]
+ );
+
+ const changePaginationOptions = useCallback(
+ (nextPaginationOptions: PaginationOptions) => {
+ dispatch({
+ type: 'changePaginationOptions',
+ payload: { paginationOptions: nextPaginationOptions },
+ });
+ },
+ [dispatch]
+ );
+
+ // Time range has changed
+ useEffect(() => {
+ dispatch({
+ type: 'changeTimeRange',
+ payload: { timeRange: { start: startTime, end: endTime } },
+ });
+ }, [startTime, endTime]);
+
+ useEffect(() => {
+ getLogEntryAnomalies();
+ }, [getLogEntryAnomalies]);
+
+ const handleFetchNextPage = useCallback(() => {
+ if (reducerState.lastReceivedCursors) {
+ dispatch({ type: 'fetchNextPage' });
+ }
+ }, [dispatch, reducerState]);
+
+ const handleFetchPreviousPage = useCallback(() => {
+ if (reducerState.lastReceivedCursors) {
+ dispatch({ type: 'fetchPreviousPage' });
+ }
+ }, [dispatch, reducerState]);
+
+ const isLoadingLogEntryAnomalies = useMemo(
+ () => getLogEntryAnomaliesRequest.state === 'pending',
+ [getLogEntryAnomaliesRequest.state]
+ );
+
+ const hasFailedLoadingLogEntryAnomalies = useMemo(
+ () => getLogEntryAnomaliesRequest.state === 'rejected',
+ [getLogEntryAnomaliesRequest.state]
+ );
+
+ return {
+ logEntryAnomalies,
+ getLogEntryAnomalies,
+ isLoadingLogEntryAnomalies,
+ hasFailedLoadingLogEntryAnomalies,
+ changeSortOptions,
+ sortOptions: reducerState.sortOptions,
+ changePaginationOptions,
+ paginationOptions: reducerState.paginationOptions,
+ fetchPreviousPage: reducerState.page > 1 ? handleFetchPreviousPage : undefined,
+ fetchNextPage: reducerState.hasNextPage ? handleFetchNextPage : undefined,
+ page: reducerState.page,
+ };
+};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts
new file mode 100644
index 0000000000000..fae5bd200a415
--- /dev/null
+++ b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_examples.ts
@@ -0,0 +1,65 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { useMemo, useState } from 'react';
+
+import { LogEntryExample } from '../../../../common/http_api';
+import { useTrackedPromise } from '../../../utils/use_tracked_promise';
+import { callGetLogEntryExamplesAPI } from './service_calls/get_log_entry_examples';
+
+export const useLogEntryExamples = ({
+ dataset,
+ endTime,
+ exampleCount,
+ sourceId,
+ startTime,
+ categoryId,
+}: {
+ dataset: string;
+ endTime: number;
+ exampleCount: number;
+ sourceId: string;
+ startTime: number;
+ categoryId?: string;
+}) => {
+ const [logEntryExamples, setLogEntryExamples] = useState([]);
+
+ const [getLogEntryExamplesRequest, getLogEntryExamples] = useTrackedPromise(
+ {
+ cancelPreviousOn: 'creation',
+ createPromise: async () => {
+ return await callGetLogEntryExamplesAPI(
+ sourceId,
+ startTime,
+ endTime,
+ dataset,
+ exampleCount,
+ categoryId
+ );
+ },
+ onResolve: ({ data: { examples } }) => {
+ setLogEntryExamples(examples);
+ },
+ },
+ [dataset, endTime, exampleCount, sourceId, startTime]
+ );
+
+ const isLoadingLogEntryExamples = useMemo(() => getLogEntryExamplesRequest.state === 'pending', [
+ getLogEntryExamplesRequest.state,
+ ]);
+
+ const hasFailedLoadingLogEntryExamples = useMemo(
+ () => getLogEntryExamplesRequest.state === 'rejected',
+ [getLogEntryExamplesRequest.state]
+ );
+
+ return {
+ getLogEntryExamples,
+ hasFailedLoadingLogEntryExamples,
+ isLoadingLogEntryExamples,
+ logEntryExamples,
+ };
+};
diff --git a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_examples.ts b/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_examples.ts
deleted file mode 100644
index 12bcdb2a4b4d6..0000000000000
--- a/x-pack/plugins/infra/public/pages/logs/log_entry_rate/use_log_entry_rate_examples.ts
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { useMemo, useState } from 'react';
-
-import { LogEntryRateExample } from '../../../../common/http_api';
-import { useTrackedPromise } from '../../../utils/use_tracked_promise';
-import { callGetLogEntryRateExamplesAPI } from './service_calls/get_log_entry_rate_examples';
-
-export const useLogEntryRateExamples = ({
- dataset,
- endTime,
- exampleCount,
- sourceId,
- startTime,
-}: {
- dataset: string;
- endTime: number;
- exampleCount: number;
- sourceId: string;
- startTime: number;
-}) => {
- const [logEntryRateExamples, setLogEntryRateExamples] = useState([]);
-
- const [getLogEntryRateExamplesRequest, getLogEntryRateExamples] = useTrackedPromise(
- {
- cancelPreviousOn: 'creation',
- createPromise: async () => {
- return await callGetLogEntryRateExamplesAPI(
- sourceId,
- startTime,
- endTime,
- dataset,
- exampleCount
- );
- },
- onResolve: ({ data: { examples } }) => {
- setLogEntryRateExamples(examples);
- },
- },
- [dataset, endTime, exampleCount, sourceId, startTime]
- );
-
- const isLoadingLogEntryRateExamples = useMemo(
- () => getLogEntryRateExamplesRequest.state === 'pending',
- [getLogEntryRateExamplesRequest.state]
- );
-
- const hasFailedLoadingLogEntryRateExamples = useMemo(
- () => getLogEntryRateExamplesRequest.state === 'rejected',
- [getLogEntryRateExamplesRequest.state]
- );
-
- return {
- getLogEntryRateExamples,
- hasFailedLoadingLogEntryRateExamples,
- isLoadingLogEntryRateExamples,
- logEntryRateExamples,
- };
-};
diff --git a/x-pack/plugins/infra/server/infra_server.ts b/x-pack/plugins/infra/server/infra_server.ts
index 8af37a36ef745..6596e07ebaca5 100644
--- a/x-pack/plugins/infra/server/infra_server.ts
+++ b/x-pack/plugins/infra/server/infra_server.ts
@@ -15,9 +15,10 @@ import {
initGetLogEntryCategoryDatasetsRoute,
initGetLogEntryCategoryExamplesRoute,
initGetLogEntryRateRoute,
- initGetLogEntryRateExamplesRoute,
+ initGetLogEntryExamplesRoute,
initValidateLogAnalysisDatasetsRoute,
initValidateLogAnalysisIndicesRoute,
+ initGetLogEntryAnomaliesRoute,
} from './routes/log_analysis';
import { initMetricExplorerRoute } from './routes/metrics_explorer';
import { initMetadataRoute } from './routes/metadata';
@@ -51,13 +52,14 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
initGetLogEntryCategoryDatasetsRoute(libs);
initGetLogEntryCategoryExamplesRoute(libs);
initGetLogEntryRateRoute(libs);
+ initGetLogEntryAnomaliesRoute(libs);
initSnapshotRoute(libs);
initNodeDetailsRoute(libs);
initSourceRoute(libs);
initValidateLogAnalysisDatasetsRoute(libs);
initValidateLogAnalysisIndicesRoute(libs);
initLogEntriesRoute(libs);
- initGetLogEntryRateExamplesRoute(libs);
+ initGetLogEntryExamplesRoute(libs);
initLogEntriesHighlightsRoute(libs);
initLogEntriesSummaryRoute(libs);
initLogEntriesSummaryHighlightsRoute(libs);
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/common.ts b/x-pack/plugins/infra/server/lib/log_analysis/common.ts
new file mode 100644
index 0000000000000..0c0b0a0f19982
--- /dev/null
+++ b/x-pack/plugins/infra/server/lib/log_analysis/common.ts
@@ -0,0 +1,29 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import type { MlAnomalyDetectors } from '../../types';
+import { startTracingSpan } from '../../../common/performance_tracing';
+import { NoLogAnalysisMlJobError } from './errors';
+
+export async function fetchMlJob(mlAnomalyDetectors: MlAnomalyDetectors, jobId: string) {
+ const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
+ const {
+ jobs: [mlJob],
+ } = await mlAnomalyDetectors.jobs(jobId);
+
+ const mlGetJobSpan = finalizeMlGetJobSpan();
+
+ if (mlJob == null) {
+ throw new NoLogAnalysisMlJobError(`Failed to find ml job ${jobId}.`);
+ }
+
+ return {
+ mlJob,
+ timing: {
+ spans: [mlGetJobSpan],
+ },
+ };
+}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/errors.ts b/x-pack/plugins/infra/server/lib/log_analysis/errors.ts
index e07126416f4ce..09fee8844fbc5 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/errors.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/errors.ts
@@ -33,3 +33,10 @@ export class UnknownCategoryError extends Error {
Object.setPrototypeOf(this, new.target.prototype);
}
}
+
+export class InsufficientAnomalyMlJobsConfigured extends Error {
+ constructor(message?: string) {
+ super(message);
+ Object.setPrototypeOf(this, new.target.prototype);
+ }
+}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/index.ts b/x-pack/plugins/infra/server/lib/log_analysis/index.ts
index 44c2bafce4194..c9a176be0a28f 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/index.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/index.ts
@@ -7,3 +7,4 @@
export * from './errors';
export * from './log_entry_categories_analysis';
export * from './log_entry_rate_analysis';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..12ae516564d66
--- /dev/null
+++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_anomalies.ts
@@ -0,0 +1,398 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { RequestHandlerContext } from 'src/core/server';
+import { InfraRequestHandlerContext } from '../../types';
+import { TracingSpan, startTracingSpan } from '../../../common/performance_tracing';
+import { fetchMlJob } from './common';
+import {
+ getJobId,
+ logEntryCategoriesJobTypes,
+ logEntryRateJobTypes,
+ jobCustomSettingsRT,
+} from '../../../common/log_analysis';
+import { Sort, Pagination } from '../../../common/http_api/log_analysis';
+import type { MlSystem } from '../../types';
+import { createLogEntryAnomaliesQuery, logEntryAnomaliesResponseRT } from './queries';
+import {
+ InsufficientAnomalyMlJobsConfigured,
+ InsufficientLogAnalysisMlJobConfigurationError,
+ UnknownCategoryError,
+} from './errors';
+import { decodeOrThrow } from '../../../common/runtime_types';
+import {
+ createLogEntryExamplesQuery,
+ logEntryExamplesResponseRT,
+} from './queries/log_entry_examples';
+import { InfraSource } from '../sources';
+import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
+import { fetchLogEntryCategories } from './log_entry_categories_analysis';
+
+interface MappedAnomalyHit {
+ id: string;
+ anomalyScore: number;
+ dataset: string;
+ typical: number;
+ actual: number;
+ jobId: string;
+ startTime: number;
+ duration: number;
+ categoryId?: string;
+}
+
+export async function getLogEntryAnomalies(
+ context: RequestHandlerContext & { infra: Required },
+ sourceId: string,
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) {
+ const finalizeLogEntryAnomaliesSpan = startTracingSpan('get log entry anomalies');
+
+ const logRateJobId = getJobId(context.infra.spaceId, sourceId, logEntryRateJobTypes[0]);
+ const logCategoriesJobId = getJobId(
+ context.infra.spaceId,
+ sourceId,
+ logEntryCategoriesJobTypes[0]
+ );
+
+ const jobIds: string[] = [];
+ let jobSpans: TracingSpan[] = [];
+
+ try {
+ const {
+ timing: { spans },
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, logRateJobId);
+ jobIds.push(logRateJobId);
+ jobSpans = [...jobSpans, ...spans];
+ } catch (e) {
+ // Job wasn't found
+ }
+
+ try {
+ const {
+ timing: { spans },
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, logCategoriesJobId);
+ jobIds.push(logCategoriesJobId);
+ jobSpans = [...jobSpans, ...spans];
+ } catch (e) {
+ // Job wasn't found
+ }
+
+ if (jobIds.length === 0) {
+ throw new InsufficientAnomalyMlJobsConfigured(
+ 'Log rate or categorisation ML jobs need to be configured to search anomalies'
+ );
+ }
+
+ const {
+ anomalies,
+ paginationCursors,
+ hasMoreEntries,
+ timing: { spans: fetchLogEntryAnomaliesSpans },
+ } = await fetchLogEntryAnomalies(
+ context.infra.mlSystem,
+ jobIds,
+ startTime,
+ endTime,
+ sort,
+ pagination
+ );
+
+ const data = anomalies.map((anomaly) => {
+ const { jobId } = anomaly;
+
+ if (jobId === logRateJobId) {
+ return parseLogRateAnomalyResult(anomaly, logRateJobId);
+ } else {
+ return parseCategoryAnomalyResult(anomaly, logCategoriesJobId);
+ }
+ });
+
+ const logEntryAnomaliesSpan = finalizeLogEntryAnomaliesSpan();
+
+ return {
+ data,
+ paginationCursors,
+ hasMoreEntries,
+ timing: {
+ spans: [logEntryAnomaliesSpan, ...jobSpans, ...fetchLogEntryAnomaliesSpans],
+ },
+ };
+}
+
+const parseLogRateAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => {
+ const {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ } = anomaly;
+
+ return {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ type: 'logRate' as const,
+ jobId,
+ };
+};
+
+const parseCategoryAnomalyResult = (anomaly: MappedAnomalyHit, jobId: string) => {
+ const {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ categoryId,
+ } = anomaly;
+
+ return {
+ id,
+ anomalyScore,
+ dataset,
+ typical,
+ actual,
+ duration,
+ startTime: anomalyStartTime,
+ categoryId,
+ type: 'logCategory' as const,
+ jobId,
+ };
+};
+
+async function fetchLogEntryAnomalies(
+ mlSystem: MlSystem,
+ jobIds: string[],
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) {
+ // We'll request 1 extra entry on top of our pageSize to determine if there are
+ // more entries to be fetched. This avoids scenarios where the client side can't
+ // determine if entries.length === pageSize actually means there are more entries / next page
+ // or not.
+ const expandedPagination = { ...pagination, pageSize: pagination.pageSize + 1 };
+
+ const finalizeFetchLogEntryAnomaliesSpan = startTracingSpan('fetch log entry anomalies');
+
+ const results = decodeOrThrow(logEntryAnomaliesResponseRT)(
+ await mlSystem.mlAnomalySearch(
+ createLogEntryAnomaliesQuery(jobIds, startTime, endTime, sort, expandedPagination)
+ )
+ );
+
+ const {
+ hits: { hits },
+ } = results;
+ const hasMoreEntries = hits.length > pagination.pageSize;
+
+ // An extra entry was found and hasMoreEntries has been determined, the extra entry can be removed.
+ if (hasMoreEntries) {
+ hits.pop();
+ }
+
+ // To "search_before" the sort order will have been reversed for ES.
+ // The results are now reversed back, to match the requested sort.
+ if (pagination.cursor && 'searchBefore' in pagination.cursor) {
+ hits.reverse();
+ }
+
+ const paginationCursors =
+ hits.length > 0
+ ? {
+ previousPageCursor: hits[0].sort,
+ nextPageCursor: hits[hits.length - 1].sort,
+ }
+ : undefined;
+
+ const anomalies = hits.map((result) => {
+ const {
+ job_id,
+ record_score: anomalyScore,
+ typical,
+ actual,
+ partition_field_value: dataset,
+ bucket_span: duration,
+ timestamp: anomalyStartTime,
+ by_field_value: categoryId,
+ } = result._source;
+
+ return {
+ id: result._id,
+ anomalyScore,
+ dataset,
+ typical: typical[0],
+ actual: actual[0],
+ jobId: job_id,
+ startTime: anomalyStartTime,
+ duration: duration * 1000,
+ categoryId,
+ };
+ });
+
+ const fetchLogEntryAnomaliesSpan = finalizeFetchLogEntryAnomaliesSpan();
+
+ return {
+ anomalies,
+ paginationCursors,
+ hasMoreEntries,
+ timing: {
+ spans: [fetchLogEntryAnomaliesSpan],
+ },
+ };
+}
+
+export async function getLogEntryExamples(
+ context: RequestHandlerContext & { infra: Required },
+ sourceId: string,
+ startTime: number,
+ endTime: number,
+ dataset: string,
+ exampleCount: number,
+ sourceConfiguration: InfraSource,
+ callWithRequest: KibanaFramework['callWithRequest'],
+ categoryId?: string
+) {
+ const finalizeLogEntryExamplesSpan = startTracingSpan('get log entry rate example log entries');
+
+ const jobId = getJobId(
+ context.infra.spaceId,
+ sourceId,
+ categoryId != null ? logEntryCategoriesJobTypes[0] : logEntryRateJobTypes[0]
+ );
+
+ const {
+ mlJob,
+ timing: { spans: fetchMlJobSpans },
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, jobId);
+
+ const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
+ const indices = customSettings?.logs_source_config?.indexPattern;
+ const timestampField = customSettings?.logs_source_config?.timestampField;
+ const tiebreakerField = sourceConfiguration.configuration.fields.tiebreaker;
+
+ if (indices == null || timestampField == null) {
+ throw new InsufficientLogAnalysisMlJobConfigurationError(
+ `Failed to find index configuration for ml job ${jobId}`
+ );
+ }
+
+ const {
+ examples,
+ timing: { spans: fetchLogEntryExamplesSpans },
+ } = await fetchLogEntryExamples(
+ context,
+ sourceId,
+ indices,
+ timestampField,
+ tiebreakerField,
+ startTime,
+ endTime,
+ dataset,
+ exampleCount,
+ callWithRequest,
+ categoryId
+ );
+
+ const logEntryExamplesSpan = finalizeLogEntryExamplesSpan();
+
+ return {
+ data: examples,
+ timing: {
+ spans: [logEntryExamplesSpan, ...fetchMlJobSpans, ...fetchLogEntryExamplesSpans],
+ },
+ };
+}
+
+export async function fetchLogEntryExamples(
+ context: RequestHandlerContext & { infra: Required },
+ sourceId: string,
+ indices: string,
+ timestampField: string,
+ tiebreakerField: string,
+ startTime: number,
+ endTime: number,
+ dataset: string,
+ exampleCount: number,
+ callWithRequest: KibanaFramework['callWithRequest'],
+ categoryId?: string
+) {
+ const finalizeEsSearchSpan = startTracingSpan('Fetch log rate examples from ES');
+
+ let categoryQuery: string | undefined;
+
+ // Examples should be further scoped to a specific ML category
+ if (categoryId) {
+ const parsedCategoryId = parseInt(categoryId, 10);
+
+ const logEntryCategoriesCountJobId = getJobId(
+ context.infra.spaceId,
+ sourceId,
+ logEntryCategoriesJobTypes[0]
+ );
+
+ const { logEntryCategoriesById } = await fetchLogEntryCategories(
+ context,
+ logEntryCategoriesCountJobId,
+ [parsedCategoryId]
+ );
+
+ const category = logEntryCategoriesById[parsedCategoryId];
+
+ if (category == null) {
+ throw new UnknownCategoryError(parsedCategoryId);
+ }
+
+ categoryQuery = category._source.terms;
+ }
+
+ const {
+ hits: { hits },
+ } = decodeOrThrow(logEntryExamplesResponseRT)(
+ await callWithRequest(
+ context,
+ 'search',
+ createLogEntryExamplesQuery(
+ indices,
+ timestampField,
+ tiebreakerField,
+ startTime,
+ endTime,
+ dataset,
+ exampleCount,
+ categoryQuery
+ )
+ )
+ );
+
+ const esSearchSpan = finalizeEsSearchSpan();
+
+ return {
+ examples: hits.map((hit) => ({
+ id: hit._id,
+ dataset: hit._source.event?.dataset ?? '',
+ message: hit._source.message ?? '',
+ timestamp: hit.sort[0],
+ tiebreaker: hit.sort[1],
+ })),
+ timing: {
+ spans: [esSearchSpan],
+ },
+ };
+}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts
index 4f244d724405e..6d00ba56e0e66 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_categories_analysis.ts
@@ -17,7 +17,6 @@ import { decodeOrThrow } from '../../../common/runtime_types';
import type { MlAnomalyDetectors, MlSystem } from '../../types';
import {
InsufficientLogAnalysisMlJobConfigurationError,
- NoLogAnalysisMlJobError,
NoLogAnalysisResultsIndexError,
UnknownCategoryError,
} from './errors';
@@ -45,6 +44,7 @@ import {
topLogEntryCategoriesResponseRT,
} from './queries/top_log_entry_categories';
import { InfraSource } from '../sources';
+import { fetchMlJob } from './common';
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
@@ -213,7 +213,7 @@ export async function getLogEntryCategoryExamples(
const {
mlJob,
timing: { spans: fetchMlJobSpans },
- } = await fetchMlJob(context, logEntryCategoriesCountJobId);
+ } = await fetchMlJob(context.infra.mlAnomalyDetectors, logEntryCategoriesCountJobId);
const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
const indices = customSettings?.logs_source_config?.indexPattern;
@@ -330,7 +330,7 @@ async function fetchTopLogEntryCategories(
};
}
-async function fetchLogEntryCategories(
+export async function fetchLogEntryCategories(
context: { infra: { mlSystem: MlSystem } },
logEntryCategoriesCountJobId: string,
categoryIds: number[]
@@ -452,30 +452,6 @@ async function fetchTopLogEntryCategoryHistograms(
};
}
-async function fetchMlJob(
- context: { infra: { mlAnomalyDetectors: MlAnomalyDetectors } },
- logEntryCategoriesCountJobId: string
-) {
- const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
-
- const {
- jobs: [mlJob],
- } = await context.infra.mlAnomalyDetectors.jobs(logEntryCategoriesCountJobId);
-
- const mlGetJobSpan = finalizeMlGetJobSpan();
-
- if (mlJob == null) {
- throw new NoLogAnalysisMlJobError(`Failed to find ml job ${logEntryCategoriesCountJobId}.`);
- }
-
- return {
- mlJob,
- timing: {
- spans: [mlGetJobSpan],
- },
- };
-}
-
async function fetchLogEntryCategoryExamples(
requestContext: { core: { elasticsearch: { legacy: { client: ILegacyScopedClusterClient } } } },
indices: string,
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts
index 290cf03b67365..0323980dcd013 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/log_entry_rate_analysis.ts
@@ -7,7 +7,6 @@
import { pipe } from 'fp-ts/lib/pipeable';
import { map, fold } from 'fp-ts/lib/Either';
import { identity } from 'fp-ts/lib/function';
-import { RequestHandlerContext } from 'src/core/server';
import { throwErrors, createPlainError } from '../../../common/runtime_types';
import {
logRateModelPlotResponseRT,
@@ -15,22 +14,9 @@ import {
LogRateModelPlotBucket,
CompositeTimestampPartitionKey,
} from './queries';
-import { startTracingSpan } from '../../../common/performance_tracing';
-import { decodeOrThrow } from '../../../common/runtime_types';
-import { getJobId, jobCustomSettingsRT } from '../../../common/log_analysis';
-import {
- createLogEntryRateExamplesQuery,
- logEntryRateExamplesResponseRT,
-} from './queries/log_entry_rate_examples';
-import {
- InsufficientLogAnalysisMlJobConfigurationError,
- NoLogAnalysisMlJobError,
- NoLogAnalysisResultsIndexError,
-} from './errors';
-import { InfraSource } from '../sources';
+import { getJobId } from '../../../common/log_analysis';
+import { NoLogAnalysisResultsIndexError } from './errors';
import type { MlSystem } from '../../types';
-import { InfraRequestHandlerContext } from '../../types';
-import { KibanaFramework } from '../adapters/framework/kibana_framework_adapter';
const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000;
@@ -143,130 +129,3 @@ export async function getLogEntryRateBuckets(
}
}, []);
}
-
-export async function getLogEntryRateExamples(
- context: RequestHandlerContext & { infra: Required },
- sourceId: string,
- startTime: number,
- endTime: number,
- dataset: string,
- exampleCount: number,
- sourceConfiguration: InfraSource,
- callWithRequest: KibanaFramework['callWithRequest']
-) {
- const finalizeLogEntryRateExamplesSpan = startTracingSpan(
- 'get log entry rate example log entries'
- );
-
- const jobId = getJobId(context.infra.spaceId, sourceId, 'log-entry-rate');
-
- const {
- mlJob,
- timing: { spans: fetchMlJobSpans },
- } = await fetchMlJob(context, jobId);
-
- const customSettings = decodeOrThrow(jobCustomSettingsRT)(mlJob.custom_settings);
- const indices = customSettings?.logs_source_config?.indexPattern;
- const timestampField = customSettings?.logs_source_config?.timestampField;
- const tiebreakerField = sourceConfiguration.configuration.fields.tiebreaker;
-
- if (indices == null || timestampField == null) {
- throw new InsufficientLogAnalysisMlJobConfigurationError(
- `Failed to find index configuration for ml job ${jobId}`
- );
- }
-
- const {
- examples,
- timing: { spans: fetchLogEntryRateExamplesSpans },
- } = await fetchLogEntryRateExamples(
- context,
- indices,
- timestampField,
- tiebreakerField,
- startTime,
- endTime,
- dataset,
- exampleCount,
- callWithRequest
- );
-
- const logEntryRateExamplesSpan = finalizeLogEntryRateExamplesSpan();
-
- return {
- data: examples,
- timing: {
- spans: [logEntryRateExamplesSpan, ...fetchMlJobSpans, ...fetchLogEntryRateExamplesSpans],
- },
- };
-}
-
-export async function fetchLogEntryRateExamples(
- context: RequestHandlerContext & { infra: Required },
- indices: string,
- timestampField: string,
- tiebreakerField: string,
- startTime: number,
- endTime: number,
- dataset: string,
- exampleCount: number,
- callWithRequest: KibanaFramework['callWithRequest']
-) {
- const finalizeEsSearchSpan = startTracingSpan('Fetch log rate examples from ES');
-
- const {
- hits: { hits },
- } = decodeOrThrow(logEntryRateExamplesResponseRT)(
- await callWithRequest(
- context,
- 'search',
- createLogEntryRateExamplesQuery(
- indices,
- timestampField,
- tiebreakerField,
- startTime,
- endTime,
- dataset,
- exampleCount
- )
- )
- );
-
- const esSearchSpan = finalizeEsSearchSpan();
-
- return {
- examples: hits.map((hit) => ({
- id: hit._id,
- dataset,
- message: hit._source.message ?? '',
- timestamp: hit.sort[0],
- tiebreaker: hit.sort[1],
- })),
- timing: {
- spans: [esSearchSpan],
- },
- };
-}
-
-async function fetchMlJob(
- context: RequestHandlerContext & { infra: Required },
- logEntryRateJobId: string
-) {
- const finalizeMlGetJobSpan = startTracingSpan('Fetch ml job from ES');
- const {
- jobs: [mlJob],
- } = await context.infra.mlAnomalyDetectors.jobs(logEntryRateJobId);
-
- const mlGetJobSpan = finalizeMlGetJobSpan();
-
- if (mlJob == null) {
- throw new NoLogAnalysisMlJobError(`Failed to find ml job ${logEntryRateJobId}.`);
- }
-
- return {
- mlJob,
- timing: {
- spans: [mlGetJobSpan],
- },
- };
-}
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts
index eacf29b303db0..87394028095de 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/common.ts
@@ -21,6 +21,14 @@ export const createJobIdFilters = (jobId: string) => [
},
];
+export const createJobIdsFilters = (jobIds: string[]) => [
+ {
+ terms: {
+ job_id: jobIds,
+ },
+ },
+];
+
export const createTimeRangeFilters = (startTime: number, endTime: number) => [
{
range: {
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts
index 8c470acbf02fb..792c5bf98b538 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/index.ts
@@ -6,3 +6,4 @@
export * from './log_entry_rate';
export * from './top_log_entry_categories';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..fc72776ea5cac
--- /dev/null
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_anomalies.ts
@@ -0,0 +1,128 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import * as rt from 'io-ts';
+import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearch_runtime_types';
+import {
+ createJobIdsFilters,
+ createTimeRangeFilters,
+ createResultTypeFilters,
+ defaultRequestParameters,
+} from './common';
+import { Sort, Pagination } from '../../../../common/http_api/log_analysis';
+
+// TODO: Reassess validity of this against ML docs
+const TIEBREAKER_FIELD = '_doc';
+
+const sortToMlFieldMap = {
+ dataset: 'partition_field_value',
+ anomalyScore: 'record_score',
+ startTime: 'timestamp',
+};
+
+export const createLogEntryAnomaliesQuery = (
+ jobIds: string[],
+ startTime: number,
+ endTime: number,
+ sort: Sort,
+ pagination: Pagination
+) => {
+ const { field } = sort;
+ const { pageSize } = pagination;
+
+ const filters = [
+ ...createJobIdsFilters(jobIds),
+ ...createTimeRangeFilters(startTime, endTime),
+ ...createResultTypeFilters(['record']),
+ ];
+
+ const sourceFields = [
+ 'job_id',
+ 'record_score',
+ 'typical',
+ 'actual',
+ 'partition_field_value',
+ 'timestamp',
+ 'bucket_span',
+ 'by_field_value',
+ ];
+
+ const { querySortDirection, queryCursor } = parsePaginationCursor(sort, pagination);
+
+ const sortOptions = [
+ { [sortToMlFieldMap[field]]: querySortDirection },
+ { [TIEBREAKER_FIELD]: querySortDirection }, // Tiebreaker
+ ];
+
+ const resultsQuery = {
+ ...defaultRequestParameters,
+ body: {
+ query: {
+ bool: {
+ filter: filters,
+ },
+ },
+ search_after: queryCursor,
+ sort: sortOptions,
+ size: pageSize,
+ _source: sourceFields,
+ },
+ };
+
+ return resultsQuery;
+};
+
+export const logEntryAnomalyHitRT = rt.type({
+ _id: rt.string,
+ _source: rt.intersection([
+ rt.type({
+ job_id: rt.string,
+ record_score: rt.number,
+ typical: rt.array(rt.number),
+ actual: rt.array(rt.number),
+ partition_field_value: rt.string,
+ bucket_span: rt.number,
+ timestamp: rt.number,
+ }),
+ rt.partial({
+ by_field_value: rt.string,
+ }),
+ ]),
+ sort: rt.tuple([rt.union([rt.string, rt.number]), rt.union([rt.string, rt.number])]),
+});
+
+export type LogEntryAnomalyHit = rt.TypeOf;
+
+export const logEntryAnomaliesResponseRT = rt.intersection([
+ commonSearchSuccessResponseFieldsRT,
+ rt.type({
+ hits: rt.type({
+ hits: rt.array(logEntryAnomalyHitRT),
+ }),
+ }),
+]);
+
+export type LogEntryAnomaliesResponseRT = rt.TypeOf;
+
+const parsePaginationCursor = (sort: Sort, pagination: Pagination) => {
+ const { cursor } = pagination;
+ const { direction } = sort;
+
+ if (!cursor) {
+ return { querySortDirection: direction, queryCursor: undefined };
+ }
+
+ // We will always use ES's search_after to paginate, to mimic "search_before" behaviour we
+ // need to reverse the user's chosen search direction for the ES query.
+ if ('searchBefore' in cursor) {
+ return {
+ querySortDirection: direction === 'desc' ? 'asc' : 'desc',
+ queryCursor: cursor.searchBefore,
+ };
+ } else {
+ return { querySortDirection: direction, queryCursor: cursor.searchAfter };
+ }
+};
diff --git a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate_examples.ts b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_examples.ts
similarity index 59%
rename from x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate_examples.ts
rename to x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_examples.ts
index ef06641caf797..74a664e78dcd6 100644
--- a/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_rate_examples.ts
+++ b/x-pack/plugins/infra/server/lib/log_analysis/queries/log_entry_examples.ts
@@ -10,14 +10,15 @@ import { commonSearchSuccessResponseFieldsRT } from '../../../utils/elasticsearc
import { defaultRequestParameters } from './common';
import { partitionField } from '../../../../common/log_analysis';
-export const createLogEntryRateExamplesQuery = (
+export const createLogEntryExamplesQuery = (
indices: string,
timestampField: string,
tiebreakerField: string,
startTime: number,
endTime: number,
dataset: string,
- exampleCount: number
+ exampleCount: number,
+ categoryQuery?: string
) => ({
...defaultRequestParameters,
body: {
@@ -32,11 +33,27 @@ export const createLogEntryRateExamplesQuery = (
},
},
},
- {
- term: {
- [partitionField]: dataset,
- },
- },
+ ...(!!dataset
+ ? [
+ {
+ term: {
+ [partitionField]: dataset,
+ },
+ },
+ ]
+ : []),
+ ...(categoryQuery
+ ? [
+ {
+ match: {
+ message: {
+ query: categoryQuery,
+ operator: 'AND',
+ },
+ },
+ },
+ ]
+ : []),
],
},
},
@@ -47,7 +64,7 @@ export const createLogEntryRateExamplesQuery = (
size: exampleCount,
});
-export const logEntryRateExampleHitRT = rt.type({
+export const logEntryExampleHitRT = rt.type({
_id: rt.string,
_source: rt.partial({
event: rt.partial({
@@ -58,15 +75,15 @@ export const logEntryRateExampleHitRT = rt.type({
sort: rt.tuple([rt.number, rt.number]),
});
-export type LogEntryRateExampleHit = rt.TypeOf;
+export type LogEntryExampleHit = rt.TypeOf;
-export const logEntryRateExamplesResponseRT = rt.intersection([
+export const logEntryExamplesResponseRT = rt.intersection([
commonSearchSuccessResponseFieldsRT,
rt.type({
hits: rt.type({
- hits: rt.array(logEntryRateExampleHitRT),
+ hits: rt.array(logEntryExampleHitRT),
}),
}),
]);
-export type LogEntryRateExamplesResponse = rt.TypeOf;
+export type LogEntryExamplesResponse = rt.TypeOf;
diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts
index 30b6be435837b..cbd89db97236f 100644
--- a/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts
+++ b/x-pack/plugins/infra/server/routes/log_analysis/results/index.ts
@@ -8,4 +8,5 @@ export * from './log_entry_categories';
export * from './log_entry_category_datasets';
export * from './log_entry_category_examples';
export * from './log_entry_rate';
-export * from './log_entry_rate_examples';
+export * from './log_entry_examples';
+export * from './log_entry_anomalies';
diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts
new file mode 100644
index 0000000000000..f4911658ea496
--- /dev/null
+++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_anomalies.ts
@@ -0,0 +1,112 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import Boom from 'boom';
+import { InfraBackendLibs } from '../../../lib/infra_types';
+import {
+ LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH,
+ getLogEntryAnomaliesSuccessReponsePayloadRT,
+ getLogEntryAnomaliesRequestPayloadRT,
+ GetLogEntryAnomaliesRequestPayload,
+ Sort,
+ Pagination,
+} from '../../../../common/http_api/log_analysis';
+import { createValidationFunction } from '../../../../common/runtime_types';
+import { assertHasInfraMlPlugins } from '../../../utils/request_context';
+import { getLogEntryAnomalies } from '../../../lib/log_analysis';
+
+export const initGetLogEntryAnomaliesRoute = ({ framework }: InfraBackendLibs) => {
+ framework.registerRoute(
+ {
+ method: 'post',
+ path: LOG_ANALYSIS_GET_LOG_ENTRY_ANOMALIES_PATH,
+ validate: {
+ body: createValidationFunction(getLogEntryAnomaliesRequestPayloadRT),
+ },
+ },
+ framework.router.handleLegacyErrors(async (requestContext, request, response) => {
+ const {
+ data: {
+ sourceId,
+ timeRange: { startTime, endTime },
+ sort: sortParam,
+ pagination: paginationParam,
+ },
+ } = request.body;
+
+ const { sort, pagination } = getSortAndPagination(sortParam, paginationParam);
+
+ try {
+ assertHasInfraMlPlugins(requestContext);
+
+ const {
+ data: logEntryAnomalies,
+ paginationCursors,
+ hasMoreEntries,
+ timing,
+ } = await getLogEntryAnomalies(
+ requestContext,
+ sourceId,
+ startTime,
+ endTime,
+ sort,
+ pagination
+ );
+
+ return response.ok({
+ body: getLogEntryAnomaliesSuccessReponsePayloadRT.encode({
+ data: {
+ anomalies: logEntryAnomalies,
+ hasMoreEntries,
+ paginationCursors,
+ },
+ timing,
+ }),
+ });
+ } catch (error) {
+ if (Boom.isBoom(error)) {
+ throw error;
+ }
+
+ return response.customError({
+ statusCode: error.statusCode ?? 500,
+ body: {
+ message: error.message ?? 'An unexpected error occurred',
+ },
+ });
+ }
+ })
+ );
+};
+
+const getSortAndPagination = (
+ sort: Partial = {},
+ pagination: Partial = {}
+): {
+ sort: Sort;
+ pagination: Pagination;
+} => {
+ const sortDefaults = {
+ field: 'anomalyScore' as const,
+ direction: 'desc' as const,
+ };
+
+ const sortWithDefaults = {
+ ...sortDefaults,
+ ...sort,
+ };
+
+ const paginationDefaults = {
+ pageSize: 50,
+ };
+
+ const paginationWithDefaults = {
+ ...paginationDefaults,
+ ...pagination,
+ };
+
+ return { sort: sortWithDefaults, pagination: paginationWithDefaults };
+};
diff --git a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_examples.ts
similarity index 75%
rename from x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts
rename to x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_examples.ts
index b8ebcc66911dc..be4caee769506 100644
--- a/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_rate_examples.ts
+++ b/x-pack/plugins/infra/server/routes/log_analysis/results/log_entry_examples.ts
@@ -7,21 +7,21 @@
import Boom from 'boom';
import { createValidationFunction } from '../../../../common/runtime_types';
import { InfraBackendLibs } from '../../../lib/infra_types';
-import { NoLogAnalysisResultsIndexError, getLogEntryRateExamples } from '../../../lib/log_analysis';
+import { NoLogAnalysisResultsIndexError, getLogEntryExamples } from '../../../lib/log_analysis';
import { assertHasInfraMlPlugins } from '../../../utils/request_context';
import {
- getLogEntryRateExamplesRequestPayloadRT,
- getLogEntryRateExamplesSuccessReponsePayloadRT,
+ getLogEntryExamplesRequestPayloadRT,
+ getLogEntryExamplesSuccessReponsePayloadRT,
LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
} from '../../../../common/http_api/log_analysis';
-export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
+export const initGetLogEntryExamplesRoute = ({ framework, sources }: InfraBackendLibs) => {
framework.registerRoute(
{
method: 'post',
path: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_EXAMPLES_PATH,
validate: {
- body: createValidationFunction(getLogEntryRateExamplesRequestPayloadRT),
+ body: createValidationFunction(getLogEntryExamplesRequestPayloadRT),
},
},
framework.router.handleLegacyErrors(async (requestContext, request, response) => {
@@ -31,6 +31,7 @@ export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBa
exampleCount,
sourceId,
timeRange: { startTime, endTime },
+ categoryId,
},
} = request.body;
@@ -42,7 +43,7 @@ export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBa
try {
assertHasInfraMlPlugins(requestContext);
- const { data: logEntryRateExamples, timing } = await getLogEntryRateExamples(
+ const { data: logEntryExamples, timing } = await getLogEntryExamples(
requestContext,
sourceId,
startTime,
@@ -50,13 +51,14 @@ export const initGetLogEntryRateExamplesRoute = ({ framework, sources }: InfraBa
dataset,
exampleCount,
sourceConfiguration,
- framework.callWithRequest
+ framework.callWithRequest,
+ categoryId
);
return response.ok({
- body: getLogEntryRateExamplesSuccessReponsePayloadRT.encode({
+ body: getLogEntryExamplesSuccessReponsePayloadRT.encode({
data: {
- examples: logEntryRateExamples,
+ examples: logEntryExamples,
},
timing,
}),
diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json
index 92285d8bf72f8..c750d21698a50 100644
--- a/x-pack/plugins/translations/translations/ja-JP.json
+++ b/x-pack/plugins/translations/translations/ja-JP.json
@@ -7472,7 +7472,6 @@
"xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "15 分ごとのログエントリー (平均)",
"xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "異常を読み込み中",
"xpack.infra.logs.analysis.anomaliesSectionTitle": "異常",
- "xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "異常が検出されませんでした。",
"xpack.infra.logs.analysis.anomalySectionNoDataBody": "時間範囲を調整する必要があるかもしれません。",
"xpack.infra.logs.analysis.anomalySectionNoDataTitle": "表示するデータがありません。",
"xpack.infra.logs.analysis.jobConfigurationOutdatedCalloutMessage": "異なるソース構成を使用して ML ジョブが作成されました。現在の構成を適用するにはジョブを再作成してください。これにより以前検出された異常が削除されます。",
@@ -7481,14 +7480,6 @@
"xpack.infra.logs.analysis.jobDefinitionOutdatedCalloutTitle": "古い ML ジョブ定義",
"xpack.infra.logs.analysis.jobStoppedCalloutMessage": "ML ジョブが手動またはリソース不足により停止しました。新しいログエントリーはジョブが再起動するまで処理されません。",
"xpack.infra.logs.analysis.jobStoppedCalloutTitle": "ML ジョブが停止しました",
- "xpack.infra.logs.analysis.logRateResultsToolbarText": "{startTime} から {endTime} までの {numberOfLogs} 件のログエントリーを分析しました",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanLabel": "バケットスパン: ",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanValue": "15 分",
- "xpack.infra.logs.analysis.logRateSectionLineSeriesName": "15 分ごとのログエントリー (平均)",
- "xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel": "ログレートの結果を読み込み中",
- "xpack.infra.logs.analysis.logRateSectionNoDataBody": "時間範囲を調整する必要があるかもしれません。",
- "xpack.infra.logs.analysis.logRateSectionNoDataTitle": "表示するデータがありません。",
- "xpack.infra.logs.analysis.logRateSectionTitle": "ログレート",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesBody": "本機能は機械学習ジョブを利用し、そのステータスと結果にアクセスするためには、少なくとも{machineLearningUserRole}ロールが必要です。",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesTitle": "追加の機械学習の権限が必要です",
"xpack.infra.logs.analysis.missingMlSetupPrivilegesBody": "本機能は機械学習ジョブを利用し、設定には{machineLearningAdminRole}ロールが必要です。",
diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json
index 457f65e89083d..59e78defb04d6 100644
--- a/x-pack/plugins/translations/translations/zh-CN.json
+++ b/x-pack/plugins/translations/translations/zh-CN.json
@@ -7477,7 +7477,6 @@
"xpack.infra.logs.analysis.anomaliesSectionLineSeriesName": "每 15 分钟日志条目数(平均值)",
"xpack.infra.logs.analysis.anomaliesSectionLoadingAriaLabel": "正在加载异常",
"xpack.infra.logs.analysis.anomaliesSectionTitle": "异常",
- "xpack.infra.logs.analysis.anomalySectionNoAnomaliesTitle": "未检测到任何异常。",
"xpack.infra.logs.analysis.anomalySectionNoDataBody": "您可能想调整时间范围。",
"xpack.infra.logs.analysis.anomalySectionNoDataTitle": "没有可显示的数据。",
"xpack.infra.logs.analysis.jobConfigurationOutdatedCalloutMessage": "创建 ML 作业时所使用的源配置不同。重新创建作业以应用当前配置。这将移除以前检测到的异常。",
@@ -7486,14 +7485,6 @@
"xpack.infra.logs.analysis.jobDefinitionOutdatedCalloutTitle": "ML 作业定义已过期",
"xpack.infra.logs.analysis.jobStoppedCalloutMessage": "ML 作业已手动停止或由于缺乏资源而停止。作业重新启动后,才会处理新的日志条目。",
"xpack.infra.logs.analysis.jobStoppedCalloutTitle": "ML 作业已停止",
- "xpack.infra.logs.analysis.logRateResultsToolbarText": "从 {startTime} 到 {endTime} 已分析 {numberOfLogs} 个日志条目",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanLabel": "存储桶跨度: ",
- "xpack.infra.logs.analysis.logRateSectionBucketSpanValue": "15 分钟",
- "xpack.infra.logs.analysis.logRateSectionLineSeriesName": "每 15 分钟日志条目数(平均值)",
- "xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel": "正在加载日志速率结果",
- "xpack.infra.logs.analysis.logRateSectionNoDataBody": "您可能想调整时间范围。",
- "xpack.infra.logs.analysis.logRateSectionNoDataTitle": "没有可显示的数据。",
- "xpack.infra.logs.analysis.logRateSectionTitle": "日志速率",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesBody": "此功能使用 Machine Learning 作业,要访问这些作业的状态和结果,至少需要 {machineLearningUserRole} 角色。",
"xpack.infra.logs.analysis.missingMlResultsPrivilegesTitle": "需要额外的 Machine Learning 权限",
"xpack.infra.logs.analysis.missingMlSetupPrivilegesBody": "此功能使用 Machine Learning 作业,这需要 {machineLearningAdminRole} 角色才能设置。",