Skip to content

Commit

Permalink
[7.17] [ML] Improving empty object creation (#191518) (#191836)
Browse files Browse the repository at this point in the history
Manual backport of #191518

Also includes changes to server side and common code to use
`Object.hasOwn` rather than `hasOwnProperty`

---------

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
  • Loading branch information
jgowdyelastic and elasticmachine authored Sep 3, 2024
1 parent cd86cc9 commit 8e38ea3
Show file tree
Hide file tree
Showing 28 changed files with 91 additions and 86 deletions.
6 changes: 3 additions & 3 deletions x-pack/plugins/ml/common/types/alerts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,15 +75,15 @@ export interface InfluencerAnomalyAlertDoc extends BaseAnomalyAlertDoc {
export type AlertHitDoc = RecordAnomalyAlertDoc | BucketAnomalyAlertDoc | InfluencerAnomalyAlertDoc;

export function isRecordAnomalyAlertDoc(arg: any): arg is RecordAnomalyAlertDoc {
return arg.hasOwnProperty('result_type') && arg.result_type === ANOMALY_RESULT_TYPE.RECORD;
return Object.hasOwn(arg, 'result_type') && arg.result_type === ANOMALY_RESULT_TYPE.RECORD;
}

export function isBucketAnomalyAlertDoc(arg: any): arg is BucketAnomalyAlertDoc {
return arg.hasOwnProperty('result_type') && arg.result_type === ANOMALY_RESULT_TYPE.BUCKET;
return Object.hasOwn(arg, 'result_type') && arg.result_type === ANOMALY_RESULT_TYPE.BUCKET;
}

export function isInfluencerAnomalyAlertDoc(arg: any): arg is InfluencerAnomalyAlertDoc {
return arg.hasOwnProperty('result_type') && arg.result_type === ANOMALY_RESULT_TYPE.INFLUENCER;
return Object.hasOwn(arg, 'result_type') && arg.result_type === ANOMALY_RESULT_TYPE.INFLUENCER;
}

export type MlAnomalyDetectionAlertParams = {
Expand Down
20 changes: 10 additions & 10 deletions x-pack/plugins/ml/common/types/field_histograms.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ export interface NumericChartData {
export const isNumericChartData = (arg: any): arg is NumericChartData => {
return (
typeof arg === 'object' &&
arg.hasOwnProperty('data') &&
arg.hasOwnProperty('id') &&
arg.hasOwnProperty('interval') &&
arg.hasOwnProperty('stats') &&
arg.hasOwnProperty('type') &&
Object.hasOwn(arg, 'data') &&
Object.hasOwn(arg, 'id') &&
Object.hasOwn(arg, 'interval') &&
Object.hasOwn(arg, 'stats') &&
Object.hasOwn(arg, 'type') &&
arg.type === 'numeric'
);
};
Expand All @@ -47,10 +47,10 @@ export interface OrdinalChartData {
export const isOrdinalChartData = (arg: any): arg is OrdinalChartData => {
return (
typeof arg === 'object' &&
arg.hasOwnProperty('data') &&
arg.hasOwnProperty('cardinality') &&
arg.hasOwnProperty('id') &&
arg.hasOwnProperty('type') &&
Object.hasOwn(arg, 'data') &&
Object.hasOwn(arg, 'cardinality') &&
Object.hasOwn(arg, 'id') &&
Object.hasOwn(arg, 'type') &&
(arg.type === 'ordinal' || arg.type === 'boolean')
);
};
Expand All @@ -61,7 +61,7 @@ export interface UnsupportedChartData {
}

export const isUnsupportedChartData = (arg: any): arg is UnsupportedChartData => {
return typeof arg === 'object' && arg.hasOwnProperty('type') && arg.type === 'unsupported';
return typeof arg === 'object' && Object.hasOwn(arg, 'type') && arg.type === 'unsupported';
};

export type ChartDataItem = NumericDataItem | OrdinalDataItem;
Expand Down
2 changes: 1 addition & 1 deletion x-pack/plugins/ml/common/util/group_color_utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ const COLORS = [
euiVars.euiColorPrimary,
];

const colorMap: Record<string, string> = {};
const colorMap: Record<string, string> = Object.create(null);

export function tabColor(name: string): string {
if (colorMap[name] === undefined) {
Expand Down
4 changes: 2 additions & 2 deletions x-pack/plugins/ml/common/util/job_utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -280,8 +280,8 @@ export function isModelPlotEnabled(
// 'partition' field values even though this is supported on the back-end.
// If supplied, check both the by and partition entities are in the terms.
const detector = job.analysis_config.detectors[detectorIndex];
const detectorHasPartitionField = detector.hasOwnProperty('partition_field_name');
const detectorHasByField = detector.hasOwnProperty('by_field_name');
const detectorHasPartitionField = Object.hasOwn(detector, 'partition_field_name');
const detectorHasByField = Object.hasOwn(detector, 'by_field_name');
const terms = termsStr.split(',');

if (detectorHasPartitionField) {
Expand Down
2 changes: 1 addition & 1 deletion x-pack/plugins/ml/common/util/validation_utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export function findAggField(
value = returnParent === true ? aggs : aggs[k];
return true;
}
if (aggs.hasOwnProperty(k) && aggs[k] !== null && typeof aggs[k] === 'object') {
if (Object.hasOwn(aggs, k) && aggs[k] !== null && typeof aggs[k] === 'object') {
value = findAggField(aggs[k], fieldName, returnParent);
return value !== undefined;
}
Expand Down
2 changes: 1 addition & 1 deletion x-pack/plugins/ml/common/util/validators.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ export function numberValidator(conditions?: { min?: number; max?: number }) {
}

return (value: number): NumberValidationResult | null => {
const result = {} as NumberValidationResult;
const result = Object.create(null) as NumberValidationResult;
if (conditions?.min !== undefined && value < conditions.min) {
result.min = true;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ export function registerAnomalyDetectionAlertType({
minimumLicenseRequired: MINIMUM_FULL_LICENSE,
isExportable: true,
async executor({ services, params, alertId, state, previousStartedAt, startedAt }) {
const fakeRequest = {} as KibanaRequest;
const fakeRequest = Object.create(null) as KibanaRequest;
const { execute } = mlSharedServices.alertingServiceProvider(
services.savedObjectsClient,
fakeRequest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ export function registerJobsMonitoringRuleType({
async executor(options) {
const { services, name } = options;

const fakeRequest = {} as KibanaRequest;
const fakeRequest = Object.create(null) as KibanaRequest;
const { getTestsResults } = mlServicesProviders.jobsHealthServiceProvider(
services.savedObjectsClient,
fakeRequest,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ import {
} from '../../../../common/types/capabilities';

export function getAdminCapabilities() {
const caps: any = {};
const caps: any = Object.create(null);
Object.keys(adminMlCapabilities).forEach((k) => {
caps[k] = true;
});
return { ...getUserCapabilities(), ...caps } as MlCapabilities;
}

export function getUserCapabilities() {
const caps: any = {};
const caps: any = Object.create(null);
Object.keys(userMlCapabilities).forEach((k) => {
caps[k] = true;
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ const cardinalityCheckProvider = (client: IScopedClusterClient) => {

const { detectors, influencers, bucket_span: bucketSpan } = analysisConfig;

let overallCardinality = {};
let maxBucketCardinality = {};
let overallCardinality = Object.create(null);
let maxBucketCardinality = Object.create(null);

// Get fields required for the model memory estimation
const overallCardinalityFields: Set<string> = detectors.reduce(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ export class AnalyticsManager {
async getInitialElementsModelRoot(modelId: string): Promise<InitialElementsReturnType> {
const resultElements = [];
const modelElements = [];
const details: any = {};
const details: any = Object.create(null);
// fetch model data and create model elements
let data = await this.getAnalyticsModelData(modelId);
const modelNodeId = `${data.model_id}-${JOB_MAP_NODE_TYPES.TRAINED_MODEL}`;
Expand Down Expand Up @@ -338,7 +338,7 @@ export class AnalyticsManager {
async getInitialElementsJobRoot(jobId: string): Promise<InitialElementsReturnType> {
const resultElements = [];
const modelElements = [];
const details: any = {};
const details: any = Object.create(null);
const data = await this.getAnalyticsData(jobId);
// @ts-expect-error @elastic-elasticsearch Data frame types incomplete
const nextLinkId = data?.source?.index[0];
Expand Down Expand Up @@ -389,7 +389,7 @@ export class AnalyticsManager {
try {
await Promise.all([this.setInferenceModels(), this.setJobStats()]);
// Create first node for incoming analyticsId or modelId
let initialData: InitialElementsReturnType = {} as InitialElementsReturnType;
let initialData: InitialElementsReturnType = Object.create(null) as InitialElementsReturnType;
if (analyticsId !== undefined) {
initialData = await this.getInitialElementsJobRoot(analyticsId);
} else if (modelId !== undefined) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ export class DataRecognizer {

const jobs: ModuleJob[] = [];
const datafeeds: ModuleDatafeed[] = [];
const kibana: KibanaObjects = {};
const kibana: KibanaObjects = Object.create(null);
// load all of the job configs
if (isModule(module)) {
const tempJobs: ModuleJob[] = module.jobs.map((j) => ({
Expand Down Expand Up @@ -573,7 +573,7 @@ export class DataRecognizer {
}

public async dataRecognizerJobsExist(moduleId: string): Promise<JobExistResult> {
const results = {} as JobExistResult;
const results = Object.create(null) as JobExistResult;

// Load the module with the specified ID and check if the jobs
// in the module have been created.
Expand Down Expand Up @@ -828,7 +828,7 @@ export class DataRecognizer {
start?: number,
end?: number
): Promise<{ [key: string]: DatafeedResponse }> {
const results = {} as { [key: string]: DatafeedResponse };
const results = Object.create(null) as { [key: string]: DatafeedResponse };
for (const datafeed of datafeeds) {
results[datafeed.id] = await this._startDatafeed(datafeed, start, end);
}
Expand Down Expand Up @@ -939,7 +939,9 @@ export class DataRecognizer {
// creates an empty results object,
// listing each job/datafeed/savedObject with a save success boolean
private _createResultsTemplate(moduleConfig: Module): DataRecognizerConfigResponse {
const results: DataRecognizerConfigResponse = {} as DataRecognizerConfigResponse;
const results: DataRecognizerConfigResponse = Object.create(
null
) as DataRecognizerConfigResponse;
const reducedConfig = {
jobs: moduleConfig.jobs,
datafeeds: moduleConfig.datafeeds,
Expand All @@ -964,7 +966,7 @@ export class DataRecognizer {
if (Array.isArray(reducedConfig[i])) {
createResultsItems(reducedConfig[i] as any[], results, i);
} else {
results[i] = {} as any;
results[i] = Object.create(null) as any;
Object.keys(reducedConfig[i]).forEach((k) => {
createResultsItems((reducedConfig[i] as Module['kibana'])[k] as any[], results[i], k);
});
Expand Down Expand Up @@ -1156,7 +1158,7 @@ export class DataRecognizer {
);

if (!job.config.analysis_limits) {
job.config.analysis_limits = {} as AnalysisLimits;
job.config.analysis_limits = Object.create(null) as AnalysisLimits;
}

job.config.analysis_limits.model_memory_limit = modelMemoryLimit;
Expand Down Expand Up @@ -1190,7 +1192,7 @@ export class DataRecognizer {
// so set the jobs mml to be the max

if (!job.config.analysis_limits) {
job.config.analysis_limits = {} as AnalysisLimits;
job.config.analysis_limits = Object.create(null) as AnalysisLimits;
}

job.config.analysis_limits.model_memory_limit = maxMml;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ export class DataVisualizer {
): Promise<BatchStats[]> {
// Batch up fields by type, getting stats for multiple fields at a time.
const batches: Field[][] = [];
const batchedFields: { [key: string]: Field[][] } = {};
const batchedFields: { [key: string]: Field[][] } = Object.create(null);
each(fields, (field) => {
if (field.fieldName === undefined) {
// undefined fieldName is used for a document count request.
Expand Down Expand Up @@ -808,7 +808,7 @@ export class DataVisualizer {
body: searchBody,
});

const buckets: { [key: string]: number } = {};
const buckets: { [key: string]: number } = Object.create(null);
const dataByTimeBucket: Array<{ key: string; doc_count: number }> = get(
body,
['aggregations', 'eventRate', 'buckets'],
Expand Down Expand Up @@ -852,7 +852,7 @@ export class DataVisualizer {
() => (count += PERCENTILE_SPACING)
);

const aggs: { [key: string]: any } = {};
const aggs: { [key: string]: any } = Object.create(null);
fields.forEach((field, i) => {
const safeFieldName = getSafeAggregationName(field.fieldName, i);
aggs[`${safeFieldName}_field_stats`] = {
Expand Down Expand Up @@ -991,7 +991,7 @@ export class DataVisualizer {
const size = 0;
const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query);

const aggs: Aggs = {};
const aggs: Aggs = Object.create(null);
fields.forEach((field, i) => {
const safeFieldName = getSafeAggregationName(field.fieldName, i);
const top = {
Expand Down Expand Up @@ -1083,7 +1083,7 @@ export class DataVisualizer {
const size = 0;
const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query);

const aggs: Aggs = {};
const aggs: Aggs = Object.create(null);
fields.forEach((field, i) => {
const safeFieldName = getSafeAggregationName(field.fieldName, i);
aggs[`${safeFieldName}_field_stats`] = {
Expand Down Expand Up @@ -1151,7 +1151,7 @@ export class DataVisualizer {
const size = 0;
const filterCriteria = buildBaseFilterCriteria(timeFieldName, earliestMs, latestMs, query);

const aggs: Aggs = {};
const aggs: Aggs = Object.create(null);
fields.forEach((field, i) => {
const safeFieldName = getSafeAggregationName(field.fieldName, i);
aggs[`${safeFieldName}_value_count`] = {
Expand Down
14 changes: 7 additions & 7 deletions x-pack/plugins/ml/server/models/fields_service/fields_service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,13 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
fieldNames.forEach((fieldName) => {
if (
typeof datafeedConfig?.script_fields === 'object' &&
datafeedConfig.script_fields.hasOwnProperty(fieldName)
Object.hasOwn(datafeedConfig.script_fields, fieldName)
) {
aggregatableFields.push(fieldName);
}
if (
typeof datafeedConfig?.runtime_mappings === 'object' &&
datafeedConfig.runtime_mappings.hasOwnProperty(fieldName)
Object.hasOwn(datafeedConfig.runtime_mappings, fieldName)
) {
aggregatableFields.push(fieldName);
}
Expand Down Expand Up @@ -118,7 +118,7 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
) ?? {};

// No need to perform aggregation over the cached fields
const fieldsToAgg = aggregatableFields.filter((field) => !cachedValues.hasOwnProperty(field));
const fieldsToAgg = aggregatableFields.filter((field) => !Object.hasOwn(cachedValues, field));

if (fieldsToAgg.length === 0) {
return cachedValues;
Expand All @@ -142,17 +142,17 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
mustCriteria.push(query);
}

const runtimeMappings: any = {};
const runtimeMappings: any = Object.create(null);
const aggs = fieldsToAgg.reduce(
(obj, field) => {
if (
typeof datafeedConfig?.script_fields === 'object' &&
datafeedConfig.script_fields.hasOwnProperty(field)
Object.hasOwn(datafeedConfig.script_fields, field)
) {
obj[field] = { cardinality: { script: datafeedConfig.script_fields[field].script } };
} else if (
typeof datafeedConfig?.runtime_mappings === 'object' &&
datafeedConfig.runtime_mappings.hasOwnProperty(field)
Object.hasOwn(datafeedConfig.runtime_mappings, field)
) {
obj[field] = { cardinality: { field } };
runtimeMappings.runtime_mappings = datafeedConfig.runtime_mappings;
Expand Down Expand Up @@ -339,7 +339,7 @@ export function fieldsServiceProvider({ asCurrentUser }: IScopedClusterClient) {
) ?? {};

// No need to perform aggregation over the cached fields
const fieldsToAgg = aggregatableFields.filter((field) => !cachedValues.hasOwnProperty(field));
const fieldsToAgg = aggregatableFields.filter((field) => !Object.hasOwn(cachedValues, field));

if (fieldsToAgg.length === 0) {
return cachedValues;
Expand Down
6 changes: 3 additions & 3 deletions x-pack/plugins/ml/server/models/filter/filter_manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ export class FilterManager {
results[FILTERS] &&
(results[FILTERS].body as estypes.MlGetFiltersResponse).filters.length
) {
let filtersInUse: FiltersInUse = {};
let filtersInUse: FiltersInUse = Object.create(null);
if (results[JOBS] && (results[JOBS].body as estypes.MlGetJobsResponse).jobs) {
filtersInUse = this.buildFiltersInUse(
(results[JOBS].body as estypes.MlGetJobsResponse).jobs
Expand Down Expand Up @@ -107,7 +107,7 @@ export class FilterManager {
]);

// Build a map of filter_ids against jobs and detectors using that filter.
let filtersInUse: FiltersInUse = {};
let filtersInUse: FiltersInUse = Object.create(null);
if (results[JOBS] && (results[JOBS].body as estypes.MlGetJobsResponse).jobs) {
filtersInUse = this.buildFiltersInUse(
(results[JOBS].body as estypes.MlGetJobsResponse).jobs
Expand Down Expand Up @@ -182,7 +182,7 @@ export class FilterManager {

buildFiltersInUse(jobsList: Job[]) {
// Build a map of filter_ids against jobs and detectors using that filter.
const filtersInUse: FiltersInUse = {};
const filtersInUse: FiltersInUse = Object.create(null);
jobsList.forEach((job) => {
const detectors = job.analysis_config.detectors;
detectors.forEach((detector) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export function jobAuditMessagesProvider(
gte = `now-${from}`;
}

let timeFilter = {};
let timeFilter = Object.create(null);
if (from !== null) {
timeFilter = {
range: {
Expand Down
4 changes: 2 additions & 2 deletions x-pack/plugins/ml/server/models/job_service/datafeeds.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ export function datafeedsProvider(client: IScopedClusterClient, mlClient: MlClie
return acc;
}, {} as { [id: string]: boolean });

const results: Results = {};
const results: Results = Object.create(null);

async function doStart(datafeedId: string): Promise<{ started: boolean; error?: string }> {
if (doStartsCalled[datafeedId] === false) {
Expand Down Expand Up @@ -113,7 +113,7 @@ export function datafeedsProvider(client: IScopedClusterClient, mlClient: MlClie
}

async function stopDatafeeds(datafeedIds: string[]) {
const results: Results = {};
const results: Results = Object.create(null);

for (const datafeedId of datafeedIds) {
try {
Expand Down
Loading

0 comments on commit 8e38ea3

Please sign in to comment.