From 1ae02893c04952a64641ca40a79a30abecdcd77e Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 09:32:40 +0100 Subject: [PATCH 01/23] You need to start somewhere --- .../server/logging/appenders/appenders.ts | 15 +- .../appenders/rolling_file/policies/index.ts | 63 ++++++++ .../appenders/rolling_file/policies/policy.ts | 30 ++++ .../rolling_file/policies/size_limit/index.ts | 24 +++ .../policies/size_limit/size_limit_policy.ts | 55 +++++++ .../get_next_rolling_time.test.ts | 86 +++++++++++ .../time_interval/get_next_rolling_time.ts | 61 ++++++++ .../policies/time_interval/index.ts | 24 +++ .../time_interval/time_interval_policy.ts | 83 +++++++++++ .../rolling_file/rolling_file_appender.ts | 141 ++++++++++++++++++ .../rolling_file/rolling_file_context.ts | 32 ++++ .../rolling_file/rolling_file_manager.ts | 69 +++++++++ .../strategies/default_strategy.ts | 113 ++++++++++++++ .../appenders/rolling_file/strategies/fs.ts | 25 ++++ .../rolling_file/strategies/index.ts | 40 +++++ .../strategies/numeric_pattern_matcher.ts | 65 ++++++++ .../strategies/pattern_matcher.test.ts | 48 ++++++ .../rolling_file/strategies/strategy.ts | 22 +++ src/core/server/logging/logging_config.ts | 2 + src/core/server/logging/logging_system.ts | 9 +- src/core/server/root/index.ts | 2 +- 21 files changed, 1004 insertions(+), 5 deletions(-) create mode 100644 src/core/server/logging/appenders/rolling_file/policies/index.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/policy.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/index.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts create mode 100644 src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts create mode 100644 src/core/server/logging/appenders/rolling_file/rolling_file_context.ts create mode 100644 src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/default_strategy.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/fs.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/index.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/numeric_pattern_matcher.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/pattern_matcher.test.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/strategy.ts diff --git a/src/core/server/logging/appenders/appenders.ts b/src/core/server/logging/appenders/appenders.ts index 4e6920c50686c..aace9ed2b5db7 100644 --- a/src/core/server/logging/appenders/appenders.ts +++ b/src/core/server/logging/appenders/appenders.ts @@ -28,6 +28,10 @@ import { import { Layouts } from '../layouts/layouts'; import { ConsoleAppender, ConsoleAppenderConfig } from './console/console_appender'; import { FileAppender, FileAppenderConfig } from './file/file_appender'; +import { + RollingFileAppender, + RollingFileAppenderConfig, +} from './rolling_file/rolling_file_appender'; /** * Config schema for validting the shape of the `appenders` key in in {@link LoggerContextConfigType} or @@ -39,10 +43,15 @@ export const appendersSchema = schema.oneOf([ ConsoleAppender.configSchema, FileAppender.configSchema, LegacyAppender.configSchema, + RollingFileAppender.configSchema, ]); /** @public */ -export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig; +export type AppenderConfigType = + | ConsoleAppenderConfig + | FileAppenderConfig + | LegacyAppenderConfig + | RollingFileAppenderConfig; /** @internal */ export class Appenders { @@ -57,10 +66,10 @@ export class Appenders { switch (config.kind) { case 'console': return new ConsoleAppender(Layouts.create(config.layout)); - case 'file': return new FileAppender(Layouts.create(config.layout), config.path); - + case 'rolling-file': + return new RollingFileAppender(config); case 'legacy-appender': return new LegacyAppender(config.legacyLoggingConfig); diff --git a/src/core/server/logging/appenders/rolling_file/policies/index.ts b/src/core/server/logging/appenders/rolling_file/policies/index.ts new file mode 100644 index 0000000000000..2e0a43bd98590 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/index.ts @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { schema } from '@kbn/config-schema'; +import { assertNever } from '@kbn/std'; +import { TriggeringPolicy } from './policy'; +import { RollingFileContext } from '../rolling_file_context'; +import { + sizedLimitTriggeringPolicyConfigSchema, + SizedLimitTriggeringPolicyConfig, + SizedLimitTriggeringPolicy, +} from './size_limit'; +import { + TimeIntervalTriggeringPolicyConfig, + TimeIntervalTriggeringPolicy, + timeIntervalTriggeringPolicyConfigSchema, +} from './time_interval'; + +export { TriggeringPolicy } from './policy'; + +/** + * Any of the existing policy's configuration + * + * See {@link SizedLimitTriggeringPolicyConfig} and {@link TimeIntervalTriggeringPolicyConfig} + */ +export type TriggeringPolicyConfig = + | SizedLimitTriggeringPolicyConfig + | TimeIntervalTriggeringPolicyConfig; + +export const rollingPolicyConfigSchema = schema.oneOf([ + sizedLimitTriggeringPolicyConfigSchema, + timeIntervalTriggeringPolicyConfigSchema, +]); + +export const createTriggeringPolicy = ( + config: TriggeringPolicyConfig, + context: RollingFileContext +): TriggeringPolicy => { + switch (config.kind) { + case 'size-limit': + return new SizedLimitTriggeringPolicy(config, context); + case 'time-interval': + return new TimeIntervalTriggeringPolicy(config, context); + default: + return assertNever(config); + } +}; diff --git a/src/core/server/logging/appenders/rolling_file/policies/policy.ts b/src/core/server/logging/appenders/rolling_file/policies/policy.ts new file mode 100644 index 0000000000000..6af3aabe09635 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/policy.ts @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { LogRecord } from '@kbn/logging'; + +/** + * A policy used to determinate when a rollout should be performed. + */ +export interface TriggeringPolicy { + /** + * Determines whether a rollover should occur. + **/ + isTriggeringEvent(record: LogRecord): boolean; +} diff --git a/src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts b/src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts new file mode 100644 index 0000000000000..17624adbe94da --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { + SizedLimitTriggeringPolicy, + SizedLimitTriggeringPolicyConfig, + sizedLimitTriggeringPolicyConfigSchema, +} from './size_limit_policy'; diff --git a/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts new file mode 100644 index 0000000000000..7886b3798ce6b --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { schema, ByteSizeValue } from '@kbn/config-schema'; +import { LogRecord } from '@kbn/logging'; +import { RollingFileContext } from '../../rolling_file_context'; +import { TriggeringPolicy } from '../policy'; + +export interface SizedLimitTriggeringPolicyConfig { + kind: 'size-limit'; + + /** + * The minimum size the file must have to roll over. + */ + size: ByteSizeValue; +} + +export const sizedLimitTriggeringPolicyConfigSchema = schema.object({ + kind: schema.literal('size-limit'), + size: schema.byteSize({ min: '1b' }), +}); + +/** + * A triggering policy based on fixed size limit. + */ +export class SizedLimitTriggeringPolicy implements TriggeringPolicy { + private readonly maxFileSize: number; + + constructor( + config: SizedLimitTriggeringPolicyConfig, + private readonly context: RollingFileContext + ) { + this.maxFileSize = config.size.getValueInBytes(); + } + + isTriggeringEvent(record: LogRecord): boolean { + return this.context.currentFileSize > this.maxFileSize; + } +} diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts new file mode 100644 index 0000000000000..5163637390645 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import moment from 'moment'; +import { schema } from '@kbn/config-schema'; +import { getNextRollingTime } from './get_next_rolling_time'; + +const format = 'YYYY-MM-DD HH:mm:ss:SSS'; + +const formattedRollingTime = (date: string, duration: string, modulate: boolean) => + moment( + getNextRollingTime( + moment(date, format).toDate().getTime(), + schema.duration().validate(duration), + modulate + ) + ).format(format); + +describe('getNextRollingTime', () => { + describe('when `modulate` is false', () => { + it('increments the current time by the interval', () => { + expect(formattedRollingTime('2010-10-20 04:27:12:000', '15m', false)).toEqual( + '2010-10-20 04:42:12:000' + ); + + expect(formattedRollingTime('2010-02-12 04:27:12:000', '24h', false)).toEqual( + '2010-02-13 04:27:12:000' + ); + + expect(formattedRollingTime('2010-02-17 06:34:55', '2d', false)).toEqual( + '2010-02-19 06:34:55:000' + ); + }); + }); + + describe('when `modulate` is true', () => { + it('increments the current time to reach the next boundary', () => { + expect(formattedRollingTime('2010-10-20 04:27:12:512', '30m', true)).toEqual( + '2010-10-20 04:30:00:000' + ); + expect(formattedRollingTime('2010-10-20 04:27:12:512', '6h', true)).toEqual( + '2010-10-20 06:00:00:000' + ); + expect(formattedRollingTime('2010-10-20 04:27:12:512', '1w', true)).toEqual( + '2010-10-24 00:00:00:000' + ); + }); + + it('works when on the edge of a boundary', () => { + expect(formattedRollingTime('2010-10-20 06:00:00:000', '6h', true)).toEqual( + '2010-10-20 12:00:00:000' + ); + expect(formattedRollingTime('2010-10-14 00:00:00:000', '1d', true)).toEqual( + '2010-10-15 00:00:00:000' + ); + expect(formattedRollingTime('2010-01-10 00:00:00:000', '2w', true)).toEqual( + '2010-01-24 00:00:00:000' + ); + }); + + it('increments a higher field when necessary', () => { + expect(formattedRollingTime('2010-10-20 21:00:00:000', '9h', true)).toEqual( + '2010-10-21 03:00:00:000' + ); + expect(formattedRollingTime('2010-12-31 21:00:00:000', '4d', true)).toEqual( + '2011-01-03 00:00:00:000' + ); + }); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts new file mode 100644 index 0000000000000..484352bd9b4b1 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import moment, { Duration, unitOfTime } from 'moment-timezone'; + +/** + * Return the next potential rollover time, given current time and rollover interval + */ +export const getNextRollingTime = ( + currentTime: number, + interval: Duration, + modulate: boolean +): number => { + if (modulate) { + const field = getHighestField(interval); + const currentMoment = moment(currentTime); + const increment = interval.get(field) - (currentMoment.get(field) % interval.get(field)); + const incrementInMs = moment.duration(increment, field).asMilliseconds(); + return currentMoment.startOf(field).toDate().getTime() + incrementInMs; + } else { + return currentTime + interval.asMilliseconds(); + } +}; + +const getHighestField = (duration: Duration): unitOfTime.Base => { + if (duration.asYears() >= 1) { + return 'year'; + } + if (duration.asMonths() >= 1) { + return 'month'; + } + if (duration.asDays() >= 1) { + return 'day'; + } + if (duration.asHours() >= 1) { + return 'hour'; + } + if (duration.asMinutes() >= 1) { + return 'minute'; + } + if (duration.asSeconds() >= 1) { + return 'second'; + } + return 'millisecond'; +}; diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/index.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/index.ts new file mode 100644 index 0000000000000..481b7a77d8463 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/index.ts @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { + TimeIntervalTriggeringPolicy, + TimeIntervalTriggeringPolicyConfig, + timeIntervalTriggeringPolicyConfigSchema, +} from './time_interval_policy'; diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts new file mode 100644 index 0000000000000..8590b437aceda --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Duration } from 'moment-timezone'; +import { schema } from '@kbn/config-schema'; +import { LogRecord } from '@kbn/logging'; +import { RollingFileContext } from '../../rolling_file_context'; +import { TriggeringPolicy } from '../policy'; +import { getNextRollingTime } from './get_next_rolling_time'; + +export interface TimeIntervalTriggeringPolicyConfig { + kind: 'time-interval'; + + /** + * How often a rollover should occur. + */ + interval: Duration; + /** + * Indicates whether the interval should be adjusted to cause the next rollover to occur on the interval boundary. + * + * For example, if the item is hours, the current hour is 3 am and the interval is 4 then + * the first rollover will occur at 4 am and then next ones will occur at 8 am, noon, 4pm, etc. + * The default value is true. + */ + modulate: boolean; +} + +export const timeIntervalTriggeringPolicyConfigSchema = schema.object({ + kind: schema.literal('time-interval'), + interval: schema.duration({ defaultValue: '24h' }), + modulate: schema.boolean({ defaultValue: true }), +}); + +/** + * A triggering policy based on a fixed time interval + */ +export class TimeIntervalTriggeringPolicy implements TriggeringPolicy { + /** + * millisecond timestamp of when the next rollover should occur. + */ + private nextRolloverTime: number; + + constructor( + private readonly config: TimeIntervalTriggeringPolicyConfig, + context: RollingFileContext + ) { + this.nextRolloverTime = getNextRollingTime( + context.currentFileTime || Date.now(), + config.interval, + config.modulate + ); + } + + isTriggeringEvent(record: LogRecord): boolean { + const eventTime = record.timestamp.getTime(); + if (eventTime >= this.nextRolloverTime) { + this.nextRolloverTime = getNextRollingTime( + eventTime, + this.config.interval, + this.config.modulate + ); + // TODO: update context.currentFileTime ? + return true; + } + return false; + } +} diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts new file mode 100644 index 0000000000000..abd0299fdb749 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -0,0 +1,141 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { schema } from '@kbn/config-schema'; +import { LogRecord, Layout, DisposableAppender } from '@kbn/logging'; +import { Layouts, LayoutConfigType } from '../../layouts/layouts'; +import { BufferAppender } from '../buffer/buffer_appender'; +import { + TriggeringPolicyConfig, + createTriggeringPolicy, + rollingPolicyConfigSchema, + TriggeringPolicy, +} from './policies'; +import { + RollingStrategy, + createRollingStrategy, + RollingStrategyConfig, + rollingStrategyConfigSchema, +} from './strategies'; +import { RollingFileManager } from './rolling_file_manager'; +import { RollingFileContext } from './rolling_file_context'; + +export interface RollingFileAppenderConfig { + kind: 'rolling-file'; + /** + * The layout to use when writing log entries + */ + layout: LayoutConfigType; + /** + * The absolute path of the file to write to. + * If the file, or any of its parent directories, do not exist, they will be created. + */ + path: string; + /** + * The policy to use to determine if a rollover should occur. + */ + policy: TriggeringPolicyConfig; + /** + * The rollout strategy to use for rolling. + */ + strategy: RollingStrategyConfig; +} + +/** + * Appender that formats all the `LogRecord` instances it receives and writes them to the specified file. + * @internal + */ +export class RollingFileAppender implements DisposableAppender { + public static configSchema = schema.object({ + kind: schema.literal('rolling-file'), + layout: Layouts.configSchema, + path: schema.string(), + policy: rollingPolicyConfigSchema, + strategy: rollingStrategyConfigSchema, + }); + + private isRolling = false; + private layout: Layout; + private context: RollingFileContext; + private fileManager: RollingFileManager; + private policy: TriggeringPolicy; + private strategy: RollingStrategy; + private buffer: BufferAppender; + + /** + * Creates FileAppender instance with specified layout and file path. + */ + constructor(config: RollingFileAppenderConfig) { + this.context = new RollingFileContext(); + this.fileManager = new RollingFileManager(config.path, this.context); + this.layout = Layouts.create(config.layout); + this.policy = createTriggeringPolicy(config.policy, this.context); // TODO: rename to TriggeringPolicy + this.strategy = createRollingStrategy(config.path, config.strategy, this.context); + this.buffer = new BufferAppender(); + } + + /** + * Formats specified `record` and writes them to the specified file. + * @param record `LogRecord` instance to be logged. + */ + public append(record: LogRecord) { + // if we are currently rolling the files, push the log record + // into the buffer, which will be flushed once rolling is complete + if (this.isRolling) { + this.buffer.append(record); + } + if (this.needRollover(record)) { + this.buffer.append(record); + this.performRollover(); + } else { + this.fileManager.write(`${this.layout.format(record)}\n`); + } + } + + /** + * Disposes `FileAppender`. Waits for the underlying file stream to be completely flushed and closed. + */ + public async dispose() { + await this.buffer.dispose(); + await this.fileManager.closeStream(); + } + + private async performRollover() { + try { + this.isRolling = true; + await this.strategy.rollout(); + await this.fileManager.closeStream(); + this.isRolling = false; + const pendingLogs = this.buffer.flush(); + for (const log of pendingLogs) { + this.append(log); + } + } catch (e) { + // eslint-disable-next-line no-console + console.log('Error while rolling file: ', e); + } + } + + /** + * Checks if the current even should trigger a rollover + */ + private needRollover(record: LogRecord) { + return this.policy.isTriggeringEvent(record); + } +} diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts new file mode 100644 index 0000000000000..d4080e47674b7 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts @@ -0,0 +1,32 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Context shared between the rolling file manager, policy and strategy. + */ +export class RollingFileContext { + /** + * The size of the currently opened file. + */ + public currentFileSize: number; + /** + * The time the currently opened file was created. + */ + public currentFileTime: number; +} diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts new file mode 100644 index 0000000000000..60265b6840af2 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts @@ -0,0 +1,69 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { createWriteStream, WriteStream, statSync } from 'fs'; +import { RollingFileContext } from './rolling_file_context'; + +export class RollingFileManager { + private readonly filePath; + private outputStream?: WriteStream; + + constructor(path: string, private readonly context: RollingFileContext) { + this.filePath = path; + } + + write(chunk: string) { + const stream = this.ensureStreamOpen(); + this.context.currentFileSize += chunk.length; + stream.write(chunk); + } + + private ensureStreamOpen() { + if (this.outputStream === undefined) { + this.outputStream = createWriteStream(this.filePath, { + encoding: 'utf8', + flags: 'a', + }); + // TODO: should move elsewhere? + this.refreshInitialTime(); + } + return this.outputStream!; + } + + private refreshInitialTime() { + try { + const { birthtime, size } = statSync(this.filePath); + this.context.initialTime = birthtime.getTime(); + } catch (e) { + this.context.initialTime = Date.now(); + } + } + + public async closeStream() { + return new Promise((resolve) => { + if (this.outputStream === undefined) { + return resolve(); + } + this.outputStream.end(() => { + this.outputStream = undefined; + resolve(); + }); + }); + } +} diff --git a/src/core/server/logging/appenders/rolling_file/strategies/default_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/default_strategy.ts new file mode 100644 index 0000000000000..dad5525a8b40d --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/default_strategy.ts @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { basename, dirname, join } from 'path'; +import { schema } from '@kbn/config-schema'; +import { readdir, unlink, rename } from './fs'; +import { RollingStrategy } from './strategy'; +import { RollingFileContext } from '../rolling_file_context'; +import { getNumericMatcher, getNumericFileName } from './numeric_pattern_matcher'; + +// TODO: rename to `numeric` & create `date` +export interface DefaultRollingStrategyConfig { + kind: 'default'; + /** + * The suffix pattern to apply when renaming a file. The suffix will be applied + * after the `appender.path` file name, but before the file extension. + * + * @example + * ```yaml + * logging: + * appenders: + * rolling-file: + * kind: rolling-file + * path: /var/logs/kibana.log + * strategy: + * type: default + * pattern: "-%i" + * max: 5 + * ``` + * + * will create `/var/logs/kibana-1.log`, `/var/logs/kibana-2.log`, and so on. + * + * Defaults to '-%i'. + */ + pattern: string; + /** + * The maximum number of files to keep. Once this number is reached, oldest + * files will be deleted. Defaults to `7` + */ + max: number; +} + +// %d{MM-dd-yyyy} +// %i + +export const defaultRollingStrategyConfigSchema = schema.object({ + kind: schema.literal('default'), + pattern: schema.string({ defaultValue: '-%i' }), // TODO: validate + max: schema.number({ min: 1, defaultValue: 7 }), +}); + +export class DefaultRollingStrategy implements RollingStrategy { + constructor( + private readonly filepath: string, + private readonly config: DefaultRollingStrategyConfig, + context: RollingFileContext + ) {} + + async rollout() { + // console.log('***** performing rolling'); + + const logFileBaseName = basename(this.filepath); + const logFileFolder = dirname(this.filepath); + + const matcher = getNumericMatcher(logFileBaseName, this.config.pattern); + const dirContent = await readdir(logFileFolder); + + const orderedFiles = dirContent + .map((fileName) => ({ + fileName, + index: matcher(fileName), + })) + .filter(({ index }) => index !== undefined) + .sort((a, b) => a.index! - b.index!) + .map(({ fileName }) => fileName); + + const filesToRoll = orderedFiles.slice(0, this.config.max - 1); + const filesToDelete = orderedFiles.slice(filesToRoll.length, orderedFiles.length); + + for (const fileToDelete of filesToDelete) { + // console.log('*** will delete ', fileToDelete); + + await unlink(join(logFileFolder, fileToDelete)); + } + + for (let i = filesToRoll.length - 1; i >= 0; i--) { + const oldFileName = filesToRoll[i]; + const newFileName = getNumericFileName(logFileBaseName, this.config.pattern, i + 2); + // console.log('*** will roll ', oldFileName, newFileName); + await rename(join(logFileFolder, oldFileName), join(logFileFolder, newFileName)); + } + + const currentFileNewName = getNumericFileName(logFileBaseName, this.config.pattern, 1); + // console.log('*** will roll ', logFileBaseName, currentFileNewName); + await rename(this.filepath, join(logFileFolder, currentFileNewName)); + } +} diff --git a/src/core/server/logging/appenders/rolling_file/strategies/fs.ts b/src/core/server/logging/appenders/rolling_file/strategies/fs.ts new file mode 100644 index 0000000000000..ddfbdc298d012 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/fs.ts @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import Fs from 'fs'; +import { promisify } from 'util'; + +export const readdir = promisify(Fs.readdir); +export const unlink = promisify(Fs.unlink); +export const rename = promisify(Fs.rename); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/index.ts b/src/core/server/logging/appenders/rolling_file/strategies/index.ts new file mode 100644 index 0000000000000..8477e24c4e280 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/index.ts @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { schema } from '@kbn/config-schema'; +import { RollingStrategy } from './strategy'; +import { + DefaultRollingStrategy, + DefaultRollingStrategyConfig, + defaultRollingStrategyConfigSchema, +} from './default_strategy'; +import { RollingFileContext } from '../rolling_file_context'; + +export { RollingStrategy } from './strategy'; +export type RollingStrategyConfig = DefaultRollingStrategyConfig; + +export const rollingStrategyConfigSchema = schema.oneOf([defaultRollingStrategyConfigSchema]); + +export const createRollingStrategy = ( + filepath: string, + config: RollingStrategyConfig, + context: RollingFileContext +): RollingStrategy => { + return new DefaultRollingStrategy(filepath, config, context); +}; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric_pattern_matcher.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric_pattern_matcher.ts new file mode 100644 index 0000000000000..dfe7e1379c693 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric_pattern_matcher.ts @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +const createNumericMatcher = (fileBaseName: string, pattern: string): RegExp => { + let suffixStart = fileBaseName.indexOf('.'); + if (suffixStart === -1) { + suffixStart = fileBaseName.length; + } + const baseNameWithoutSuffix = fileBaseName + .substr(0, suffixStart) + // escape special characters in the pattern + .replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); + const suffix = fileBaseName + .substr(suffixStart, fileBaseName.length) + // escape special characters in the pattern + .replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); + const processedPattern = pattern + // escape special characters in the pattern + .replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&') + // create matching group for `%i` + .replace(/%i/g, '(?\\d+)'); + return new RegExp(`^${baseNameWithoutSuffix}${processedPattern}${suffix}$`); +}; + +export const getNumericMatcher = (logFileName: string, pattern: string) => { + const matcher = createNumericMatcher(logFileName, pattern); + return (fileName: string): number | undefined => { + const match = matcher.exec(fileName); + if (!match) { + return undefined; + } + return parseInt(match.groups!.counter, 10); + }; +}; + +export const getNumericFileName = ( + fileBaseName: string, + pattern: string, + index: number +): string => { + let suffixStart = fileBaseName.indexOf('.'); + if (suffixStart === -1) { + suffixStart = fileBaseName.length; + } + const baseNameWithoutSuffix = fileBaseName.substr(0, suffixStart); + const suffix = fileBaseName.substr(suffixStart, fileBaseName.length); + const interpolatedPattern = pattern.replace('%i', String(index)); + return `${baseNameWithoutSuffix}${interpolatedPattern}${suffix}`; +}; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/pattern_matcher.test.ts b/src/core/server/logging/appenders/rolling_file/strategies/pattern_matcher.test.ts new file mode 100644 index 0000000000000..47b7c44f9319c --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/pattern_matcher.test.ts @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { getNumericMatcher } from './numeric_pattern_matcher'; + +describe('getNumericIndex', () => { + it('returns the file index when the file matches the pattern', () => { + const matcher = getNumericMatcher('log.json', '.%i'); + expect(matcher('log.1.json')).toEqual(1); + expect(matcher('log.12.json')).toEqual(12); + }); + it('handles special characters in the pattern', () => { + const matcher = getNumericMatcher('kibana.log', '-{%i}'); + expect(matcher('kibana-{1}.log')).toEqual(1); + }); + it('returns undefined when the file does not match the pattern', () => { + const matcher = getNumericMatcher('log.json', '.%i'); + expect(matcher('log.1.text')).toBeUndefined(); + expect(matcher('log*1.json')).toBeUndefined(); + expect(matcher('log.2foo.json')).toBeUndefined(); + }); + it('handles multiple extensions', () => { + const matcher = getNumericMatcher('log.foo.bar', '.%i'); + expect(matcher('log.1.foo.bar')).toEqual(1); + expect(matcher('log.12.foo.bar')).toEqual(12); + }); + it('handles files with no extensions', () => { + const matcher = getNumericMatcher('log', '.%i'); + expect(matcher('log.1')).toEqual(1); + expect(matcher('log.42')).toEqual(42); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts new file mode 100644 index 0000000000000..5418c47e2ea2c --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts @@ -0,0 +1,22 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export interface RollingStrategy { + rollout(): Promise; +} diff --git a/src/core/server/logging/logging_config.ts b/src/core/server/logging/logging_config.ts index a6ab15dc29fdf..ef3ca36124176 100644 --- a/src/core/server/logging/logging_config.ts +++ b/src/core/server/logging/logging_config.ts @@ -182,6 +182,8 @@ export class LoggingConfig { this.fillLoggersConfig(configType); } + // if (!this.env.isDevCliParent) { + /** * Returns a new LoggingConfig that merges the existing config with the specified config. * diff --git a/src/core/server/logging/logging_system.ts b/src/core/server/logging/logging_system.ts index 8bc22bdf537af..9bdd1c96264da 100644 --- a/src/core/server/logging/logging_system.ts +++ b/src/core/server/logging/logging_system.ts @@ -19,6 +19,7 @@ import type { PublicMethodsOf } from '@kbn/utility-types'; import { DisposableAppender, LogLevel, Logger, LoggerFactory } from '@kbn/logging'; +import { Env } from '@kbn/config'; import { Appenders } from './appenders/appenders'; import { BufferAppender } from './appenders/buffer/buffer_appender'; import { BaseLogger } from './logger'; @@ -30,6 +31,7 @@ import { LoggerContextConfigType, LoggerContextConfigInput, loggerContextConfigSchema, + config as loggingConfig, } from './logging_config'; export type ILoggingSystem = PublicMethodsOf; @@ -48,6 +50,8 @@ export class LoggingSystem implements LoggerFactory { private readonly loggers: Map = new Map(); private readonly contextConfigs = new Map(); + constructor(private readonly env: Env) {} + public get(...contextParts: string[]): Logger { const context = LoggingConfig.getLoggerContext(contextParts); if (!this.loggers.has(context)) { @@ -68,7 +72,10 @@ export class LoggingSystem implements LoggerFactory { * @param rawConfig New config instance. */ public upgrade(rawConfig: LoggingConfigType) { - const config = new LoggingConfig(rawConfig)!; + // We only want the console appender for the CLI process, + // so we use the 'default' configuration as defined by the schema. + const usedConfig = this.env.isDevCliParent ? loggingConfig.schema.validate({}) : rawConfig; + const config = new LoggingConfig(usedConfig)!; this.applyBaseConfig(config); } diff --git a/src/core/server/root/index.ts b/src/core/server/root/index.ts index 5e9722de03dee..ccc88f67b4182 100644 --- a/src/core/server/root/index.ts +++ b/src/core/server/root/index.ts @@ -39,7 +39,7 @@ export class Root { env: Env, private readonly onShutdown?: (reason?: Error | string) => void ) { - this.loggingSystem = new LoggingSystem(); + this.loggingSystem = new LoggingSystem(env); this.logger = this.loggingSystem.asLoggerFactory(); this.log = this.logger.get('root'); this.server = new Server(rawConfigProvider, env, this.loggingSystem); From 864c5874ae4702f5930f0cdc690c8867ca63b779 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 09:41:45 +0100 Subject: [PATCH 02/23] revert comment --- src/core/server/logging/logging_config.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/core/server/logging/logging_config.ts b/src/core/server/logging/logging_config.ts index ef3ca36124176..a6ab15dc29fdf 100644 --- a/src/core/server/logging/logging_config.ts +++ b/src/core/server/logging/logging_config.ts @@ -182,8 +182,6 @@ export class LoggingConfig { this.fillLoggersConfig(configType); } - // if (!this.env.isDevCliParent) { - /** * Returns a new LoggingConfig that merges the existing config with the specified config. * From 004436e07f155ab8c20442bbf28d37b3b119d99a Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 10:15:00 +0100 Subject: [PATCH 03/23] rename default strategy to numeric --- .../appenders/rolling_file/policies/policy.ts | 2 +- .../rolling_file/rolling_file_appender.ts | 18 +++---- .../rolling_file/rolling_file_context.ts | 18 ++++++- .../rolling_file/rolling_file_manager.ts | 16 ++---- .../rolling_file/strategies/index.ts | 15 +++--- .../rolling_file/strategies/numeric/index.ts | 24 +++++++++ .../{ => numeric}/numeric_pattern_matcher.ts | 0 .../numeric_strategy.ts} | 53 +++++++++++++------ .../{ => numeric}/pattern_matcher.test.ts | 0 .../rolling_file/strategies/strategy.ts | 6 +++ 10 files changed, 105 insertions(+), 47 deletions(-) create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/numeric/index.ts rename src/core/server/logging/appenders/rolling_file/strategies/{ => numeric}/numeric_pattern_matcher.ts (100%) rename src/core/server/logging/appenders/rolling_file/strategies/{default_strategy.ts => numeric/numeric_strategy.ts} (65%) rename src/core/server/logging/appenders/rolling_file/strategies/{ => numeric}/pattern_matcher.test.ts (100%) diff --git a/src/core/server/logging/appenders/rolling_file/policies/policy.ts b/src/core/server/logging/appenders/rolling_file/policies/policy.ts index 6af3aabe09635..eeded68711829 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/policy.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/policy.ts @@ -24,7 +24,7 @@ import { LogRecord } from '@kbn/logging'; */ export interface TriggeringPolicy { /** - * Determines whether a rollover should occur. + * Determines whether a rollover should occur before logging given record. **/ isTriggeringEvent(record: LogRecord): boolean; } diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index abd0299fdb749..2d80b02ac1a3e 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -71,22 +71,22 @@ export class RollingFileAppender implements DisposableAppender { }); private isRolling = false; - private layout: Layout; - private context: RollingFileContext; - private fileManager: RollingFileManager; - private policy: TriggeringPolicy; - private strategy: RollingStrategy; - private buffer: BufferAppender; + private readonly layout: Layout; + private readonly context: RollingFileContext; + private readonly fileManager: RollingFileManager; + private readonly policy: TriggeringPolicy; + private readonly strategy: RollingStrategy; + private readonly buffer: BufferAppender; /** * Creates FileAppender instance with specified layout and file path. */ constructor(config: RollingFileAppenderConfig) { - this.context = new RollingFileContext(); + this.context = new RollingFileContext(config.path); this.fileManager = new RollingFileManager(config.path, this.context); this.layout = Layouts.create(config.layout); - this.policy = createTriggeringPolicy(config.policy, this.context); // TODO: rename to TriggeringPolicy - this.strategy = createRollingStrategy(config.path, config.strategy, this.context); + this.policy = createTriggeringPolicy(config.policy, this.context); + this.strategy = createRollingStrategy(config.strategy, this.context); this.buffer = new BufferAppender(); } diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts index d4080e47674b7..c01291aa8c75e 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts @@ -17,16 +17,30 @@ * under the License. */ +import { statSync } from 'fs'; + /** * Context shared between the rolling file manager, policy and strategy. */ export class RollingFileContext { + constructor(public readonly filePath: string) {} /** * The size of the currently opened file. */ - public currentFileSize: number; + public currentFileSize: number = 0; /** * The time the currently opened file was created. */ - public currentFileTime: number; + public currentFileTime: number = 0; + + public refreshFileInfo() { + try { + const { birthtime, size } = statSync(this.filePath); + this.currentFileTime = birthtime.getTime(); + this.currentFileSize = size; + } catch (e) { + this.currentFileTime = Date.now(); + this.currentFileSize = 0; + } + } } diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts index 60265b6840af2..3c500f4723124 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts @@ -17,9 +17,12 @@ * under the License. */ -import { createWriteStream, WriteStream, statSync } from 'fs'; +import { createWriteStream, WriteStream } from 'fs'; import { RollingFileContext } from './rolling_file_context'; +/** + * Delegate of the {@link RollingFileAppender} used to manage the log file access + */ export class RollingFileManager { private readonly filePath; private outputStream?: WriteStream; @@ -40,21 +43,10 @@ export class RollingFileManager { encoding: 'utf8', flags: 'a', }); - // TODO: should move elsewhere? - this.refreshInitialTime(); } return this.outputStream!; } - private refreshInitialTime() { - try { - const { birthtime, size } = statSync(this.filePath); - this.context.initialTime = birthtime.getTime(); - } catch (e) { - this.context.initialTime = Date.now(); - } - } - public async closeStream() { return new Promise((resolve) => { if (this.outputStream === undefined) { diff --git a/src/core/server/logging/appenders/rolling_file/strategies/index.ts b/src/core/server/logging/appenders/rolling_file/strategies/index.ts index 8477e24c4e280..7668daf05345b 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/index.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/index.ts @@ -20,21 +20,20 @@ import { schema } from '@kbn/config-schema'; import { RollingStrategy } from './strategy'; import { - DefaultRollingStrategy, - DefaultRollingStrategyConfig, - defaultRollingStrategyConfigSchema, -} from './default_strategy'; + NumericRollingStrategy, + NumericRollingStrategyConfig, + numericRollingStrategyConfigSchema, +} from './numeric'; import { RollingFileContext } from '../rolling_file_context'; export { RollingStrategy } from './strategy'; -export type RollingStrategyConfig = DefaultRollingStrategyConfig; +export type RollingStrategyConfig = NumericRollingStrategyConfig; -export const rollingStrategyConfigSchema = schema.oneOf([defaultRollingStrategyConfigSchema]); +export const rollingStrategyConfigSchema = schema.oneOf([numericRollingStrategyConfigSchema]); export const createRollingStrategy = ( - filepath: string, config: RollingStrategyConfig, context: RollingFileContext ): RollingStrategy => { - return new DefaultRollingStrategy(filepath, config, context); + return new NumericRollingStrategy(config, context); }; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/index.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/index.ts new file mode 100644 index 0000000000000..f5b6ae740b155 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/index.ts @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { + NumericRollingStrategy, + NumericRollingStrategyConfig, + numericRollingStrategyConfigSchema, +} from './numeric_strategy'; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric_pattern_matcher.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_pattern_matcher.ts similarity index 100% rename from src/core/server/logging/appenders/rolling_file/strategies/numeric_pattern_matcher.ts rename to src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_pattern_matcher.ts diff --git a/src/core/server/logging/appenders/rolling_file/strategies/default_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts similarity index 65% rename from src/core/server/logging/appenders/rolling_file/strategies/default_strategy.ts rename to src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index dad5525a8b40d..be2e7a3400cb4 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/default_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -19,14 +19,13 @@ import { basename, dirname, join } from 'path'; import { schema } from '@kbn/config-schema'; -import { readdir, unlink, rename } from './fs'; -import { RollingStrategy } from './strategy'; -import { RollingFileContext } from '../rolling_file_context'; +import { readdir, unlink, rename } from '../fs'; +import { RollingStrategy } from '../strategy'; +import { RollingFileContext } from '../../rolling_file_context'; import { getNumericMatcher, getNumericFileName } from './numeric_pattern_matcher'; -// TODO: rename to `numeric` & create `date` -export interface DefaultRollingStrategyConfig { - kind: 'default'; +export interface NumericRollingStrategyConfig { + kind: 'numeric'; /** * The suffix pattern to apply when renaming a file. The suffix will be applied * after the `appender.path` file name, but before the file extension. @@ -59,24 +58,48 @@ export interface DefaultRollingStrategyConfig { // %d{MM-dd-yyyy} // %i -export const defaultRollingStrategyConfigSchema = schema.object({ - kind: schema.literal('default'), +export const numericRollingStrategyConfigSchema = schema.object({ + kind: schema.literal('numeric'), pattern: schema.string({ defaultValue: '-%i' }), // TODO: validate max: schema.number({ min: 1, defaultValue: 7 }), }); -export class DefaultRollingStrategy implements RollingStrategy { +/** + * A rolling strategy that will suffix the file with a given pattern when rolling, + * and will only retains a fixed amount of rolled file. + * + * @example + * ```yaml + * logging: + * appenders: + * rolling-file: + * kind: rolling-file + * path: /kibana.log + * strategy: + * type: numeric + * pattern: "-%i" + * max: 2 + * ``` + * - During the first rollover kibana.log is renamed to kibana-1.log. A new kibana.log file is created and starts + * being written to. + * - During the second rollover kibana-1.log is renamed to kibana-2.log and kibana.log is renamed to kibana-1.log. + * A new kibana.log file is created and starts being written to. + * - During the third and subsequent rollovers, kibana-2.log is deleted, kibana-1.log is renamed to kibana-2.log and + * kibana.log is renamed to kibana-1.log. A new kibana.log file is created and starts being written to. + * + * See {@link NumericRollingStrategyConfig} for more details. + */ +export class NumericRollingStrategy implements RollingStrategy { constructor( - private readonly filepath: string, - private readonly config: DefaultRollingStrategyConfig, - context: RollingFileContext + private readonly config: NumericRollingStrategyConfig, + private readonly context: RollingFileContext ) {} async rollout() { // console.log('***** performing rolling'); - const logFileBaseName = basename(this.filepath); - const logFileFolder = dirname(this.filepath); + const logFileBaseName = basename(this.context.filePath); + const logFileFolder = dirname(this.context.filePath); const matcher = getNumericMatcher(logFileBaseName, this.config.pattern); const dirContent = await readdir(logFileFolder); @@ -108,6 +131,6 @@ export class DefaultRollingStrategy implements RollingStrategy { const currentFileNewName = getNumericFileName(logFileBaseName, this.config.pattern, 1); // console.log('*** will roll ', logFileBaseName, currentFileNewName); - await rename(this.filepath, join(logFileFolder, currentFileNewName)); + await rename(this.context.filePath, join(logFileFolder, currentFileNewName)); } } diff --git a/src/core/server/logging/appenders/rolling_file/strategies/pattern_matcher.test.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts similarity index 100% rename from src/core/server/logging/appenders/rolling_file/strategies/pattern_matcher.test.ts rename to src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts diff --git a/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts index 5418c47e2ea2c..ad075e1e1c109 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts @@ -17,6 +17,12 @@ * under the License. */ +/** + * A strategy to perform the log file rollout. + */ export interface RollingStrategy { + /** + * Performs the rollout + */ rollout(): Promise; } From f59bca6e6e94b4630a6ab3a31ac821e763f4aace Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 11:53:07 +0100 Subject: [PATCH 04/23] add some tests --- .../logging/appenders/appenders.test.ts | 11 ++ .../appenders/rolling_file/policies/index.ts | 14 +- .../rolling_file/policies/size_limit/index.ts | 6 +- .../size_limit/size_limit_policy.test.ts | 76 ++++++++++ .../policies/size_limit/size_limit_policy.ts | 15 +- .../time_interval/get_next_rolling_time.ts | 37 ++--- .../time_interval_policy.test.mocks.ts | 21 +++ .../time_interval_policy.test.ts | 130 ++++++++++++++++++ .../time_interval/time_interval_policy.ts | 16 ++- .../policies/time_interval/utils.test.ts | 78 +++++++++++ .../policies/time_interval/utils.ts | 70 ++++++++++ .../rolling_file/rolling_file_appender.ts | 1 + .../rolling_file/rolling_file_manager.ts | 2 + .../strategies/numeric/numeric_strategy.ts | 2 +- ..._pattern_matcher.ts => pattern_matcher.ts} | 0 15 files changed, 432 insertions(+), 47 deletions(-) create mode 100644 src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.test.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.mocks.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.test.ts create mode 100644 src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.ts rename src/core/server/logging/appenders/rolling_file/strategies/numeric/{numeric_pattern_matcher.ts => pattern_matcher.ts} (100%) diff --git a/src/core/server/logging/appenders/appenders.test.ts b/src/core/server/logging/appenders/appenders.test.ts index 7cfd2158be338..831dbc9aa2707 100644 --- a/src/core/server/logging/appenders/appenders.test.ts +++ b/src/core/server/logging/appenders/appenders.test.ts @@ -19,10 +19,12 @@ import { mockCreateLayout } from './appenders.test.mocks'; +import { ByteSizeValue } from '@kbn/config-schema'; import { LegacyAppender } from '../../legacy/logging/appenders/legacy_appender'; import { Appenders } from './appenders'; import { ConsoleAppender } from './console/console_appender'; import { FileAppender } from './file/file_appender'; +import { RollingFileAppender } from './rolling_file/rolling_file_appender'; beforeEach(() => { mockCreateLayout.mockReset(); @@ -83,4 +85,13 @@ test('`create()` creates correct appender.', () => { }); expect(legacyAppender).toBeInstanceOf(LegacyAppender); + + const rollingFileAppender = Appenders.create({ + kind: 'rolling-file', + path: 'path', + layout: { highlight: true, kind: 'pattern', pattern: '' }, + strategy: { kind: 'numeric', max: 5, pattern: '%i' }, + policy: { kind: 'size-limit', size: ByteSizeValue.parse('15b') }, + }); + expect(rollingFileAppender).toBeInstanceOf(RollingFileAppender); }); diff --git a/src/core/server/logging/appenders/rolling_file/policies/index.ts b/src/core/server/logging/appenders/rolling_file/policies/index.ts index 2e0a43bd98590..9c32a0f9e13b1 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/index.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/index.ts @@ -22,9 +22,9 @@ import { assertNever } from '@kbn/std'; import { TriggeringPolicy } from './policy'; import { RollingFileContext } from '../rolling_file_context'; import { - sizedLimitTriggeringPolicyConfigSchema, - SizedLimitTriggeringPolicyConfig, - SizedLimitTriggeringPolicy, + sizeLimitTriggeringPolicyConfigSchema, + SizeLimitTriggeringPolicyConfig, + SizeLimitTriggeringPolicy, } from './size_limit'; import { TimeIntervalTriggeringPolicyConfig, @@ -37,14 +37,14 @@ export { TriggeringPolicy } from './policy'; /** * Any of the existing policy's configuration * - * See {@link SizedLimitTriggeringPolicyConfig} and {@link TimeIntervalTriggeringPolicyConfig} + * See {@link SizeLimitTriggeringPolicyConfig} and {@link TimeIntervalTriggeringPolicyConfig} */ export type TriggeringPolicyConfig = - | SizedLimitTriggeringPolicyConfig + | SizeLimitTriggeringPolicyConfig | TimeIntervalTriggeringPolicyConfig; export const rollingPolicyConfigSchema = schema.oneOf([ - sizedLimitTriggeringPolicyConfigSchema, + sizeLimitTriggeringPolicyConfigSchema, timeIntervalTriggeringPolicyConfigSchema, ]); @@ -54,7 +54,7 @@ export const createTriggeringPolicy = ( ): TriggeringPolicy => { switch (config.kind) { case 'size-limit': - return new SizedLimitTriggeringPolicy(config, context); + return new SizeLimitTriggeringPolicy(config, context); case 'time-interval': return new TimeIntervalTriggeringPolicy(config, context); default: diff --git a/src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts b/src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts index 17624adbe94da..7502eb4fb90c0 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/size_limit/index.ts @@ -18,7 +18,7 @@ */ export { - SizedLimitTriggeringPolicy, - SizedLimitTriggeringPolicyConfig, - sizedLimitTriggeringPolicyConfigSchema, + SizeLimitTriggeringPolicy, + SizeLimitTriggeringPolicyConfig, + sizeLimitTriggeringPolicyConfigSchema, } from './size_limit_policy'; diff --git a/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.test.ts b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.test.ts new file mode 100644 index 0000000000000..f54ca8d2f1f8a --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.test.ts @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { ByteSizeValue } from '@kbn/config-schema'; +import { LogRecord, LogLevel } from '@kbn/logging'; +import { SizeLimitTriggeringPolicy } from './size_limit_policy'; +import { RollingFileContext } from '../../rolling_file_context'; + +describe('SizeLimitTriggeringPolicy', () => { + let context: RollingFileContext; + + const createPolicy = (size: ByteSizeValue) => + new SizeLimitTriggeringPolicy({ kind: 'size-limit', size }, context); + + const createLogRecord = (parts: Partial = {}): LogRecord => ({ + timestamp: new Date(), + level: LogLevel.Info, + context: 'context', + message: 'just a log', + pid: 42, + ...parts, + }); + + const isTriggering = ({ fileSize, maxSize }: { maxSize: string; fileSize: string }) => { + const policy = createPolicy(ByteSizeValue.parse(maxSize)); + context.currentFileSize = ByteSizeValue.parse(fileSize).getValueInBytes(); + return policy.isTriggeringEvent(createLogRecord()); + }; + + beforeEach(() => { + context = new RollingFileContext('foo.log'); + }); + + it('triggers a rollover when the file size exceeds the max size', () => { + expect( + isTriggering({ + fileSize: '70b', + maxSize: '50b', + }) + ).toBeTruthy(); + }); + + it('triggers a rollover when the file size equals the max size', () => { + expect( + isTriggering({ + fileSize: '20b', + maxSize: '20b', + }) + ).toBeTruthy(); + }); + + it('does not triggers a rollover when the file size did not rea h the max size', () => { + expect( + isTriggering({ + fileSize: '20b', + maxSize: '50b', + }) + ).toBeFalsy(); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts index 7886b3798ce6b..975b09be6e078 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts @@ -22,7 +22,7 @@ import { LogRecord } from '@kbn/logging'; import { RollingFileContext } from '../../rolling_file_context'; import { TriggeringPolicy } from '../policy'; -export interface SizedLimitTriggeringPolicyConfig { +export interface SizeLimitTriggeringPolicyConfig { kind: 'size-limit'; /** @@ -31,25 +31,28 @@ export interface SizedLimitTriggeringPolicyConfig { size: ByteSizeValue; } -export const sizedLimitTriggeringPolicyConfigSchema = schema.object({ +export const sizeLimitTriggeringPolicyConfigSchema = schema.object({ kind: schema.literal('size-limit'), size: schema.byteSize({ min: '1b' }), }); /** - * A triggering policy based on fixed size limit. + * A triggering policy based on a fixed size limit. + * + * Will trigger a rollover when the current log size exceed the + * given {@link SizeLimitTriggeringPolicyConfig.size | size}. */ -export class SizedLimitTriggeringPolicy implements TriggeringPolicy { +export class SizeLimitTriggeringPolicy implements TriggeringPolicy { private readonly maxFileSize: number; constructor( - config: SizedLimitTriggeringPolicyConfig, + config: SizeLimitTriggeringPolicyConfig, private readonly context: RollingFileContext ) { this.maxFileSize = config.size.getValueInBytes(); } isTriggeringEvent(record: LogRecord): boolean { - return this.context.currentFileSize > this.maxFileSize; + return this.context.currentFileSize >= this.maxFileSize; } } diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts index 484352bd9b4b1..11cbace5ce043 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.ts @@ -17,10 +17,11 @@ * under the License. */ -import moment, { Duration, unitOfTime } from 'moment-timezone'; +import moment, { Duration } from 'moment-timezone'; +import { getHighestTimeUnit } from './utils'; /** - * Return the next potential rollover time, given current time and rollover interval + * Return the next rollout time, given current time and rollout interval */ export const getNextRollingTime = ( currentTime: number, @@ -28,34 +29,14 @@ export const getNextRollingTime = ( modulate: boolean ): number => { if (modulate) { - const field = getHighestField(interval); + const incrementedUnit = getHighestTimeUnit(interval); const currentMoment = moment(currentTime); - const increment = interval.get(field) - (currentMoment.get(field) % interval.get(field)); - const incrementInMs = moment.duration(increment, field).asMilliseconds(); - return currentMoment.startOf(field).toDate().getTime() + incrementInMs; + const increment = + interval.get(incrementedUnit) - + (currentMoment.get(incrementedUnit) % interval.get(incrementedUnit)); + const incrementInMs = moment.duration(increment, incrementedUnit).asMilliseconds(); + return currentMoment.startOf(incrementedUnit).toDate().getTime() + incrementInMs; } else { return currentTime + interval.asMilliseconds(); } }; - -const getHighestField = (duration: Duration): unitOfTime.Base => { - if (duration.asYears() >= 1) { - return 'year'; - } - if (duration.asMonths() >= 1) { - return 'month'; - } - if (duration.asDays() >= 1) { - return 'day'; - } - if (duration.asHours() >= 1) { - return 'hour'; - } - if (duration.asMinutes() >= 1) { - return 'minute'; - } - if (duration.asSeconds() >= 1) { - return 'second'; - } - return 'millisecond'; -}; diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.mocks.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.mocks.ts new file mode 100644 index 0000000000000..5383f55bb19e5 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.mocks.ts @@ -0,0 +1,21 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export const getNextRollingTimeMock = jest.fn(); +jest.doMock('./get_next_rolling_time', () => ({ getNextRollingTime: getNextRollingTimeMock })); diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts new file mode 100644 index 0000000000000..79561173087bc --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { getNextRollingTimeMock } from './time_interval_policy.test.mocks'; +import moment from 'moment-timezone'; +import { LogLevel, LogRecord } from '@kbn/logging'; +import { schema } from '@kbn/config-schema'; +import { + TimeIntervalTriggeringPolicy, + TimeIntervalTriggeringPolicyConfig, +} from './time_interval_policy'; +import { RollingFileContext } from '../../rolling_file_context'; + +const format = 'YYYY-MM-DD HH:mm:ss'; + +describe('TimeIntervalTriggeringPolicy', () => { + afterEach(() => { + getNextRollingTimeMock.mockReset(); + }); + + const createLogRecord = (timestamp: Date): LogRecord => ({ + timestamp, + level: LogLevel.Info, + context: 'context', + message: 'just a log', + pid: 42, + }); + + const createContext = (currentFileTime: number = Date.now()): RollingFileContext => { + const context = new RollingFileContext('foo.log'); + context.currentFileTime = currentFileTime; + return context; + }; + + const createConfig = ( + interval: string = '15m', + modulate: boolean = false + ): TimeIntervalTriggeringPolicyConfig => ({ + kind: 'time-interval', + interval: schema.duration().validate(interval), + modulate, + }); + + it('calls `getNextRollingTime` during construction with the correct parameters', () => { + const date = moment('2010-10-20 04:27:12', format).toDate(); + const context = createContext(date.getTime()); + const config = createConfig('15m', true); + + new TimeIntervalTriggeringPolicy(config, context); + + expect(getNextRollingTimeMock).toHaveBeenCalledTimes(1); + expect(getNextRollingTimeMock).toHaveBeenCalledWith( + context.currentFileTime, + config.interval, + config.modulate + ); + }); + + describe('#isTriggeringEvent', () => { + it('returns true if the event time is after the nextRolloverTime', () => { + const eventDate = moment('2010-10-20 04:43:12', format).toDate(); + const nextRolloverDate = moment('2010-10-20 04:00:00', format).toDate(); + + getNextRollingTimeMock.mockReturnValue(nextRolloverDate.getTime()); + + const policy = new TimeIntervalTriggeringPolicy(createConfig(), createContext()); + + expect(policy.isTriggeringEvent(createLogRecord(eventDate))).toBeTruthy(); + }); + + it('returns true if the event time is exactly the nextRolloverTime', () => { + const eventDate = moment('2010-10-20 04:00:00', format).toDate(); + const nextRolloverDate = moment('2010-10-20 04:00:00', format).toDate(); + + getNextRollingTimeMock.mockReturnValue(nextRolloverDate.getTime()); + + const policy = new TimeIntervalTriggeringPolicy(createConfig(), createContext()); + + expect(policy.isTriggeringEvent(createLogRecord(eventDate))).toBeTruthy(); + }); + + it('returns false if the event time is before the nextRolloverTime', () => { + const eventDate = moment('2010-10-20 03:47:12', format).toDate(); + const nextRolloverDate = moment('2010-10-20 04:00:00', format).toDate(); + + getNextRollingTimeMock.mockReturnValue(nextRolloverDate.getTime()); + + const policy = new TimeIntervalTriggeringPolicy(createConfig(), createContext()); + + expect(policy.isTriggeringEvent(createLogRecord(eventDate))).toBeFalsy(); + }); + + it('refreshes its `nextRolloverTime` when returning true', () => { + const eventDate = moment('2010-10-20 04:43:12', format).toDate(); + const firstRollOverDate = moment('2010-10-20 04:00:00', format).toDate(); + const nextRollOverDate = moment('2010-10-20 08:00:00', format).toDate(); + + getNextRollingTimeMock + // constructor call + .mockReturnValueOnce(firstRollOverDate.getTime()) + // call performed during `isTriggeringEvent` to refresh the rolling time + .mockReturnValueOnce(nextRollOverDate.getTime()); + + const policy = new TimeIntervalTriggeringPolicy(createConfig(), createContext()); + + const logRecord = createLogRecord(eventDate); + + // rollingDate is firstRollOverDate + expect(policy.isTriggeringEvent(logRecord)).toBeTruthy(); + // rollingDate should be nextRollOverDate + expect(policy.isTriggeringEvent(logRecord)).toBeFalsy(); + }); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts index 8590b437aceda..de7e2a851783c 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts @@ -23,12 +23,18 @@ import { LogRecord } from '@kbn/logging'; import { RollingFileContext } from '../../rolling_file_context'; import { TriggeringPolicy } from '../policy'; import { getNextRollingTime } from './get_next_rolling_time'; +import { isValidRolloverInterval } from './utils'; export interface TimeIntervalTriggeringPolicyConfig { kind: 'time-interval'; /** * How often a rollover should occur. + * + * @remarks + * Due to of modulate rolling works, it is required to have an integer value for the highest time unit + * of the duration (you can't overflow to a higher unit). + * For example, `15m` and `4h` are valid values , but `90m` is not (as it is `1.5h`),. */ interval: Duration; /** @@ -43,7 +49,14 @@ export interface TimeIntervalTriggeringPolicyConfig { export const timeIntervalTriggeringPolicyConfigSchema = schema.object({ kind: schema.literal('time-interval'), - interval: schema.duration({ defaultValue: '24h' }), + interval: schema.duration({ + defaultValue: '24h', + validate: (interval) => { + if (!isValidRolloverInterval(interval)) { + return 'Interval value cannot overflow to a higher field.'; + } + }, + }), modulate: schema.boolean({ defaultValue: true }), }); @@ -75,7 +88,6 @@ export class TimeIntervalTriggeringPolicy implements TriggeringPolicy { this.config.interval, this.config.modulate ); - // TODO: update context.currentFileTime ? return true; } return false; diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.test.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.test.ts new file mode 100644 index 0000000000000..1b9517f6ade3c --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.test.ts @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { schema } from '@kbn/config-schema'; +import { getHighestTimeUnit, isValidRolloverInterval } from './utils'; + +const duration = (raw: string) => schema.duration().validate(raw); + +describe('getHighestTimeUnit', () => { + it('returns the highest time unit of the duration', () => { + expect(getHighestTimeUnit(duration('500ms'))).toEqual('millisecond'); + expect(getHighestTimeUnit(duration('30s'))).toEqual('second'); + expect(getHighestTimeUnit(duration('15m'))).toEqual('minute'); + expect(getHighestTimeUnit(duration('12h'))).toEqual('hour'); + expect(getHighestTimeUnit(duration('4d'))).toEqual('day'); + expect(getHighestTimeUnit(duration('3w'))).toEqual('week'); + expect(getHighestTimeUnit(duration('7M'))).toEqual('month'); + expect(getHighestTimeUnit(duration('7Y'))).toEqual('year'); + }); + + it('handles overflows', () => { + expect(getHighestTimeUnit(duration('2000ms'))).toEqual('second'); + expect(getHighestTimeUnit(duration('90s'))).toEqual('minute'); + expect(getHighestTimeUnit(duration('75m'))).toEqual('hour'); + expect(getHighestTimeUnit(duration('36h'))).toEqual('day'); + expect(getHighestTimeUnit(duration('9d'))).toEqual('week'); + expect(getHighestTimeUnit(duration('15w'))).toEqual('month'); + expect(getHighestTimeUnit(duration('23M'))).toEqual('year'); + }); +}); + +describe('isValidRolloverInterval', () => { + it('returns true if the interval does not overflow', () => { + expect(isValidRolloverInterval(duration('500ms'))).toEqual(true); + expect(isValidRolloverInterval(duration('30s'))).toEqual(true); + expect(isValidRolloverInterval(duration('15m'))).toEqual(true); + expect(isValidRolloverInterval(duration('12h'))).toEqual(true); + expect(isValidRolloverInterval(duration('4d'))).toEqual(true); + expect(isValidRolloverInterval(duration('3w'))).toEqual(true); + expect(isValidRolloverInterval(duration('7M'))).toEqual(true); + expect(isValidRolloverInterval(duration('7Y'))).toEqual(true); + }); + + it('returns false if the interval overflows to a non integer value', () => { + expect(isValidRolloverInterval(duration('2500ms'))).toEqual(false); + expect(isValidRolloverInterval(duration('90s'))).toEqual(false); + expect(isValidRolloverInterval(duration('75m'))).toEqual(false); + expect(isValidRolloverInterval(duration('36h'))).toEqual(false); + expect(isValidRolloverInterval(duration('9d'))).toEqual(false); + expect(isValidRolloverInterval(duration('15w'))).toEqual(false); + expect(isValidRolloverInterval(duration('23M'))).toEqual(false); + }); + + it('returns true if the interval overflows to an integer value', () => { + expect(isValidRolloverInterval(duration('2000ms'))).toEqual(true); + expect(isValidRolloverInterval(duration('120s'))).toEqual(true); + expect(isValidRolloverInterval(duration('240m'))).toEqual(true); + expect(isValidRolloverInterval(duration('48h'))).toEqual(true); + expect(isValidRolloverInterval(duration('14d'))).toEqual(true); + expect(isValidRolloverInterval(duration('24M'))).toEqual(true); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.ts new file mode 100644 index 0000000000000..ca2cbf31dfc6f --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/utils.ts @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { Duration, unitOfTime } from 'moment-timezone'; + +/** + * Returns the highest time unit of the given duration + * (the highest unit with a value higher of equal to 1) + * + * @example + * ``` + * getHighestTimeUnit(moment.duration(4, 'day')) + * // 'day' + * getHighestTimeUnit(moment.duration(90, 'minute')) + * // 'hour' - 90min = 1.5h + * getHighestTimeUnit(moment.duration(30, 'minute')) + * // 'minute' - 30min = 0,5h + * ``` + */ +export const getHighestTimeUnit = (duration: Duration): unitOfTime.Base => { + if (duration.asYears() >= 1) { + return 'year'; + } + if (duration.asMonths() >= 1) { + return 'month'; + } + if (duration.asWeeks() >= 1) { + return 'week'; + } + if (duration.asDays() >= 1) { + return 'day'; + } + if (duration.asHours() >= 1) { + return 'hour'; + } + if (duration.asMinutes() >= 1) { + return 'minute'; + } + if (duration.asSeconds() >= 1) { + return 'second'; + } + return 'millisecond'; +}; + +/** + * Returns true if the given duration is valid to be used with by the {@link TimeIntervalTriggeringPolicy | policy} + * + * See {@link TimeIntervalTriggeringPolicyConfig.interval} for rules and reasons around this validation. + */ +export const isValidRolloverInterval = (duration: Duration): boolean => { + const highestUnit = getHighestTimeUnit(duration); + const asHighestUnit = duration.as(highestUnit); + return Number.isInteger(asHighestUnit); +}; diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index 2d80b02ac1a3e..aa23cd26a0fae 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -83,6 +83,7 @@ export class RollingFileAppender implements DisposableAppender { */ constructor(config: RollingFileAppenderConfig) { this.context = new RollingFileContext(config.path); + this.context.refreshFileInfo(); this.fileManager = new RollingFileManager(config.path, this.context); this.layout = Layouts.create(config.layout); this.policy = createTriggeringPolicy(config.policy, this.context); diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts index 3c500f4723124..0fb3068e69908 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts @@ -43,6 +43,8 @@ export class RollingFileManager { encoding: 'utf8', flags: 'a', }); + // refresh the file meta in case it was not initialized yet. + this.context.refreshFileInfo(); } return this.outputStream!; } diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index be2e7a3400cb4..e5d0da44428a3 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -22,7 +22,7 @@ import { schema } from '@kbn/config-schema'; import { readdir, unlink, rename } from '../fs'; import { RollingStrategy } from '../strategy'; import { RollingFileContext } from '../../rolling_file_context'; -import { getNumericMatcher, getNumericFileName } from './numeric_pattern_matcher'; +import { getNumericMatcher, getNumericFileName } from './pattern_matcher'; export interface NumericRollingStrategyConfig { kind: 'numeric'; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_pattern_matcher.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts similarity index 100% rename from src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_pattern_matcher.ts rename to src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts From b5f098ffb4cbd1828adb4d4a96a637b2d32f70c0 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 12:00:35 +0100 Subject: [PATCH 05/23] fix some tests --- .../policies/time_interval/get_next_rolling_time.test.ts | 8 ++++---- .../strategies/numeric/pattern_matcher.test.ts | 2 +- src/core/server/logging/logging_system.test.ts | 7 ++++++- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts index 5163637390645..66de78a89d7f8 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/get_next_rolling_time.test.ts @@ -17,7 +17,7 @@ * under the License. */ -import moment from 'moment'; +import moment from 'moment-timezone'; import { schema } from '@kbn/config-schema'; import { getNextRollingTime } from './get_next_rolling_time'; @@ -69,12 +69,12 @@ describe('getNextRollingTime', () => { expect(formattedRollingTime('2010-10-14 00:00:00:000', '1d', true)).toEqual( '2010-10-15 00:00:00:000' ); - expect(formattedRollingTime('2010-01-10 00:00:00:000', '2w', true)).toEqual( - '2010-01-24 00:00:00:000' + expect(formattedRollingTime('2010-01-03 00:00:00:000', '2w', true)).toEqual( + '2010-01-17 00:00:00:000' ); }); - it('increments a higher field when necessary', () => { + it('increments a higher unit when necessary', () => { expect(formattedRollingTime('2010-10-20 21:00:00:000', '9h', true)).toEqual( '2010-10-21 03:00:00:000' ); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts index 47b7c44f9319c..00e08b9ff478d 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts @@ -17,7 +17,7 @@ * under the License. */ -import { getNumericMatcher } from './numeric_pattern_matcher'; +import { getNumericMatcher } from './pattern_matcher'; describe('getNumericIndex', () => { it('returns the file index when the file matches the pattern', () => { diff --git a/src/core/server/logging/logging_system.test.ts b/src/core/server/logging/logging_system.test.ts index 2fca2f35cb032..29e0fef5f9f94 100644 --- a/src/core/server/logging/logging_system.test.ts +++ b/src/core/server/logging/logging_system.test.ts @@ -19,6 +19,7 @@ const mockStreamWrite = jest.fn(); jest.mock('fs', () => ({ + ...(jest.requireActual('fs') as any), constants: {}, createWriteStream: jest.fn(() => ({ write: mockStreamWrite })), })); @@ -32,16 +33,20 @@ jest.mock('@kbn/legacy-logging', () => ({ const timestamp = new Date(Date.UTC(2012, 1, 1, 14, 33, 22, 11)); let mockConsoleLog: jest.SpyInstance; +import { REPO_ROOT } from '@kbn/dev-utils'; import { createWriteStream } from 'fs'; const mockCreateWriteStream = (createWriteStream as unknown) as jest.Mock; +import { Env } from '../config'; +import { getEnvOptions } from '../config/mocks'; import { LoggingSystem, config } from '.'; let system: LoggingSystem; beforeEach(() => { mockConsoleLog = jest.spyOn(global.console, 'log').mockReturnValue(undefined); jest.spyOn(global, 'Date').mockImplementation(() => timestamp); - system = new LoggingSystem(); + const env = Env.createDefault(REPO_ROOT, getEnvOptions()); + system = new LoggingSystem(env); }); afterEach(() => { From d5d3fd132f5b0c8753b84f6b3ab5e8738a0f8a66 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 12:36:30 +0100 Subject: [PATCH 06/23] update documentation --- src/core/server/logging/README.md | 136 ++++++++++++++++++ .../policies/size_limit/size_limit_policy.ts | 2 +- .../time_interval/time_interval_policy.ts | 2 +- .../strategies/numeric/numeric_strategy.ts | 2 + 4 files changed, 140 insertions(+), 2 deletions(-) diff --git a/src/core/server/logging/README.md b/src/core/server/logging/README.md index 553dc7c36e824..1bc5b1dfc52fb 100644 --- a/src/core/server/logging/README.md +++ b/src/core/server/logging/README.md @@ -5,6 +5,10 @@ - [Layouts](#layouts) - [Pattern layout](#pattern-layout) - [JSON layout](#json-layout) +- [Appenders](#appenders) + - [Rolling File Appender](#rolling-file-appender) + - [Triggering Policies](#triggering-policies) + - [Rolling strategies](#rolling-strategies) - [Configuration](#configuration) - [Usage](#usage) - [Logging config migration](#logging-config-migration) @@ -127,6 +131,138 @@ Outputs the process ID. With `json` layout log messages will be formatted as JSON strings that include timestamp, log level, context, message text and any other metadata that may be associated with the log message itself. +## Appenders + +### Rolling File Appender + +Similar to Log4j's `RollingFileAppender`, this appender will log into a file, and rotate it following a rolling +strategy when the configured policy triggers. + +#### Triggering Policies + +The triggering policy determines when a rolling should occur. + +There are currently two policies supported: `size-limit` and `time-interval`. + +##### SizeLimitTriggeringPolicy + +This policy will rotate the file when it reaches a predetermined size. + +```yaml +logging: + appenders: + rolling-file: + kind: rolling-file + path: /var/logs/kibana.log + policy: + kind: size-limit + size: 50mb + strategy: + //... + layout: + kind: pattern +``` + +The options are: + +- `size` + +the maximum size the log file should reach before a rollover will be performed. + +The default value is `100mb` + +##### TimeIntervalTriggeringPolicy + +This policy will rotate the file every given interval of time. + +```yaml +logging: + appenders: + rolling-file: + kind: rolling-file + path: /var/logs/kibana.log + policy: + kind: time-interval + interval: 10s + modulate: true + strategy: + //... + layout: + kind: pattern +``` + +The options are: + +- `interval` + +How often a rollover should occur. + +The default value is `24h` + +- `modulate` + +Whether the interval should be adjusted to cause the next rollover to occur on the interval boundary. + +For example, when true, if the interval is `4h` and the current hour is 3 am then the first rollover will occur at 4 am +and then next ones will occur at 8 am, noon, 4pm, etc. + +The default value is true. + +#### Rolling strategies + +The rolling strategies determine how the rollover should occur: both the naming of the rolled files, +and their retention policy. + +There is currently only one strategy supported: `numeric`. + +##### NumericRollingStrategy + +This strategy will suffix the file with a given pattern when rolling, +and will retains a fixed amount of rolled files. + +```yaml +logging: + appenders: + rolling-file: + kind: rolling-file + path: /var/logs/kibana.log + policy: + // ... + strategy: + kind: numeric + pattern: '-%i' + max: 2 + layout: + kind: pattern +``` + +For example, with this configuration: + +- During the first rollover kibana.log is renamed to kibana-1.log. A new kibana.log file is created and starts + being written to. +- During the second rollover kibana-1.log is renamed to kibana-2.log and kibana.log is renamed to kibana-1.log. + A new kibana.log file is created and starts being written to. +- During the third and subsequent rollovers, kibana-2.log is deleted, kibana-1.log is renamed to kibana-2.log and + kibana.log is renamed to kibana-1.log. A new kibana.log file is created and starts being written to. + +The options are: + +- `pattern` + +The suffix to append to the file path when rolling. Must include `%i`, as this is the value +that will be converted to the file index. + +for example, with `path: /var/logs/kibana.log` and `pattern: '-%i'`, the created rolling files +will be `/var/logs/kibana-1.log`, `/var/logs/kibana-2.log` + +The default value is `-%i` + +- `max` + +The maximum number of files to keep. Once this number is reached, oldest files will be deleted + +The default value is `7` + ## Configuration As any configuration in the platform, logging configuration is validated against the predefined schema and if there are diff --git a/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts index 975b09be6e078..cf3e90d0fbce1 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/size_limit/size_limit_policy.ts @@ -33,7 +33,7 @@ export interface SizeLimitTriggeringPolicyConfig { export const sizeLimitTriggeringPolicyConfigSchema = schema.object({ kind: schema.literal('size-limit'), - size: schema.byteSize({ min: '1b' }), + size: schema.byteSize({ min: '1b', defaultValue: '100mb' }), }); /** diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts index de7e2a851783c..e500c6f50ceed 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts @@ -40,7 +40,7 @@ export interface TimeIntervalTriggeringPolicyConfig { /** * Indicates whether the interval should be adjusted to cause the next rollover to occur on the interval boundary. * - * For example, if the item is hours, the current hour is 3 am and the interval is 4 then + * For example, if the interval is `4h` and the current hour is 3 am then * the first rollover will occur at 4 am and then next ones will occur at 8 am, noon, 4pm, etc. * The default value is true. */ diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index e5d0da44428a3..c014a6d9b7373 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -30,6 +30,8 @@ export interface NumericRollingStrategyConfig { * The suffix pattern to apply when renaming a file. The suffix will be applied * after the `appender.path` file name, but before the file extension. * + * Must include `%i`, as it is the value that will be converted to the file index + * * @example * ```yaml * logging: From a56c61e9ff5f3e4172ca589d15a196152c4838ec Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 12:43:01 +0100 Subject: [PATCH 07/23] update generated doc --- .../server/kibana-plugin-core-server.appenderconfigtype.md | 2 +- .../rolling_file/strategies/numeric/numeric_strategy.ts | 2 +- src/core/server/server.api.md | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md b/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md index 0838572f26f49..a50df950628b3 100644 --- a/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md +++ b/docs/development/core/server/kibana-plugin-core-server.appenderconfigtype.md @@ -8,5 +8,5 @@ Signature: ```typescript -export declare type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig; +export declare type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RollingFileAppenderConfig; ``` diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index c014a6d9b7373..5237031b67a97 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -47,7 +47,7 @@ export interface NumericRollingStrategyConfig { * * will create `/var/logs/kibana-1.log`, `/var/logs/kibana-2.log`, and so on. * - * Defaults to '-%i'. + * Defaults to `-%i`. */ pattern: string; /** diff --git a/src/core/server/server.api.md b/src/core/server/server.api.md index 8dddff07a0e4c..a6257f5e933a2 100644 --- a/src/core/server/server.api.md +++ b/src/core/server/server.api.md @@ -7,6 +7,7 @@ import { ApiResponse } from '@elastic/elasticsearch/lib/Transport'; import Boom from '@hapi/boom'; import { BulkIndexDocumentsParams } from 'elasticsearch'; +import { ByteSizeValue } from '@kbn/config-schema'; import { CatAliasesParams } from 'elasticsearch'; import { CatAllocationParams } from 'elasticsearch'; import { CatCommonParams } from 'elasticsearch'; @@ -47,6 +48,7 @@ import { DeleteScriptParams } from 'elasticsearch'; import { DeleteTemplateParams } from 'elasticsearch'; import { DetailedPeerCertificate } from 'tls'; import { Duration } from 'moment'; +import { Duration as Duration_2 } from 'moment-timezone'; import { EnvironmentMode } from '@kbn/config'; import { ExistsParams } from 'elasticsearch'; import { ExplainParams } from 'elasticsearch'; @@ -177,9 +179,10 @@ export interface AppCategory { // Warning: (ae-forgotten-export) The symbol "ConsoleAppenderConfig" needs to be exported by the entry point index.d.ts // Warning: (ae-forgotten-export) The symbol "FileAppenderConfig" needs to be exported by the entry point index.d.ts // Warning: (ae-forgotten-export) The symbol "LegacyAppenderConfig" needs to be exported by the entry point index.d.ts +// Warning: (ae-forgotten-export) The symbol "RollingFileAppenderConfig" needs to be exported by the entry point index.d.ts // // @public (undocumented) -export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig; +export type AppenderConfigType = ConsoleAppenderConfig | FileAppenderConfig | LegacyAppenderConfig | RollingFileAppenderConfig; // @public @deprecated (undocumented) export interface AssistanceAPIResponse { From 6b6fc49779127fcd80498dea539c421e1019c1dd Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 16:51:19 +0100 Subject: [PATCH 08/23] change applyBaseConfig to be async --- packages/kbn-logging/src/appenders.ts | 2 +- .../rolling_file/rolling_file_appender.ts | 61 +++++++++++++++---- .../strategies/numeric/numeric_strategy.ts | 15 +++-- .../server/logging/logging_system.mock.ts | 1 + .../server/logging/logging_system.test.ts | 52 ++++++++-------- src/core/server/logging/logging_system.ts | 20 +++--- src/core/server/root/index.test.ts | 1 + src/core/server/root/index.ts | 4 +- 8 files changed, 103 insertions(+), 53 deletions(-) diff --git a/packages/kbn-logging/src/appenders.ts b/packages/kbn-logging/src/appenders.ts index 346d3d6dd1068..a82a95b6b0f8a 100644 --- a/packages/kbn-logging/src/appenders.ts +++ b/packages/kbn-logging/src/appenders.ts @@ -35,5 +35,5 @@ export interface Appender { * @internal */ export interface DisposableAppender extends Appender { - dispose: () => void; + dispose: () => void | Promise; } diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index aa23cd26a0fae..2edc2e1d97e01 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -71,6 +71,9 @@ export class RollingFileAppender implements DisposableAppender { }); private isRolling = false; + private disposed = false; + private rollingPromise?: Promise; + private readonly layout: Layout; private readonly context: RollingFileContext; private readonly fileManager: RollingFileManager; @@ -100,43 +103,77 @@ export class RollingFileAppender implements DisposableAppender { // into the buffer, which will be flushed once rolling is complete if (this.isRolling) { this.buffer.append(record); + return; } - if (this.needRollover(record)) { + if (this.needRollout(record)) { this.buffer.append(record); - this.performRollover(); - } else { - this.fileManager.write(`${this.layout.format(record)}\n`); + this.rollingPromise = this.performRollout(); + return; } + + this._writeToFile(record); + } + + private _writeToFile(record: LogRecord) { + this.fileManager.write(`${this.layout.format(record)}\n`); } /** * Disposes `FileAppender`. Waits for the underlying file stream to be completely flushed and closed. */ public async dispose() { + if (this.disposed) { + return; + } + this.disposed = true; + if (this.rollingPromise) { + await this.rollingPromise; + } await this.buffer.dispose(); await this.fileManager.closeStream(); } - private async performRollover() { + private async performRollout() { + if (this.isRolling) { + return; + } + this.isRolling = true; try { - this.isRolling = true; await this.strategy.rollout(); await this.fileManager.closeStream(); + this.rollingPromise = undefined; this.isRolling = false; - const pendingLogs = this.buffer.flush(); - for (const log of pendingLogs) { - this.append(log); - } } catch (e) { // eslint-disable-next-line no-console console.log('Error while rolling file: ', e); + throw e; + } finally { + this.flushBuffer(); + } + } + + private flushBuffer() { + const pendingLogs = this.buffer.flush(); + // in some extreme rare scenario, `dispose` can be called during a rollover + // where the internal buffered logs would trigger another rollover + // (rollover started, logs keep coming and got buffered, dispose is called, rollover ends and we then flush) + // this would cause a second rollover that would not be awaited + // for, and could result in a race with the newly created appender + // that would also be performing a rollover. + // so if we are disposed, we just flush the buffer directly to the file instead to avoid loosing the entries. + for (const log of pendingLogs) { + if (this.disposed) { + this._writeToFile(log); + } else { + this.append(log); + } } } /** - * Checks if the current even should trigger a rollover + * Checks if the current even should trigger a rollout */ - private needRollover(record: LogRecord) { + private needRollout(record: LogRecord) { return this.policy.isTriggeringEvent(record); } } diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index 5237031b67a97..946ff367cf2c4 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -57,12 +57,16 @@ export interface NumericRollingStrategyConfig { max: number; } -// %d{MM-dd-yyyy} -// %i - export const numericRollingStrategyConfigSchema = schema.object({ kind: schema.literal('numeric'), - pattern: schema.string({ defaultValue: '-%i' }), // TODO: validate + pattern: schema.string({ + defaultValue: '-%i', + validate: (pattern) => { + if (!pattern.includes('%i')) { + return `pattern must include '%i'`; + } + }, + }), max: schema.number({ min: 1, defaultValue: 7 }), }); @@ -134,5 +138,8 @@ export class NumericRollingStrategy implements RollingStrategy { const currentFileNewName = getNumericFileName(logFileBaseName, this.config.pattern, 1); // console.log('*** will roll ', logFileBaseName, currentFileNewName); await rename(this.context.filePath, join(logFileFolder, currentFileNewName)); + + // updates the context file info to mirror the new size and date + this.context.refreshFileInfo(); } } diff --git a/src/core/server/logging/logging_system.mock.ts b/src/core/server/logging/logging_system.mock.ts index 6ea784be5411f..35d7caf0914e7 100644 --- a/src/core/server/logging/logging_system.mock.ts +++ b/src/core/server/logging/logging_system.mock.ts @@ -42,6 +42,7 @@ const createLoggingSystemMock = () => { context, })); mocked.asLoggerFactory.mockImplementation(() => mocked); + mocked.upgrade.mockResolvedValue(undefined); mocked.stop.mockResolvedValue(); return mocked; }; diff --git a/src/core/server/logging/logging_system.test.ts b/src/core/server/logging/logging_system.test.ts index 29e0fef5f9f94..c0b731fe1836a 100644 --- a/src/core/server/logging/logging_system.test.ts +++ b/src/core/server/logging/logging_system.test.ts @@ -72,7 +72,7 @@ test('uses default memory buffer logger until config is provided', () => { expect(bufferAppendSpy.mock.calls[1][0]).toMatchSnapshot({ pid: expect.any(Number) }); }); -test('flushes memory buffer logger and switches to real logger once config is provided', () => { +test('flushes memory buffer logger and switches to real logger once config is provided', async () => { const logger = system.get('test', 'context'); logger.trace('buffered trace message'); @@ -82,7 +82,7 @@ test('flushes memory buffer logger and switches to real logger once config is pr const bufferAppendSpy = jest.spyOn((system as any).bufferAppender, 'append'); // Switch to console appender with `info` level, so that `trace` message won't go through. - system.upgrade( + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'info' }, @@ -101,7 +101,7 @@ test('flushes memory buffer logger and switches to real logger once config is pr expect(bufferAppendSpy).not.toHaveBeenCalled(); }); -test('appends records via multiple appenders.', () => { +test('appends records via multiple appenders.', async () => { const loggerWithoutConfig = system.get('some-context'); const testsLogger = system.get('tests'); const testsChildLogger = system.get('tests', 'child'); @@ -114,7 +114,7 @@ test('appends records via multiple appenders.', () => { expect(mockConsoleLog).not.toHaveBeenCalled(); expect(mockCreateWriteStream).not.toHaveBeenCalled(); - system.upgrade( + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'pattern' } }, @@ -136,8 +136,8 @@ test('appends records via multiple appenders.', () => { expect(mockStreamWrite.mock.calls[1][0]).toMatchSnapshot('file logs'); }); -test('uses `root` logger if context is not specified.', () => { - system.upgrade( +test('uses `root` logger if context is not specified.', async () => { + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'pattern' } } }, }) @@ -150,7 +150,7 @@ test('uses `root` logger if context is not specified.', () => { }); test('`stop()` disposes all appenders.', async () => { - system.upgrade( + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'info' }, @@ -166,10 +166,10 @@ test('`stop()` disposes all appenders.', async () => { expect(consoleDisposeSpy).toHaveBeenCalledTimes(1); }); -test('asLoggerFactory() only allows to create new loggers.', () => { +test('asLoggerFactory() only allows to create new loggers.', async () => { const logger = system.asLoggerFactory().get('test', 'context'); - system.upgrade( + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'all' }, @@ -188,19 +188,19 @@ test('asLoggerFactory() only allows to create new loggers.', () => { expect(JSON.parse(mockConsoleLog.mock.calls[2][0])).toMatchSnapshot(dynamicProps); }); -test('setContextConfig() updates config with relative contexts', () => { +test('setContextConfig() updates config with relative contexts', async () => { const testsLogger = system.get('tests'); const testsChildLogger = system.get('tests', 'child'); const testsGrandchildLogger = system.get('tests', 'child', 'grandchild'); - system.upgrade( + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'info' }, }) ); - system.setContextConfig(['tests', 'child'], { + await system.setContextConfig(['tests', 'child'], { appenders: new Map([ [ 'custom', @@ -243,19 +243,19 @@ test('setContextConfig() updates config with relative contexts', () => { ); }); -test('setContextConfig() updates config for a root context', () => { +test('setContextConfig() updates config for a root context', async () => { const testsLogger = system.get('tests'); const testsChildLogger = system.get('tests', 'child'); const testsGrandchildLogger = system.get('tests', 'child', 'grandchild'); - system.upgrade( + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'info' }, }) ); - system.setContextConfig(['tests', 'child'], { + await system.setContextConfig(['tests', 'child'], { appenders: new Map([ [ 'custom', @@ -288,8 +288,8 @@ test('setContextConfig() updates config for a root context', () => { ); }); -test('custom context configs are applied on subsequent calls to update()', () => { - system.setContextConfig(['tests', 'child'], { +test('custom context configs are applied on subsequent calls to update()', async () => { + await system.setContextConfig(['tests', 'child'], { appenders: new Map([ [ 'custom', @@ -300,7 +300,7 @@ test('custom context configs are applied on subsequent calls to update()', () => }); // Calling upgrade after setContextConfig should not throw away the context-specific config - system.upgrade( + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'info' }, @@ -325,15 +325,15 @@ test('custom context configs are applied on subsequent calls to update()', () => ); }); -test('subsequent calls to setContextConfig() for the same context override the previous config', () => { - system.upgrade( +test('subsequent calls to setContextConfig() for the same context override the previous config', async () => { + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'info' }, }) ); - system.setContextConfig(['tests', 'child'], { + await system.setContextConfig(['tests', 'child'], { appenders: new Map([ [ 'custom', @@ -344,7 +344,7 @@ test('subsequent calls to setContextConfig() for the same context override the p }); // Call again, this time with level: 'warn' and a different pattern - system.setContextConfig(['tests', 'child'], { + await system.setContextConfig(['tests', 'child'], { appenders: new Map([ [ 'custom', @@ -375,15 +375,15 @@ test('subsequent calls to setContextConfig() for the same context override the p ); }); -test('subsequent calls to setContextConfig() for the same context can disable the previous config', () => { - system.upgrade( +test('subsequent calls to setContextConfig() for the same context can disable the previous config', async () => { + await system.upgrade( config.schema.validate({ appenders: { default: { kind: 'console', layout: { kind: 'json' } } }, root: { level: 'info' }, }) ); - system.setContextConfig(['tests', 'child'], { + await system.setContextConfig(['tests', 'child'], { appenders: new Map([ [ 'custom', @@ -394,7 +394,7 @@ test('subsequent calls to setContextConfig() for the same context can disable th }); // Call again, this time no customizations (effectively disabling) - system.setContextConfig(['tests', 'child'], {}); + await system.setContextConfig(['tests', 'child'], {}); const logger = system.get('tests', 'child', 'grandchild'); logger.debug('this should not show anywhere!'); diff --git a/src/core/server/logging/logging_system.ts b/src/core/server/logging/logging_system.ts index 9bdd1c96264da..adb493dbbe4fa 100644 --- a/src/core/server/logging/logging_system.ts +++ b/src/core/server/logging/logging_system.ts @@ -71,12 +71,12 @@ export class LoggingSystem implements LoggerFactory { * Updates all current active loggers with the new config values. * @param rawConfig New config instance. */ - public upgrade(rawConfig: LoggingConfigType) { + public async upgrade(rawConfig: LoggingConfigType) { // We only want the console appender for the CLI process, // so we use the 'default' configuration as defined by the schema. const usedConfig = this.env.isDevCliParent ? loggingConfig.schema.validate({}) : rawConfig; const config = new LoggingConfig(usedConfig)!; - this.applyBaseConfig(config); + await this.applyBaseConfig(config); } /** @@ -100,7 +100,7 @@ export class LoggingSystem implements LoggerFactory { * @param baseContextParts * @param rawConfig */ - public setContextConfig(baseContextParts: string[], rawConfig: LoggerContextConfigInput) { + public async setContextConfig(baseContextParts: string[], rawConfig: LoggerContextConfigInput) { const context = LoggingConfig.getLoggerContext(baseContextParts); const contextConfig = loggerContextConfigSchema.validate(rawConfig); this.contextConfigs.set(context, { @@ -117,7 +117,7 @@ export class LoggingSystem implements LoggerFactory { // If we already have a base config, apply the config. If not, custom context configs // will be picked up on next call to `upgrade`. if (this.baseConfig) { - this.applyBaseConfig(this.baseConfig); + await this.applyBaseConfig(this.baseConfig); } } @@ -161,17 +161,21 @@ export class LoggingSystem implements LoggerFactory { return this.getLoggerConfigByContext(config, LoggingConfig.getParentLoggerContext(context)); } - private applyBaseConfig(newBaseConfig: LoggingConfig) { + private async applyBaseConfig(newBaseConfig: LoggingConfig) { const computedConfig = [...this.contextConfigs.values()].reduce( (baseConfig, contextConfig) => baseConfig.extend(contextConfig), newBaseConfig ); + // reconfigure all the loggers without configuration to have them use the buffer + // appender while we are awaiting for the appenders to be disposed. + for (const [loggerKey, loggerAdapter] of this.loggers) { + loggerAdapter.updateLogger(this.createLogger(loggerKey, undefined)); + } + // Appenders must be reset, so we first dispose of the current ones, then // build up a new set of appenders. - for (const appender of this.appenders.values()) { - appender.dispose(); - } + await Promise.all([...this.appenders.values()].map((a) => a.dispose())); this.appenders.clear(); for (const [appenderKey, appenderConfig] of computedConfig.appenders) { diff --git a/src/core/server/root/index.test.ts b/src/core/server/root/index.test.ts index 4d3fe24c7ba83..1ad0bcde0ca0e 100644 --- a/src/core/server/root/index.test.ts +++ b/src/core/server/root/index.test.ts @@ -33,6 +33,7 @@ let mockConsoleError: jest.SpyInstance; beforeEach(() => { jest.spyOn(global.process, 'exit').mockReturnValue(undefined as never); mockConsoleError = jest.spyOn(console, 'error').mockReturnValue(undefined); + logger.upgrade.mockResolvedValue(undefined); rawConfigService.getConfig$.mockReturnValue(new BehaviorSubject({ someValue: 'foo' })); configService.atPath.mockReturnValue(new BehaviorSubject({ someValue: 'foo' })); }); diff --git a/src/core/server/root/index.ts b/src/core/server/root/index.ts index ccc88f67b4182..a68eeec9d2307 100644 --- a/src/core/server/root/index.ts +++ b/src/core/server/root/index.ts @@ -18,7 +18,7 @@ */ import { ConnectableObservable, Subscription } from 'rxjs'; -import { first, map, publishReplay, switchMap, tap } from 'rxjs/operators'; +import { first, publishReplay, switchMap, concatMap, tap } from 'rxjs/operators'; import { Env, RawConfigurationProvider } from '../config'; import { Logger, LoggerFactory, LoggingConfigType, LoggingSystem } from '../logging'; @@ -99,7 +99,7 @@ export class Root { const update$ = configService.getConfig$().pipe( // always read the logging config when the underlying config object is re-read switchMap(() => configService.atPath('logging')), - map((config) => this.loggingSystem.upgrade(config)), + concatMap((config) => this.loggingSystem.upgrade(config)), // This specifically console.logs because we were not able to configure the logger. // eslint-disable-next-line no-console tap({ error: (err) => console.error('Configuring logger failed:', err) }), From a04d0c750955ab8063f13f5f5a982b89bdaf15a5 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 20:09:22 +0100 Subject: [PATCH 09/23] fix integ tests --- .../logging/integration_tests/logging.test.ts | 26 ++++++++++++------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/src/core/server/logging/integration_tests/logging.test.ts b/src/core/server/logging/integration_tests/logging.test.ts index 7f6059567c46e..bf9934b64a419 100644 --- a/src/core/server/logging/integration_tests/logging.test.ts +++ b/src/core/server/logging/integration_tests/logging.test.ts @@ -146,12 +146,18 @@ describe('logging service', () => { ], }; + const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); + let root: ReturnType; let setup: InternalCoreSetup; let mockConsoleLog: jest.SpyInstance; const loggingConfig$ = new Subject(); - const setContextConfig = (enable: boolean) => - enable ? loggingConfig$.next(CUSTOM_LOGGING_CONFIG) : loggingConfig$.next({}); + const setContextConfig = async (enable: boolean) => { + loggingConfig$.next(enable ? CUSTOM_LOGGING_CONFIG : {}); + // need to wait for config to reload. nextTick is enough, using delay just to be sure + await delay(10); + }; + beforeAll(async () => { mockConsoleLog = jest.spyOn(global.console, 'log'); root = kbnTestServer.createRoot(); @@ -171,12 +177,12 @@ describe('logging service', () => { it('does not write to custom appenders when not configured', async () => { const logger = root.logger.get('plugins.myplugin.debug_pattern'); - setContextConfig(false); + await setContextConfig(false); logger.info('log1'); - setContextConfig(true); + await setContextConfig(true); logger.debug('log2'); logger.info('log3'); - setContextConfig(false); + await setContextConfig(false); logger.info('log4'); expect(mockConsoleLog).toHaveBeenCalledTimes(2); expect(mockConsoleLog).toHaveBeenCalledWith( @@ -188,7 +194,7 @@ describe('logging service', () => { }); it('writes debug_json context to custom JSON appender', async () => { - setContextConfig(true); + await setContextConfig(true); const logger = root.logger.get('plugins.myplugin.debug_json'); logger.debug('log1'); logger.info('log2'); @@ -214,7 +220,7 @@ describe('logging service', () => { }); it('writes info_json context to custom JSON appender', async () => { - setContextConfig(true); + await setContextConfig(true); const logger = root.logger.get('plugins.myplugin.info_json'); logger.debug('i should not be logged!'); logger.info('log2'); @@ -230,7 +236,7 @@ describe('logging service', () => { }); it('writes debug_pattern context to custom pattern appender', async () => { - setContextConfig(true); + await setContextConfig(true); const logger = root.logger.get('plugins.myplugin.debug_pattern'); logger.debug('log1'); logger.info('log2'); @@ -245,7 +251,7 @@ describe('logging service', () => { }); it('writes info_pattern context to custom pattern appender', async () => { - setContextConfig(true); + await setContextConfig(true); const logger = root.logger.get('plugins.myplugin.info_pattern'); logger.debug('i should not be logged!'); logger.info('log2'); @@ -256,7 +262,7 @@ describe('logging service', () => { }); it('writes all context to both appenders', async () => { - setContextConfig(true); + await setContextConfig(true); const logger = root.logger.get('plugins.myplugin.all'); logger.debug('log1'); logger.info('log2'); From 91f14202017c13e85f3f3d83c8b5a1235ad54fde Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 2 Dec 2020 21:58:32 +0100 Subject: [PATCH 10/23] add integration tests --- .../rolling_file/rolling_file_appender.ts | 5 +- .../appenders/rolling_file/strategies/fs.ts | 1 + .../strategies/numeric/numeric_strategy.ts | 42 ++-- .../rolling_file_appender.test.ts | 220 ++++++++++++++++++ 4 files changed, 247 insertions(+), 21 deletions(-) create mode 100644 src/core/server/logging/integration_tests/rolling_file_appender.test.ts diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index 2edc2e1d97e01..e35704064d02c 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -146,7 +146,6 @@ export class RollingFileAppender implements DisposableAppender { } catch (e) { // eslint-disable-next-line no-console console.log('Error while rolling file: ', e); - throw e; } finally { this.flushBuffer(); } @@ -154,11 +153,11 @@ export class RollingFileAppender implements DisposableAppender { private flushBuffer() { const pendingLogs = this.buffer.flush(); - // in some extreme rare scenario, `dispose` can be called during a rollover + // in some extreme scenarios, `dispose` can be called during a rollover // where the internal buffered logs would trigger another rollover // (rollover started, logs keep coming and got buffered, dispose is called, rollover ends and we then flush) // this would cause a second rollover that would not be awaited - // for, and could result in a race with the newly created appender + // and could result in a race with the newly created appender // that would also be performing a rollover. // so if we are disposed, we just flush the buffer directly to the file instead to avoid loosing the entries. for (const log of pendingLogs) { diff --git a/src/core/server/logging/appenders/rolling_file/strategies/fs.ts b/src/core/server/logging/appenders/rolling_file/strategies/fs.ts index ddfbdc298d012..5b089cc108de9 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/fs.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/fs.ts @@ -23,3 +23,4 @@ import { promisify } from 'util'; export const readdir = promisify(Fs.readdir); export const unlink = promisify(Fs.unlink); export const rename = promisify(Fs.rename); +export const exists = promisify(Fs.exists); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index 946ff367cf2c4..afb1bd831b72e 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -19,7 +19,7 @@ import { basename, dirname, join } from 'path'; import { schema } from '@kbn/config-schema'; -import { readdir, unlink, rename } from '../fs'; +import { readdir, unlink, rename, exists } from '../fs'; import { RollingStrategy } from '../strategy'; import { RollingFileContext } from '../../rolling_file_context'; import { getNumericMatcher, getNumericFileName } from './pattern_matcher'; @@ -72,7 +72,7 @@ export const numericRollingStrategyConfigSchema = schema.object({ /** * A rolling strategy that will suffix the file with a given pattern when rolling, - * and will only retains a fixed amount of rolled file. + * and will only retain a fixed amount of rolled files. * * @example * ```yaml @@ -96,19 +96,29 @@ export const numericRollingStrategyConfigSchema = schema.object({ * See {@link NumericRollingStrategyConfig} for more details. */ export class NumericRollingStrategy implements RollingStrategy { + private readonly logFilePath; + private readonly logFileBaseName; + private readonly logFileFolder; + constructor( private readonly config: NumericRollingStrategyConfig, private readonly context: RollingFileContext - ) {} + ) { + this.logFilePath = this.context.filePath; + this.logFileBaseName = basename(this.context.filePath); + this.logFileFolder = dirname(this.context.filePath); + } async rollout() { - // console.log('***** performing rolling'); - - const logFileBaseName = basename(this.context.filePath); - const logFileFolder = dirname(this.context.filePath); + // in case of time-interval triggering policy, we can have an entire + // interval without any log event. In that case, the log file is not even + // present, and we should not perform the rollout + if (!(await exists(this.logFilePath))) { + return; + } - const matcher = getNumericMatcher(logFileBaseName, this.config.pattern); - const dirContent = await readdir(logFileFolder); + const matcher = getNumericMatcher(this.logFileBaseName, this.config.pattern); + const dirContent = await readdir(this.logFileFolder); const orderedFiles = dirContent .map((fileName) => ({ @@ -123,21 +133,17 @@ export class NumericRollingStrategy implements RollingStrategy { const filesToDelete = orderedFiles.slice(filesToRoll.length, orderedFiles.length); for (const fileToDelete of filesToDelete) { - // console.log('*** will delete ', fileToDelete); - - await unlink(join(logFileFolder, fileToDelete)); + await unlink(join(this.logFileFolder, fileToDelete)); } for (let i = filesToRoll.length - 1; i >= 0; i--) { const oldFileName = filesToRoll[i]; - const newFileName = getNumericFileName(logFileBaseName, this.config.pattern, i + 2); - // console.log('*** will roll ', oldFileName, newFileName); - await rename(join(logFileFolder, oldFileName), join(logFileFolder, newFileName)); + const newFileName = getNumericFileName(this.logFileBaseName, this.config.pattern, i + 2); + await rename(join(this.logFileFolder, oldFileName), join(this.logFileFolder, newFileName)); } - const currentFileNewName = getNumericFileName(logFileBaseName, this.config.pattern, 1); - // console.log('*** will roll ', logFileBaseName, currentFileNewName); - await rename(this.context.filePath, join(logFileFolder, currentFileNewName)); + const currentFileNewName = getNumericFileName(this.logFileBaseName, this.config.pattern, 1); + await rename(this.context.filePath, join(this.logFileFolder, currentFileNewName)); // updates the context file info to mirror the new size and date this.context.refreshFileInfo(); diff --git a/src/core/server/logging/integration_tests/rolling_file_appender.test.ts b/src/core/server/logging/integration_tests/rolling_file_appender.test.ts new file mode 100644 index 0000000000000..b83e1d2821520 --- /dev/null +++ b/src/core/server/logging/integration_tests/rolling_file_appender.test.ts @@ -0,0 +1,220 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { join } from 'path'; +import { rmdirSync, mkdtempSync, readFileSync, readdirSync } from 'fs'; +import moment from 'moment-timezone'; +import * as kbnTestServer from '../../../test_helpers/kbn_server'; +import { getNextRollingTime } from '../appenders/rolling_file/policies/time_interval/get_next_rolling_time'; + +const flushDelay = 250; +const delay = (waitInMs: number) => new Promise((resolve) => setTimeout(resolve, waitInMs)); +const flush = async () => delay(flushDelay); + +function createRoot(appenderConfig: any) { + return kbnTestServer.createRoot({ + logging: { + silent: true, // set "true" in kbnTestServer + appenders: { + 'rolling-file': appenderConfig, + }, + loggers: [ + { + context: 'test.rolling.file', + appenders: ['rolling-file'], + level: 'debug', + }, + ], + }, + }); +} + +describe('RollingFileAppender', () => { + let root: ReturnType; + let testDir: string; + let logFile: string; + + const getFileContent = (basename: string) => + readFileSync(join(testDir, basename)).toString('utf-8'); + + beforeEach(async () => { + testDir = mkdtempSync('rolling-test'); + logFile = join(testDir, 'kibana.log'); + }); + + afterEach(async () => { + try { + rmdirSync(testDir); + } catch (e) { + /* trap */ + } + if (root) { + await root.shutdown(); + } + }); + + const message = (index: number) => `some message of around 40 bytes number ${index}`; + const expectedFileContent = (indices: number[]) => indices.map(message).join('\n') + '\n'; + + describe('`size-limit` policy with `numeric` strategy', () => { + it('rolls the log file in the correct order', async () => { + root = createRoot({ + kind: 'rolling-file', + path: logFile, + layout: { + kind: 'pattern', + pattern: '%message', + }, + policy: { + kind: 'size-limit', + size: '100b', + }, + strategy: { + kind: 'numeric', + max: 5, + pattern: '.%i', + }, + }); + await root.setup(); + + const logger = root.logger.get('test.rolling.file'); + + // size = 100b, message.length ~= 40b, should roll every 3 message + + // last file - 'kibana.2.log' + logger.info(message(1)); + logger.info(message(2)); + logger.info(message(3)); + // roll - 'kibana.1.log' + logger.info(message(4)); + logger.info(message(5)); + logger.info(message(6)); + // roll - 'kibana.log' + logger.info(message(7)); + + await flush(); + + const files = readdirSync(testDir).sort(); + + expect(files).toEqual(['kibana.1.log', 'kibana.2.log', 'kibana.log']); + expect(getFileContent('kibana.log')).toEqual(expectedFileContent([7])); + expect(getFileContent('kibana.1.log')).toEqual(expectedFileContent([4, 5, 6])); + expect(getFileContent('kibana.2.log')).toEqual(expectedFileContent([1, 2, 3])); + }); + + it('only keep the correct number of files', async () => { + root = createRoot({ + kind: 'rolling-file', + path: logFile, + layout: { + kind: 'pattern', + pattern: '%message', + }, + policy: { + kind: 'size-limit', + size: '60b', + }, + strategy: { + kind: 'numeric', + max: 2, + pattern: '-%i', + }, + }); + await root.setup(); + + const logger = root.logger.get('test.rolling.file'); + + // size = 60b, message.length ~= 40b, should roll every 2 message + + // last file - 'kibana-3.log' (which will be removed during the last rolling) + logger.info(message(1)); + logger.info(message(2)); + // roll - 'kibana-2.log' + logger.info(message(3)); + logger.info(message(4)); + // roll - 'kibana-1.log' + logger.info(message(5)); + logger.info(message(6)); + // roll - 'kibana.log' + logger.info(message(7)); + logger.info(message(8)); + + await flush(); + + const files = readdirSync(testDir).sort(); + + expect(files).toEqual(['kibana-1.log', 'kibana-2.log', 'kibana.log']); + expect(getFileContent('kibana.log')).toEqual(expectedFileContent([7, 8])); + expect(getFileContent('kibana-1.log')).toEqual(expectedFileContent([5, 6])); + expect(getFileContent('kibana-2.log')).toEqual(expectedFileContent([3, 4])); + }); + }); + + describe('`time-interval` policy with `numeric` strategy', () => { + it('rolls the log file at the given interval', async () => { + root = createRoot({ + kind: 'rolling-file', + path: logFile, + layout: { + kind: 'pattern', + pattern: '%message', + }, + policy: { + kind: 'time-interval', + interval: '1s', + modulate: true, + }, + strategy: { + kind: 'numeric', + max: 2, + pattern: '-%i', + }, + }); + await root.setup(); + + const logger = root.logger.get('test.rolling.file'); + + const waitForNextRollingTime = () => { + const now = Date.now(); + const nextRolling = getNextRollingTime(now, moment.duration(1, 'second'), true); + return delay(nextRolling - now + 1); + }; + + // wait for a rolling time boundary to minimize the risk to have logs emitted in different intervals + // the `1s` interval should be way more than enough to log 2 messages + await waitForNextRollingTime(); + + logger.info(message(1)); + logger.info(message(2)); + + await waitForNextRollingTime(); + + logger.info(message(3)); + logger.info(message(4)); + + await flush(); + + const files = readdirSync(testDir).sort(); + + expect(files).toEqual(['kibana-1.log', 'kibana.log']); + expect(getFileContent('kibana.log')).toEqual(expectedFileContent([3, 4])); + expect(getFileContent('kibana-1.log')).toEqual(expectedFileContent([1, 2])); + }); + }); +}); From 8165f353840b01e68334b261b3ce9bf7e0ee36fd Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 3 Dec 2020 08:55:40 +0100 Subject: [PATCH 11/23] some renames --- .../strategies/numeric/numeric_strategy.ts | 8 +-- .../numeric/pattern_matcher.test.ts | 33 +++++++++--- .../strategies/numeric/pattern_matcher.ts | 50 ++++++++++++------- 3 files changed, 62 insertions(+), 29 deletions(-) diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index afb1bd831b72e..2d1d4d1ad7979 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -22,7 +22,7 @@ import { schema } from '@kbn/config-schema'; import { readdir, unlink, rename, exists } from '../fs'; import { RollingStrategy } from '../strategy'; import { RollingFileContext } from '../../rolling_file_context'; -import { getNumericMatcher, getNumericFileName } from './pattern_matcher'; +import { getFileNameMatcher, getRollingFileName } from './pattern_matcher'; export interface NumericRollingStrategyConfig { kind: 'numeric'; @@ -117,7 +117,7 @@ export class NumericRollingStrategy implements RollingStrategy { return; } - const matcher = getNumericMatcher(this.logFileBaseName, this.config.pattern); + const matcher = getFileNameMatcher(this.logFileBaseName, this.config.pattern); const dirContent = await readdir(this.logFileFolder); const orderedFiles = dirContent @@ -138,11 +138,11 @@ export class NumericRollingStrategy implements RollingStrategy { for (let i = filesToRoll.length - 1; i >= 0; i--) { const oldFileName = filesToRoll[i]; - const newFileName = getNumericFileName(this.logFileBaseName, this.config.pattern, i + 2); + const newFileName = getRollingFileName(this.logFileBaseName, this.config.pattern, i + 2); await rename(join(this.logFileFolder, oldFileName), join(this.logFileFolder, newFileName)); } - const currentFileNewName = getNumericFileName(this.logFileBaseName, this.config.pattern, 1); + const currentFileNewName = getRollingFileName(this.logFileBaseName, this.config.pattern, 1); await rename(this.context.filePath, join(this.logFileFolder, currentFileNewName)); // updates the context file info to mirror the new size and date diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts index 00e08b9ff478d..8f29ff3346130 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.test.ts @@ -17,32 +17,49 @@ * under the License. */ -import { getNumericMatcher } from './pattern_matcher'; +import { getFileNameMatcher, getRollingFileName } from './pattern_matcher'; -describe('getNumericIndex', () => { +describe('getFileNameMatcher', () => { it('returns the file index when the file matches the pattern', () => { - const matcher = getNumericMatcher('log.json', '.%i'); + const matcher = getFileNameMatcher('log.json', '.%i'); expect(matcher('log.1.json')).toEqual(1); expect(matcher('log.12.json')).toEqual(12); }); it('handles special characters in the pattern', () => { - const matcher = getNumericMatcher('kibana.log', '-{%i}'); + const matcher = getFileNameMatcher('kibana.log', '-{%i}'); expect(matcher('kibana-{1}.log')).toEqual(1); }); it('returns undefined when the file does not match the pattern', () => { - const matcher = getNumericMatcher('log.json', '.%i'); + const matcher = getFileNameMatcher('log.json', '.%i'); expect(matcher('log.1.text')).toBeUndefined(); expect(matcher('log*1.json')).toBeUndefined(); expect(matcher('log.2foo.json')).toBeUndefined(); }); it('handles multiple extensions', () => { - const matcher = getNumericMatcher('log.foo.bar', '.%i'); + const matcher = getFileNameMatcher('log.foo.bar', '.%i'); expect(matcher('log.1.foo.bar')).toEqual(1); expect(matcher('log.12.foo.bar')).toEqual(12); }); - it('handles files with no extensions', () => { - const matcher = getNumericMatcher('log', '.%i'); + it('handles files without extension', () => { + const matcher = getFileNameMatcher('log', '.%i'); expect(matcher('log.1')).toEqual(1); expect(matcher('log.42')).toEqual(42); }); }); + +describe('getRollingFileName', () => { + it('returns the correct file name', () => { + expect(getRollingFileName('kibana.json', '.%i', 5)).toEqual('kibana.5.json'); + expect(getRollingFileName('log.txt', '-%i', 3)).toEqual('log-3.txt'); + }); + + it('handles multiple extensions', () => { + expect(getRollingFileName('kibana.foo.bar', '.%i', 5)).toEqual('kibana.5.foo.bar'); + expect(getRollingFileName('log.foo.bar', '-%i', 3)).toEqual('log-3.foo.bar'); + }); + + it('handles files without extension', () => { + expect(getRollingFileName('kibana', '.%i', 12)).toEqual('kibana.12'); + expect(getRollingFileName('log', '-%i', 7)).toEqual('log-7'); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts index dfe7e1379c693..e379745cb9aa0 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts @@ -17,28 +17,35 @@ * under the License. */ +const escape = (string: string) => string.replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); + const createNumericMatcher = (fileBaseName: string, pattern: string): RegExp => { - let suffixStart = fileBaseName.indexOf('.'); - if (suffixStart === -1) { - suffixStart = fileBaseName.length; + let extStart = fileBaseName.indexOf('.'); + if (extStart === -1) { + extStart = fileBaseName.length; } - const baseNameWithoutSuffix = fileBaseName - .substr(0, suffixStart) - // escape special characters in the pattern - .replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); - const suffix = fileBaseName - .substr(suffixStart, fileBaseName.length) - // escape special characters in the pattern - .replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); - const processedPattern = pattern - // escape special characters in the pattern - .replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&') + const baseNameWithoutExt = escape(fileBaseName.substr(0, extStart)); + const extension = escape(fileBaseName.substr(extStart, fileBaseName.length)); + const processedPattern = escape(pattern) // create matching group for `%i` .replace(/%i/g, '(?\\d+)'); - return new RegExp(`^${baseNameWithoutSuffix}${processedPattern}${suffix}$`); + return new RegExp(`^${baseNameWithoutExt}${processedPattern}${extension}$`); }; -export const getNumericMatcher = (logFileName: string, pattern: string) => { +/** + * Builds a matcher that can be used to match a filename against the rolling + * file name pattern associated with given `logFileName` and `pattern` + * + * @example + * ```ts + * const matcher = getFileNameMatcher('kibana.log', '-%i'); + * matcher('kibana-1.log') // `1` + * matcher('kibana-5.log') // `5` + * matcher('kibana-A.log') // undefined + * matcher('kibana.log') // `undefined + * ``` + */ +export const getFileNameMatcher = (logFileName: string, pattern: string) => { const matcher = createNumericMatcher(logFileName, pattern); return (fileName: string): number | undefined => { const match = matcher.exec(fileName); @@ -49,7 +56,16 @@ export const getNumericMatcher = (logFileName: string, pattern: string) => { }; }; -export const getNumericFileName = ( +/** + * Returns the rolling file name associated with given basename and pattern for given index. + * + * @example + * ```ts + * getNumericFileName('foo.log', '.%i', 4) // -> `foo.4.log` + * getNumericFileName('kibana.log', '-{%i}', 12) // -> `kibana-{12}.log` + * ``` + */ +export const getRollingFileName = ( fileBaseName: string, pattern: string, index: number From 30ba0d4c616676e38a59f31489029233de5102f4 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 3 Dec 2020 13:28:26 +0100 Subject: [PATCH 12/23] more tests --- .../logging/appenders/rolling_file/mocks.ts | 35 ++++ .../rolling_file/rolling_file_manager.ts | 26 +-- .../numeric/numeric_strategy.test.mocks.ts | 40 ++++ .../numeric/numeric_strategy.test.ts | 172 ++++++++++++++++++ .../strategies/numeric/numeric_strategy.ts | 52 +++--- .../numeric/rolling_tasks.test.mocks.ts | 37 ++++ .../strategies/numeric/rolling_tasks.test.ts | 166 +++++++++++++++++ .../strategies/numeric/rolling_tasks.ts | 93 ++++++++++ 8 files changed, 581 insertions(+), 40 deletions(-) create mode 100644 src/core/server/logging/appenders/rolling_file/mocks.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.mocks.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts create mode 100644 src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts diff --git a/src/core/server/logging/appenders/rolling_file/mocks.ts b/src/core/server/logging/appenders/rolling_file/mocks.ts new file mode 100644 index 0000000000000..3eef0dfdfce67 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/mocks.ts @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import type { RollingFileContext } from './rolling_file_context'; + +const createContextMock = (filePath: string) => { + const mock: jest.Mocked = { + currentFileSize: 0, + currentFileTime: 0, + filePath, + refreshFileInfo: jest.fn(), + }; + + return mock; +}; + +export const rollingFileAppenderMocks = { + createContext: createContextMock, +}; diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts index 0fb3068e69908..bcda2abb7922c 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts @@ -33,10 +33,22 @@ export class RollingFileManager { write(chunk: string) { const stream = this.ensureStreamOpen(); - this.context.currentFileSize += chunk.length; + this.context.currentFileSize += Buffer.byteLength(chunk, 'utf8'); stream.write(chunk); } + async closeStream() { + return new Promise((resolve) => { + if (this.outputStream === undefined) { + return resolve(); + } + this.outputStream.end(() => { + this.outputStream = undefined; + resolve(); + }); + }); + } + private ensureStreamOpen() { if (this.outputStream === undefined) { this.outputStream = createWriteStream(this.filePath, { @@ -48,16 +60,4 @@ export class RollingFileManager { } return this.outputStream!; } - - public async closeStream() { - return new Promise((resolve) => { - if (this.outputStream === undefined) { - return resolve(); - } - this.outputStream.end(() => { - this.outputStream = undefined; - resolve(); - }); - }); - } } diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.mocks.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.mocks.ts new file mode 100644 index 0000000000000..661ca87874e08 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.mocks.ts @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export const getOrderedRolledFilesMock = jest.fn(); +export const deleteFilesMock = jest.fn(); +export const rollPreviousFilesInOrderMock = jest.fn(); +export const rollCurrentFileMock = jest.fn(); +export const shouldSkipRolloutMock = jest.fn(); + +jest.doMock('./rolling_tasks', () => ({ + getOrderedRolledFiles: getOrderedRolledFilesMock, + deleteFiles: deleteFilesMock, + rollPreviousFilesInOrder: rollPreviousFilesInOrderMock, + rollCurrentFile: rollCurrentFileMock, + shouldSkipRollout: shouldSkipRolloutMock, +})); + +export const resetAllMock = () => { + shouldSkipRolloutMock.mockReset(); + getOrderedRolledFilesMock.mockReset(); + deleteFilesMock.mockReset(); + rollPreviousFilesInOrderMock.mockReset(); + rollCurrentFileMock.mockReset(); +}; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.ts new file mode 100644 index 0000000000000..386b551aee377 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.test.ts @@ -0,0 +1,172 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { join } from 'path'; +import { + resetAllMock, + shouldSkipRolloutMock, + deleteFilesMock, + getOrderedRolledFilesMock, + rollCurrentFileMock, + rollPreviousFilesInOrderMock, +} from './numeric_strategy.test.mocks'; +import { rollingFileAppenderMocks } from '../../mocks'; +import { NumericRollingStrategy, NumericRollingStrategyConfig } from './numeric_strategy'; + +const logFileFolder = 'log-file-folder'; +const logFileBaseName = 'kibana.log'; +const pattern = '.%i'; +const logFilePath = join(logFileFolder, logFileBaseName); + +describe('NumericRollingStrategy', () => { + let context: ReturnType; + let strategy: NumericRollingStrategy; + + const createStrategy = (config: Omit) => + new NumericRollingStrategy({ ...config, kind: 'numeric' }, context); + + beforeEach(() => { + context = rollingFileAppenderMocks.createContext(logFilePath); + strategy = createStrategy({ pattern, max: 3 }); + shouldSkipRolloutMock.mockResolvedValue(false); + getOrderedRolledFilesMock.mockResolvedValue([]); + }); + + afterEach(() => { + resetAllMock(); + }); + + it('calls `getOrderedRolledFiles` with the correct parameters', async () => { + await strategy.rollout(); + + expect(getOrderedRolledFilesMock).toHaveBeenCalledTimes(1); + expect(getOrderedRolledFilesMock).toHaveBeenCalledWith({ + logFileFolder, + logFileBaseName, + pattern, + }); + }); + + it('calls `deleteFiles` with the correct files', async () => { + getOrderedRolledFilesMock.mockResolvedValue([ + 'kibana.1.log', + 'kibana.2.log', + 'kibana.3.log', + 'kibana.4.log', + ]); + + await strategy.rollout(); + + expect(deleteFilesMock).toHaveBeenCalledTimes(1); + expect(deleteFilesMock).toHaveBeenCalledWith({ + filesToDelete: ['kibana.3.log', 'kibana.4.log'], + logFileFolder, + }); + }); + + it('calls `rollPreviousFilesInOrder` with the correct files', async () => { + getOrderedRolledFilesMock.mockResolvedValue([ + 'kibana.1.log', + 'kibana.2.log', + 'kibana.3.log', + 'kibana.4.log', + ]); + + await strategy.rollout(); + + expect(rollPreviousFilesInOrderMock).toHaveBeenCalledTimes(1); + expect(rollPreviousFilesInOrderMock).toHaveBeenCalledWith({ + filesToRoll: ['kibana.1.log', 'kibana.2.log'], + logFileFolder, + logFileBaseName, + pattern, + }); + }); + + it('calls `rollCurrentFile` with the correct parameters', async () => { + await strategy.rollout(); + + expect(rollCurrentFileMock).toHaveBeenCalledTimes(1); + expect(rollCurrentFileMock).toHaveBeenCalledWith({ + pattern, + logFileBaseName, + logFileFolder, + }); + }); + + it('calls `context.refreshFileInfo` with the correct parameters', async () => { + await strategy.rollout(); + + expect(context.refreshFileInfo).toHaveBeenCalledTimes(1); + }); + + it('calls the tasks in the correct order', async () => { + getOrderedRolledFilesMock.mockResolvedValue([ + 'kibana.1.log', + 'kibana.2.log', + 'kibana.3.log', + 'kibana.4.log', + ]); + + await strategy.rollout(); + + const deleteFilesCall = deleteFilesMock.mock.invocationCallOrder[0]; + const rollPreviousFilesInOrderCall = rollPreviousFilesInOrderMock.mock.invocationCallOrder[0]; + const rollCurrentFileCall = rollCurrentFileMock.mock.invocationCallOrder[0]; + const refreshFileInfoCall = context.refreshFileInfo.mock.invocationCallOrder[0]; + + expect(deleteFilesCall).toBeLessThan(rollPreviousFilesInOrderCall); + expect(rollPreviousFilesInOrderCall).toBeLessThan(rollCurrentFileCall); + expect(rollCurrentFileCall).toBeLessThan(refreshFileInfoCall); + }); + + it('do not calls `deleteFiles` if no file should be deleted', async () => { + getOrderedRolledFilesMock.mockResolvedValue(['kibana.1.log', 'kibana.2.log']); + + await strategy.rollout(); + + expect(deleteFilesMock).not.toHaveBeenCalled(); + }); + + it('do not calls `rollPreviousFilesInOrder` if no file should be rolled', async () => { + getOrderedRolledFilesMock.mockResolvedValue([]); + + await strategy.rollout(); + + expect(rollPreviousFilesInOrderMock).not.toHaveBeenCalled(); + }); + + it('skips the rollout if `shouldSkipRollout` returns true', async () => { + shouldSkipRolloutMock.mockResolvedValue(true); + getOrderedRolledFilesMock.mockResolvedValue([ + 'kibana.1.log', + 'kibana.2.log', + 'kibana.3.log', + 'kibana.4.log', + ]); + + await strategy.rollout(); + + expect(getOrderedRolledFilesMock).not.toHaveBeenCalled(); + expect(deleteFilesMock).not.toHaveBeenCalled(); + expect(rollPreviousFilesInOrderMock).not.toHaveBeenCalled(); + expect(rollCurrentFileMock).not.toHaveBeenCalled(); + expect(context.refreshFileInfo).not.toHaveBeenCalled(); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index 2d1d4d1ad7979..75ba2fe7c772d 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -17,12 +17,17 @@ * under the License. */ -import { basename, dirname, join } from 'path'; +import { basename, dirname } from 'path'; import { schema } from '@kbn/config-schema'; -import { readdir, unlink, rename, exists } from '../fs'; import { RollingStrategy } from '../strategy'; import { RollingFileContext } from '../../rolling_file_context'; -import { getFileNameMatcher, getRollingFileName } from './pattern_matcher'; +import { + shouldSkipRollout, + getOrderedRolledFiles, + deleteFiles, + rollCurrentFile, + rollPreviousFilesInOrder, +} from './rolling_tasks'; export interface NumericRollingStrategyConfig { kind: 'numeric'; @@ -110,40 +115,33 @@ export class NumericRollingStrategy implements RollingStrategy { } async rollout() { - // in case of time-interval triggering policy, we can have an entire - // interval without any log event. In that case, the log file is not even - // present, and we should not perform the rollout - if (!(await exists(this.logFilePath))) { + const logFilePath = this.logFilePath; + const logFileBaseName = this.logFileBaseName; + const logFileFolder = this.logFileFolder; + const pattern = this.config.pattern; + + if (await shouldSkipRollout({ logFilePath })) { return; } - const matcher = getFileNameMatcher(this.logFileBaseName, this.config.pattern); - const dirContent = await readdir(this.logFileFolder); - - const orderedFiles = dirContent - .map((fileName) => ({ - fileName, - index: matcher(fileName), - })) - .filter(({ index }) => index !== undefined) - .sort((a, b) => a.index! - b.index!) - .map(({ fileName }) => fileName); - + // get the files matching the pattern in the folder, and sort them by `%i` value + const orderedFiles = await getOrderedRolledFiles({ + logFileFolder, + logFileBaseName, + pattern, + }); const filesToRoll = orderedFiles.slice(0, this.config.max - 1); const filesToDelete = orderedFiles.slice(filesToRoll.length, orderedFiles.length); - for (const fileToDelete of filesToDelete) { - await unlink(join(this.logFileFolder, fileToDelete)); + if (filesToDelete.length > 0) { + await deleteFiles({ logFileFolder, filesToDelete }); } - for (let i = filesToRoll.length - 1; i >= 0; i--) { - const oldFileName = filesToRoll[i]; - const newFileName = getRollingFileName(this.logFileBaseName, this.config.pattern, i + 2); - await rename(join(this.logFileFolder, oldFileName), join(this.logFileFolder, newFileName)); + if (filesToRoll.length > 0) { + await rollPreviousFilesInOrder({ filesToRoll, logFileFolder, logFileBaseName, pattern }); } - const currentFileNewName = getRollingFileName(this.logFileBaseName, this.config.pattern, 1); - await rename(this.context.filePath, join(this.logFileFolder, currentFileNewName)); + await rollCurrentFile({ pattern, logFileBaseName, logFileFolder }); // updates the context file info to mirror the new size and date this.context.refreshFileInfo(); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts new file mode 100644 index 0000000000000..16ae8264edcf8 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export const readdirMock = jest.fn(); +export const unlinkMock = jest.fn(); +export const renameMock = jest.fn(); +export const existsMock = jest.fn(); + +jest.doMock('../fs', () => ({ + readdir: readdirMock, + unlink: unlinkMock, + rename: renameMock, + exists: existsMock, +})); + +export const clearAllMocks = () => { + readdirMock.mockClear(); + unlinkMock.mockClear(); + renameMock.mockClear(); + existsMock.mockClear(); +}; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts new file mode 100644 index 0000000000000..e730e00d98757 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts @@ -0,0 +1,166 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { join } from 'path'; +import { + existsMock, + readdirMock, + renameMock, + unlinkMock, + clearAllMocks, +} from './rolling_tasks.test.mocks'; +import { + shouldSkipRollout, + rollCurrentFile, + rollPreviousFilesInOrder, + deleteFiles, + getOrderedRolledFiles, +} from './rolling_tasks'; + +describe('NumericRollingStrategy tasks', () => { + afterEach(() => { + clearAllMocks(); + }); + + describe('shouldSkipRollout', () => { + it('calls `exists` with the correct parameters', async () => { + await shouldSkipRollout({ logFilePath: 'some-file' }); + + expect(existsMock).toHaveBeenCalledTimes(1); + expect(existsMock).toHaveBeenCalledWith('some-file'); + }); + it('returns `true` if the file is current log file does not exist', async () => { + existsMock.mockResolvedValue(false); + + expect(await shouldSkipRollout({ logFilePath: 'some-file' })).toEqual(true); + }); + it('returns `false` if the file is current log file exists', async () => { + existsMock.mockResolvedValue(true); + + expect(await shouldSkipRollout({ logFilePath: 'some-file' })).toEqual(false); + }); + }); + + describe('rollCurrentFile', () => { + it('calls `rename` with the correct parameters', async () => { + await rollCurrentFile({ + logFileFolder: 'log-folder', + logFileBaseName: 'kibana.log', + pattern: '.%i', + }); + + expect(renameMock).toHaveBeenCalledTimes(1); + expect(renameMock).toHaveBeenCalledWith( + join('log-folder', 'kibana.log'), + join('log-folder', 'kibana.1.log') + ); + }); + }); + + describe('rollPreviousFilesInOrder', () => { + it('calls `rename` once for each file', async () => { + await rollPreviousFilesInOrder({ + filesToRoll: ['file-1', 'file-2', 'file-3'], + logFileFolder: 'log-folder', + logFileBaseName: 'file', + pattern: '-%i', + }); + + expect(renameMock).toHaveBeenCalledTimes(3); + }); + + it('calls `rename` with the correct parameters', async () => { + await rollPreviousFilesInOrder({ + filesToRoll: ['file-1', 'file-2'], + logFileFolder: 'log-folder', + logFileBaseName: 'file', + pattern: '-%i', + }); + + expect(renameMock).toHaveBeenNthCalledWith( + 1, + join('log-folder', 'file-2'), + join('log-folder', 'file-3') + ); + expect(renameMock).toHaveBeenNthCalledWith( + 2, + join('log-folder', 'file-1'), + join('log-folder', 'file-2') + ); + }); + }); + + describe('deleteFiles', () => { + it('calls `unlink` once for each file', async () => { + await deleteFiles({ + logFileFolder: 'log-folder', + filesToDelete: ['file-a', 'file-b', 'file-c'], + }); + + expect(unlinkMock).toHaveBeenCalledTimes(3); + }); + it('calls `unlink` with the correct parameters', async () => { + await deleteFiles({ + logFileFolder: 'log-folder', + filesToDelete: ['file-a', 'file-b'], + }); + + expect(unlinkMock).toHaveBeenNthCalledWith(1, join('log-folder', 'file-a')); + expect(unlinkMock).toHaveBeenNthCalledWith(2, join('log-folder', 'file-b')); + }); + }); + + describe('getOrderedRolledFiles', () => { + it('returns the rolled files matching the pattern in order', async () => { + readdirMock.mockResolvedValue(['kibana-3.log', 'kibana-1.log', 'kibana-2.log']); + + const files = await getOrderedRolledFiles({ + logFileFolder: 'log-folder', + logFileBaseName: 'kibana.log', + pattern: '-%i', + }); + + expect(files).toEqual(['kibana-1.log', 'kibana-2.log', 'kibana-3.log']); + }); + + it('ignores files that do no match the pattern', async () => { + readdirMock.mockResolvedValue(['kibana.2.log', 'kibana.1.log', 'kibana-3.log', 'foo.log']); + + const files = await getOrderedRolledFiles({ + logFileFolder: 'log-folder', + logFileBaseName: 'kibana.log', + pattern: '.%i', + }); + + expect(files).toEqual(['kibana.1.log', 'kibana.2.log']); + }); + + it('does not return the base log file', async () => { + readdirMock.mockResolvedValue(['kibana.log', 'kibana-1.log', 'kibana-2.log']); + + const files = await getOrderedRolledFiles({ + logFileFolder: 'log-folder', + logFileBaseName: 'kibana.log', + pattern: '-%i', + }); + + expect(files).toEqual(['kibana-1.log', 'kibana-2.log']); + }); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts new file mode 100644 index 0000000000000..50d671fac2114 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { join } from 'path'; +import { getFileNameMatcher, getRollingFileName } from './pattern_matcher'; +import { readdir, rename, unlink, exists } from '../fs'; + +export const shouldSkipRollout = async ({ logFilePath }: { logFilePath: string }) => { + // in case of time-interval triggering policy, we can have an entire + // interval without any log event. In that case, the log file is not even + // present, and we should not perform the rollout + return !(await exists(logFilePath)); +}; + +export const getOrderedRolledFiles = async ({ + logFileBaseName, + logFileFolder, + pattern, +}: { + logFileFolder: string; + logFileBaseName: string; + pattern: string; +}): Promise => { + const matcher = getFileNameMatcher(logFileBaseName, pattern); + const dirContent = await readdir(logFileFolder); + return dirContent + .map((fileName) => ({ + fileName, + index: matcher(fileName), + })) + .filter(({ index }) => index !== undefined) + .sort((a, b) => a.index! - b.index!) + .map(({ fileName }) => fileName); +}; + +export const deleteFiles = async ({ + logFileFolder, + filesToDelete, +}: { + logFileFolder: string; + filesToDelete: string[]; +}) => { + for (const fileToDelete of filesToDelete) { + await unlink(join(logFileFolder, fileToDelete)); + } +}; + +export const rollPreviousFilesInOrder = async ({ + filesToRoll, + logFileFolder, + logFileBaseName, + pattern, +}: { + logFileFolder: string; + logFileBaseName: string; + pattern: string; + filesToRoll: string[]; +}) => { + for (let i = filesToRoll.length - 1; i >= 0; i--) { + const oldFileName = filesToRoll[i]; + const newFileName = getRollingFileName(logFileBaseName, pattern, i + 2); + await rename(join(logFileFolder, oldFileName), join(logFileFolder, newFileName)); + } +}; + +export const rollCurrentFile = async ({ + logFileFolder, + logFileBaseName, + pattern, +}: { + logFileFolder: string; + logFileBaseName: string; + pattern: string; +}) => { + const rolledBaseName = getRollingFileName(logFileBaseName, pattern, 1); + await rename(join(logFileFolder, logFileBaseName), join(logFileFolder, rolledBaseName)); +}; From 8f4581b50ec03221c2b1ececc0629b96576908c5 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 3 Dec 2020 15:30:16 +0100 Subject: [PATCH 13/23] more tests --- .../logging/appenders/rolling_file/mocks.ts | 37 +++ .../appenders/rolling_file/policies/index.ts | 2 +- .../rolling_file_appender.test.mocks.ts | 58 ++++ .../rolling_file_appender.test.ts | 275 ++++++++++++++++++ .../rolling_file/rolling_file_appender.ts | 6 +- .../rolling_file/rolling_file_manager.ts | 4 +- 6 files changed, 376 insertions(+), 6 deletions(-) create mode 100644 src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.mocks.ts create mode 100644 src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.ts diff --git a/src/core/server/logging/appenders/rolling_file/mocks.ts b/src/core/server/logging/appenders/rolling_file/mocks.ts index 3eef0dfdfce67..2944235438688 100644 --- a/src/core/server/logging/appenders/rolling_file/mocks.ts +++ b/src/core/server/logging/appenders/rolling_file/mocks.ts @@ -17,7 +17,12 @@ * under the License. */ +import { PublicMethodsOf } from '@kbn/utility-types'; +import type { Layout } from '@kbn/logging'; import type { RollingFileContext } from './rolling_file_context'; +import type { RollingFileManager } from './rolling_file_manager'; +import type { TriggeringPolicy } from './policies/policy'; +import type { RollingStrategy } from './strategies/strategy'; const createContextMock = (filePath: string) => { const mock: jest.Mocked = { @@ -26,10 +31,42 @@ const createContextMock = (filePath: string) => { filePath, refreshFileInfo: jest.fn(), }; + return mock; +}; + +const createStrategyMock = () => { + const mock: jest.Mocked = { + rollout: jest.fn(), + }; + return mock; +}; +const createPolicyMock = () => { + const mock: jest.Mocked = { + isTriggeringEvent: jest.fn(), + }; + return mock; +}; + +const createLayoutMock = () => { + const mock: jest.Mocked = { + format: jest.fn(), + }; + return mock; +}; + +const createFileManagerMock = () => { + const mock: jest.Mocked> = { + write: jest.fn(), + closeStream: jest.fn(), + }; return mock; }; export const rollingFileAppenderMocks = { createContext: createContextMock, + createStrategy: createStrategyMock, + createPolicy: createPolicyMock, + createLayout: createLayoutMock, + createFileManager: createFileManagerMock, }; diff --git a/src/core/server/logging/appenders/rolling_file/policies/index.ts b/src/core/server/logging/appenders/rolling_file/policies/index.ts index 9c32a0f9e13b1..3604420a69597 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/index.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/index.ts @@ -43,7 +43,7 @@ export type TriggeringPolicyConfig = | SizeLimitTriggeringPolicyConfig | TimeIntervalTriggeringPolicyConfig; -export const rollingPolicyConfigSchema = schema.oneOf([ +export const triggeringPolicyConfigSchema = schema.oneOf([ sizeLimitTriggeringPolicyConfigSchema, timeIntervalTriggeringPolicyConfigSchema, ]); diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.mocks.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.mocks.ts new file mode 100644 index 0000000000000..c84cf09fffe89 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.mocks.ts @@ -0,0 +1,58 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { schema } from '@kbn/config-schema'; + +export const LayoutsMock = { + create: jest.fn(), + configSchema: schema.any(), +}; +jest.doMock('../../layouts/layouts', () => ({ + Layouts: LayoutsMock, +})); + +export const createTriggeringPolicyMock = jest.fn(); +jest.doMock('./policies', () => ({ + triggeringPolicyConfigSchema: schema.any(), + createTriggeringPolicy: createTriggeringPolicyMock, +})); + +export const createRollingStrategyMock = jest.fn(); +jest.doMock('./strategies', () => ({ + rollingStrategyConfigSchema: schema.any(), + createRollingStrategy: createRollingStrategyMock, +})); + +export const RollingFileManagerMock = jest.fn(); +jest.doMock('./rolling_file_manager', () => ({ + RollingFileManager: RollingFileManagerMock, +})); + +export const RollingFileContextMock = jest.fn(); +jest.doMock('./rolling_file_context', () => ({ + RollingFileContext: RollingFileContextMock, +})); + +export const resetAllMocks = () => { + LayoutsMock.create.mockReset(); + createTriggeringPolicyMock.mockReset(); + createRollingStrategyMock.mockReset(); + RollingFileManagerMock.mockReset(); + RollingFileContextMock.mockReset(); +}; diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.ts new file mode 100644 index 0000000000000..96051903e16e2 --- /dev/null +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.test.ts @@ -0,0 +1,275 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { + createRollingStrategyMock, + createTriggeringPolicyMock, + LayoutsMock, + resetAllMocks, + RollingFileContextMock, + RollingFileManagerMock, +} from './rolling_file_appender.test.mocks'; +import { rollingFileAppenderMocks } from './mocks'; +import moment from 'moment-timezone'; +import { LogLevel, LogRecord } from '@kbn/logging'; +import { RollingFileAppender, RollingFileAppenderConfig } from './rolling_file_appender'; + +const config: RollingFileAppenderConfig = { + kind: 'rolling-file', + path: '/var/log/kibana.log', + layout: { + kind: 'pattern', + pattern: '%message', + highlight: false, + }, + policy: { + kind: 'time-interval', + interval: moment.duration(4, 'hour'), + modulate: true, + }, + strategy: { + kind: 'numeric', + max: 5, + pattern: '-%i', + }, +}; + +const createLogRecord = (parts: Partial = {}): LogRecord => ({ + timestamp: new Date(), + level: LogLevel.Info, + context: 'context', + message: 'just a log', + pid: 42, + ...parts, +}); + +const nextTick = () => new Promise((resolve) => setTimeout(resolve, 10)); + +const createPromiseResolver = () => { + let resolve: () => void; + let reject: () => void; + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve; + reject = _reject; + }); + + return { + promise, + resolve: resolve!, + reject: reject!, + }; +}; + +describe('RollingFileAppender', () => { + let appender: RollingFileAppender; + + let layout: ReturnType; + let strategy: ReturnType; + let policy: ReturnType; + let context: ReturnType; + let fileManager: ReturnType; + + beforeEach(() => { + layout = rollingFileAppenderMocks.createLayout(); + LayoutsMock.create.mockReturnValue(layout); + + policy = rollingFileAppenderMocks.createPolicy(); + createTriggeringPolicyMock.mockReturnValue(policy); + + strategy = rollingFileAppenderMocks.createStrategy(); + createRollingStrategyMock.mockReturnValue(strategy); + + context = rollingFileAppenderMocks.createContext('file-path'); + RollingFileContextMock.mockImplementation(() => context); + + fileManager = rollingFileAppenderMocks.createFileManager(); + RollingFileManagerMock.mockImplementation(() => fileManager); + + appender = new RollingFileAppender(config); + }); + + afterAll(() => { + resetAllMocks(); + }); + + it('constructs its delegates with the correct parameters', () => { + expect(RollingFileContextMock).toHaveBeenCalledTimes(1); + expect(RollingFileContextMock).toHaveBeenCalledWith(config.path); + + expect(RollingFileManagerMock).toHaveBeenCalledTimes(1); + expect(RollingFileManagerMock).toHaveBeenCalledWith(context); + + expect(LayoutsMock.create).toHaveBeenCalledTimes(1); + expect(LayoutsMock.create).toHaveBeenCalledWith(config.layout); + + expect(createTriggeringPolicyMock).toHaveBeenCalledTimes(1); + expect(createTriggeringPolicyMock).toHaveBeenCalledWith(config.policy, context); + + expect(createRollingStrategyMock).toHaveBeenCalledTimes(1); + expect(createRollingStrategyMock).toHaveBeenCalledWith(config.strategy, context); + }); + + describe('#append', () => { + describe('when rollout is not needed', () => { + beforeEach(() => { + policy.isTriggeringEvent.mockReturnValue(false); + }); + + it('calls `layout.format` with the message', () => { + const log1 = createLogRecord({ message: '1' }); + const log2 = createLogRecord({ message: '2' }); + + appender.append(log1); + + expect(layout.format).toHaveBeenCalledTimes(1); + expect(layout.format).toHaveBeenCalledWith(log1); + + appender.append(log2); + + expect(layout.format).toHaveBeenCalledTimes(2); + expect(layout.format).toHaveBeenCalledWith(log2); + }); + + it('calls `fileManager.write` with the formatted message', () => { + layout.format.mockImplementation(({ message }) => message); + + const log1 = createLogRecord({ message: '1' }); + const log2 = createLogRecord({ message: '2' }); + + appender.append(log1); + + expect(fileManager.write).toHaveBeenCalledTimes(1); + expect(fileManager.write).toHaveBeenCalledWith('1\n'); + + appender.append(log2); + + expect(fileManager.write).toHaveBeenCalledTimes(2); + expect(fileManager.write).toHaveBeenCalledWith('2\n'); + }); + }); + + describe('when rollout is needed', () => { + beforeEach(() => { + policy.isTriggeringEvent.mockReturnValueOnce(true).mockReturnValue(false); + }); + + it('does not log the event triggering the rollout', () => { + const log = createLogRecord({ message: '1' }); + appender.append(log); + + expect(layout.format).not.toHaveBeenCalled(); + expect(fileManager.write).not.toHaveBeenCalled(); + }); + + it('triggers the rollout', () => { + const log = createLogRecord({ message: '1' }); + appender.append(log); + + expect(strategy.rollout).toHaveBeenCalledTimes(1); + }); + + it('closes the manager stream once the rollout is complete', async () => { + const { promise, resolve } = createPromiseResolver(); + strategy.rollout.mockReturnValue(promise); + + const log = createLogRecord({ message: '1' }); + appender.append(log); + + expect(fileManager.closeStream).not.toHaveBeenCalled(); + + resolve(); + await nextTick(); + + expect(fileManager.closeStream).toHaveBeenCalledTimes(1); + }); + + it('logs the event once the rollout is complete', async () => { + const { promise, resolve } = createPromiseResolver(); + strategy.rollout.mockReturnValue(promise); + + const log = createLogRecord({ message: '1' }); + appender.append(log); + + expect(fileManager.write).not.toHaveBeenCalled(); + + resolve(); + await nextTick(); + + expect(fileManager.write).toHaveBeenCalledTimes(1); + }); + + it('logs any pending events once the rollout is complete', async () => { + const { promise, resolve } = createPromiseResolver(); + strategy.rollout.mockReturnValue(promise); + + appender.append(createLogRecord({ message: '1' })); + appender.append(createLogRecord({ message: '2' })); + appender.append(createLogRecord({ message: '3' })); + + expect(fileManager.write).not.toHaveBeenCalled(); + + resolve(); + await nextTick(); + + expect(fileManager.write).toHaveBeenCalledTimes(3); + }); + }); + }); + + describe('#dispose', () => { + it('closes the file manager', async () => { + await appender.dispose(); + + expect(fileManager.closeStream).toHaveBeenCalledTimes(1); + }); + + it('noops if called multiple times', async () => { + await appender.dispose(); + + expect(fileManager.closeStream).toHaveBeenCalledTimes(1); + + await appender.dispose(); + + expect(fileManager.closeStream).toHaveBeenCalledTimes(1); + }); + + it('waits until the rollout completes if a rollout was in progress', async () => { + expect.assertions(1); + + const { promise, resolve } = createPromiseResolver(); + let rolloutComplete = false; + + strategy.rollout.mockReturnValue( + promise.then(() => { + rolloutComplete = true; + }) + ); + + appender.append(createLogRecord({ message: '1' })); + + const dispose = appender.dispose().then(() => { + expect(rolloutComplete).toEqual(true); + }); + + resolve(); + + await Promise.all([dispose, promise]); + }); + }); +}); diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index e35704064d02c..1dee21941264c 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -24,7 +24,7 @@ import { BufferAppender } from '../buffer/buffer_appender'; import { TriggeringPolicyConfig, createTriggeringPolicy, - rollingPolicyConfigSchema, + triggeringPolicyConfigSchema, TriggeringPolicy, } from './policies'; import { @@ -66,7 +66,7 @@ export class RollingFileAppender implements DisposableAppender { kind: schema.literal('rolling-file'), layout: Layouts.configSchema, path: schema.string(), - policy: rollingPolicyConfigSchema, + policy: triggeringPolicyConfigSchema, strategy: rollingStrategyConfigSchema, }); @@ -87,7 +87,7 @@ export class RollingFileAppender implements DisposableAppender { constructor(config: RollingFileAppenderConfig) { this.context = new RollingFileContext(config.path); this.context.refreshFileInfo(); - this.fileManager = new RollingFileManager(config.path, this.context); + this.fileManager = new RollingFileManager(this.context); this.layout = Layouts.create(config.layout); this.policy = createTriggeringPolicy(config.policy, this.context); this.strategy = createRollingStrategy(config.strategy, this.context); diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts index bcda2abb7922c..c2224de7db6fb 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_manager.ts @@ -27,8 +27,8 @@ export class RollingFileManager { private readonly filePath; private outputStream?: WriteStream; - constructor(path: string, private readonly context: RollingFileContext) { - this.filePath = path; + constructor(private readonly context: RollingFileContext) { + this.filePath = context.filePath; } write(chunk: string) { From b226a63af1b8ff6d04272011c7c8cd250d2ffc04 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 3 Dec 2020 15:50:47 +0100 Subject: [PATCH 14/23] nits on README --- src/core/server/logging/README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/core/server/logging/README.md b/src/core/server/logging/README.md index 1bc5b1dfc52fb..8cb704f09ce8c 100644 --- a/src/core/server/logging/README.md +++ b/src/core/server/logging/README.md @@ -167,7 +167,7 @@ The options are: - `size` -the maximum size the log file should reach before a rollover will be performed. +the maximum size the log file should reach before a rollover should be performed. The default value is `100mb` @@ -206,14 +206,14 @@ Whether the interval should be adjusted to cause the next rollover to occur on t For example, when true, if the interval is `4h` and the current hour is 3 am then the first rollover will occur at 4 am and then next ones will occur at 8 am, noon, 4pm, etc. -The default value is true. +The default value is `true`. #### Rolling strategies -The rolling strategies determine how the rollover should occur: both the naming of the rolled files, +The rolling strategy determines how the rollover should occur: both the naming of the rolled files, and their retention policy. -There is currently only one strategy supported: `numeric`. +There is currently one strategy supported: `numeric`. ##### NumericRollingStrategy @@ -253,13 +253,13 @@ The suffix to append to the file path when rolling. Must include `%i`, as this i that will be converted to the file index. for example, with `path: /var/logs/kibana.log` and `pattern: '-%i'`, the created rolling files -will be `/var/logs/kibana-1.log`, `/var/logs/kibana-2.log` +will be `/var/logs/kibana-1.log`, `/var/logs/kibana-2.log`, and so on. The default value is `-%i` - `max` -The maximum number of files to keep. Once this number is reached, oldest files will be deleted +The maximum number of files to keep. Once this number is reached, oldest files will be deleted. The default value is `7` From 2634c764a7f224197ddcf97a82843f3585be4750 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 3 Dec 2020 19:37:13 +0100 Subject: [PATCH 15/23] some self review --- .../time_interval/time_interval_policy.test.ts | 17 +++++++++++++++++ .../time_interval/time_interval_policy.ts | 7 ++++--- .../rolling_file/rolling_file_appender.ts | 18 +++++++----------- 3 files changed, 28 insertions(+), 14 deletions(-) diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts index 79561173087bc..3f06883da8884 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.test.ts @@ -32,6 +32,7 @@ const format = 'YYYY-MM-DD HH:mm:ss'; describe('TimeIntervalTriggeringPolicy', () => { afterEach(() => { getNextRollingTimeMock.mockReset(); + jest.restoreAllMocks(); }); const createLogRecord = (timestamp: Date): LogRecord => ({ @@ -72,6 +73,22 @@ describe('TimeIntervalTriggeringPolicy', () => { ); }); + it('calls `getNextRollingTime` with the current time if `context.currentFileTime` is not set', () => { + const currentTime = moment('2018-06-15 04:27:12', format).toDate().getTime(); + jest.spyOn(Date, 'now').mockReturnValue(currentTime); + const context = createContext(0); + const config = createConfig('15m', true); + + new TimeIntervalTriggeringPolicy(config, context); + + expect(getNextRollingTimeMock).toHaveBeenCalledTimes(1); + expect(getNextRollingTimeMock).toHaveBeenCalledWith( + currentTime, + config.interval, + config.modulate + ); + }); + describe('#isTriggeringEvent', () => { it('returns true if the event time is after the nextRolloverTime', () => { const eventDate = moment('2010-10-20 04:43:12', format).toDate(); diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts index e500c6f50ceed..352e124875845 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts @@ -32,11 +32,12 @@ export interface TimeIntervalTriggeringPolicyConfig { * How often a rollover should occur. * * @remarks - * Due to of modulate rolling works, it is required to have an integer value for the highest time unit + * Due to how modulate rolling works, it is required to have an integer value for the highest time unit * of the duration (you can't overflow to a higher unit). - * For example, `15m` and `4h` are valid values , but `90m` is not (as it is `1.5h`),. + * For example, `15m` and `4h` are valid values , but `90m` is not (as it is `1.5h`). */ interval: Duration; + /** * Indicates whether the interval should be adjusted to cause the next rollover to occur on the interval boundary. * @@ -65,7 +66,7 @@ export const timeIntervalTriggeringPolicyConfigSchema = schema.object({ */ export class TimeIntervalTriggeringPolicy implements TriggeringPolicy { /** - * millisecond timestamp of when the next rollover should occur. + * milliseconds timestamp of when the next rollover should occur. */ private nextRolloverTime: number; diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index 1dee21941264c..194be9612bd3e 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -44,7 +44,6 @@ export interface RollingFileAppenderConfig { layout: LayoutConfigType; /** * The absolute path of the file to write to. - * If the file, or any of its parent directories, do not exist, they will be created. */ path: string; /** @@ -81,9 +80,6 @@ export class RollingFileAppender implements DisposableAppender { private readonly strategy: RollingStrategy; private readonly buffer: BufferAppender; - /** - * Creates FileAppender instance with specified layout and file path. - */ constructor(config: RollingFileAppenderConfig) { this.context = new RollingFileContext(config.path); this.context.refreshFileInfo(); @@ -95,8 +91,8 @@ export class RollingFileAppender implements DisposableAppender { } /** - * Formats specified `record` and writes them to the specified file. - * @param record `LogRecord` instance to be logged. + * Formats specified `record` and writes it to the specified file. If the record + * would trigger a rollover, then it will be performed before writing to the file. */ public append(record: LogRecord) { // if we are currently rolling the files, push the log record @@ -119,7 +115,8 @@ export class RollingFileAppender implements DisposableAppender { } /** - * Disposes `FileAppender`. Waits for the underlying file stream to be completely flushed and closed. + * Disposes the appender. + * If a rollout is currently in progress, it will ve awaited. */ public async dispose() { if (this.disposed) { @@ -141,14 +138,13 @@ export class RollingFileAppender implements DisposableAppender { try { await this.strategy.rollout(); await this.fileManager.closeStream(); - this.rollingPromise = undefined; - this.isRolling = false; } catch (e) { // eslint-disable-next-line no-console console.log('Error while rolling file: ', e); - } finally { - this.flushBuffer(); } + this.rollingPromise = undefined; + this.isRolling = false; + this.flushBuffer(); } private flushBuffer() { From a9e21d598fe302a1e76c0b2be21f556321252842 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 3 Dec 2020 19:39:45 +0100 Subject: [PATCH 16/23] doc nits --- .../appenders/rolling_file/rolling_file_appender.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index 194be9612bd3e..5657d0860f94e 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -47,11 +47,11 @@ export interface RollingFileAppenderConfig { */ path: string; /** - * The policy to use to determine if a rollover should occur. + * The {@link TriggeringPolicy | policy} to use to determine if a rollover should occur. */ policy: TriggeringPolicyConfig; /** - * The rollout strategy to use for rolling. + * The {@link RollingStrategy | rollout strategy} to use for rolling. */ strategy: RollingStrategyConfig; } @@ -92,7 +92,7 @@ export class RollingFileAppender implements DisposableAppender { /** * Formats specified `record` and writes it to the specified file. If the record - * would trigger a rollover, then it will be performed before writing to the file. + * would trigger a rollover, it will be performed before the effective write operation. */ public append(record: LogRecord) { // if we are currently rolling the files, push the log record @@ -116,7 +116,7 @@ export class RollingFileAppender implements DisposableAppender { /** * Disposes the appender. - * If a rollout is currently in progress, it will ve awaited. + * If a rollout is currently in progress, it will be awaited. */ public async dispose() { if (this.disposed) { From 3e740c6ef656fdc7b625f275976d339adeb73c7e Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 3 Dec 2020 20:23:56 +0100 Subject: [PATCH 17/23] self review --- .../logging/appenders/rolling_file/rolling_file_appender.ts | 2 +- .../rolling_file/strategies/numeric/pattern_matcher.ts | 2 +- .../appenders/rolling_file/strategies/numeric/rolling_tasks.ts | 3 +++ .../logging/appenders/rolling_file/strategies/strategy.ts | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index 5657d0860f94e..0309196933d22 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -166,7 +166,7 @@ export class RollingFileAppender implements DisposableAppender { } /** - * Checks if the current even should trigger a rollout + * Checks if the current event should trigger a rollout */ private needRollout(record: LogRecord) { return this.policy.isTriggeringEvent(record); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts index e379745cb9aa0..e1b2f914bfbb7 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts @@ -42,7 +42,7 @@ const createNumericMatcher = (fileBaseName: string, pattern: string): RegExp => * matcher('kibana-1.log') // `1` * matcher('kibana-5.log') // `5` * matcher('kibana-A.log') // undefined - * matcher('kibana.log') // `undefined + * matcher('kibana.log') // undefined * ``` */ export const getFileNameMatcher = (logFileName: string, pattern: string) => { diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts index 50d671fac2114..d14f24dd44fb3 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts @@ -28,6 +28,9 @@ export const shouldSkipRollout = async ({ logFilePath }: { logFilePath: string } return !(await exists(logFilePath)); }; +/** + * Returns the rolled file basenames, from the most recent to the oldest. + */ export const getOrderedRolledFiles = async ({ logFileBaseName, logFileFolder, diff --git a/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts index ad075e1e1c109..fb5984dfb5df3 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/strategy.ts @@ -18,7 +18,7 @@ */ /** - * A strategy to perform the log file rollout. + * A strategy to perform the log file rollover. */ export interface RollingStrategy { /** From 6694e24a1cf6bff9d3f2f467cbe7f220a7f2cc7e Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Mon, 7 Dec 2020 09:34:18 +0100 Subject: [PATCH 18/23] use `escapeRegExp` from lodash --- .../rolling_file/strategies/numeric/pattern_matcher.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts index e1b2f914bfbb7..91004cca94e26 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/pattern_matcher.ts @@ -17,16 +17,16 @@ * under the License. */ -const escape = (string: string) => string.replace(/[.*+\-?^${}()|[\]\\]/g, '\\$&'); +import { escapeRegExp } from 'lodash'; const createNumericMatcher = (fileBaseName: string, pattern: string): RegExp => { let extStart = fileBaseName.indexOf('.'); if (extStart === -1) { extStart = fileBaseName.length; } - const baseNameWithoutExt = escape(fileBaseName.substr(0, extStart)); - const extension = escape(fileBaseName.substr(extStart, fileBaseName.length)); - const processedPattern = escape(pattern) + const baseNameWithoutExt = escapeRegExp(fileBaseName.substr(0, extStart)); + const extension = escapeRegExp(fileBaseName.substr(extStart, fileBaseName.length)); + const processedPattern = escapeRegExp(pattern) // create matching group for `%i` .replace(/%i/g, '(?\\d+)'); return new RegExp(`^${baseNameWithoutExt}${processedPattern}${extension}$`); From 3aa0bc700443fa2323b61e4f5955cc3974863505 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Tue, 8 Dec 2020 17:13:25 +0100 Subject: [PATCH 19/23] address some review comments --- .../appenders/rolling_file/strategies/fs.ts | 26 ------------- .../numeric/rolling_tasks.test.mocks.ts | 8 ++-- .../strategies/numeric/rolling_tasks.test.ts | 21 ++++++---- .../strategies/numeric/rolling_tasks.ts | 13 ++++--- .../rolling_file_appender.test.ts | 38 +++++++++---------- 5 files changed, 45 insertions(+), 61 deletions(-) delete mode 100644 src/core/server/logging/appenders/rolling_file/strategies/fs.ts diff --git a/src/core/server/logging/appenders/rolling_file/strategies/fs.ts b/src/core/server/logging/appenders/rolling_file/strategies/fs.ts deleted file mode 100644 index 5b089cc108de9..0000000000000 --- a/src/core/server/logging/appenders/rolling_file/strategies/fs.ts +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import Fs from 'fs'; -import { promisify } from 'util'; - -export const readdir = promisify(Fs.readdir); -export const unlink = promisify(Fs.unlink); -export const rename = promisify(Fs.rename); -export const exists = promisify(Fs.exists); diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts index 16ae8264edcf8..4355ec7ffb2ec 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.mocks.ts @@ -20,18 +20,18 @@ export const readdirMock = jest.fn(); export const unlinkMock = jest.fn(); export const renameMock = jest.fn(); -export const existsMock = jest.fn(); +export const accessMock = jest.fn(); -jest.doMock('../fs', () => ({ +jest.doMock('fs/promises', () => ({ readdir: readdirMock, unlink: unlinkMock, rename: renameMock, - exists: existsMock, + access: accessMock, })); export const clearAllMocks = () => { readdirMock.mockClear(); unlinkMock.mockClear(); renameMock.mockClear(); - existsMock.mockClear(); + accessMock.mockClear(); }; diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts index e730e00d98757..469ea450485a1 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.test.ts @@ -19,7 +19,7 @@ import { join } from 'path'; import { - existsMock, + accessMock, readdirMock, renameMock, unlinkMock, @@ -42,16 +42,18 @@ describe('NumericRollingStrategy tasks', () => { it('calls `exists` with the correct parameters', async () => { await shouldSkipRollout({ logFilePath: 'some-file' }); - expect(existsMock).toHaveBeenCalledTimes(1); - expect(existsMock).toHaveBeenCalledWith('some-file'); + expect(accessMock).toHaveBeenCalledTimes(1); + expect(accessMock).toHaveBeenCalledWith('some-file'); }); it('returns `true` if the file is current log file does not exist', async () => { - existsMock.mockResolvedValue(false); + accessMock.mockImplementation(() => { + throw new Error('ENOENT'); + }); expect(await shouldSkipRollout({ logFilePath: 'some-file' })).toEqual(true); }); it('returns `false` if the file is current log file exists', async () => { - existsMock.mockResolvedValue(true); + accessMock.mockResolvedValue(undefined); expect(await shouldSkipRollout({ logFilePath: 'some-file' })).toEqual(false); }); @@ -128,7 +130,12 @@ describe('NumericRollingStrategy tasks', () => { describe('getOrderedRolledFiles', () => { it('returns the rolled files matching the pattern in order', async () => { - readdirMock.mockResolvedValue(['kibana-3.log', 'kibana-1.log', 'kibana-2.log']); + readdirMock.mockResolvedValue([ + 'kibana-10.log', + 'kibana-1.log', + 'kibana-12.log', + 'kibana-2.log', + ]); const files = await getOrderedRolledFiles({ logFileFolder: 'log-folder', @@ -136,7 +143,7 @@ describe('NumericRollingStrategy tasks', () => { pattern: '-%i', }); - expect(files).toEqual(['kibana-1.log', 'kibana-2.log', 'kibana-3.log']); + expect(files).toEqual(['kibana-1.log', 'kibana-2.log', 'kibana-10.log', 'kibana-12.log']); }); it('ignores files that do no match the pattern', async () => { diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts index d14f24dd44fb3..6fe065c5c1561 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/rolling_tasks.ts @@ -18,14 +18,19 @@ */ import { join } from 'path'; +import { readdir, rename, unlink, access } from 'fs/promises'; import { getFileNameMatcher, getRollingFileName } from './pattern_matcher'; -import { readdir, rename, unlink, exists } from '../fs'; export const shouldSkipRollout = async ({ logFilePath }: { logFilePath: string }) => { // in case of time-interval triggering policy, we can have an entire // interval without any log event. In that case, the log file is not even // present, and we should not perform the rollout - return !(await exists(logFilePath)); + try { + await access(logFilePath); + return false; + } catch (e) { + return true; + } }; /** @@ -59,9 +64,7 @@ export const deleteFiles = async ({ logFileFolder: string; filesToDelete: string[]; }) => { - for (const fileToDelete of filesToDelete) { - await unlink(join(logFileFolder, fileToDelete)); - } + await Promise.all(filesToDelete.map((fileToDelete) => unlink(join(logFileFolder, fileToDelete)))); }; export const rollPreviousFilesInOrder = async ({ diff --git a/src/core/server/logging/integration_tests/rolling_file_appender.test.ts b/src/core/server/logging/integration_tests/rolling_file_appender.test.ts index b83e1d2821520..4680740195b44 100644 --- a/src/core/server/logging/integration_tests/rolling_file_appender.test.ts +++ b/src/core/server/logging/integration_tests/rolling_file_appender.test.ts @@ -18,7 +18,7 @@ */ import { join } from 'path'; -import { rmdirSync, mkdtempSync, readFileSync, readdirSync } from 'fs'; +import { rmdir, mkdtemp, readFile, readdir } from 'fs/promises'; import moment from 'moment-timezone'; import * as kbnTestServer from '../../../test_helpers/kbn_server'; import { getNextRollingTime } from '../appenders/rolling_file/policies/time_interval/get_next_rolling_time'; @@ -50,17 +50,17 @@ describe('RollingFileAppender', () => { let testDir: string; let logFile: string; - const getFileContent = (basename: string) => - readFileSync(join(testDir, basename)).toString('utf-8'); + const getFileContent = async (basename: string) => + (await readFile(join(testDir, basename))).toString('utf-8'); beforeEach(async () => { - testDir = mkdtempSync('rolling-test'); + testDir = await mkdtemp('rolling-test'); logFile = join(testDir, 'kibana.log'); }); afterEach(async () => { try { - rmdirSync(testDir); + await rmdir(testDir); } catch (e) { /* trap */ } @@ -110,12 +110,12 @@ describe('RollingFileAppender', () => { await flush(); - const files = readdirSync(testDir).sort(); + const files = await readdir(testDir); - expect(files).toEqual(['kibana.1.log', 'kibana.2.log', 'kibana.log']); - expect(getFileContent('kibana.log')).toEqual(expectedFileContent([7])); - expect(getFileContent('kibana.1.log')).toEqual(expectedFileContent([4, 5, 6])); - expect(getFileContent('kibana.2.log')).toEqual(expectedFileContent([1, 2, 3])); + expect(files.sort()).toEqual(['kibana.1.log', 'kibana.2.log', 'kibana.log']); + expect(await getFileContent('kibana.log')).toEqual(expectedFileContent([7])); + expect(await getFileContent('kibana.1.log')).toEqual(expectedFileContent([4, 5, 6])); + expect(await getFileContent('kibana.2.log')).toEqual(expectedFileContent([1, 2, 3])); }); it('only keep the correct number of files', async () => { @@ -157,12 +157,12 @@ describe('RollingFileAppender', () => { await flush(); - const files = readdirSync(testDir).sort(); + const files = await readdir(testDir); - expect(files).toEqual(['kibana-1.log', 'kibana-2.log', 'kibana.log']); - expect(getFileContent('kibana.log')).toEqual(expectedFileContent([7, 8])); - expect(getFileContent('kibana-1.log')).toEqual(expectedFileContent([5, 6])); - expect(getFileContent('kibana-2.log')).toEqual(expectedFileContent([3, 4])); + expect(files.sort()).toEqual(['kibana-1.log', 'kibana-2.log', 'kibana.log']); + expect(await getFileContent('kibana.log')).toEqual(expectedFileContent([7, 8])); + expect(await getFileContent('kibana-1.log')).toEqual(expectedFileContent([5, 6])); + expect(await getFileContent('kibana-2.log')).toEqual(expectedFileContent([3, 4])); }); }); @@ -210,11 +210,11 @@ describe('RollingFileAppender', () => { await flush(); - const files = readdirSync(testDir).sort(); + const files = await readdir(testDir); - expect(files).toEqual(['kibana-1.log', 'kibana.log']); - expect(getFileContent('kibana.log')).toEqual(expectedFileContent([3, 4])); - expect(getFileContent('kibana-1.log')).toEqual(expectedFileContent([1, 2])); + expect(files.sort()).toEqual(['kibana-1.log', 'kibana.log']); + expect(await getFileContent('kibana.log')).toEqual(expectedFileContent([3, 4])); + expect(await getFileContent('kibana-1.log')).toEqual(expectedFileContent([1, 2])); }); }); }); From 2cf122f2b128d02e0021d5bd685e8f86ff45c095 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 9 Dec 2020 08:04:18 +0100 Subject: [PATCH 20/23] a few more nits --- .../rolling_file/strategies/numeric/numeric_strategy.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts index 75ba2fe7c772d..009f34f4a6203 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/numeric/numeric_strategy.ts @@ -72,7 +72,7 @@ export const numericRollingStrategyConfigSchema = schema.object({ } }, }), - max: schema.number({ min: 1, defaultValue: 7 }), + max: schema.number({ min: 1, max: 100, defaultValue: 7 }), }); /** @@ -144,6 +144,9 @@ export class NumericRollingStrategy implements RollingStrategy { await rollCurrentFile({ pattern, logFileBaseName, logFileFolder }); // updates the context file info to mirror the new size and date + // this is required for the time based policy, as the next time check + // will be performed before the file manager updates the context itself by reopening + // a writer to the new file. this.context.refreshFileInfo(); } } From a8d26ef9ffc029286e20285ddb1d74ea8b5f9147 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 9 Dec 2020 08:14:10 +0100 Subject: [PATCH 21/23] extract `isDevCliParent` check outside of LoggingSystem.upgrade --- src/core/server/logging/logging_system.test.ts | 6 +----- src/core/server/logging/logging_system.ts | 14 ++++++-------- src/core/server/root/index.ts | 11 +++++++---- 3 files changed, 14 insertions(+), 17 deletions(-) diff --git a/src/core/server/logging/logging_system.test.ts b/src/core/server/logging/logging_system.test.ts index c0b731fe1836a..171a88f28e128 100644 --- a/src/core/server/logging/logging_system.test.ts +++ b/src/core/server/logging/logging_system.test.ts @@ -33,20 +33,16 @@ jest.mock('@kbn/legacy-logging', () => ({ const timestamp = new Date(Date.UTC(2012, 1, 1, 14, 33, 22, 11)); let mockConsoleLog: jest.SpyInstance; -import { REPO_ROOT } from '@kbn/dev-utils'; import { createWriteStream } from 'fs'; const mockCreateWriteStream = (createWriteStream as unknown) as jest.Mock; -import { Env } from '../config'; -import { getEnvOptions } from '../config/mocks'; import { LoggingSystem, config } from '.'; let system: LoggingSystem; beforeEach(() => { mockConsoleLog = jest.spyOn(global.console, 'log').mockReturnValue(undefined); jest.spyOn(global, 'Date').mockImplementation(() => timestamp); - const env = Env.createDefault(REPO_ROOT, getEnvOptions()); - system = new LoggingSystem(env); + system = new LoggingSystem(); }); afterEach(() => { diff --git a/src/core/server/logging/logging_system.ts b/src/core/server/logging/logging_system.ts index adb493dbbe4fa..d9e7eb70dc4ff 100644 --- a/src/core/server/logging/logging_system.ts +++ b/src/core/server/logging/logging_system.ts @@ -19,7 +19,6 @@ import type { PublicMethodsOf } from '@kbn/utility-types'; import { DisposableAppender, LogLevel, Logger, LoggerFactory } from '@kbn/logging'; -import { Env } from '@kbn/config'; import { Appenders } from './appenders/appenders'; import { BufferAppender } from './appenders/buffer/buffer_appender'; import { BaseLogger } from './logger'; @@ -50,7 +49,7 @@ export class LoggingSystem implements LoggerFactory { private readonly loggers: Map = new Map(); private readonly contextConfigs = new Map(); - constructor(private readonly env: Env) {} + constructor() {} public get(...contextParts: string[]): Logger { const context = LoggingConfig.getLoggerContext(contextParts); @@ -69,13 +68,12 @@ export class LoggingSystem implements LoggerFactory { /** * Updates all current active loggers with the new config values. - * @param rawConfig New config instance. + * @param rawConfig New config instance. if unspecified, the default logging configuration + * will be used. */ - public async upgrade(rawConfig: LoggingConfigType) { - // We only want the console appender for the CLI process, - // so we use the 'default' configuration as defined by the schema. - const usedConfig = this.env.isDevCliParent ? loggingConfig.schema.validate({}) : rawConfig; - const config = new LoggingConfig(usedConfig)!; + public async upgrade(rawConfig?: LoggingConfigType) { + const usedConfig = rawConfig ?? loggingConfig.schema.validate({}); + const config = new LoggingConfig(usedConfig); await this.applyBaseConfig(config); } diff --git a/src/core/server/root/index.ts b/src/core/server/root/index.ts index a68eeec9d2307..1f3aa87498922 100644 --- a/src/core/server/root/index.ts +++ b/src/core/server/root/index.ts @@ -17,7 +17,7 @@ * under the License. */ -import { ConnectableObservable, Subscription } from 'rxjs'; +import { ConnectableObservable, Subscription, of } from 'rxjs'; import { first, publishReplay, switchMap, concatMap, tap } from 'rxjs/operators'; import { Env, RawConfigurationProvider } from '../config'; @@ -36,10 +36,10 @@ export class Root { constructor( rawConfigProvider: RawConfigurationProvider, - env: Env, + private readonly env: Env, private readonly onShutdown?: (reason?: Error | string) => void ) { - this.loggingSystem = new LoggingSystem(env); + this.loggingSystem = new LoggingSystem(); this.logger = this.loggingSystem.asLoggerFactory(); this.log = this.logger.get('root'); this.server = new Server(rawConfigProvider, env, this.loggingSystem); @@ -98,7 +98,10 @@ export class Root { // Stream that maps config updates to logger updates, including update failures. const update$ = configService.getConfig$().pipe( // always read the logging config when the underlying config object is re-read - switchMap(() => configService.atPath('logging')), + // except for the CLI process where we only apply the default logging config once + switchMap(() => + this.env.isDevCliParent ? of(undefined) : configService.atPath('logging') + ), concatMap((config) => this.loggingSystem.upgrade(config)), // This specifically console.logs because we were not able to configure the logger. // eslint-disable-next-line no-console From e76a08906478b4b23b5f590b094173f8aa2fe3f4 Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Wed, 9 Dec 2020 08:28:14 +0100 Subject: [PATCH 22/23] log errors from context --- .../logging/appenders/rolling_file/rolling_file_appender.ts | 2 +- .../logging/appenders/rolling_file/rolling_file_context.ts | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts index 0309196933d22..3ec5c62aec3bb 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_appender.ts @@ -140,7 +140,7 @@ export class RollingFileAppender implements DisposableAppender { await this.fileManager.closeStream(); } catch (e) { // eslint-disable-next-line no-console - console.log('Error while rolling file: ', e); + console.error('[RollingFileAppender]: error while rolling file: ', e); } this.rollingPromise = undefined; this.isRolling = false; diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts index c01291aa8c75e..376878ab55cc6 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts @@ -41,6 +41,10 @@ export class RollingFileContext { } catch (e) { this.currentFileTime = Date.now(); this.currentFileSize = 0; + if (e.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log('[RollingFileAppender] error accessing the log file', e); + } } } } From bbb96f4d2b45282b9b72dfe96882d98082f6a30f Mon Sep 17 00:00:00 2001 From: pgayvallet Date: Thu, 10 Dec 2020 08:54:33 +0100 Subject: [PATCH 23/23] add defaults for policy/strategy --- .../appenders/rolling_file/policies/index.ts | 15 +++++++++++---- .../time_interval/time_interval_policy.ts | 2 +- .../rolling_file/rolling_file_context.ts | 6 +++--- .../appenders/rolling_file/strategies/index.ts | 10 +++++++++- 4 files changed, 24 insertions(+), 9 deletions(-) diff --git a/src/core/server/logging/appenders/rolling_file/policies/index.ts b/src/core/server/logging/appenders/rolling_file/policies/index.ts index 3604420a69597..66eb7f039d37b 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/index.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/index.ts @@ -18,6 +18,7 @@ */ import { schema } from '@kbn/config-schema'; +import moment from 'moment-timezone'; import { assertNever } from '@kbn/std'; import { TriggeringPolicy } from './policy'; import { RollingFileContext } from '../rolling_file_context'; @@ -43,10 +44,16 @@ export type TriggeringPolicyConfig = | SizeLimitTriggeringPolicyConfig | TimeIntervalTriggeringPolicyConfig; -export const triggeringPolicyConfigSchema = schema.oneOf([ - sizeLimitTriggeringPolicyConfigSchema, - timeIntervalTriggeringPolicyConfigSchema, -]); +const defaultPolicy: TimeIntervalTriggeringPolicyConfig = { + kind: 'time-interval', + interval: moment.duration(24, 'hour'), + modulate: true, +}; + +export const triggeringPolicyConfigSchema = schema.oneOf( + [sizeLimitTriggeringPolicyConfigSchema, timeIntervalTriggeringPolicyConfigSchema], + { defaultValue: defaultPolicy } +); export const createTriggeringPolicy = ( config: TriggeringPolicyConfig, diff --git a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts index 352e124875845..330a74b03f20e 100644 --- a/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts +++ b/src/core/server/logging/appenders/rolling_file/policies/time_interval/time_interval_policy.ts @@ -54,7 +54,7 @@ export const timeIntervalTriggeringPolicyConfigSchema = schema.object({ defaultValue: '24h', validate: (interval) => { if (!isValidRolloverInterval(interval)) { - return 'Interval value cannot overflow to a higher field.'; + return 'Interval value cannot overflow to a higher time unit.'; } }, }), diff --git a/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts index 376878ab55cc6..ed3b30cea2330 100644 --- a/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts +++ b/src/core/server/logging/appenders/rolling_file/rolling_file_context.ts @@ -39,12 +39,12 @@ export class RollingFileContext { this.currentFileTime = birthtime.getTime(); this.currentFileSize = size; } catch (e) { - this.currentFileTime = Date.now(); - this.currentFileSize = 0; if (e.code !== 'ENOENT') { // eslint-disable-next-line no-console - console.log('[RollingFileAppender] error accessing the log file', e); + console.error('[RollingFileAppender] error accessing the log file', e); } + this.currentFileTime = Date.now(); + this.currentFileSize = 0; } } } diff --git a/src/core/server/logging/appenders/rolling_file/strategies/index.ts b/src/core/server/logging/appenders/rolling_file/strategies/index.ts index 7668daf05345b..e51a16a0026a8 100644 --- a/src/core/server/logging/appenders/rolling_file/strategies/index.ts +++ b/src/core/server/logging/appenders/rolling_file/strategies/index.ts @@ -29,7 +29,15 @@ import { RollingFileContext } from '../rolling_file_context'; export { RollingStrategy } from './strategy'; export type RollingStrategyConfig = NumericRollingStrategyConfig; -export const rollingStrategyConfigSchema = schema.oneOf([numericRollingStrategyConfigSchema]); +const defaultStrategy: NumericRollingStrategyConfig = { + kind: 'numeric', + pattern: '-%i', + max: 7, +}; + +export const rollingStrategyConfigSchema = schema.oneOf([numericRollingStrategyConfigSchema], { + defaultValue: defaultStrategy, +}); export const createRollingStrategy = ( config: RollingStrategyConfig,