Skip to content

Commit

Permalink
PR3 - Persistent logging
Browse files Browse the repository at this point in the history
Signed-off-by: jackyalbo <jacky.albo@gmail.com>
  • Loading branch information
jackyalbo committed Jul 10, 2024
1 parent bc4eff6 commit 767db73
Show file tree
Hide file tree
Showing 27 changed files with 471 additions and 52 deletions.
4 changes: 4 additions & 0 deletions config.js
Original file line number Diff line number Diff line change
Expand Up @@ -659,6 +659,10 @@ config.BUCKET_DIFF_FOR_REPLICATION = true;

config.BUCKET_LOG_UPLOAD_ENABLED = true;
config.BUCKET_LOG_UPLOADER_DELAY = 5 * 60 * 1000;
config.BUCKET_LOG_TYPE = process.env.GUARANTEED_LOGS_PATH ? 'PERSISTENT' : 'BEST_EFFORT';
config.PERSISTENT_BUCKET_LOG_DIR = process.env.GUARANTEED_LOGS_PATH;
config.PERSISTENT_BUCKET_LOG_NS = 'bucket_logging';
config.BUCKET_LOG_CONCURRENCY = 10;

///////////////////////////
// KEY ROTATOR //
Expand Down
15 changes: 8 additions & 7 deletions src/api/bucket_api.js
Original file line number Diff line number Diff line change
Expand Up @@ -782,16 +782,13 @@ module.exports = {
method: 'PUT',
params: {
type: 'object',
required: ['name', 'log_bucket', 'log_prefix'],
required: ['name', 'logging'],
properties: {
name: {
$ref: 'common_api#/definitions/bucket_name'
},
log_bucket: {
$ref: 'common_api#/definitions/bucket_name'
},
log_prefix: {
type: 'string',
logging: {
$ref: 'common_api#/definitions/bucket_logging'
},
},
},
Expand Down Expand Up @@ -828,7 +825,11 @@ module.exports = {
},
},
reply: {
$ref: 'common_api#/definitions/bucket_logging',
oneOf: [{
$ref: 'common_api#/definitions/bucket_logging',
}, {
type: 'null'
}]
},
auth: {
system: ['admin', 'user']
Expand Down
7 changes: 7 additions & 0 deletions src/cmd/manage_nsfs.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const SensitiveString = require('../util/sensitive_string');
const ManageCLIError = require('../manage_nsfs/manage_nsfs_cli_errors').ManageCLIError;
const ManageCLIResponse = require('../manage_nsfs/manage_nsfs_cli_responses').ManageCLIResponse;
const manage_nsfs_glacier = require('../manage_nsfs/manage_nsfs_glacier');
const manage_nsfs_logging = require('../manage_nsfs/manage_nsfs_logging');
const nsfs_schema_utils = require('../manage_nsfs/nsfs_schema_utils');
const { print_usage } = require('../manage_nsfs/manage_nsfs_help_utils');
const { TYPES, ACTIONS, CONFIG_SUBDIRS,
Expand Down Expand Up @@ -74,6 +75,8 @@ async function main(argv = minimist(process.argv.slice(2))) {
await whitelist_ips_management(argv);
} else if (type === TYPES.GLACIER) {
await glacier_management(argv);
} else if (type === TYPES.LOGGING) {
await logging_management(user_input);
} else {
// we should not get here (we check it before)
throw_cli_error(ManageCLIError.InvalidType);
Expand Down Expand Up @@ -713,5 +716,9 @@ async function manage_glacier_operations(action, argv) {
}
}

async function logging_management(user_input) {
await manage_nsfs_logging.export_bucket_logging(user_input);
}

exports.main = main;
if (require.main === module) main();
14 changes: 7 additions & 7 deletions src/cmd/nsfs.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,6 @@
'use strict';
/* eslint-disable complexity */

require('../util/dotenv').load();
require('aws-sdk/lib/maintenance_mode_message').suppress = true;

const dbg = require('../util/debug_module')(__filename);
if (!dbg.get_process_name()) dbg.set_process_name('nsfs');
dbg.original_console();

// DO NOT PUT NEW REQUIREMENTS BEFORE SETTING process.env.NC_NSFS_NO_DB_ENV = 'true'
// NC nsfs deployments specifying process.env.LOCAL_MD_SERVER=true deployed together with a db
// when a system_store object is initialized VaccumAnalyzer is being called once a day.
Expand All @@ -17,6 +10,13 @@ dbg.original_console();
if (process.env.LOCAL_MD_SERVER !== 'true') {
process.env.NC_NSFS_NO_DB_ENV = 'true';
}
require('../util/dotenv').load();
require('aws-sdk/lib/maintenance_mode_message').suppress = true;

const dbg = require('../util/debug_module')(__filename);
if (!dbg.get_process_name()) dbg.set_process_name('nsfs');
dbg.original_console();

const config = require('../../config');

const os = require('os');
Expand Down
18 changes: 15 additions & 3 deletions src/endpoint/endpoint.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ const endpoint_stats_collector = require('../sdk/endpoint_stats_collector');
const { NamespaceMonitor } = require('../server/bg_services/namespace_monitor');
const { SemaphoreMonitor } = require('../server/bg_services/semaphore_monitor');
const prom_reporting = require('../server/analytic_services/prometheus_reporting');
const { PersistentLogger } = require('../util/persistent_logger');
const NoobaaEvent = require('../manage_nsfs/manage_nsfs_events_utils').NoobaaEvent;
const cluster = /** @type {import('node:cluster').Cluster} */ (
/** @type {unknown} */ (require('node:cluster'))
Expand All @@ -63,6 +64,7 @@ dbg.log0('endpoint: replacing old umask: ', old_umask.toString(8), 'with new uma
* func_sdk?: FuncSDK;
* sts_sdk?: StsSDK;
* virtual_hosts?: readonly string[];
* bucket_logger?: PersistentLogger;
* }} EndpointRequest
*/

Expand Down Expand Up @@ -97,6 +99,7 @@ async function create_https_server(ssl_cert_info, honorCipherOrder, endpoint_han
*/
/* eslint-disable max-statements */
async function main(options = {}) {
let bucket_logger;
try {
// setting process title needed for letting GPFS to identify the noobaa endpoint processes see issue #8039.
if (config.ENDPOINT_PROCESS_TITLE) {
Expand Down Expand Up @@ -127,6 +130,12 @@ async function main(options = {}) {
dbg.log0('Configured Virtual Hosts:', virtual_hosts);
dbg.log0('Configured Location Info:', location_info);

bucket_logger = config.BUCKET_LOG_TYPE === 'PERSISTENT' &&
new PersistentLogger(config.PERSISTENT_BUCKET_LOG_DIR, config.PERSISTENT_BUCKET_LOG_NS, {
locking: 'SHARED',
poll_interval: config.NSFS_GLACIER_LOGS_POLL_INTERVAL,
});

process.on('warning', e => dbg.warn(e.stack));

let internal_rpc_client;
Expand Down Expand Up @@ -164,8 +173,8 @@ async function main(options = {}) {
init_request_sdk = create_init_request_sdk(rpc, internal_rpc_client, object_io);
}

const endpoint_request_handler = create_endpoint_handler(init_request_sdk, virtual_hosts);
const endpoint_request_handler_sts = create_endpoint_handler(init_request_sdk, virtual_hosts, true);
const endpoint_request_handler = create_endpoint_handler(init_request_sdk, virtual_hosts, /*is_sts?*/ false, bucket_logger);
const endpoint_request_handler_sts = create_endpoint_handler(init_request_sdk, virtual_hosts, /*is_sts?*/ true);

const ssl_cert_info = await ssl_utils.get_ssl_cert_info('S3', options.nsfs_config_root);
const https_server = await create_https_server(ssl_cert_info, true, endpoint_request_handler);
Expand Down Expand Up @@ -243,6 +252,8 @@ async function main(options = {}) {
//noobaa crashed event
new NoobaaEvent(NoobaaEvent.ENDPOINT_CRASHED).create_event(undefined, undefined, err);
handle_server_error(err);
} finally {
if (bucket_logger) bucket_logger.close();
}
}

Expand All @@ -251,14 +262,15 @@ async function main(options = {}) {
* @param {readonly string[]} virtual_hosts
* @returns {EndpointHandler}
*/
function create_endpoint_handler(init_request_sdk, virtual_hosts, sts) {
function create_endpoint_handler(init_request_sdk, virtual_hosts, sts, logger) {
const blob_rest_handler = process.env.ENDPOINT_BLOB_ENABLED === 'true' ? blob_rest : unavailable_handler;
const lambda_rest_handler = config.DB_TYPE === 'mongodb' ? lambda_rest : unavailable_handler;

/** @type {EndpointHandler} */
const endpoint_request_handler = (req, res) => {
endpoint_utils.prepare_rest_request(req);
req.virtual_hosts = virtual_hosts;
if (logger) req.bucket_logger = logger;
init_request_sdk(req, res);
if (req.url.startsWith('/2015-03-31/functions')) {
return lambda_rest_handler(req, res);
Expand Down
4 changes: 3 additions & 1 deletion src/endpoint/s3/ops/s3_get_bucket_logging.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
/* Copyright (C) 2016 NooBaa */
'use strict';

const SensitiveString = require("../../../util/sensitive_string");

/**
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGETlogging.html
*/
Expand All @@ -11,7 +13,7 @@ async function get_bucket_logging(req) {
return {
BucketLoggingStatus: logging ? {
LoggingEnabled: {
TargetBucket: logging.log_bucket,
TargetBucket: logging.log_bucket instanceof SensitiveString ? logging.log_bucket.unwrap() : logging.log_bucket,
TargetPrefix: logging.log_prefix,
}
} : ''
Expand Down
26 changes: 19 additions & 7 deletions src/endpoint/s3/s3_bucket_logging.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,16 @@ const dbg = require('../../util/debug_module')(__filename);
const http_utils = require('../../util/http_utils');
const dgram = require('node:dgram');
const { Buffer } = require('node:buffer');

const config = require('../../../config');

async function send_bucket_op_logs(req, res) {
if (req.params && req.params.bucket) {
if (req.params && req.params.bucket && req.op_name !== 'put_bucket') {
const bucket_info = await req.object_sdk.read_bucket_sdk_config_info(req.params.bucket);
dbg.log2("read_bucket_sdk_config_info = ", bucket_info);

if (is_bucket_logging_enabled(bucket_info)) {
dbg.log2("Bucket logging is enabled for Bucket : ", req.params.bucket);
endpoint_bucket_op_logs(req.op_name, req, res, bucket_info);
await endpoint_bucket_op_logs(req.op_name, req, res, bucket_info);
}
}
}
Expand Down Expand Up @@ -54,15 +54,28 @@ const create_syslog_udp_socket = (() => {
})();


function endpoint_bucket_op_logs(op_name, req, res, source_bucket) {
async function endpoint_bucket_op_logs(op_name, req, res, source_bucket) {

// 1 - Get all the information to be logged in a log message.
// 2 - Format it and send it to log bucket/syslog.
const s3_log = get_bucket_log_record(op_name, source_bucket, req, res);
dbg.log1("Bucket operation logs = ", s3_log);

switch (config.BUCKET_LOG_TYPE) {
case 'PERSISTENT': {
await req.bucket_logger.append(JSON.stringify(s3_log));
break;
}
default: {
send_op_logs_to_syslog(req.object_sdk.rpc_client.rpc.router.syslog, s3_log);
}
}

}

function send_op_logs_to_syslog(syslog, s3_log) {
const buffer = Buffer.from(JSON.stringify(s3_log));
const {client, port, hostname} = create_syslog_udp_socket(req.object_sdk.rpc_client.rpc.router.syslog);
const {client, port, hostname} = create_syslog_udp_socket(syslog);
if (client && port && hostname) {
client.send(buffer, port, hostname, err => {
if (err) {
Expand All @@ -72,13 +85,12 @@ function endpoint_bucket_op_logs(op_name, req, res, source_bucket) {
} else {
dbg.log0(`Could not send bucket logs: client: ${client} port: ${port} hostname:${hostname}`);
}

}

function get_bucket_log_record(op_name, source_bucket, req, res) {

const client_ip = http_utils.parse_client_ip(req);
let status_code;
let status_code = 102;
if (res && res.statusCode) {
status_code = res.statusCode;
}
Expand Down
16 changes: 14 additions & 2 deletions src/endpoint/s3/s3_rest.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ const s3_logging = require('./s3_bucket_logging');
const time_utils = require('../../util/time_utils');
const http_utils = require('../../util/http_utils');
const signature_utils = require('../../util/signature_utils');
const config = require('../../../config');

const S3_MAX_BODY_LEN = 4 * 1024 * 1024;

Expand Down Expand Up @@ -68,6 +69,11 @@ async function s3_rest(req, res) {
await handle_website_error(req, res, err);
} else {
handle_error(req, res, err);
try {
await s3_logging.send_bucket_op_logs(req, res); // logging again with error
} catch (err1) {
dbg.error("Could not log bucket operation:", err1);
}
}
}
}
Expand Down Expand Up @@ -118,7 +124,13 @@ async function handle_request(req, res) {
usage_report.s3_usage_info.total_calls += 1;
usage_report.s3_usage_info[op_name] = (usage_report.s3_usage_info[op_name] || 0) + 1;


if (config.BUCKET_LOG_TYPE === 'PERSISTENT') {
try {
await s3_logging.send_bucket_op_logs(req); // logging intension - no result
} catch (err) {
dbg.error("Could not log bucket operation:", err);
}
}

if (req.query && req.query.versionId) {
const caching = await req.object_sdk.read_bucket_sdk_caching_info(req.params.bucket);
Expand Down Expand Up @@ -151,7 +163,7 @@ async function handle_request(req, res) {
http_utils.send_reply(req, res, reply, options);
collect_bucket_usage(op, req, res);
try {
await s3_logging.send_bucket_op_logs(req);
await s3_logging.send_bucket_op_logs(req, res); // logging again with result
} catch (err) {
dbg.error("Could not log bucket operation:", err);
}
Expand Down
13 changes: 12 additions & 1 deletion src/manage_nsfs/manage_nsfs_cli_errors.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ ManageCLIError.InvalidArgumentType = Object.freeze({

ManageCLIError.InvalidType = Object.freeze({
code: 'InvalidType',
message: 'Invalid type, available types are account, bucket or whitelist',
message: 'Invalid type, available types are account, bucket, logging or whitelist',
http_code: 400,
});

Expand Down Expand Up @@ -157,6 +157,16 @@ ManageCLIError.InvalidMasterKey = Object.freeze({
http_code: 500,
});

//////////////////////////////
//// BUCKET LOGGING ERRORS ///
//////////////////////////////

ManageCLIError.LoggingExportFailed = Object.freeze({
code: 'LoggingExportFailed',
message: 'Logging export attmept failed',
http_code: 500,
});

////////////////////////
//// ACCOUNT ERRORS ////
////////////////////////
Expand Down Expand Up @@ -423,6 +433,7 @@ const NSFS_CLI_ERROR_EVENT_MAP = {
AccountDeleteForbiddenHasBuckets: NoobaaEvent.ACCOUNT_DELETE_FORBIDDEN,
BucketAlreadyExists: NoobaaEvent.BUCKET_ALREADY_EXISTS,
BucketSetForbiddenNoBucketOwner: NoobaaEvent.UNAUTHORIZED,
LoggingExportFailed: NoobaaEvent.LOGGING_FAILED,
};

exports.ManageCLIError = ManageCLIError;
Expand Down
9 changes: 9 additions & 0 deletions src/manage_nsfs/manage_nsfs_cli_responses.js
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,21 @@ ManageCLIResponse.BucketList = Object.freeze({
list: {}
});

///////////////////////////////
// LOGGING RESPONSES ///
///////////////////////////////
ManageCLIResponse.LoggingExported = Object.freeze({
code: 'LoggingExported',
status: {}
});

const NSFS_CLI_SUCCESS_EVENT_MAP = {
AccountCreated: NoobaaEvent.ACCOUNT_CREATED,
AccountDeleted: NoobaaEvent.ACCOUNT_DELETED,
BucketCreated: NoobaaEvent.BUCKET_CREATED,
BucketDeleted: NoobaaEvent.BUCKET_DELETE,
WhiteListIPUpdated: NoobaaEvent.WHITELIST_UPDATED,
LoggingExported: NoobaaEvent.LOGGING_EXPORTED,
};

exports.ManageCLIResponse = ManageCLIResponse;
Expand Down
3 changes: 2 additions & 1 deletion src/manage_nsfs/manage_nsfs_constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ const TYPES = {
BUCKET: 'bucket',
IP_WHITELIST: 'whitelist',
GLACIER: 'glacier',
HEALTH: 'health'
HEALTH: 'health',
LOGGING: 'logging',
};

const ACTIONS = {
Expand Down
22 changes: 22 additions & 0 deletions src/manage_nsfs/manage_nsfs_events_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -325,4 +325,26 @@ NoobaaEvent.INVALID_BUCKET_STATE = Object.freeze({
state: 'HEALTHY',
});

NoobaaEvent.LOGGING_EXPORTED = Object.freeze({
event_code: 'bucket_logging_exported',
entity_type: 'NODE',
event_type: 'INFO',
message: 'Bucket logs was exported to target buckets',
description: 'Bucket logs was successfully exported to target buckets',
scope: 'NODE',
severity: 'INFO',
state: 'HEALTHY',
});

NoobaaEvent.LOGGING_FAILED = Object.freeze({
event_code: 'bucket_logging_export_failed',
entity_type: 'NODE',
event_type: 'ERROR',
message: 'Bucket logging export failed.',
description: 'Bucket logging export failed due to error',
scope: 'NODE',
severity: 'ERROR',
state: 'DEGRADED',
});

exports.NoobaaEvent = NoobaaEvent;
Loading

0 comments on commit 767db73

Please sign in to comment.