Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NSFS | NC | Add Schema Validation to Bucket and Account Add and Update (NC NSFS CLI) #7702

Merged
merged 1 commit into from
Jan 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 33 additions & 4 deletions src/cmd/manage_nsfs.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ const SensitiveString = require('../util/sensitive_string');
const ManageCLIError = require('../manage_nsfs/manage_nsfs_cli_errors').ManageCLIError;
const ManageCLIResponse = require('../manage_nsfs/manage_nsfs_cli_responses').ManageCLIResponse;
const bucket_policy_utils = require('../endpoint/s3/s3_bucket_policy_utils');
const nsfs_schema_utils = require('../manage_nsfs/nsfs_schema_utils');

const TYPES = {
ACCOUNT: 'account',
Expand Down Expand Up @@ -199,6 +200,7 @@ async function main(argv = minimist(process.argv.slice(2))) {
dbg.log1('NSFS Manage command: exit on error', err.stack || err);
const manage_err = ((err instanceof ManageCLIError) && err) ||
new ManageCLIError(ManageCLIError.FS_ERRORS_TO_MANAGE[err.code] ||
ManageCLIError.RPC_ERROR_TO_MANAGE[err.rpc_code] ||
ManageCLIError.InternalError);
throw_cli_error(manage_err, err.stack || err);
}
Expand Down Expand Up @@ -254,7 +256,10 @@ async function fetch_bucket_data(argv, from_file) {
bucket_owner: new SensitiveString(String(data.bucket_owner)),
// update bucket identifier
new_name: data.new_name && new SensitiveString(String(data.new_name)),
fs_backend: data.fs_backend || undefined
// fs_backend deletion specified with empty string '' (but it is not part of the schema)
fs_backend: data.fs_backend || undefined,
// s3_policy deletion specified with empty string '' (but it is not part of the schema)
s3_policy: data.s3_policy || undefined,
};

return data;
Expand Down Expand Up @@ -288,6 +293,10 @@ async function add_bucket(data) {
if (exists) throw_cli_error(ManageCLIError.BucketAlreadyExists, data.name.unwrap());

const data_json = JSON.stringify(data);
// We take an object that was stringify
// (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters)
// for validating against the schema we need an object, hence we parse it back to object
nsfs_schema_utils.validate_bucket_schema(JSON.parse(data_json));
await native_fs_utils.create_config_file(fs_context, buckets_dir_path, bucket_conf_path, data_json);
write_stdout_response(ManageCLIResponse.BucketCreated, data_json);
}
Expand Down Expand Up @@ -316,6 +325,10 @@ async function update_bucket(data) {
if (!update_name) {
const bucket_config_path = get_config_file_path(buckets_dir_path, data.name);
data = JSON.stringify(data);
// We take an object that was stringify
// (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters)
// for validating against the schema we need an object, hence we parse it back to object
nsfs_schema_utils.validate_bucket_schema(JSON.parse(data));
await native_fs_utils.update_config_file(fs_context, buckets_dir_path, bucket_config_path, data);
write_stdout_response(ManageCLIResponse.BucketUpdated, data);
return;
Expand All @@ -330,7 +343,10 @@ async function update_bucket(data) {
if (exists) throw_cli_error(ManageCLIError.BucketAlreadyExists, data.name.unwrap());

data = JSON.stringify(_.omit(data, ['new_name']));

// We take an object that was stringify
// (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters)
// for validating against the schema we need an object, hence we parse it back to object
nsfs_schema_utils.validate_bucket_schema(JSON.parse(data));
await native_fs_utils.create_config_file(fs_context, buckets_dir_path, new_bucket_config_path, data);
await native_fs_utils.delete_config_file(fs_context, buckets_dir_path, cur_bucket_config_path);
write_stdout_response(ManageCLIResponse.BucketUpdated, data);
Expand Down Expand Up @@ -430,8 +446,8 @@ async function fetch_account_data(argv, from_file) {
access_keys,
nsfs_account_config: {
distinguished_name: argv.user,
uid: !argv.user && argv.uid,
gid: !argv.user && argv.gid,
uid: argv.user ? undefined : argv.uid,
gid: argv.user ? undefined : argv.gid,
new_buckets_path: argv.new_buckets_path,
fs_backend: argv.fs_backend ? String(argv.fs_backend) : config.NSFS_NC_STORAGE_BACKEND
}
Expand All @@ -456,6 +472,7 @@ async function fetch_account_data(argv, from_file) {
uid: data.nsfs_account_config.uid && Number(data.nsfs_account_config.uid),
gid: data.nsfs_account_config.gid && Number(data.nsfs_account_config.gid),
new_buckets_path: data.nsfs_account_config.new_buckets_path,
// fs_backend deletion specified with empty string '' (but it is not part of the schema)
fs_backend: data.nsfs_account_config.fs_backend || undefined
},
allow_bucket_creation: !is_undefined(data.nsfs_account_config.new_buckets_path),
Expand Down Expand Up @@ -503,6 +520,10 @@ async function add_account(data) {
}

data = JSON.stringify(data);
// We take an object that was stringify
// (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters)
// for validating against the schema we need an object, hence we parse it back to object
nsfs_schema_utils.validate_account_schema(JSON.parse(data));
await native_fs_utils.create_config_file(fs_context, accounts_dir_path, account_config_path, data);
await native_fs_utils._create_path(access_keys_dir_path, fs_context, config.BASE_MODE_CONFIG_DIR);
await nb_native().fs.symlink(fs_context, account_config_path, account_config_access_key_path);
Expand All @@ -522,6 +543,10 @@ async function update_account(data) {
if (!update_name && !update_access_key) {
const account_config_path = get_config_file_path(accounts_dir_path, data.name);
data = JSON.stringify(data);
// We take an object that was stringify
// (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters)
// for validating against the schema we need an object, hence we parse it back to object
nsfs_schema_utils.validate_account_schema(JSON.parse(data));
await native_fs_utils.update_config_file(fs_context, accounts_dir_path, account_config_path, data);
write_stdout_response(ManageCLIResponse.AccountUpdated, data);
return;
Expand All @@ -540,6 +565,10 @@ async function update_account(data) {
throw_cli_error(err_code);
}
data = JSON.stringify(_.omit(data, ['new_name', 'new_access_key']));
// We take an object that was stringify
// (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters)
// for validating against the schema we need an object, hence we parse it back to object
nsfs_schema_utils.validate_account_schema(JSON.parse(data));
if (update_name) {
await native_fs_utils.create_config_file(fs_context, accounts_dir_path, new_account_config_path, data);
await native_fs_utils.delete_config_file(fs_context, accounts_dir_path, cur_account_config_path);
Expand Down
9 changes: 9 additions & 0 deletions src/manage_nsfs/manage_nsfs_cli_errors.js
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,11 @@ ManageCLIError.MissingConfigDirPath = Object.freeze({
message: 'Config dir path should not be empty',
http_code: 400,
});
ManageCLIError.InvalidSchema = Object.freeze({
code: 'InvalidSchema',
message: 'Schema invalid, please use required properties',
http_code: 400,
});

//////////////////////////////
//// IP WHITE LIST ERRORS ////
Expand Down Expand Up @@ -301,4 +306,8 @@ ManageCLIError.FS_ERRORS_TO_MANAGE = Object.freeze({
// EEXIST: ManageCLIError.BucketAlreadyExists,
});

ManageCLIError.RPC_ERROR_TO_MANAGE = Object.freeze({
INVALID_SCHEMA: ManageCLIError.InvalidSchema,
});

exports.ManageCLIError = ManageCLIError;
43 changes: 43 additions & 0 deletions src/manage_nsfs/nsfs_schema_utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
/* Copyright (C) 2023 NooBaa */
'use strict';

const RpcError = require('../rpc/rpc_error');
const { default: Ajv } = require('ajv');
const ajv = new Ajv({ verbose: true, allErrors: true });
const { KEYWORDS } = require('../util/schema_keywords');
const common_api = require('../api/common_api');
const schema_utils = require('../util/schema_utils');

ajv.addKeyword(KEYWORDS.methods);
ajv.addKeyword(KEYWORDS.doc);
ajv.addKeyword(KEYWORDS.date);
ajv.addKeyword(KEYWORDS.idate);
ajv.addKeyword(KEYWORDS.objectid);
ajv.addKeyword(KEYWORDS.binary);
ajv.addKeyword(KEYWORDS.wrapper);
ajv.addSchema(common_api);

const bucket_schema = require('../server/object_services/schemas/nsfs_bucket_schema');
const account_schema = require('../server/object_services/schemas/nsfs_account_schema');

schema_utils.strictify(bucket_schema, {
additionalProperties: false
});

schema_utils.strictify(account_schema, {
additionalProperties: false
});

function validate_account_schema(account) {
const valid = ajv.validate(account_schema, account);
if (!valid) throw new RpcError('INVALID_SCHEMA', ajv.errors[0]?.message);
}

function validate_bucket_schema(bucket) {
const valid = ajv.validate(bucket_schema, bucket);
if (!valid) throw new RpcError('INVALID_SCHEMA', ajv.errors[0]?.message);
}

//EXPORTS
exports.validate_account_schema = validate_account_schema;
exports.validate_bucket_schema = validate_bucket_schema;
29 changes: 3 additions & 26 deletions src/sdk/bucketspace_fs.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,7 @@ const BucketSpaceSimpleFS = require('./bucketspace_simple_fs');
const _ = require('lodash');
const util = require('util');
const bucket_policy_utils = require('../endpoint/s3/s3_bucket_policy_utils');
const { default: Ajv } = require('ajv');
const bucket_schema = require('../server/object_services/schemas/nsfs_bucket_schema');
const account_schema = require('../server/object_services/schemas/nsfs_account_schema');
const { KEYWORDS } = require('../util/schema_keywords');
const common_api = require('../api/common_api');
const nsfs_schema_utils = require('../manage_nsfs/nsfs_schema_utils');

const KeysSemaphore = require('../util/keys_semaphore');
const native_fs_utils = require('../util/native_fs_utils');
Expand All @@ -27,15 +23,6 @@ const dbg = require('../util/debug_module')(__filename);
const BUCKET_PATH = 'buckets';
const ACCOUNT_PATH = 'accounts';
const ACCESS_KEYS_PATH = 'access_keys';
const ajv = new Ajv({ verbose: true, allErrors: true });
ajv.addKeyword(KEYWORDS.methods);
ajv.addKeyword(KEYWORDS.doc);
ajv.addKeyword(KEYWORDS.date);
ajv.addKeyword(KEYWORDS.idate);
ajv.addKeyword(KEYWORDS.objectid);
ajv.addKeyword(KEYWORDS.binary);
ajv.addKeyword(KEYWORDS.wrapper);
ajv.addSchema(common_api);
const bucket_semaphore = new KeysSemaphore(1);

//TODO: dup from namespace_fs - need to handle and not dup code
Expand Down Expand Up @@ -109,7 +96,7 @@ class BucketSpaceFS extends BucketSpaceSimpleFS {
const iam_path = this._get_access_keys_config_path(access_key);
const { data } = await nb_native().fs.readFile(this.fs_context, iam_path);
const account = JSON.parse(data.toString());
this.validate_account_schema(account);
nsfs_schema_utils.validate_account_schema(account);
account.name = new SensitiveString(account.name);
account.email = new SensitiveString(account.email);
for (const k of account.access_keys) {
Expand Down Expand Up @@ -137,23 +124,13 @@ class BucketSpaceFS extends BucketSpaceSimpleFS {
}
}

validate_account_schema(account) {
const valid = ajv.validate(account_schema, account);
if (!valid) throw new RpcError('INVALID_SCHEMA', ajv.errors[0]?.message);
}

validate_bucket_schema(bucket) {
const valid = ajv.validate(bucket_schema, bucket);
if (!valid) throw new RpcError('INVALID_SCHEMA', ajv.errors[0]?.message);
}

async read_bucket_sdk_info({ name }) {
try {
const bucket_config_path = this._get_bucket_config_path(name);
dbg.log0('BucketSpaceFS.read_bucket_sdk_info: bucket_config_path', bucket_config_path);
const { data } = await nb_native().fs.readFile(this.fs_context, bucket_config_path);
const bucket = JSON.parse(data.toString());
this.validate_bucket_schema(bucket);
nsfs_schema_utils.validate_bucket_schema(bucket);
const is_valid = await this.check_bucket_config(bucket);
if (!is_valid) {
dbg.warn('BucketSpaceFS: one or more bucket config check is failed for bucket : ', name);
Expand Down
3 changes: 3 additions & 0 deletions src/server/object_services/schemas/nsfs_bucket_schema.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ module.exports = {
},
versioning: {
type: 'string',
enum: ['DISABLED', 'SUSPENDED', 'ENABLED']
// GAP would like to use $ref: 'bucket_api#/definitions/versioning'
// but currently it creates an error Error: reference "bucket_api" resolves to more than one schema
},
path: {
type: 'string',
Expand Down
Loading