Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add export-rules command #639

Merged
merged 19 commits into from
Feb 9, 2021
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
b1ca536
Add export-rule command to CLI
brokensound77 Nov 25, 2020
0da2e55
move export rule to packaging method
brokensound77 Nov 26, 2020
6b5ee7a
Merge branch 'main' into cli/export-rule
rw-access Nov 30, 2020
7de7f43
add checks for missing rule ID and duplicate rules
brokensound77 Dec 1, 2020
a038841
Merge remote-tracking branch 'origin/cli/export-rule' into cli/export…
brokensound77 Dec 1, 2020
133f19b
Merge branch 'main' into cli/export-rule
brokensound77 Dec 1, 2020
8593897
Merge branch 'main' into cli/export-rule
brokensound77 Dec 3, 2020
9b89219
Merge branch 'main' into cli/export-rule
brokensound77 Dec 7, 2020
c368f71
Merge remote-tracking branch 'upstream/main' into cli/export-rule
brokensound77 Dec 18, 2020
65cf195
add --downgrade-version option
brokensound77 Dec 19, 2020
23b4491
Merge remote-tracking branch 'upstream/main' into cli/export-rule
brokensound77 Dec 19, 2020
c90e4b5
open file more lazily to avoid erroring out with open handle
brokensound77 Dec 19, 2020
bf3c389
Merge branch 'main' into cli/export-rule
threat-punter Jan 5, 2021
4b3197f
Merge remote-tracking branch 'upstream/main' into cli/export-rule
brokensound77 Feb 3, 2021
3681b7c
tweaks varname changes from feedback
brokensound77 Feb 3, 2021
d54fa3e
Merge remote-tracking branch 'origin/cli/export-rule' into cli/export…
brokensound77 Feb 3, 2021
0d1cefb
Merge remote-tracking branch 'upstream/main' into cli/export-rule
brokensound77 Feb 4, 2021
7b2f1ea
buffer exported rules and save
brokensound77 Feb 9, 2021
c58d2b9
Merge branch 'main' into cli/export-rule
brokensound77 Feb 9, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -108,3 +108,5 @@ ENV/
# Siem rules
releases/
collections/
exports/
surveys/
22 changes: 22 additions & 0 deletions CLI.md
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,28 @@ Options:
-h, --help Show this message and exit.
```

Alternatively, rules can be exported into a consolidated ndjson file which can be imported in the Kibana security app
directly.

```console
Usage: detection_rules export-rules [OPTIONS] [RULE_ID]...

Export rule(s) into an importable ndjson file.

Options:
-f, --rule-file FILE Export specified rule files
-d, --directory DIRECTORY Recursively export rules from a directory
-o, --outfile FILE Name of file for exported rules
-r, --replace-id Replace rule IDs with new IDs before export
--downgrade-version [7.8|7.9|7.10|7.11]
brokensound77 marked this conversation as resolved.
Show resolved Hide resolved
Downgrade a rule version to be compatible
with older instances of Kibana
-s, --skip-unsupported If `--downgrade-version` is passed, skip
rule types which are unsupported (an error
will be raised otherwise)
-h, --help Show this message and exit.
```

_*To load a custom rule, the proper index must be setup first. The simplest way to do this is to click
the `Load prebuilt detection rules and timeline templates` button on the `detections` page in the Kibana security app._

Expand Down
14 changes: 6 additions & 8 deletions detection_rules/kbwrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,8 @@ def kibana_group(ctx: click.Context, **kibana_kwargs):
@click.pass_context
def upload_rule(ctx, toml_files):
"""Upload a list of rule .toml files to Kibana."""
from uuid import uuid4
from .packaging import manage_versions
from .schemas import downgrade
from .rule import downgrade_contents_from_rule

kibana = ctx.obj['kibana']
file_lookup = load_rule_files(paths=toml_files)
Expand All @@ -68,12 +67,11 @@ def upload_rule(ctx, toml_files):
api_payloads = []

for rule in rules:
payload = rule.contents.copy()
meta = payload.setdefault("meta", {})
meta["original"] = dict(id=rule.id, **rule.metadata)
payload["rule_id"] = str(uuid4())
payload = downgrade(payload, kibana.version)
rule = RuleResource(payload)
try:
rule = RuleResource(downgrade_contents_from_rule(rule, kibana.version))
except ValueError as e:
client_error(f'{e} in version:{kibana.version}, for rule: {rule.name}', e, ctx=ctx)

api_payloads.append(rule)

with kibana:
Expand Down
71 changes: 67 additions & 4 deletions detection_rules/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import json
import os
import re
import time
from pathlib import Path

import click
import jsonschema
Expand All @@ -16,7 +18,7 @@
from .misc import client_error, nested_set, parse_config
from .rule import Rule
from .rule_formatter import toml_write
from .schemas import CurrentSchema
from .schemas import CurrentSchema, schema_map
from .utils import get_path, clear_caches, load_rule_contents


Expand Down Expand Up @@ -130,7 +132,7 @@ def mass_update(ctx, query, metadata, language, field):
@click.option('--rule-file', '-f', type=click.Path(dir_okay=False), help='Optionally view a rule from a specified file')
@click.option('--api-format/--rule-format', default=True, help='Print the rule in final api or rule format')
@click.pass_context
def view_rule(ctx, rule_id, rule_file, api_format):
def view_rule(ctx, rule_id, rule_file, api_format, verbose=True):
"""View an internal rule or specified rule file."""
rule = None

Expand All @@ -149,12 +151,73 @@ def view_rule(ctx, rule_id, rule_file, api_format):
if not rule:
client_error('Unknown format!')

click.echo(toml_write(rule.rule_format()) if not api_format else
json.dumps(rule.contents, indent=2, sort_keys=True))
if verbose:
click.echo(toml_write(rule.rule_format()) if not api_format else
json.dumps(rule.contents, indent=2, sort_keys=True))

return rule


@root.command('export-rules')
@click.argument('rule-id', nargs=-1, required=False)
brokensound77 marked this conversation as resolved.
Show resolved Hide resolved
@click.option('--rule-file', '-f', multiple=True, type=click.Path(dir_okay=False), help='Export specified rule files')
@click.option('--directory', '-d', multiple=True, type=click.Path(file_okay=False),
help='Recursively export rules from a directory')
@click.option('--outfile', '-o', default=get_path('exports', f'{time.strftime("%Y%m%dT%H%M%SL")}.ndjson'),
type=click.Path(dir_okay=False), help='Name of file for exported rules')
@click.option('--replace-id', '-r', is_flag=True, help='Replace rule IDs with new IDs before export')
brokensound77 marked this conversation as resolved.
Show resolved Hide resolved
@click.option('--downgrade-version', type=click.Choice(list(schema_map)),
help='Downgrade a rule version to be compatible with older instances of Kibana')
@click.option('--skip-unsupported', '-s', is_flag=True,
help='If `--downgrade-version` is passed, skip rule types which are unsupported '
'(an error will be raised otherwise)')
def export_rules(rule_id, rule_file, directory, outfile, replace_id, downgrade_version, skip_unsupported):
"""Export rule(s) into an importable ndjson file."""
from .packaging import Package

if not (rule_id or rule_file or directory):
client_error('Required: at least one of --rule-id, --rule-file, or --directory')
brokensound77 marked this conversation as resolved.
Show resolved Hide resolved

if rule_id:
all_rules = {r.id: r for r in rule_loader.load_rules(verbose=False).values()}
missing = [rid for rid in rule_id if rid not in all_rules]

if missing:
client_error(f'Unknown rules for rule IDs: {", ".join(missing)}')

rules = [r for r in all_rules.values() if r.id in rule_id]
rule_ids = [r.id for r in rules]
else:
rules = []
rule_ids = []

rule_files = list(rule_file)
for dirpath in directory:
rule_files.extend(list(Path(dirpath).rglob('*.toml')))

file_lookup = rule_loader.load_rule_files(verbose=False, paths=rule_files)
rules_from_files = rule_loader.load_rules(file_lookup=file_lookup).values()

# rule_loader.load_rules handles checks for duplicate rule IDs - this means rules loaded by ID are de-duped and
# rules loaded from files and directories are de-duped from each other, so this check is to ensure that there is
# no overlap between the two sets of rules
duplicates = [r.id for r in rules_from_files if r.id in rule_ids]
if duplicates:
client_error(f'Duplicate rules for rule IDs: {", ".join(duplicates)}')

rules.extend(rules_from_files)

if replace_id:
from uuid import uuid4
for rule in rules:
rule.contents['rule_id'] = str(uuid4())

Path(outfile).parent.mkdir(exist_ok=True)
package = Package(rules, '_', verbose=False)
package.export(outfile, downgrade_version=downgrade_version, skip_unsupported=skip_unsupported)
return package.rules


@root.command('validate-rule')
@click.argument('rule-id', required=False)
@click.option('--rule-name', '-n')
Expand Down
51 changes: 44 additions & 7 deletions detection_rules/packaging.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,14 @@
import os
import shutil
from collections import defaultdict, OrderedDict
from pathlib import Path
from typing import List

import click

from . import rule_loader
from .misc import JS_LICENSE
from .rule import Rule # noqa: F401
from .rule import Rule, downgrade_contents_from_rule # noqa: F401
from .utils import get_path, get_etc_path, load_etc_dump, save_etc_dump

RELEASE_DIR = get_path("releases")
Expand Down Expand Up @@ -137,24 +139,25 @@ class Package(object):
"""Packaging object for siem rules and releases."""

def __init__(self, rules, name, deprecated_rules=None, release=False, current_versions=None, min_version=None,
max_version=None, update_version_lock=False):
max_version=None, update_version_lock=False, verbose=True):
"""Initialize a package."""
self.rules = [r.copy() for r in rules] # type: list[Rule]
self.rules: List[Rule] = [r.copy() for r in rules]
self.name = name
self.deprecated_rules = [r.copy() for r in deprecated_rules or []] # type: list[Rule]
self.deprecated_rules: List[Rule] = [r.copy() for r in deprecated_rules or []]
self.release = release

self.changed_rule_ids, self.new_rules_ids, self.removed_rule_ids = self._add_versions(current_versions,
update_version_lock)
update_version_lock,
verbose=verbose)

if min_version or max_version:
self.rules = [r for r in self.rules
if (min_version or 0) <= r.contents['version'] <= (max_version or r.contents['version'])]

def _add_versions(self, current_versions, update_versions_lock=False):
def _add_versions(self, current_versions, update_versions_lock=False, verbose=True):
"""Add versions to rules at load time."""
return manage_versions(self.rules, deprecated_rules=self.deprecated_rules, current_versions=current_versions,
save_changes=update_versions_lock)
save_changes=update_versions_lock, verbose=verbose)

@staticmethod
def _package_notice_file(save_dir):
Expand Down Expand Up @@ -245,6 +248,40 @@ def save(self, verbose=True):
if verbose:
click.echo('Package saved to: {}'.format(save_dir))

def export(self, outfile, downgrade_version=None, verbose=True, skip_unsupported=False):
"""Export rules into a consolidated ndjson file."""
outfile = Path(outfile).with_suffix('.ndjson')
unsupported = []

if downgrade_version:
if skip_unsupported:
export_str = ''

for rule in self.rules:
try:
export_str += json.dumps(downgrade_contents_from_rule(rule, downgrade_version),
brokensound77 marked this conversation as resolved.
Show resolved Hide resolved
sort_keys=True) + '\n'
except ValueError as e:
unsupported.append(f'{e}: {rule.id} - {rule.name}')
continue

with open(outfile, 'w') as f:
brokensound77 marked this conversation as resolved.
Show resolved Hide resolved
f.write(export_str)
else:
with open(outfile, 'w') as f:
f.write('\n'.join(json.dumps(downgrade_contents_from_rule(r, downgrade_version), sort_keys=True)
for r in self.rules))
else:
with open(outfile, 'w') as f:
f.write('\n'.join(json.dumps(r.contents, sort_keys=True) for r in self.rules))

if verbose:
click.echo(f'Exported {len(self.rules) - len(unsupported)} rules into {outfile}')

if skip_unsupported and unsupported:
unsupported_str = '\n- '.join(unsupported)
click.echo(f'Skipped {len(unsupported)} unsupported rules: \n- {unsupported_str}')

def get_package_hash(self, as_api=True, verbose=True):
"""Get hash of package contents."""
contents = base64.b64encode(self.get_consolidated(as_api=as_api).encode('utf-8'))
Expand Down
13 changes: 12 additions & 1 deletion detection_rules/rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import hashlib
import json
import os
from uuid import uuid4

import click
import kql
Expand All @@ -15,7 +16,7 @@
from . import ecs, beats
from .attack import tactics, build_threat_map_entry, technique_lookup
from .rule_formatter import nested_normalize, toml_write
from .schemas import CurrentSchema, TomlMetadata # RULE_TYPES, metadata_schema, schema_validate, get_schema
from .schemas import CurrentSchema, TomlMetadata, downgrade
from .utils import get_path, clear_caches, cached


Expand Down Expand Up @@ -439,3 +440,13 @@ def build(cls, path=None, rule_type=None, required_only=True, save=True, verbose
click.echo(' - to have a rule validate against a specific beats schema, add it to metadata->beats_version')

return rule


def downgrade_contents_from_rule(rule: Rule, target_version: str) -> dict:
"""Generate the downgraded contents from a rule."""
payload = rule.contents.copy()
meta = payload.setdefault("meta", {})
meta["original"] = dict(id=rule.id, **rule.metadata)
payload["rule_id"] = str(uuid4())
payload = downgrade(payload, target_version)
return payload
14 changes: 8 additions & 6 deletions detection_rules/schemas/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,17 @@
"downgrade",
"CurrentSchema",
"validate_rta_mapping",
"schema_map",
"TomlMetadata",
)

all_schemas = [
ApiSchema78,
ApiSchema79,
ApiSchema710,
ApiSchema711,
]
schema_map = {
'7.8': ApiSchema78,
'7.9': ApiSchema79,
'7.10': ApiSchema710,
'7.11': ApiSchema711
}
brokensound77 marked this conversation as resolved.
Show resolved Hide resolved
all_schemas = list(schema_map.values())

CurrentSchema = all_schemas[-1]

Expand Down