Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a RuleCollection object instead of a "loader" module #1063

Merged
merged 20 commits into from
Apr 5, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 15 additions & 10 deletions CLI.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ and will accept any valid rule in the following formats:
#### `import-rules`

```console
Usage: detection_rules import-rules [OPTIONS] [INFILE]...
Usage: detection_rules import-rules [OPTIONS] [INPUT_FILE]...

Import rules from json, toml, or Kibana exported rule file(s).

Expand Down Expand Up @@ -159,34 +159,39 @@ Options:
--cloud-id TEXT
-k, --kibana-url TEXT

Usage: detection_rules kibana upload-rule [OPTIONS] TOML_FILES...
Usage: detection_rules kibana upload-rule [OPTIONS]

Upload a list of rule .toml files to Kibana.

Options:
-r, --replace-id Replace rule IDs with new IDs before export
-h, --help Show this message and exit.
-f, --rule-file FILE
-d, --directory DIRECTORY Recursively export rules from a directory
-id, --rule-id TEXT
-r, --replace-id Replace rule IDs with new IDs before export
-h, --help Show this message and exit.
(detection-rules-build) (base) ➜ detection-rules git:(rule-loader) ✗
```

Alternatively, rules can be exported into a consolidated ndjson file which can be imported in the Kibana security app
directly.

```console
Usage: detection_rules export-rules [OPTIONS] [RULE_ID]...
Usage: detection_rules export-rules [OPTIONS]

Export rule(s) into an importable ndjson file.

Options:
-f, --rule-file FILE Export specified rule files
-f, --rule-file FILE
-d, --directory DIRECTORY Recursively export rules from a directory
-id, --rule-id TEXT
-o, --outfile FILE Name of file for exported rules
-r, --replace-id Replace rule IDs with new IDs before export
--stack-version [7.8|7.9|7.10|7.11]
--stack-version [7.8|7.9|7.10|7.11|7.12]
Downgrade a rule version to be compatible
with older instances of Kibana
-s, --skip-unsupported If `--stack-version` is passed, skip
rule types which are unsupported (an error
will be raised otherwise)
-s, --skip-unsupported If `--stack-version` is passed, skip rule
types which are unsupported (an error will
be raised otherwise)
-h, --help Show this message and exit.
```

Expand Down
76 changes: 76 additions & 0 deletions detection_rules/cli_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,95 @@
import datetime
import os
from pathlib import Path
from typing import List

import click

import kql
import functools
from . import ecs
from .attack import matrix, tactics, build_threat_map_entry
from .rule import TOMLRule, TOMLRuleContents
from .rule_loader import RuleCollection, DEFAULT_RULES_DIR, dict_filter
from .schemas import CurrentSchema
from .utils import clear_caches, get_path

RULES_DIR = get_path("rules")


def single_collection(f):
"""Add arguments to get a RuleCollection by file, directory or a list of IDs"""
from .misc import client_error

@click.option('--rule-file', '-f', multiple=False, required=False, type=click.Path(dir_okay=False))
@click.option('--rule-id', '-id', multiple=False, required=False)
@functools.wraps(f)
def get_collection(*args, **kwargs):
rule_name: List[str] = kwargs.pop("rule_name", [])
rule_id: List[str] = kwargs.pop("rule_id", [])
rule_files: List[str] = kwargs.pop("rule_file")
directories: List[str] = kwargs.pop("directory")

rules = RuleCollection()

if bool(rule_name) + bool(rule_id) + bool(rule_files) != 1:
client_error('Required: exactly one of --rule-id, --rule-file, or --directory')

rules.load_files(Path(p) for p in rule_files)
rules.load_directories(Path(d) for d in directories)

if rule_id:
rules.load_directory(DEFAULT_RULES_DIR, toml_filter=dict_filter(rule__rule_id=rule_id))

if len(rules) != 1:
client_error(f"Could not find rule with ID {rule_id}")

kwargs["rules"] = rules
return f(*args, **kwargs)

return get_collection


def multi_collection(f):
"""Add arguments to get a RuleCollection by file, directory or a list of IDs"""
from .misc import client_error

@click.option('--rule-file', '-f', multiple=True, type=click.Path(dir_okay=False), required=False)
@click.option('--directory', '-d', multiple=True, type=click.Path(file_okay=False), required=False,
help='Recursively export rules from a directory')
@click.option('--rule-id', '-id', multiple=True, required=False)
@functools.wraps(f)
def get_collection(*args, **kwargs):
rule_name: List[str] = kwargs.pop("rule_name", [])
rule_id: List[str] = kwargs.pop("rule_id", [])
rule_files: List[str] = kwargs.pop("rule_file")
directories: List[str] = kwargs.pop("directory")

rules = RuleCollection()

if not rule_name or rule_id or rule_files:
client_error('Required: at least one of --rule-id, --rule-file, or --directory')

rules.load_files(Path(p) for p in rule_files)
rules.load_directories(Path(d) for d in directories)

if rule_id:
rules.load_directory(DEFAULT_RULES_DIR, toml_filter=dict_filter(rule__rule_id=rule_id))
found_ids = {rule.id for rule in rules}
missing = set(rule_id).difference(found_ids)

if missing:
client_error(f'Could not find rules with IDs: {", ".join(missing)}')

if len(rules) == 0:
client_error("No rules found")

kwargs["rules"] = rules
return f(*args, **kwargs)

return get_collection


def rule_prompt(path=None, rule_type=None, required_only=True, save=True, verbose=False, **kwargs) -> TOMLRule:
"""Prompt loop to build a rule."""
from .misc import schema_prompt
Expand Down
47 changes: 20 additions & 27 deletions detection_rules/devtools.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,16 @@
import click
from elasticsearch import Elasticsearch
from eql import load_dump
from kibana.connector import Kibana

from kibana.connector import Kibana
from . import rule_loader
from .cli_utils import single_collection
from .eswrap import CollectEvents, add_range_to_dsl
from .main import root
from .misc import PYTHON_LICENSE, add_client, GithubClient, Manifest, client_error, getdefault
from .packaging import PACKAGE_FILE, Package, manage_versions, RELEASE_DIR
from .rule import TOMLRule, TOMLRuleContents, BaseQueryRuleData
from .rule_loader import get_rule
from .rule import TOMLRule, BaseQueryRuleData
from .rule_loader import production_filter, RuleCollection
from .utils import get_path, dict_hash

RULES_DIR = get_path('rules')
Expand Down Expand Up @@ -68,7 +69,7 @@ def update_lock_versions(rule_ids):
if not click.confirm('Are you sure you want to update hashes without a version bump?'):
return

rules = [r for r in rule_loader.load_rules(verbose=False).values() if r.id in rule_ids]
rules = RuleCollection.default().filter(lambda r: r.id in rule_ids)
changed, new = manage_versions(rules, exclude_version_update=True, add_new=False, save_changes=True)

if not changed:
Expand All @@ -86,10 +87,12 @@ def kibana_diff(rule_id, repo, branch, threads):
"""Diff rules against their version represented in kibana if exists."""
from .misc import get_kibana_rules

rules = RuleCollection.default()

if rule_id:
rules = {r.id: r for r in rule_loader.load_rules(verbose=False).values() if r.id in rule_id}
rules = rules.filter(lambda r: r.id in rule_id)
else:
rules = {r.id: r for r in rule_loader.get_production_rules()}
rules = rules.filter(production_filter)

# add versions to the rules
manage_versions(list(rules.values()), verbose=False)
Expand All @@ -102,13 +105,13 @@ def kibana_diff(rule_id, repo, branch, threads):
missing_from_kibana = list(set(repo_hashes).difference(set(kibana_hashes)))

rule_diff = []
for rid, rhash in repo_hashes.items():
if rid in missing_from_kibana:
for rule_id, rule_hash in repo_hashes.items():
if rule_id in missing_from_kibana:
continue
if rhash != kibana_hashes[rid]:
if rule_hash != kibana_hashes[rule_id]:
rule_diff.append(
f'versions - repo: {rules[rid].contents["version"]}, kibana: {kibana_rules[rid]["version"]} -> '
f'{rid} - {rules[rid].name}'
f'versions - repo: {rules[rule_id].contents["version"]}, kibana: {kibana_rules[rule_id]["version"]} -> '
f'{rule_id} - {rules[rule_id].name}'
)

diff = {
Expand Down Expand Up @@ -373,26 +376,17 @@ def event_search(query, index, language, date_range, count, max_results, verbose


@test_group.command('rule-event-search')
@click.argument('rule-file', type=click.Path(dir_okay=False), required=False)
@click.option('--rule-id', '-id')
@single_collection
@click.option('--date-range', '-d', type=(str, str), default=('now-7d', 'now'), help='Date range to scope search')
@click.option('--count', '-c', is_flag=True, help='Return count of results only')
@click.option('--max-results', '-m', type=click.IntRange(1, 1000), default=100,
help='Max results to return (capped at 1000)')
@click.option('--verbose', '-v', is_flag=True)
@click.pass_context
@add_client('elasticsearch')
def rule_event_search(ctx, rule_file, rule_id, date_range, count, max_results, verbose,
def rule_event_search(ctx, rule, date_range, count, max_results, verbose,
elasticsearch_client: Elasticsearch = None):
"""Search using a rule file against an Elasticsearch instance."""
rule: TOMLRule

if rule_id:
rule = get_rule(rule_id, verbose=False)
elif rule_file:
rule = TOMLRule(path=rule_file, contents=TOMLRuleContents.from_dict(load_dump(rule_file)))
else:
client_error('Must specify a rule file or rule ID')

if isinstance(rule.contents.data, BaseQueryRuleData):
if verbose:
Expand Down Expand Up @@ -431,18 +425,17 @@ def rule_survey(ctx: click.Context, query, date_range, dump_file, hide_zero_coun
"""Survey rule counts."""
from eql.table import Table
from kibana.resources import Signal
from . import rule_loader
from .main import search_rules

survey_results = []
start_time, end_time = date_range

if query:
rule_paths = [r['file'] for r in ctx.invoke(search_rules, query=query, verbose=False)]
rules = rule_loader.load_rules(rule_loader.load_rule_files(paths=rule_paths, verbose=False), verbose=False)
rules = rules.values()
rules = RuleCollection()
paths = [Path(r['file']) for r in ctx.invoke(search_rules, query=query, verbose=False)]
rules.load_files(paths)
else:
rules = rule_loader.load_rules(verbose=False).values()
rules = RuleCollection.default().filter(production_filter)

click.echo(f'Running survey against {len(rules)} rules')
click.echo(f'Saving detailed dump to: {dump_file}')
Expand Down
10 changes: 5 additions & 5 deletions detection_rules/eswrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
import json
import os
import time
from contextlib import contextmanager
from collections import defaultdict
from contextlib import contextmanager
from pathlib import Path
from typing import Union

Expand All @@ -20,10 +20,9 @@
import kql
from .main import root
from .misc import add_params, client_error, elasticsearch_options
from .utils import format_command_options, normalize_timing_and_sort, unix_time_to_formatted, get_path
from .rule import TOMLRule
from .rule_loader import get_rule, rta_mappings

from .rule_loader import rta_mappings, RuleCollection
from .utils import format_command_options, normalize_timing_and_sort, unix_time_to_formatted, get_path

COLLECTION_DIR = get_path('collections')
MATCH_ALL = {'bool': {'filter': [{'match_all': {}}]}}
Expand Down Expand Up @@ -88,7 +87,8 @@ def evaluate_against_rule_and_update_mapping(self, rule_id, rta_name, verbose=Tr
"""Evaluate a rule against collected events and update mapping."""
from .utils import combine_sources, evaluate

rule = get_rule(rule_id, verbose=False)
rule = next((rule for rule in RuleCollection.default() if rule.id == rule_id), None)
assert rule is not None, f"Unable to find rule with ID {rule_id}"
merged_events = combine_sources(*self.events.values())
filtered = evaluate(rule, merged_events)

Expand Down
31 changes: 16 additions & 15 deletions detection_rules/kbwrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,16 @@
# 2.0.

"""Kibana cli commands."""
import uuid

import click

import kql
from kibana import Kibana, Signal, RuleResource

from .cli_utils import multi_collection
from .main import root
from .misc import add_params, client_error, kibana_options
from .rule_loader import load_rule_files, load_rules
from .schemas import downgrade
from .utils import format_command_options


Expand Down Expand Up @@ -49,30 +52,28 @@ def kibana_group(ctx: click.Context, **kibana_kwargs):


@kibana_group.command("upload-rule")
@click.argument("toml-files", nargs=-1, required=True)
@multi_collection
@click.option('--replace-id', '-r', is_flag=True, help='Replace rule IDs with new IDs before export')
@click.pass_context
def upload_rule(ctx, toml_files, replace_id):
def upload_rule(ctx, rules, replace_id):
"""Upload a list of rule .toml files to Kibana."""
from .packaging import manage_versions

kibana = ctx.obj['kibana']
file_lookup = load_rule_files(paths=toml_files)
rules = list(load_rules(file_lookup=file_lookup).values())

# assign the versions from etc/versions.lock.json
# rules that have changed in hash get incremented, others stay as-is.
# rules that aren't in the lookup default to version 1
manage_versions(rules, verbose=False)

api_payloads = []

for rule in rules:
try:
payload = rule.get_payload(include_version=True, replace_id=replace_id, embed_metadata=True,
target_version=kibana.version)
payload = rule.contents.to_api_format()
payload.setdefault("meta", {}).update(rule.contents.metadata.to_dict())

if replace_id:
payload["rule_id"] = str(uuid.uuid4())

payload = downgrade(payload, target_version=kibana.version)

except ValueError as e:
client_error(f'{e} in version:{kibana.version}, for rule: {rule.name}', e, ctx=ctx)

rule = RuleResource(payload)
api_payloads.append(rule)

Expand Down
Loading