Skip to content

Refresh Kibana module with API updates #3466

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
8f826e1
Refresh Kibana module with API updates
brokensound77 Feb 22, 2024
4f18a52
add import/export commands
brokensound77 Feb 22, 2024
aa1a824
Merge branch 'main' into refresh-kibana-module-with-new-APIs
brokensound77 Mar 14, 2024
4d031b0
rename repo commands
brokensound77 Mar 14, 2024
9a3d809
merge upstream updates
brokensound77 Mar 14, 2024
29710a9
add RawRuleCollection and DictRule objects
brokensound77 Apr 4, 2024
6cff113
save exported rules to files; rule.from_rule_resource
brokensound77 Apr 18, 2024
167cb9f
Merge remote-tracking branch 'upstream/main' into refresh-kibana-modu…
brokensound77 Apr 18, 2024
223363b
lint
brokensound77 Apr 18, 2024
9d3f6a0
strip unknown fields in schema
brokensound77 Apr 20, 2024
5f755cd
remove object requirements
brokensound77 Apr 21, 2024
00dfc83
Merge branch 'main' into refresh-kibana-module-with-new-APIs
brokensound77 Apr 21, 2024
0033ab0
add remote cli test
brokensound77 Apr 21, 2024
5d7bc1c
docstrings
brokensound77 Apr 24, 2024
9c61f00
fix remote call double python
brokensound77 Apr 24, 2024
73c37ae
update unknown values
brokensound77 Apr 24, 2024
f9a69a4
small updates from feedback
brokensound77 Apr 24, 2024
4fdfdab
update import-rules results and output
brokensound77 Apr 25, 2024
8d5796a
refined the bulk action methods; added TypedDict for values
brokensound77 Apr 25, 2024
4d77a18
update docs
brokensound77 Apr 25, 2024
a6ba7ec
Merge branch 'main' into refresh-kibana-module-with-new-APIs
brokensound77 Apr 25, 2024
b415ddc
create tmp dir prior to running
brokensound77 Apr 25, 2024
f708482
moar docstrings
brokensound77 Apr 25, 2024
eaf7b65
lint
brokensound77 Apr 26, 2024
96aa750
bump kibana lib version
brokensound77 Apr 26, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
266 changes: 266 additions & 0 deletions CLI.md

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions detection_rules/cli_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def get_collection(*args, **kwargs):

if rule_id:
rules.load_directories((DEFAULT_RULES_DIR, DEFAULT_BBR_DIR),
toml_filter=dict_filter(rule__rule_id=rule_id))
obj_filter=dict_filter(rule__rule_id=rule_id))
if len(rules) != 1:
client_error(f"Could not find rule with ID {rule_id}")

Expand All @@ -66,7 +66,7 @@ def multi_collection(f):

@click.option('--rule-file', '-f', multiple=True, type=click.Path(dir_okay=False), required=False)
@click.option('--directory', '-d', multiple=True, type=click.Path(file_okay=False), required=False,
help='Recursively export rules from a directory')
help='Recursively load rules from a directory')
@click.option('--rule-id', '-id', multiple=True, required=False)
@functools.wraps(f)
def get_collection(*args, **kwargs):
Expand All @@ -84,7 +84,7 @@ def get_collection(*args, **kwargs):

if rule_id:
rules.load_directories((DEFAULT_RULES_DIR, DEFAULT_BBR_DIR),
toml_filter=dict_filter(rule__rule_id=rule_id))
obj_filter=dict_filter(rule__rule_id=rule_id))
found_ids = {rule.id for rule in rules}
missing = set(rule_id).difference(found_ids)

Expand Down
7 changes: 7 additions & 0 deletions detection_rules/etc/test_remote_cli.bash
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,11 @@ echo "Performing a quick rule alerts search..."
echo "Requires .detection-rules-cfg.json credentials file set."
python -m detection_rules kibana search-alerts

echo "Performing a rule export..."
mkdir tmp-export 2>/dev/null
python -m detection_rules kibana export-rules -d tmp-export --skip-errors
ls tmp-export
echo "Removing generated files..."
rm -rf tmp-export

echo "Detection-rules CLI tests completed!"
103 changes: 98 additions & 5 deletions detection_rules/kbwrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,20 @@

"""Kibana cli commands."""
import sys
from pathlib import Path
from typing import Iterable, List, Optional

import click


import kql
from kibana import Signal, RuleResource

from .cli_utils import multi_collection
from .main import root
from .misc import add_params, client_error, kibana_options, get_kibana_client, nested_set
from .rule import downgrade_contents_from_rule
from .utils import format_command_options
from .rule import downgrade_contents_from_rule, TOMLRuleContents, TOMLRule
from .rule_loader import RuleCollection
from .utils import format_command_options, rulename_to_filename


@root.group('kibana')
Expand All @@ -38,7 +41,7 @@ def kibana_group(ctx: click.Context, **kibana_kwargs):
@multi_collection
@click.option('--replace-id', '-r', is_flag=True, help='Replace rule IDs with new IDs before export')
@click.pass_context
def upload_rule(ctx, rules, replace_id):
def upload_rule(ctx, rules: RuleCollection, replace_id):
"""Upload a list of rule .toml files to Kibana."""
kibana = ctx.obj['kibana']
api_payloads = []
Expand All @@ -53,7 +56,7 @@ def upload_rule(ctx, rules, replace_id):
api_payloads.append(rule)

with kibana:
results = RuleResource.bulk_create(api_payloads)
results = RuleResource.bulk_create_legacy(api_payloads)

success = []
errors = []
Expand All @@ -71,6 +74,96 @@ def upload_rule(ctx, rules, replace_id):
return results


@kibana_group.command('import-rules')
@multi_collection
@click.option('--overwrite', '-o', is_flag=True, help='Overwrite existing rules')
@click.option('--overwrite-exceptions', '-e', is_flag=True, help='Overwrite exceptions in existing rules')
@click.option('--overwrite-action-connectors', '-a', is_flag=True,
help='Overwrite action connectors in existing rules')
@click.pass_context
def kibana_import_rules(ctx: click.Context, rules: RuleCollection, overwrite: Optional[bool] = False,
overwrite_exceptions: Optional[bool] = False,
overwrite_action_connectors: Optional[bool] = False) -> (dict, List[RuleResource]):
"""Import custom rules into Kibana."""
kibana = ctx.obj['kibana']
rule_dicts = [r.contents.to_api_format() for r in rules]
with kibana:
response, successful_rule_ids, results = RuleResource.import_rules(
rule_dicts,
overwrite=overwrite,
overwrite_exceptions=overwrite_exceptions,
overwrite_action_connectors=overwrite_action_connectors
)

if successful_rule_ids:
click.echo(f'{len(successful_rule_ids)} rule(s) successfully imported')
rule_str = '\n - '.join(successful_rule_ids)
print(f' - {rule_str}')
if response['errors']:
click.echo(f'{len(response["errors"])} rule(s) failed to import!')
for error in response['errors']:
click.echo(f' - {error["rule_id"]}: ({error["error"]["status_code"]}) {error["error"]["message"]}')

return response, results


@kibana_group.command('export-rules')
@click.option('--directory', '-d', required=True, type=Path, help='Directory to export rules to')
@click.option('--rule-id', '-r', multiple=True, help='Optional Rule IDs to restrict export to')
@click.option('--skip-errors', '-s', is_flag=True, help='Skip errors when exporting rules')
@click.pass_context
def kibana_export_rules(ctx: click.Context, directory: Path,
rule_id: Optional[Iterable[str]] = None, skip_errors: bool = False) -> List[TOMLRule]:
"""Export custom rules from Kibana."""
kibana = ctx.obj['kibana']
with kibana:
results = RuleResource.export_rules(list(rule_id))

if results:
directory.mkdir(parents=True, exist_ok=True)

errors = []
exported = []
for rule_resource in results:
try:
contents = TOMLRuleContents.from_rule_resource(rule_resource, maturity='production')
threat = contents.data.get('threat')
first_tactic = threat[0].tactic.name if threat else ''
rule_name = rulename_to_filename(contents.data.name, tactic_name=first_tactic)
rule = TOMLRule(contents=contents, path=directory / f'{rule_name}')
except Exception as e:
if skip_errors:
print(f'- skipping {rule_resource.get("name")} - {type(e).__name__}')
errors.append(f'- {rule_resource.get("name")} - {e}')
continue
raise

exported.append(rule)

saved = []
for rule in exported:
try:
rule.save_toml()
except Exception as e:
if skip_errors:
print(f'- skipping {rule.contents.data.name} - {type(e).__name__}')
errors.append(f'- {rule.contents.data.name} - {e}')
continue
raise

saved.append(rule)

click.echo(f'{len(results)} rules exported')
click.echo(f'{len(exported)} rules converted')
click.echo(f'{len(saved)} saved to {directory}')
if errors:
err_file = directory / '_errors.txt'
err_file.write_text('\n'.join(errors))
click.echo(f'{len(errors)} errors saved to {err_file}')

return exported


@kibana_group.command('search-alerts')
@click.argument('query', required=False)
@click.option('--date-range', '-d', type=(str, str), default=('now-7d', 'now'), help='Date range to scope search')
Expand Down
18 changes: 7 additions & 11 deletions detection_rules/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import glob
import json
import os
import re
import time
from datetime import datetime

Expand All @@ -29,7 +28,7 @@
from .rule_formatter import toml_write
from .rule_loader import RuleCollection
from .schemas import all_versions, definitions, get_incompatible_fields, get_schema_file
from .utils import Ndjson, get_path, get_etc_path, clear_caches, load_dump, load_rule_contents
from .utils import Ndjson, get_path, get_etc_path, clear_caches, load_dump, load_rule_contents, rulename_to_filename

RULES_DIR = get_path('rules')

Expand Down Expand Up @@ -92,11 +91,11 @@ def generate_rules_index(ctx: click.Context, query, overwrite, save_files=True):
return bulk_upload_docs, importable_rules_docs


@root.command('import-rules')
@root.command('import-rules-to-repo')
@click.argument('input-file', type=click.Path(dir_okay=False, exists=True), nargs=-1, required=False)
@click.option('--required-only', is_flag=True, help='Only prompt for required fields')
@click.option('--directory', '-d', type=click.Path(file_okay=False, exists=True), help='Load files from a directory')
def import_rules(input_file, required_only, directory):
def import_rules_into_repo(input_file, required_only, directory):
"""Import rules from json, toml, yaml, or Kibana exported rule file(s)."""
rule_files = glob.glob(os.path.join(directory, '**', '*.*'), recursive=True) if directory else []
rule_files = sorted(set(rule_files + list(input_file)))
Expand All @@ -108,12 +107,9 @@ def import_rules(input_file, required_only, directory):
if not rule_contents:
click.echo('Must specify at least one file!')

def name_to_filename(name):
return re.sub(r'[^_a-z0-9]+', '_', name.strip().lower()).strip('_') + '.toml'

for contents in rule_contents:
base_path = contents.get('name') or contents.get('rule', {}).get('name')
base_path = name_to_filename(base_path) if base_path else base_path
base_path = rulename_to_filename(base_path) if base_path else base_path
rule_path = os.path.join(RULES_DIR, base_path) if base_path else None
additional = ['index'] if not contents.get('data_view_id') else ['data_view_id']
rule_prompt(rule_path, required_only=required_only, save=True, verbose=True,
Expand Down Expand Up @@ -274,7 +270,7 @@ def _export_rules(rules: RuleCollection, outfile: Path, downgrade_version: Optio
click.echo(f'Skipped {len(unsupported)} unsupported rules: \n- {unsupported_str}')


@root.command('export-rules')
@root.command('export-rules-from-repo')
@multi_collection
@click.option('--outfile', '-o', default=Path(get_path('exports', f'{time.strftime("%Y%m%dT%H%M%SL")}.ndjson')),
type=Path, help='Name of file for exported rules')
Expand All @@ -285,8 +281,8 @@ def _export_rules(rules: RuleCollection, outfile: Path, downgrade_version: Optio
help='If `--stack-version` is passed, skip rule types which are unsupported '
'(an error will be raised otherwise)')
@click.option('--include-metadata', type=bool, is_flag=True, default=False, help='Add metadata to the exported rules')
def export_rules(rules, outfile: Path, replace_id, stack_version,
skip_unsupported, include_metadata: bool) -> RuleCollection:
def export_rules_from_repo(rules, outfile: Path, replace_id, stack_version,
skip_unsupported, include_metadata: bool) -> RuleCollection:
"""Export rule(s) into an importable ndjson file."""
assert len(rules) > 0, "No rules found"

Expand Down
47 changes: 40 additions & 7 deletions detection_rules/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,17 @@

"""Generic mixin classes."""

import dataclasses
from pathlib import Path
from typing import Any, Optional, TypeVar, Type
from typing import Any, Optional, TypeVar, Type, Literal

import json
import marshmallow_dataclass
import marshmallow_dataclass.union_field
import marshmallow_jsonschema
import marshmallow_union
from marshmallow import Schema, ValidationError, fields, validates_schema
import marshmallow
from marshmallow import Schema, ValidationError, validates_schema, fields as marshmallow_fields

from .misc import load_current_package_version
from .schemas import definitions
Expand All @@ -23,6 +25,7 @@

T = TypeVar('T')
ClassT = TypeVar('ClassT') # bound=dataclass?
UNKNOWN_VALUES = Literal['raise', 'exclude', 'include']


def _strip_none_from_dict(obj: T) -> T:
Expand Down Expand Up @@ -81,14 +84,44 @@ def dive(child: dict) -> dict:
return patched


class BaseSchema(Schema):
"""Base schema for marshmallow dataclasses with unknown."""
class Meta:
"""Meta class for marshmallow schema."""


def exclude_class_schema(
clazz, base_schema: type[Schema] = BaseSchema, unknown: UNKNOWN_VALUES = marshmallow.EXCLUDE, **kwargs
) -> type[Schema]:
"""Get a marshmallow schema for a dataclass with unknown=EXCLUDE."""
base_schema.Meta.unknown = unknown
return marshmallow_dataclass.class_schema(clazz, base_schema=base_schema, **kwargs)


def recursive_class_schema(
clazz, base_schema: type[Schema] = BaseSchema, unknown: UNKNOWN_VALUES = marshmallow.EXCLUDE, **kwargs
) -> type[Schema]:
"""Recursively apply the unknown parameter for nested schemas."""
schema = exclude_class_schema(clazz, base_schema=base_schema, unknown=unknown, **kwargs)
for field in dataclasses.fields(clazz):
if dataclasses.is_dataclass(field.type):
nested_cls = field.type
nested_schema = recursive_class_schema(nested_cls, base_schema=base_schema, **kwargs)
setattr(schema, field.name, nested_schema)
return schema


class MarshmallowDataclassMixin:
"""Mixin class for marshmallow serialization."""

@classmethod
@cached
def __schema(cls: ClassT) -> Schema:
def __schema(cls: ClassT, unknown: Optional[UNKNOWN_VALUES] = None) -> Schema:
"""Get the marshmallow schema for the data class"""
return marshmallow_dataclass.class_schema(cls)()
if unknown:
return recursive_class_schema(cls, unknown=unknown)()
else:
return marshmallow_dataclass.class_schema(cls)()

def get(self, key: str, default: Optional[Any] = None):
"""Get a key from the query data without raising attribute errors."""
Expand All @@ -103,9 +136,9 @@ def jsonschema(cls):
return jsonschema

@classmethod
def from_dict(cls: Type[ClassT], obj: dict) -> ClassT:
def from_dict(cls: Type[ClassT], obj: dict, unknown: Optional[UNKNOWN_VALUES] = None) -> ClassT:
"""Deserialize and validate a dataclass from a dict using marshmallow."""
schema = cls.__schema()
schema = cls.__schema(unknown=unknown)
return schema.load(obj)

def to_dict(self, strip_none_values=True) -> dict:
Expand Down Expand Up @@ -199,7 +232,7 @@ class PatchedJSONSchema(marshmallow_jsonschema.JSONSchema):
# Patch marshmallow-jsonschema to support marshmallow-dataclass[union]
def _get_schema_for_field(self, obj, field):
"""Patch marshmallow_jsonschema.base.JSONSchema to support marshmallow-dataclass[union]."""
if isinstance(field, fields.Raw) and field.allow_none and not field.validate:
if isinstance(field, marshmallow_fields.Raw) and field.allow_none and not field.validate:
# raw fields shouldn't be type string but type any. bug in marshmallow_dataclass:__init__.py:
# if typ is Any:
# metadata.setdefault("allow_none", True)
Expand Down
Loading