Skip to content

Commit dbb0529

Browse files
brokensound77github-actions[bot]
authored andcommitted
Refresh Kibana module with API updates (#3466)
* Refresh Kibana module with API updates * add import/export commands * rename repo commands * add RawRuleCollection and DictRule objects * save exported rules to files; rule.from_rule_resource * strip unknown fields in schema * add remote cli test * update docs * bump kibana lib version --------- Co-authored-by: brokensound77 <brokensound77@users.noreply.github.com> (cherry picked from commit c567d37)
1 parent 6648e09 commit dbb0529

File tree

15 files changed

+914
-44
lines changed

15 files changed

+914
-44
lines changed

CLI.md

Lines changed: 266 additions & 0 deletions
Large diffs are not rendered by default.

detection_rules/cli_utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def get_collection(*args, **kwargs):
5050

5151
if rule_id:
5252
rules.load_directories((DEFAULT_RULES_DIR, DEFAULT_BBR_DIR),
53-
toml_filter=dict_filter(rule__rule_id=rule_id))
53+
obj_filter=dict_filter(rule__rule_id=rule_id))
5454
if len(rules) != 1:
5555
client_error(f"Could not find rule with ID {rule_id}")
5656

@@ -66,7 +66,7 @@ def multi_collection(f):
6666

6767
@click.option('--rule-file', '-f', multiple=True, type=click.Path(dir_okay=False), required=False)
6868
@click.option('--directory', '-d', multiple=True, type=click.Path(file_okay=False), required=False,
69-
help='Recursively export rules from a directory')
69+
help='Recursively load rules from a directory')
7070
@click.option('--rule-id', '-id', multiple=True, required=False)
7171
@functools.wraps(f)
7272
def get_collection(*args, **kwargs):
@@ -84,7 +84,7 @@ def get_collection(*args, **kwargs):
8484

8585
if rule_id:
8686
rules.load_directories((DEFAULT_RULES_DIR, DEFAULT_BBR_DIR),
87-
toml_filter=dict_filter(rule__rule_id=rule_id))
87+
obj_filter=dict_filter(rule__rule_id=rule_id))
8888
found_ids = {rule.id for rule in rules}
8989
missing = set(rule_id).difference(found_ids)
9090

detection_rules/etc/test_remote_cli.bash

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,11 @@ echo "Performing a quick rule alerts search..."
1212
echo "Requires .detection-rules-cfg.json credentials file set."
1313
python -m detection_rules kibana search-alerts
1414

15+
echo "Performing a rule export..."
16+
mkdir tmp-export 2>/dev/null
17+
python -m detection_rules kibana export-rules -d tmp-export --skip-errors
18+
ls tmp-export
19+
echo "Removing generated files..."
20+
rm -rf tmp-export
21+
1522
echo "Detection-rules CLI tests completed!"

detection_rules/kbwrap.py

Lines changed: 98 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,20 @@
55

66
"""Kibana cli commands."""
77
import sys
8+
from pathlib import Path
9+
from typing import Iterable, List, Optional
810

911
import click
1012

11-
1213
import kql
1314
from kibana import Signal, RuleResource
15+
1416
from .cli_utils import multi_collection
1517
from .main import root
1618
from .misc import add_params, client_error, kibana_options, get_kibana_client, nested_set
17-
from .rule import downgrade_contents_from_rule
18-
from .utils import format_command_options
19+
from .rule import downgrade_contents_from_rule, TOMLRuleContents, TOMLRule
20+
from .rule_loader import RuleCollection
21+
from .utils import format_command_options, rulename_to_filename
1922

2023

2124
@root.group('kibana')
@@ -38,7 +41,7 @@ def kibana_group(ctx: click.Context, **kibana_kwargs):
3841
@multi_collection
3942
@click.option('--replace-id', '-r', is_flag=True, help='Replace rule IDs with new IDs before export')
4043
@click.pass_context
41-
def upload_rule(ctx, rules, replace_id):
44+
def upload_rule(ctx, rules: RuleCollection, replace_id):
4245
"""Upload a list of rule .toml files to Kibana."""
4346
kibana = ctx.obj['kibana']
4447
api_payloads = []
@@ -53,7 +56,7 @@ def upload_rule(ctx, rules, replace_id):
5356
api_payloads.append(rule)
5457

5558
with kibana:
56-
results = RuleResource.bulk_create(api_payloads)
59+
results = RuleResource.bulk_create_legacy(api_payloads)
5760

5861
success = []
5962
errors = []
@@ -71,6 +74,96 @@ def upload_rule(ctx, rules, replace_id):
7174
return results
7275

7376

77+
@kibana_group.command('import-rules')
78+
@multi_collection
79+
@click.option('--overwrite', '-o', is_flag=True, help='Overwrite existing rules')
80+
@click.option('--overwrite-exceptions', '-e', is_flag=True, help='Overwrite exceptions in existing rules')
81+
@click.option('--overwrite-action-connectors', '-a', is_flag=True,
82+
help='Overwrite action connectors in existing rules')
83+
@click.pass_context
84+
def kibana_import_rules(ctx: click.Context, rules: RuleCollection, overwrite: Optional[bool] = False,
85+
overwrite_exceptions: Optional[bool] = False,
86+
overwrite_action_connectors: Optional[bool] = False) -> (dict, List[RuleResource]):
87+
"""Import custom rules into Kibana."""
88+
kibana = ctx.obj['kibana']
89+
rule_dicts = [r.contents.to_api_format() for r in rules]
90+
with kibana:
91+
response, successful_rule_ids, results = RuleResource.import_rules(
92+
rule_dicts,
93+
overwrite=overwrite,
94+
overwrite_exceptions=overwrite_exceptions,
95+
overwrite_action_connectors=overwrite_action_connectors
96+
)
97+
98+
if successful_rule_ids:
99+
click.echo(f'{len(successful_rule_ids)} rule(s) successfully imported')
100+
rule_str = '\n - '.join(successful_rule_ids)
101+
print(f' - {rule_str}')
102+
if response['errors']:
103+
click.echo(f'{len(response["errors"])} rule(s) failed to import!')
104+
for error in response['errors']:
105+
click.echo(f' - {error["rule_id"]}: ({error["error"]["status_code"]}) {error["error"]["message"]}')
106+
107+
return response, results
108+
109+
110+
@kibana_group.command('export-rules')
111+
@click.option('--directory', '-d', required=True, type=Path, help='Directory to export rules to')
112+
@click.option('--rule-id', '-r', multiple=True, help='Optional Rule IDs to restrict export to')
113+
@click.option('--skip-errors', '-s', is_flag=True, help='Skip errors when exporting rules')
114+
@click.pass_context
115+
def kibana_export_rules(ctx: click.Context, directory: Path,
116+
rule_id: Optional[Iterable[str]] = None, skip_errors: bool = False) -> List[TOMLRule]:
117+
"""Export custom rules from Kibana."""
118+
kibana = ctx.obj['kibana']
119+
with kibana:
120+
results = RuleResource.export_rules(list(rule_id))
121+
122+
if results:
123+
directory.mkdir(parents=True, exist_ok=True)
124+
125+
errors = []
126+
exported = []
127+
for rule_resource in results:
128+
try:
129+
contents = TOMLRuleContents.from_rule_resource(rule_resource, maturity='production')
130+
threat = contents.data.get('threat')
131+
first_tactic = threat[0].tactic.name if threat else ''
132+
rule_name = rulename_to_filename(contents.data.name, tactic_name=first_tactic)
133+
rule = TOMLRule(contents=contents, path=directory / f'{rule_name}')
134+
except Exception as e:
135+
if skip_errors:
136+
print(f'- skipping {rule_resource.get("name")} - {type(e).__name__}')
137+
errors.append(f'- {rule_resource.get("name")} - {e}')
138+
continue
139+
raise
140+
141+
exported.append(rule)
142+
143+
saved = []
144+
for rule in exported:
145+
try:
146+
rule.save_toml()
147+
except Exception as e:
148+
if skip_errors:
149+
print(f'- skipping {rule.contents.data.name} - {type(e).__name__}')
150+
errors.append(f'- {rule.contents.data.name} - {e}')
151+
continue
152+
raise
153+
154+
saved.append(rule)
155+
156+
click.echo(f'{len(results)} rules exported')
157+
click.echo(f'{len(exported)} rules converted')
158+
click.echo(f'{len(saved)} saved to {directory}')
159+
if errors:
160+
err_file = directory / '_errors.txt'
161+
err_file.write_text('\n'.join(errors))
162+
click.echo(f'{len(errors)} errors saved to {err_file}')
163+
164+
return exported
165+
166+
74167
@kibana_group.command('search-alerts')
75168
@click.argument('query', required=False)
76169
@click.option('--date-range', '-d', type=(str, str), default=('now-7d', 'now'), help='Date range to scope search')

detection_rules/main.py

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
import glob
99
import json
1010
import os
11-
import re
1211
import time
1312
from datetime import datetime
1413

@@ -29,7 +28,7 @@
2928
from .rule_formatter import toml_write
3029
from .rule_loader import RuleCollection
3130
from .schemas import all_versions, definitions, get_incompatible_fields, get_schema_file
32-
from .utils import Ndjson, get_path, get_etc_path, clear_caches, load_dump, load_rule_contents
31+
from .utils import Ndjson, get_path, get_etc_path, clear_caches, load_dump, load_rule_contents, rulename_to_filename
3332

3433
RULES_DIR = get_path('rules')
3534

@@ -92,11 +91,11 @@ def generate_rules_index(ctx: click.Context, query, overwrite, save_files=True):
9291
return bulk_upload_docs, importable_rules_docs
9392

9493

95-
@root.command('import-rules')
94+
@root.command('import-rules-to-repo')
9695
@click.argument('input-file', type=click.Path(dir_okay=False, exists=True), nargs=-1, required=False)
9796
@click.option('--required-only', is_flag=True, help='Only prompt for required fields')
9897
@click.option('--directory', '-d', type=click.Path(file_okay=False, exists=True), help='Load files from a directory')
99-
def import_rules(input_file, required_only, directory):
98+
def import_rules_into_repo(input_file, required_only, directory):
10099
"""Import rules from json, toml, yaml, or Kibana exported rule file(s)."""
101100
rule_files = glob.glob(os.path.join(directory, '**', '*.*'), recursive=True) if directory else []
102101
rule_files = sorted(set(rule_files + list(input_file)))
@@ -108,12 +107,9 @@ def import_rules(input_file, required_only, directory):
108107
if not rule_contents:
109108
click.echo('Must specify at least one file!')
110109

111-
def name_to_filename(name):
112-
return re.sub(r'[^_a-z0-9]+', '_', name.strip().lower()).strip('_') + '.toml'
113-
114110
for contents in rule_contents:
115111
base_path = contents.get('name') or contents.get('rule', {}).get('name')
116-
base_path = name_to_filename(base_path) if base_path else base_path
112+
base_path = rulename_to_filename(base_path) if base_path else base_path
117113
rule_path = os.path.join(RULES_DIR, base_path) if base_path else None
118114
additional = ['index'] if not contents.get('data_view_id') else ['data_view_id']
119115
rule_prompt(rule_path, required_only=required_only, save=True, verbose=True,
@@ -274,7 +270,7 @@ def _export_rules(rules: RuleCollection, outfile: Path, downgrade_version: Optio
274270
click.echo(f'Skipped {len(unsupported)} unsupported rules: \n- {unsupported_str}')
275271

276272

277-
@root.command('export-rules')
273+
@root.command('export-rules-from-repo')
278274
@multi_collection
279275
@click.option('--outfile', '-o', default=Path(get_path('exports', f'{time.strftime("%Y%m%dT%H%M%SL")}.ndjson')),
280276
type=Path, help='Name of file for exported rules')
@@ -285,8 +281,8 @@ def _export_rules(rules: RuleCollection, outfile: Path, downgrade_version: Optio
285281
help='If `--stack-version` is passed, skip rule types which are unsupported '
286282
'(an error will be raised otherwise)')
287283
@click.option('--include-metadata', type=bool, is_flag=True, default=False, help='Add metadata to the exported rules')
288-
def export_rules(rules, outfile: Path, replace_id, stack_version,
289-
skip_unsupported, include_metadata: bool) -> RuleCollection:
284+
def export_rules_from_repo(rules, outfile: Path, replace_id, stack_version,
285+
skip_unsupported, include_metadata: bool) -> RuleCollection:
290286
"""Export rule(s) into an importable ndjson file."""
291287
assert len(rules) > 0, "No rules found"
292288

detection_rules/mixins.py

Lines changed: 40 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,17 @@
55

66
"""Generic mixin classes."""
77

8+
import dataclasses
89
from pathlib import Path
9-
from typing import Any, Optional, TypeVar, Type
10+
from typing import Any, Optional, TypeVar, Type, Literal
1011

1112
import json
1213
import marshmallow_dataclass
1314
import marshmallow_dataclass.union_field
1415
import marshmallow_jsonschema
1516
import marshmallow_union
16-
from marshmallow import Schema, ValidationError, fields, validates_schema
17+
import marshmallow
18+
from marshmallow import Schema, ValidationError, validates_schema, fields as marshmallow_fields
1719

1820
from .misc import load_current_package_version
1921
from .schemas import definitions
@@ -23,6 +25,7 @@
2325

2426
T = TypeVar('T')
2527
ClassT = TypeVar('ClassT') # bound=dataclass?
28+
UNKNOWN_VALUES = Literal['raise', 'exclude', 'include']
2629

2730

2831
def _strip_none_from_dict(obj: T) -> T:
@@ -81,14 +84,44 @@ def dive(child: dict) -> dict:
8184
return patched
8285

8386

87+
class BaseSchema(Schema):
88+
"""Base schema for marshmallow dataclasses with unknown."""
89+
class Meta:
90+
"""Meta class for marshmallow schema."""
91+
92+
93+
def exclude_class_schema(
94+
clazz, base_schema: type[Schema] = BaseSchema, unknown: UNKNOWN_VALUES = marshmallow.EXCLUDE, **kwargs
95+
) -> type[Schema]:
96+
"""Get a marshmallow schema for a dataclass with unknown=EXCLUDE."""
97+
base_schema.Meta.unknown = unknown
98+
return marshmallow_dataclass.class_schema(clazz, base_schema=base_schema, **kwargs)
99+
100+
101+
def recursive_class_schema(
102+
clazz, base_schema: type[Schema] = BaseSchema, unknown: UNKNOWN_VALUES = marshmallow.EXCLUDE, **kwargs
103+
) -> type[Schema]:
104+
"""Recursively apply the unknown parameter for nested schemas."""
105+
schema = exclude_class_schema(clazz, base_schema=base_schema, unknown=unknown, **kwargs)
106+
for field in dataclasses.fields(clazz):
107+
if dataclasses.is_dataclass(field.type):
108+
nested_cls = field.type
109+
nested_schema = recursive_class_schema(nested_cls, base_schema=base_schema, **kwargs)
110+
setattr(schema, field.name, nested_schema)
111+
return schema
112+
113+
84114
class MarshmallowDataclassMixin:
85115
"""Mixin class for marshmallow serialization."""
86116

87117
@classmethod
88118
@cached
89-
def __schema(cls: ClassT) -> Schema:
119+
def __schema(cls: ClassT, unknown: Optional[UNKNOWN_VALUES] = None) -> Schema:
90120
"""Get the marshmallow schema for the data class"""
91-
return marshmallow_dataclass.class_schema(cls)()
121+
if unknown:
122+
return recursive_class_schema(cls, unknown=unknown)()
123+
else:
124+
return marshmallow_dataclass.class_schema(cls)()
92125

93126
def get(self, key: str, default: Optional[Any] = None):
94127
"""Get a key from the query data without raising attribute errors."""
@@ -103,9 +136,9 @@ def jsonschema(cls):
103136
return jsonschema
104137

105138
@classmethod
106-
def from_dict(cls: Type[ClassT], obj: dict) -> ClassT:
139+
def from_dict(cls: Type[ClassT], obj: dict, unknown: Optional[UNKNOWN_VALUES] = None) -> ClassT:
107140
"""Deserialize and validate a dataclass from a dict using marshmallow."""
108-
schema = cls.__schema()
141+
schema = cls.__schema(unknown=unknown)
109142
return schema.load(obj)
110143

111144
def to_dict(self, strip_none_values=True) -> dict:
@@ -199,7 +232,7 @@ class PatchedJSONSchema(marshmallow_jsonschema.JSONSchema):
199232
# Patch marshmallow-jsonschema to support marshmallow-dataclass[union]
200233
def _get_schema_for_field(self, obj, field):
201234
"""Patch marshmallow_jsonschema.base.JSONSchema to support marshmallow-dataclass[union]."""
202-
if isinstance(field, fields.Raw) and field.allow_none and not field.validate:
235+
if isinstance(field, marshmallow_fields.Raw) and field.allow_none and not field.validate:
203236
# raw fields shouldn't be type string but type any. bug in marshmallow_dataclass:__init__.py:
204237
# if typ is Any:
205238
# metadata.setdefault("allow_none", True)

0 commit comments

Comments
 (0)