Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add support for Data[x] notation and Provider_Name(field mapping feature's config) #1352

Merged
merged 3 commits into from
May 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG-Japanese.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
- REST APIからエクスポートされたSplunkログを分析できるようになった。 (#1083) (@hitenkoku)
- `count`で複数のグループを指定できるようにした。例: `count() by IpAddress,SubStatus,LogonType >= 2`。また、出力される結果を更新した。例: `[condition] count(TargetUserName) by IpAddress > 3 in timeframe [result] count: 4 TargetUserName:tanaka/Administrator/adsyncadmin/suzuki IpAddress:- timeframe:5m` -> `Count: 4 ¦ TargetUserName: tanaka/Administrator/adsyncadmin/suzuki ¦ IpAddress: -` (#1339) (@fukusuket)
- リリースモードでのオーバーフローチェックを有効にした。(#1348) (@YamatoSecurity)
- フィールドデータマッピングファイル(`rules/config/data_mapping/*.yaml`)で任意の`Provider_Name`フィールドを指定できるようにし、`Data[x]`表記に対応した。(#1350) (@fukusuket)

## 2.15.0 [2024/04/20] "Sonic Release"

Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
- You can now analyze Splunk logs exported from the REST API. (#1083) (@hitenkoku)
- You can now specify multiple groups with `count`. Ex: `count() by IpAddress,SubStatus,LogonType >= 2` Also, the output has been updated. Ex: `[condition] count(TargetUserName) by IpAddress > 3 in timeframe [result] count: 4 TargetUserName:tanaka/Administrator/adsyncadmin/suzuki IpAddress:- timeframe:5m` -> `Count: 4 ¦ TargetUserName: tanaka/Administrator/adsyncadmin/suzuki ¦ IpAddress: -` (#1339) (@fukusuket)
- Enabled overflow checks in release mode. (#1348) (@YamatoSecurity)
- Added support for specifying an optional `Provider_Name` field in field data mapping files (`rules/config/data_mapping/*.yaml`) as well as support for `Data[x]` notation. (#1350) (@fukusuket)

## 2.15.0 [2024/04/20] "Sonic Release"

Expand Down
46 changes: 37 additions & 9 deletions src/detections/field_data_map.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use crate::detections::field_data_map::FieldDataConverter::{HexToDecimal, ReplaceStr};
use crate::detections::message::AlertMessage;
use crate::detections::utils::get_serde_number_to_string;
use aho_corasick::AhoCorasick;
use compact_str::CompactString;
use hashbrown::HashMap;
use hashbrown::{HashMap, HashSet};
use serde_json::Value;
use std::fs;
use std::path::Path;
use std::string::String;
Expand All @@ -14,7 +16,7 @@ pub type FieldDataMapEntry = HashMap<String, FieldDataConverter>;
#[derive(Debug, Clone)]
pub enum FieldDataConverter {
HexToDecimal,
ReplaceStr((AhoCorasick, Vec<String>)),
ReplaceStr((AhoCorasick, Vec<String>), HashSet<String>),
}

#[derive(Debug, Eq, Hash, PartialEq, Default, Clone)]
Expand Down Expand Up @@ -52,6 +54,14 @@ fn build_field_data_map(yaml_data: Yaml) -> (FieldDataMapKey, FieldDataMapEntry)
if rewrite_field_data.is_none() && hex2decimal.is_none() {
return (FieldDataMapKey::default(), FieldDataMapEntry::default());
}
let mut providers = HashSet::new();
if let Some(providers_yaml) = yaml_data["Provider_Name"].as_vec() {
for provider in providers_yaml {
providers.insert(provider.as_str().unwrap_or_default().to_string());
}
} else if let Some(provider_name) = yaml_data["Provider_Name"].as_str() {
providers.insert(provider_name.to_string());
}
let mut mapping = HashMap::new();
if let Some(x) = rewrite_field_data {
for (key_yaml, val_yaml) in x.iter() {
Expand All @@ -78,7 +88,7 @@ fn build_field_data_map(yaml_data: Yaml) -> (FieldDataMapKey, FieldDataMapEntry)
}
mapping.insert(
field.to_string().to_lowercase(),
ReplaceStr((ac.unwrap(), reps)),
ReplaceStr((ac.unwrap(), reps), providers.clone()),
);
}
}
Expand All @@ -98,12 +108,23 @@ pub fn convert_field_data(
data_map_key: &FieldDataMapKey,
field: &str,
field_data_str: &str,
record: &Value,
) -> Option<CompactString> {
match data_map.get(data_map_key) {
None => None,
Some(data_map_entry) => match data_map_entry.get(field) {
None => None,
Some(ReplaceStr(x)) => {
Some(ReplaceStr(x, providers)) => {
if !providers.is_empty() {
let provider = get_serde_number_to_string(
&record["Event"]["System"]["Provider_attributes"]["Name"],
false,
)
.unwrap_or_default();
if !providers.contains(&provider.to_string()) {
return Some(CompactString::from(field_data_str));
}
};
let (ac, rep) = x;
let mut wtr = vec![];
let _ = ac.try_stream_replace_all(field_data_str.as_bytes(), &mut wtr, rep);
Expand Down Expand Up @@ -160,6 +181,7 @@ mod tests {
use crate::detections::utils;
use compact_str::CompactString;
use hashbrown::HashMap;
use serde_json::Value;
use std::path::Path;
use yaml_rust::{Yaml, YamlLoader};

Expand All @@ -179,7 +201,13 @@ mod tests {

#[test]
fn test_convert_field_data_empty_data1() {
let r = convert_field_data(&HashMap::new(), &FieldDataMapKey::default(), "", "");
let r = convert_field_data(
&HashMap::new(),
&FieldDataMapKey::default(),
"",
"",
&Value::Null,
);
assert!(r.is_none());
}

Expand All @@ -191,7 +219,7 @@ mod tests {
event_id: CompactString::from("4625".to_string()),
};
map.insert(key.clone(), HashMap::new());
let r = convert_field_data(&map, &key, "", "");
let r = convert_field_data(&map, &key, "", "", &Value::Null);
assert!(r.is_none());
}

Expand All @@ -208,7 +236,7 @@ mod tests {
let (key, entry) = build_field_data_map(build_yaml(s));
let mut map = HashMap::new();
map.insert(key.clone(), entry);
let r = convert_field_data(&map, &key, "logontype", "Foo 0");
let r = convert_field_data(&map, &key, "logontype", "Foo 0", &Value::Null);
assert_eq!(r.unwrap(), "Foo 0 - SYSTEM");
}

Expand Down Expand Up @@ -273,15 +301,15 @@ mod tests {
let mut wtr = vec![];
match r.1.get("elevatedtoken").unwrap() {
FieldDataConverter::HexToDecimal => panic!(),
FieldDataConverter::ReplaceStr(x) => {
FieldDataConverter::ReplaceStr(x, _) => {
let (ac, rp) = x;
let _ = ac.try_stream_replace_all("foo, %%1842, %%1843".as_bytes(), &mut wtr, rp);
assert_eq!(b"foo, YES, NO".to_vec(), wtr);
}
}
match r.1.get("impersonationlevel").unwrap() {
FieldDataConverter::HexToDecimal => panic!(),
FieldDataConverter::ReplaceStr(x) => {
FieldDataConverter::ReplaceStr(x, _) => {
let mut wtr = vec![];
let (ac, rp) = x;
let _ = ac.try_stream_replace_all("foo, %%1832, %%1833".as_bytes(), &mut wtr, rp);
Expand Down
2 changes: 2 additions & 0 deletions src/detections/message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,7 @@ pub fn parse_message(
.unwrap_or(tmp_event_record)
.get((suffix - 1) as usize)
.unwrap_or(tmp_event_record);
field = target_str;
}
let hash_value = get_serde_number_to_string(tmp_event_record, false);
if hash_value.is_some() {
Expand All @@ -326,6 +327,7 @@ pub fn parse_message(
field_data_map_key,
field.to_lowercase().as_str(),
hash_value.as_str(),
event_record,
);
converted_str.unwrap_or(hash_value)
};
Expand Down
54 changes: 45 additions & 9 deletions src/detections/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,15 @@ pub fn create_recordinfos(
field_data_map: &Option<FieldDataMap>,
) -> Vec<CompactString> {
let mut output = HashSet::new();
_collect_recordinfo(&mut vec![], "", record, &mut output);
_collect_recordinfo(
&mut vec![],
"",
0,
record,
record,
&mut output,
(field_data_map, field_data_map_key),
);

let mut output_vec: Vec<&(String, String)> = output.iter().collect();
// 同じレコードなら毎回同じ出力になるようにソートしておく
Expand All @@ -411,7 +419,7 @@ pub fn create_recordinfos(
.map(|(key, value)| {
if let Some(map) = field_data_map.as_ref() {
if let Some(converted_str) =
convert_field_data(map, field_data_map_key, &key.to_lowercase(), value)
convert_field_data(map, field_data_map_key, &key.to_lowercase(), value, record)
{
let val = remove_sp_char(converted_str);
return format!("{key}: {val}",).into();
Expand All @@ -429,13 +437,24 @@ pub fn create_recordinfos(
fn _collect_recordinfo<'a>(
keys: &mut Vec<&'a str>,
parent_key: &'a str,
value: &'a Value,
arr_index: usize,
org_value: &'a Value,
cur_value: &'a Value,
output: &mut HashSet<(String, String)>,
filed_data_converter: (&Option<FieldDataMap>, &FieldDataMapKey),
) {
match value {
match cur_value {
Value::Array(ary) => {
for sub_value in ary {
_collect_recordinfo(keys, parent_key, sub_value, output);
for (i, sub_value) in ary.iter().enumerate() {
_collect_recordinfo(
keys,
parent_key,
i,
org_value,
sub_value,
output,
filed_data_converter,
);
}
}
Value::Object(obj) => {
Expand All @@ -452,7 +471,7 @@ fn _collect_recordinfo<'a>(
continue;
}

_collect_recordinfo(keys, key, value, output);
_collect_recordinfo(keys, key, 0, org_value, value, output, filed_data_converter);
}
if !parent_key.is_empty() {
keys.pop();
Expand All @@ -461,9 +480,9 @@ fn _collect_recordinfo<'a>(
Value::Null => (),
_ => {
// 一番子の要素の値しか収集しない
let strval = value_to_string(value);
let strval = value_to_string(cur_value);
if let Some(strval) = strval {
let strval = strval.chars().fold(String::default(), |mut acc, c| {
let mut strval = strval.chars().fold(String::default(), |mut acc, c| {
if (c.is_control() || c.is_ascii_whitespace())
&& !['\r', '\n', '\t'].contains(&c)
{
Expand All @@ -473,6 +492,23 @@ fn _collect_recordinfo<'a>(
};
acc
});
if arr_index > 0 {
let (field_data_map, field_data_map_key) = filed_data_converter;
let i = arr_index + 1;
let field = format!("{parent_key}[{i}]",).to_lowercase();
if let Some(map) = field_data_map {
let converted_str = convert_field_data(
map,
field_data_map_key,
field.as_str(),
strval.as_str(),
org_value,
);
if let Some(converted_str) = converted_str {
strval = converted_str.to_string();
}
}
}
output.insert((parent_key.to_string(), strval));
}
}
Expand Down
Loading