Skip to content

Commit

Permalink
enhancement(elasticsearch sink): Allow empty data_stream fields (#18193
Browse files Browse the repository at this point in the history
)

enhancement(elasticsearch sink): Allow empty data_stream fileds

 Add support for ES datastream with empty namespace or dataset.

 closes: #17883

 Signed-off-by: Artur Malchanau <artur.molchanov@bolt.eu>
  • Loading branch information
Hexta authored Aug 10, 2023
1 parent 7603d28 commit 96d8063
Show file tree
Hide file tree
Showing 2 changed files with 169 additions and 7 deletions.
9 changes: 8 additions & 1 deletion src/sinks/elasticsearch/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,14 @@ impl DataStreamConfig {
.or_else(|| self.namespace(log))?;
(dtype, dataset, namespace)
};
Some(format!("{}-{}-{}", dtype, dataset, namespace))

let name = [dtype, dataset, namespace]
.into_iter()
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join("-");

Some(name)
}
}

Expand Down
167 changes: 161 additions & 6 deletions src/sinks/elasticsearch/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,25 @@ async fn sets_create_action_when_configured() {
assert_eq!(encoded.len(), encoded_size);
}

fn data_stream_body() -> BTreeMap<String, Value> {
fn data_stream_body(
dtype: Option<String>,
dataset: Option<String>,
namespace: Option<String>,
) -> BTreeMap<String, Value> {
let mut ds = BTreeMap::<String, Value>::new();
ds.insert("type".into(), Value::from("synthetics"));
ds.insert("dataset".into(), Value::from("testing"));

if let Some(dtype) = dtype {
ds.insert("type".into(), Value::from(dtype));
}

if let Some(dataset) = dataset {
ds.insert("dataset".into(), Value::from(dataset));
}

if let Some(namespace) = namespace {
ds.insert("namespace".into(), Value::from(namespace));
}

ds
}

Expand Down Expand Up @@ -100,7 +115,14 @@ async fn encode_datastream_mode() {
.single()
.expect("invalid timestamp"),
);
log.insert("data_stream", data_stream_body());
log.insert(
"data_stream",
data_stream_body(
Some("synthetics".to_string()),
Some("testing".to_string()),
None,
),
);

let mut encoded = vec![];
let (encoded_size, _json_size) = es
Expand Down Expand Up @@ -143,7 +165,14 @@ async fn encode_datastream_mode_no_routing() {
let es = ElasticsearchCommon::parse_single(&config).await.unwrap();

let mut log = LogEvent::from("hello there");
log.insert("data_stream", data_stream_body());
log.insert(
"data_stream",
data_stream_body(
Some("synthetics".to_string()),
Some("testing".to_string()),
None,
),
);
log.insert(
(
lookup::PathPrefix::Event,
Expand Down Expand Up @@ -287,7 +316,14 @@ async fn encode_datastream_mode_no_sync() {
let es = ElasticsearchCommon::parse_single(&config).await.unwrap();

let mut log = LogEvent::from("hello there");
log.insert("data_stream", data_stream_body());
log.insert(
"data_stream",
data_stream_body(
Some("synthetics".to_string()),
Some("testing".to_string()),
None,
),
);
log.insert(
(
lookup::PathPrefix::Event,
Expand Down Expand Up @@ -389,3 +425,122 @@ async fn allows_using_only_fields() {
assert_eq!(std::str::from_utf8(&encoded).unwrap(), expected);
assert_eq!(encoded.len(), encoded_size);
}

#[tokio::test]
async fn datastream_index_name() {
#[derive(Clone, Debug)]
struct TestCase {
dtype: Option<String>,
namespace: Option<String>,
dataset: Option<String>,
want: String,
}

let config = ElasticsearchConfig {
bulk: BulkConfig {
index: parse_template("vector"),
..Default::default()
},
endpoints: vec![String::from("https://example.com")],
mode: ElasticsearchMode::DataStream,
api_version: ElasticsearchApiVersion::V6,
..Default::default()
};
let es = ElasticsearchCommon::parse_single(&config).await.unwrap();

let test_cases = [
TestCase {
dtype: Some("type".to_string()),
dataset: Some("dataset".to_string()),
namespace: Some("namespace".to_string()),
want: "type-dataset-namespace".to_string(),
},
TestCase {
dtype: Some("type".to_string()),
dataset: Some("".to_string()),
namespace: Some("namespace".to_string()),
want: "type-namespace".to_string(),
},
TestCase {
dtype: Some("type".to_string()),
dataset: None,
namespace: Some("namespace".to_string()),
want: "type-generic-namespace".to_string(),
},
TestCase {
dtype: Some("type".to_string()),
dataset: Some("".to_string()),
namespace: Some("".to_string()),
want: "type".to_string(),
},
TestCase {
dtype: Some("type".to_string()),
dataset: None,
namespace: None,
want: "type-generic-default".to_string(),
},
TestCase {
dtype: Some("".to_string()),
dataset: Some("".to_string()),
namespace: Some("".to_string()),
want: "".to_string(),
},
TestCase {
dtype: None,
dataset: None,
namespace: None,
want: "logs-generic-default".to_string(),
},
TestCase {
dtype: Some("".to_string()),
dataset: Some("dataset".to_string()),
namespace: Some("namespace".to_string()),
want: "dataset-namespace".to_string(),
},
TestCase {
dtype: None,
dataset: Some("dataset".to_string()),
namespace: Some("namespace".to_string()),
want: "logs-dataset-namespace".to_string(),
},
TestCase {
dtype: Some("".to_string()),
dataset: Some("".to_string()),
namespace: Some("namespace".to_string()),
want: "namespace".to_string(),
},
TestCase {
dtype: None,
dataset: None,
namespace: Some("namespace".to_string()),
want: "logs-generic-namespace".to_string(),
},
TestCase {
dtype: Some("".to_string()),
dataset: Some("dataset".to_string()),
namespace: Some("".to_string()),
want: "dataset".to_string(),
},
TestCase {
dtype: None,
dataset: Some("dataset".to_string()),
namespace: None,
want: "logs-dataset-default".to_string(),
},
];

for test_case in test_cases {
let mut log = LogEvent::from("hello there");
log.insert(
"data_stream",
data_stream_body(
test_case.dtype.clone(),
test_case.dataset.clone(),
test_case.namespace.clone(),
),
);

let processed_event = process_log(log, &es.mode, &None, &config.encoding).unwrap();
assert_eq!(processed_event.index, test_case.want, "{test_case:?}");
}
}

0 comments on commit 96d8063

Please sign in to comment.