Skip to content

Commit

Permalink
enhancement(enterprise): hide secrets when enterprise mode is enabled (
Browse files Browse the repository at this point in the history
…#14305)

* create sensitive string struct

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace api key for datadog logs sink by sensitive string

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace api key for datadog metrics sink by sensitive string

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace api key for datadog traces sink by sensitive string

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace api key for datadog events sink by sensitive string

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace api key for apex sink by sensitive string

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace access key and secret access key for aws auth by sensitive string

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in azure_blob sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in es, axiom, logdna, websocket sinks

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in humio and splunk_hec sinks

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in influxdb sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in kafka sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in new_relic sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in sematext sinks

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in splunk_hec metric sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in axiom sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in azure_monitor_logs sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in honeycomb sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in logdna sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in nats sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in pulsar sink

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in aws_kinesis_firehose source

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in splunk_hec source

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in gcp source

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* replace keys by sensitive string in heroku_logs source

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* fix integration tests

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* apply clippy suggestions

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* apply requested fixes

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

* make SensitiveString as configurable

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>

Signed-off-by: Jeremie Drouet <jeremie.drouet@datadoghq.com>
  • Loading branch information
jdrouet authored Sep 16, 2022
1 parent d94bc95 commit e5516e9
Show file tree
Hide file tree
Showing 51 changed files with 308 additions and 157 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 5 additions & 1 deletion lib/vector-common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@ default = [
"btreemap",
"conversion",
"tokenize",
"encoding"
"encoding",
"sensitive_string"
]

aws_cloudwatch_logs_subscription = [
Expand All @@ -36,6 +37,8 @@ encoding = [
"btreemap"
]

sensitive_string = []

test = []

tokenize = [
Expand Down Expand Up @@ -63,6 +66,7 @@ tokio = { version = "1.20.1", default-features = false, features = ["macros", "t
tracing = { version = "0.1.34", default-features = false }
value = { path = "../value", features = ["json"] }
vector-config = { path = "../vector-config" }
vector-config-common = { path = "../vector-config-common" }
vector-config-macros = { path = "../vector-config-macros" }

[dev-dependencies]
Expand Down
3 changes: 3 additions & 0 deletions lib/vector-common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ pub mod internal_event;

pub mod shutdown;

#[cfg(feature = "sensitive_string")]
pub mod sensitive_string;

#[cfg(feature = "tokenize")]
pub mod tokenize;

Expand Down
68 changes: 68 additions & 0 deletions lib/vector-common/src/sensitive_string.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
use vector_config::{configurable_component, ConfigurableString};

/// Wrapper for sensitive strings containing credentials
#[configurable_component(no_deser, no_ser)]
#[cfg_attr(
feature = "serde",
derive(::serde::Deserialize, ::serde::Serialize),
serde(from = "String", into = "String")
)]
#[configurable(metadata(sensitive))]
#[derive(Clone, Default, PartialEq, Eq)]
pub struct SensitiveString(String);

impl From<String> for SensitiveString {
fn from(value: String) -> Self {
Self(value)
}
}

impl From<SensitiveString> for String {
fn from(value: SensitiveString) -> Self {
value.0
}
}

impl ConfigurableString for SensitiveString {}

impl std::fmt::Display for SensitiveString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "**REDACTED**")
}
}

impl std::fmt::Debug for SensitiveString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// we keep the double quotes here to keep the String behavior
write!(f, "\"**REDACTED**\"")
}
}

impl SensitiveString {
#[must_use]
pub fn inner(&self) -> &str {
self.0.as_str()
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn serialization() {
let json_value = "\"foo\"";
let value: SensitiveString = serde_json::from_str(json_value).unwrap();
let result: String = serde_json::to_string(&value).unwrap();
assert_eq!(result, json_value);
}

#[test]
fn hide_content() {
let value = SensitiveString("hello world".to_string());
let display = format!("{}", value);
assert_eq!(display, "**REDACTED**");
let debug = format!("{:?}", value);
assert_eq!(debug, "\"**REDACTED**\"");
}
}
13 changes: 7 additions & 6 deletions src/aws/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use aws_config::{
default_provider::credentials::DefaultCredentialsChain, sts::AssumeRoleProviderBuilder,
};
use aws_types::{credentials::SharedCredentialsProvider, region::Region, Credentials};
use vector_common::sensitive_string::SensitiveString;
use vector_config::configurable_component;

// matches default load timeout from the SDK as of 0.10.1, but lets us confidently document the
Expand All @@ -19,10 +20,10 @@ pub enum AwsAuthentication {
/// Authenticate using a fixed access key and secret pair.
Static {
/// The AWS access key ID.
access_key_id: String,
access_key_id: SensitiveString,

/// The AWS secret access key.
secret_access_key: String,
secret_access_key: SensitiveString,
},

/// Authenticate using credentials stored in a file.
Expand Down Expand Up @@ -69,8 +70,8 @@ impl AwsAuthentication {
access_key_id,
secret_access_key,
} => Ok(SharedCredentialsProvider::new(Credentials::from_keys(
access_key_id,
secret_access_key,
access_key_id.inner(),
secret_access_key.inner(),
None,
))),
AwsAuthentication::File { .. } => {
Expand All @@ -97,8 +98,8 @@ impl AwsAuthentication {
#[cfg(test)]
pub fn test_auth() -> AwsAuthentication {
AwsAuthentication::Static {
access_key_id: "dummy".to_string(),
secret_access_key: "dummy".to_string(),
access_key_id: "dummy".to_string().into(),
secret_access_key: "dummy".to_string().into(),
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/config/enterprise.rs
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,7 @@ fn setup_logs_reporting(

// Create a Datadog logs sink to consume and emit internal logs.
let datadog_logs = DatadogLogsConfig {
default_api_key: api_key,
default_api_key: api_key.into(),
endpoint: datadog.endpoint.clone(),
site: datadog.site.clone(),
region: datadog.region,
Expand Down Expand Up @@ -577,7 +577,7 @@ fn setup_metrics_reporting(

// Create a Datadog metrics sink to consume and emit internal + host metrics.
let datadog_metrics = DatadogMetricsConfig {
default_api_key: api_key,
default_api_key: api_key.into(),
endpoint: datadog.endpoint.clone(),
site: datadog.site.clone(),
region: datadog.region,
Expand Down
5 changes: 3 additions & 2 deletions src/gcp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use once_cell::sync::Lazy;
use smpl_jwt::Jwt;
use snafu::{ResultExt, Snafu};
use tokio::{sync::watch, time::Instant};
use vector_common::sensitive_string::SensitiveString;
use vector_config::configurable_component;

use crate::{config::ProxyConfig, http::HttpClient, http::HttpError};
Expand Down Expand Up @@ -71,7 +72,7 @@ pub struct GcpAuthConfig {
/// filename is named, Vector will attempt to fetch an instance service account for the compute instance the program is
/// running on. If Vector is not running on a GCE instance, then you must define eith an API key or service account
/// credentials JSON file.
pub api_key: Option<String>,
pub api_key: Option<SensitiveString>,

/// Path to a service account credentials JSON file. ([documentation](https://cloud.google.com/docs/authentication/production#manually))
///
Expand All @@ -97,7 +98,7 @@ impl GcpAuthConfig {
let creds_path = self.credentials_path.as_ref().or(gap.as_ref());
match (&creds_path, &self.api_key) {
(Some(path), _) => GcpAuthenticator::from_file(path, scope).await?,
(None, Some(api_key)) => GcpAuthenticator::from_api_key(api_key)?,
(None, Some(api_key)) => GcpAuthenticator::from_api_key(api_key.inner())?,
(None, None) => GcpAuthenticator::new_implicit().await?,
}
})
Expand Down
9 changes: 5 additions & 4 deletions src/http.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use hyper_proxy::ProxyConnector;
use snafu::{ResultExt, Snafu};
use tower::Service;
use tracing::Instrument;
use vector_common::sensitive_string::SensitiveString;
use vector_config::configurable_component;

use crate::{
Expand Down Expand Up @@ -243,15 +244,15 @@ pub enum Auth {
user: String,

/// The password to send.
password: String,
password: SensitiveString,
},

/// Bearer authentication.
///
/// A bearer token (OAuth2, JWT, etc) is passed as-is.
Bearer {
/// The bearer token to send.
token: String,
token: SensitiveString,
},
}

Expand Down Expand Up @@ -284,10 +285,10 @@ impl Auth {
pub fn apply_headers_map(&self, map: &mut HeaderMap) {
match &self {
Auth::Basic { user, password } => {
let auth = Authorization::basic(user, password);
let auth = Authorization::basic(user.as_str(), password.inner());
map.typed_insert(auth);
}
Auth::Bearer { token } => match Authorization::bearer(token) {
Auth::Bearer { token } => match Authorization::bearer(token.inner()) {
Ok(auth) => map.typed_insert(auth),
Err(error) => error!(message = "Invalid bearer token.", token = %token, %error),
},
Expand Down
7 changes: 4 additions & 3 deletions src/kafka.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use std::path::{Path, PathBuf};

use rdkafka::{consumer::ConsumerContext, ClientConfig, ClientContext, Statistics};
use snafu::Snafu;
use vector_common::sensitive_string::SensitiveString;
use vector_config::configurable_component;

use crate::{internal_events::KafkaStatisticsReceived, tls::TlsEnableableConfig};
Expand Down Expand Up @@ -65,7 +66,7 @@ pub struct KafkaSaslConfig {
pub(crate) username: Option<String>,

/// The SASL password.
pub(crate) password: Option<String>,
pub(crate) password: Option<SensitiveString>,

/// The SASL mechanism to use.
pub(crate) mechanism: Option<String>,
Expand All @@ -87,10 +88,10 @@ impl KafkaAuthConfig {
if sasl_enabled {
let sasl = self.sasl.as_ref().unwrap();
if let Some(username) = &sasl.username {
client.set("sasl.username", username);
client.set("sasl.username", username.as_str());
}
if let Some(password) = &sasl.password {
client.set("sasl.password", password);
client.set("sasl.password", password.inner());
}
if let Some(mechanism) = &sasl.mechanism {
client.set("sasl.mechanism", mechanism);
Expand Down
18 changes: 12 additions & 6 deletions src/nats.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use nkeys::error::Error as NKeysError;
use snafu::{ResultExt, Snafu};
use vector_common::sensitive_string::SensitiveString;
use vector_config::configurable_component;

use crate::tls::TlsEnableableConfig;
Expand Down Expand Up @@ -70,7 +71,7 @@ pub(crate) struct NatsAuthUserPassword {
pub(crate) user: String,

/// Password.
pub(crate) password: String,
pub(crate) password: SensitiveString,
}

/// Token configuration.
Expand All @@ -79,7 +80,7 @@ pub(crate) struct NatsAuthUserPassword {
#[serde(deny_unknown_fields)]
pub(crate) struct NatsAuthToken {
/// Token.
pub(crate) value: String,
pub(crate) value: SensitiveString,
}

/// Credentials file configuration.
Expand Down Expand Up @@ -110,9 +111,12 @@ pub(crate) struct NatsAuthNKey {
impl NatsAuthConfig {
pub(crate) fn to_nats_options(&self) -> Result<nats::asynk::Options, NatsConfigError> {
match self {
NatsAuthConfig::UserPassword { user_password } => Ok(
nats::asynk::Options::with_user_pass(&user_password.user, &user_password.password),
),
NatsAuthConfig::UserPassword { user_password } => {
Ok(nats::asynk::Options::with_user_pass(
user_password.user.as_str(),
user_password.password.inner(),
))
}
NatsAuthConfig::CredentialsFile { credentials_file } => Ok(
nats::asynk::Options::with_credentials(&credentials_file.path),
),
Expand All @@ -126,7 +130,9 @@ impl NatsAuthConfig {
kp.sign(nonce).unwrap()
})
}),
NatsAuthConfig::Token { token } => Ok(nats::asynk::Options::with_token(&token.value)),
NatsAuthConfig::Token { token } => {
Ok(nats::asynk::Options::with_token(token.value.inner()))
}
}
}
}
Expand Down
13 changes: 10 additions & 3 deletions src/sinks/apex/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use futures_util::SinkExt;
use http::{Request, StatusCode, Uri};
use hyper::Body;
use serde_json::json;
use vector_common::sensitive_string::SensitiveString;
use vector_config::configurable_component;

#[cfg(all(test, feature = "apex-integration-tests"))]
Expand Down Expand Up @@ -35,7 +36,7 @@ pub struct ApexSinkConfig {
project_id: String,

/// The API token to use to authenticate with Apex.
api_token: String,
api_token: SensitiveString,

#[configurable(derived)]
#[serde(default)]
Expand Down Expand Up @@ -131,7 +132,10 @@ impl HttpSink for ApexSinkConfig {
async fn build_request(&self, events: Self::Output) -> crate::Result<http::Request<Bytes>> {
let uri: Uri = self.uri.append_path("/add_events")?.uri;
let request = Request::post(uri)
.header("Authorization", format!("Bearer {}", &self.api_token))
.header(
"Authorization",
format!("Bearer {}", self.api_token.inner()),
)
.header("Content-Type", "application/json");

let full_body_string = json!({
Expand All @@ -149,7 +153,10 @@ impl HttpSink for ApexSinkConfig {
async fn healthcheck(config: ApexSinkConfig, client: HttpClient) -> crate::Result<()> {
let uri = config.uri.with_default_parts();
let request = Request::head(&uri.uri)
.header("Authorization", format!("Bearer {}", &config.api_token))
.header(
"Authorization",
format!("Bearer {}", config.api_token.inner()),
)
.body(Body::empty())
.unwrap();
let response = client.send(request).await?;
Expand Down
5 changes: 3 additions & 2 deletions src/sinks/axiom.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::collections::HashMap;

use vector_common::sensitive_string::SensitiveString;
use vector_config::configurable_component;

use crate::{
Expand Down Expand Up @@ -33,7 +34,7 @@ pub struct AxiomConfig {
org_id: Option<String>,

/// The Axiom API token.
token: String,
token: SensitiveString,

/// The Axiom dataset to write to.
dataset: String,
Expand Down Expand Up @@ -335,7 +336,7 @@ mod integration_tests {

let config = AxiomConfig {
url: Some(url.clone()),
token: token.clone(),
token: token.clone().into(),
dataset: dataset.clone(),
..Default::default()
};
Expand Down
Loading

0 comments on commit e5516e9

Please sign in to comment.