Skip to content

Commit

Permalink
Merge branch 'main' into vianney/inplace-trace-normalization
Browse files Browse the repository at this point in the history
  • Loading branch information
VianneyRuhlmann authored Jul 4, 2024
2 parents a76f22f + ce2afd9 commit ab18944
Show file tree
Hide file tree
Showing 11 changed files with 149 additions and 56 deletions.
11 changes: 6 additions & 5 deletions bin_tests/tests/crashtracker_bin_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,12 +226,13 @@ fn setup_test_fixtures<'a>(crates: &[&'a ArtifactsBuild]) -> TestFixtures<'a> {
let artifacts = build_artifacts(crates).unwrap();

let tmpdir = tempfile::TempDir::new().unwrap();
let dirpath = tmpdir.path();
TestFixtures {
crash_profile_path: extend_path(tmpdir.path(), "crash"),
crash_telemetry_path: extend_path(tmpdir.path(), "crash.telemetry"),
stdout_path: extend_path(tmpdir.path(), "out.stdout"),
stderr_path: extend_path(tmpdir.path(), "out.stderr"),
unix_socket_path: extend_path(tmpdir.path(), "crashtracker.socket"),
crash_profile_path: extend_path(dirpath, "crash"),
crash_telemetry_path: extend_path(dirpath, "crash.telemetry"),
stdout_path: extend_path(dirpath, "out.stdout"),
stderr_path: extend_path(dirpath, "out.stderr"),
unix_socket_path: extend_path(dirpath, "crashtracker.socket"),

artifacts,
tmpdir,
Expand Down
18 changes: 8 additions & 10 deletions crashtracker/src/crash_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use chrono::{DateTime, Utc};
use ddcommon::tag::Tag;
use serde::{Deserialize, Serialize};
use std::io::BufRead;
use std::path::Path;
use std::{collections::HashMap, fs::File, io::BufReader};
use uuid::Uuid;

Expand Down Expand Up @@ -241,24 +242,21 @@ impl CrashInfo {
/// Emit the CrashInfo as structured json in file `path`.
/// SIGNAL SAFETY:
/// I believe but have not verified this is signal safe.
pub fn to_file(&self, path: &str) -> anyhow::Result<()> {
let file = File::create(path).with_context(|| format!("Failed to create {path}"))?;
pub fn to_file(&self, path: &Path) -> anyhow::Result<()> {
let file =
File::create(path).with_context(|| format!("Failed to create {}", path.display()))?;
serde_json::to_writer_pretty(file, self)
.with_context(|| format!("Failed to write json to {path}"))?;
.with_context(|| format!("Failed to write json to {}", path.display()))?;
Ok(())
}

pub fn upload_to_endpoint(&self, config: &CrashtrackerConfiguration) -> anyhow::Result<()> {
// If we're debugging to a file, dump the actual crashinfo into a json
if let Some(endpoint) = &config.endpoint {
if Some("file") == endpoint.url.scheme_str() {
self.to_file(
endpoint
.url
.path_and_query()
.ok_or_else(|| anyhow::format_err!("empty path for upload to file"))?
.as_str(),
)?;
let path = ddcommon::decode_uri_path_in_authority(&endpoint.url)
.context("crash output file was not correctly formatted")?;
self.to_file(&path)?;
}
}
self.upload_to_telemetry(config)
Expand Down
6 changes: 4 additions & 2 deletions crashtracker/src/telemetry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use std::fmt::Write;
use std::time::{self, SystemTime};

use super::{CrashInfo, CrashtrackerConfiguration, CrashtrackerMetadata, StackFrame};
use anyhow::Ok;
use anyhow::{Context, Ok};
use ddtelemetry::{
build_host,
data::{self, Application, LogLevel},
Expand Down Expand Up @@ -70,7 +70,9 @@ impl TelemetryCrashUploader {

// ignore result because what are we going to do?
let _ = if endpoint.url.scheme_str() == Some("file") {
cfg.set_host_from_url(&format!("file://{}.telemetry", endpoint.url.path()))
let path = ddcommon::decode_uri_path_in_authority(&endpoint.url)
.context("file path is not valid")?;
cfg.set_host_from_url(&format!("file://{}.telemetry", path.display()))
} else {
cfg.set_endpoint(endpoint.clone())
};
Expand Down
25 changes: 25 additions & 0 deletions ddcommon-ffi/src/endpoint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,28 @@ pub extern "C" fn ddog_endpoint_from_api_key_and_site(

#[no_mangle]
pub extern "C" fn ddog_endpoint_drop(_: Box<Endpoint>) {}

#[cfg(test)]
mod tests {
use crate::CharSlice;

use super::ddog_endpoint_from_url;

#[test]
fn test_ddog_endpoint_from_url() {
let cases = [
("", false),
("http:// /hey", false),
("file://", false),
("http://localhost:8383/hello", true),
("file:/// file / with/weird chars 🤡", true),
("file://./", true),
("unix://./", true),
];

for (input, expected) in cases {
let actual = ddog_endpoint_from_url(CharSlice::from(input)).is_some();
assert_eq!(actual, expected);
}
}
}
55 changes: 54 additions & 1 deletion ddcommon/src/azure_app_services.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ const INSTANCE_NAME: &str = "COMPUTERNAME";
const INSTANCE_ID: &str = "WEBSITE_INSTANCE_ID";
const SERVICE_CONTEXT: &str = "DD_AZURE_APP_SERVICES";
const FUNCTIONS_WORKER_RUNTIME: &str = "FUNCTIONS_WORKER_RUNTIME";
const FUNCTIONS_WORKER_RUNTIME_VERSION: &str = "FUNCTIONS_WORKER_RUNTIME_VERSION";
const FUNCTIONS_EXTENSION_VERSION: &str = "FUNCTIONS_EXTENSION_VERSION";

const UNKNOWN_VALUE: &str = "unknown";
Expand All @@ -25,7 +26,10 @@ enum AzureContext {

macro_rules! get_trimmed_env_var {
($name:expr) => {
env::var($name).ok().map(|v| v.trim().to_string())
env::var($name)
.ok()
.map(|v| v.trim().to_string())
.filter(|s| !s.is_empty())
};
}

Expand Down Expand Up @@ -72,6 +76,9 @@ pub struct AzureMetadata {
instance_id: Option<String>,
site_kind: String,
site_type: String,
runtime: Option<String>,
runtime_version: Option<String>,
function_runtime_version: Option<String>,
}

impl AzureMetadata {
Expand Down Expand Up @@ -146,6 +153,10 @@ impl AzureMetadata {
let instance_name = query.get_var(INSTANCE_NAME);
let instance_id = query.get_var(INSTANCE_ID);

let runtime = query.get_var(FUNCTIONS_WORKER_RUNTIME);
let runtime_version = query.get_var(FUNCTIONS_WORKER_RUNTIME_VERSION);
let function_runtime_version = query.get_var(FUNCTIONS_EXTENSION_VERSION);

Some(AzureMetadata {
resource_id,
subscription_id,
Expand All @@ -157,6 +168,9 @@ impl AzureMetadata {
instance_id,
site_kind,
site_type,
runtime,
runtime_version,
function_runtime_version,
})
}

Expand Down Expand Up @@ -222,6 +236,18 @@ impl AzureMetadata {
pub fn get_site_kind(&self) -> &str {
self.site_kind.as_str()
}

pub fn get_runtime(&self) -> &str {
get_value_or_unknown!(self.runtime)
}

pub fn get_runtime_version(&self) -> &str {
get_value_or_unknown!(self.runtime_version)
}

pub fn get_function_runtime_version(&self) -> &str {
get_value_or_unknown!(self.function_runtime_version)
}
}

pub fn get_metadata() -> &'static Option<AzureMetadata> {
Expand Down Expand Up @@ -583,6 +609,9 @@ mod tests {
let expected_operating_system = "FreeBSD".to_owned();
let expected_instance_name = "my_instance_name".to_owned();
let expected_instance_id = "my_instance_id".to_owned();
let expected_function_extension_version = "~4".to_owned();
let expected_runtime = "node".to_owned();
let expected_runtime_version = "18".to_owned();

let mocked_env = MockEnv::new(&[
(WEBSITE_SITE_NAME, expected_site_name.as_str()),
Expand All @@ -592,6 +621,15 @@ mod tests {
(INSTANCE_NAME, expected_instance_name.as_str()),
(INSTANCE_ID, expected_instance_id.as_str()),
(SERVICE_CONTEXT, "1"),
(
FUNCTIONS_EXTENSION_VERSION,
expected_function_extension_version.as_str(),
),
(FUNCTIONS_WORKER_RUNTIME, expected_runtime.as_str()),
(
FUNCTIONS_WORKER_RUNTIME_VERSION,
expected_runtime_version.as_str(),
),
]);

let metadata = AzureMetadata::new(mocked_env).unwrap();
Expand All @@ -602,5 +640,20 @@ mod tests {
assert_eq!(expected_operating_system, metadata.get_operating_system());
assert_eq!(expected_instance_name, metadata.get_instance_name());
assert_eq!(expected_instance_id, metadata.get_instance_id());
assert_eq!(
expected_function_extension_version,
metadata.get_function_runtime_version()
);
assert_eq!(expected_runtime, metadata.get_runtime());
assert_eq!(expected_runtime_version, metadata.get_runtime_version());
}

#[test]
fn test_get_trimmed_env_var_empty_string() {
env::remove_var("TEST_VAR_NONE");
assert_eq!(get_trimmed_env_var!("TEST_VAR_NONE"), None);

env::set_var("TEST_VAR_EMPTY_STRING", "");
assert_eq!(get_trimmed_env_var!("TEST_VAR_EMPTY_STRING"), None);
}
}
34 changes: 22 additions & 12 deletions ddcommon/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Copyright 2021-Present Datadog, Inc. https://www.datadoghq.com/
// SPDX-License-Identifier: Apache-2.0

use std::{borrow::Cow, ops::Deref, str::FromStr};
use std::{borrow::Cow, ops::Deref, path::PathBuf, str::FromStr};

use hyper::{
header::HeaderValue,
Expand Down Expand Up @@ -95,17 +95,7 @@ pub fn parse_uri(uri: &str) -> anyhow::Result<hyper::Uri> {
} else if let Some(path) = uri.strip_prefix("windows:") {
encode_uri_path_in_authority("windows", path)
} else if let Some(path) = uri.strip_prefix("file://") {
let mut parts = uri::Parts::default();
parts.scheme = uri::Scheme::from_str("file").ok();
parts.authority = Some(uri::Authority::from_static("localhost"));

// TODO: handle edge cases like improperly escaped url strings
//
// this is eventually user configurable field
// anything we can do to ensure invalid input becomes valid - will improve usability
parts.path_and_query = uri::PathAndQuery::from_str(path).ok();

Ok(hyper::Uri::from_parts(parts)?)
encode_uri_path_in_authority("file", path)
} else {
Ok(hyper::Uri::from_str(uri)?)
}
Expand All @@ -122,6 +112,26 @@ fn encode_uri_path_in_authority(scheme: &str, path: &str) -> anyhow::Result<hype
Ok(hyper::Uri::from_parts(parts)?)
}

pub fn decode_uri_path_in_authority(uri: &hyper::Uri) -> anyhow::Result<PathBuf> {
let path = hex::decode(
uri.authority()
.ok_or_else(|| anyhow::anyhow!("missing uri authority"))?
.as_str(),
)?;
#[cfg(unix)]
{
use std::os::unix::ffi::OsStringExt;
Ok(PathBuf::from(std::ffi::OsString::from_vec(path)))
}
#[cfg(not(unix))]
{
match String::from_utf8(path) {
Ok(s) => Ok(PathBuf::from(s.as_str())),
_ => Err(anyhow::anyhow!("file uri should be utf-8")),
}
}
}

impl Endpoint {
/// Return a request builder with the following headers:
/// - User agent
Expand Down
17 changes: 8 additions & 9 deletions ddtelemetry/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -267,6 +267,8 @@ impl Config {

#[cfg(test)]
mod tests {
use std::path::Path;

#[cfg(unix)]
use ddcommon::connector::uds;

Expand Down Expand Up @@ -388,6 +390,10 @@ mod tests {
("file:///absolute/path", "/absolute/path"),
("file://./relative/path", "./relative/path"),
("file://relative/path", "relative/path"),
(
"file://c://temp//with space\\foo.json",
"c://temp//with space\\foo.json",
),
];

for (input, expected) in cases {
Expand All @@ -405,15 +411,8 @@ mod tests {
.to_string()
);
assert_eq!(
expected,
cfg.clone()
.endpoint
.unwrap()
.url
.into_parts()
.path_and_query
.unwrap()
.as_str()
Path::new(expected),
ddcommon::decode_uri_path_in_authority(&cfg.endpoint.unwrap().url).unwrap(),
);
}
}
Expand Down
18 changes: 7 additions & 11 deletions ddtelemetry/src/worker/http_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
// SPDX-License-Identifier: Apache-2.0

use ddcommon::HttpRequestBuilder;
use http::uri::Parts;
use http::{Request, Response};
use hyper::Body;
use std::{
Expand Down Expand Up @@ -41,21 +40,18 @@ pub fn request_builder(c: &Config) -> anyhow::Result<HttpRequestBuilder> {
}

pub fn from_config(c: &Config) -> Box<dyn HttpClient + Sync + Send> {
if let Some(Parts {
scheme: Some(scheme),
path_and_query: Some(path),
..
}) = c.endpoint.as_ref().map(|e| e.url.clone().into_parts())
{
if scheme.as_str() == "file" {
match &c.endpoint {
Some(e) if e.url.scheme_str() == Some("file") => {
let file_path = ddcommon::decode_uri_path_in_authority(&e.url)
.expect("file urls should always have been encoded in authority");
return Box::new(MockClient {
file: Arc::new(Mutex::new(Box::new(
File::create(path.path()).expect("Couldn't open mock client file"),
File::create(file_path).expect("Couldn't open mock client file"),
))),
});
}
}

Some(_) | None => {}
};
Box::new(HyperClient {
inner: hyper::Client::builder()
.pool_idle_timeout(std::time::Duration::from_secs(30))
Expand Down
10 changes: 5 additions & 5 deletions ruby/Rakefile
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ require "rubygems/package"

RSpec::Core::RakeTask.new(:spec)

LIB_VERSION_TO_PACKAGE = "9.0.0"
LIB_VERSION_TO_PACKAGE = "10.0.0"
unless LIB_VERSION_TO_PACKAGE.start_with?(Libdatadog::LIB_VERSION)
raise "`LIB_VERSION_TO_PACKAGE` setting in <Rakefile> (#{LIB_VERSION_TO_PACKAGE}) does not match " \
"`LIB_VERSION` setting in <lib/libdatadog/version.rb> (#{Libdatadog::LIB_VERSION})"
Expand All @@ -20,22 +20,22 @@ end
LIB_GITHUB_RELEASES = [
{
file: "libdatadog-aarch64-alpine-linux-musl.tar.gz",
sha256: "c2c6c32f612e8c1682131e72bd50492d809bac973f516e56e163e797435eef75",
sha256: "61249c5a2a3c4c80e6f54a24251b5035a49123b1664d28cc21645fa8c7271432",
ruby_platform: "aarch64-linux-musl"
},
{
file: "libdatadog-aarch64-unknown-linux-gnu.tar.gz",
sha256: "97c4fc46f92580b8929e8fcc3f51b47226836e29bce0b57ac8d3387a27a81ce1",
sha256: "14df33b816e12533b95bad64ae0df049bb1fce6b4dc0fe7df4add6ce3ce531e7",
ruby_platform: "aarch64-linux"
},
{
file: "libdatadog-x86_64-alpine-linux-musl.tar.gz",
sha256: "68e67c5e87616f830289bc85626d2062277bef54694cc6dbb445105c66fe8885",
sha256: "7c5dcf51fec39c7fc0cfca47ee1788630e15682f0a5f9580e94518163300f221",
ruby_platform: "x86_64-linux-musl"
},
{
file: "libdatadog-x86_64-unknown-linux-gnu.tar.gz",
sha256: "cd89cbe480db0b828a43afd161ddd83e57319dbe3d412fa4a2d096daae244595",
sha256: "ec3a8582f8be34edd3b9b89aed7d0642645b41f8e7c9d5b4d1d6ecdcaa8f31f0",
ruby_platform: "x86_64-linux"
}
]
Expand Down
2 changes: 1 addition & 1 deletion ruby/lib/libdatadog/version.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

module Libdatadog
# Current libdatadog version
LIB_VERSION = "9.0.0"
LIB_VERSION = "10.0.0"

GEM_MAJOR_VERSION = "1"
GEM_MINOR_VERSION = "0"
Expand Down
Loading

0 comments on commit ab18944

Please sign in to comment.