Skip to content

Commit

Permalink
Merge branch 'main' into dsn/crashtracker-common-endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
danielsn authored Jul 25, 2024
2 parents de4bfb6 + 32d5eae commit 370f8d1
Show file tree
Hide file tree
Showing 18 changed files with 46 additions and 16 deletions.
1 change: 1 addition & 0 deletions crashtracker/src/receiver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,7 @@ enum CrashReportStatus {
/// Listens to `stream`, reading it line by line, until
/// 1. A crash-report is received, in which case it is processed for upload
/// 2. `stdin` closes without a crash report (i.e. if the parent terminated normally)
///
/// In the case where the parent failed to transfer a full crash-report
/// (for instance if it crashed while calculating the crash-report), we return
/// a PartialCrashReport.
Expand Down
1 change: 1 addition & 0 deletions ddcommon/src/tag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ pub struct Tag {
/// - "language:native"
/// - "src_library:libdatadog"
/// - "type:timeout"
///
/// So being able to save allocations is nice.
value: Cow<'static, str>,
}
Expand Down
3 changes: 1 addition & 2 deletions ddsketch/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@ pub mod pb;
///
/// This implementation only supports a part of the standard (which is also only the parts dd
/// backend supports :shrug:)
/// - max length contiguous bin store, with lower bin
/// collapse behavior.
/// - max length contiguous bin store, with lower bin collapse behavior.
/// - Positive or zero values
///
/// The default sketch has a 1% relative accuracy, and only accepts positive points
Expand Down
3 changes: 3 additions & 0 deletions ipc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -59,3 +59,6 @@ bench = false
harness = false
name = "ipc"
path = "benches/ipc.rs"

[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(polyfill_glibc_memfd)'] }
1 change: 1 addition & 0 deletions ipc/src/platform/mem_handle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ pub struct ShmHandle {
}

#[derive(Debug)]
#[allow(dead_code)]
pub struct AnonHandle {
pub(crate) size: usize,
}
Expand Down
2 changes: 1 addition & 1 deletion ipc/tarpc/tarpc/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ impl<Req, Resp> Channel<Req, Resp> {
);
ctx.trace_context.new_child()
});
span.record("rpc.trace_id", &tracing::field::display(ctx.trace_id()));
span.record("rpc.trace_id", tracing::field::display(ctx.trace_id()));
let (response_completion, mut response) = oneshot::channel();
let request_id =
u64::try_from(self.next_request_id.fetch_add(1, Ordering::Relaxed)).unwrap();
Expand Down
3 changes: 2 additions & 1 deletion ipc/tarpc/tarpc/src/serde_transport.rs
Original file line number Diff line number Diff line change
Expand Up @@ -631,10 +631,11 @@ mod tests {
);
}

#[cfg(tcp)]
#[cfg(feature = "tcp")]
#[tokio::test]
async fn tcp() -> io::Result<()> {
use super::tcp;
use futures::{SinkExt, StreamExt};

let mut listener = tcp::listen("0.0.0.0:0", SymmetricalJson::<String>::default).await?;
let addr = listener.local_addr();
Expand Down
3 changes: 2 additions & 1 deletion profiling/src/collections/string_table/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,11 +236,12 @@ mod tests {
/// This is a fuzz test for the allocation optimized `StringTable`.
/// It checks both safety (lack of crashes / sanitizer failures),
/// as well as functional correctness (the table should behave like an
/// ordered set).
/// ordered set).
/// Limitations:
/// - The crate used here to generate Strings internally has a default range for the length of
/// a string, (0..=64) We should experiment with longer strings to see what happens. https://github.com/camshaft/bolero/blob/f401669697ffcbe7f34cbfd09fd57b93d5df734c/lib/bolero-generator/src/alloc/mod.rs#L17
/// - Since iterating is destructive, can only check the string values once.
///
/// `cargo +nightly bolero test collections::string_table::tests::fuzz_string_table -T 1min`
#[test]
fn fuzz_string_table() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
/// profiler version generated it. It was made from setting up a WordPress
/// demo app of some kind. It's extracted from this file (relative to root):
/// * `profiling/tests/wordpress.pprof.lz4`
///
/// For various tests such as using MIRI, it's too slow to decompress, open,
/// parse, and extract the strings on-demand.
pub const WORDPRESS_STRINGS: [&str; 1059] = [
Expand Down
1 change: 1 addition & 0 deletions profiling/src/internal/profile/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@ impl Profile {
/// - "" (the empty string)
/// - "local root span id"
/// - "trace endpoint"
///
/// All other fields are default.
#[inline]
pub fn new(
Expand Down
1 change: 1 addition & 0 deletions profiling/src/pprof/sliced_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
//! 1. repeatedly emitting a sliced message with a "required" field,
//! 2. repeatedly emitting a sliced message using a "repeated" field,
//! 3. Emitting once the message with the repeated field containing all values.
//!
//! In other words, we get the same bytes from "required" as "repeated", but
//! with fewer allocations (since we don't need a `Vec` for the elements).
//!
Expand Down
8 changes: 7 additions & 1 deletion sidecar-ffi/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,10 @@ libc = "0.2"

[dev-dependencies]
hyper = { version = "0.14", default-features = false }
tempfile = {version = "3.3"}
tempfile = { version = "3.3" }

[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = [
"cfg(coverage_nightly)",
'cfg(feature, values("prefer_dynamic"))',
] }
15 changes: 13 additions & 2 deletions sidecar/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,13 @@ rmp-serde = "1.1.1"
spawn_worker = { path = "../spawn_worker" }
zwohash = "0.1.2"
sys-info = { version = "0.9.0" }
tokio = { version = "1.35.1", features = ["fs", "sync", "io-util", "signal", "rt-multi-thread"] }
tokio = { version = "1.35.1", features = [
"fs",
"sync",
"io-util",
"signal",
"rt-multi-thread",
] }
tokio-util = { version = "0.7", features = ["codec"] }

prctl = "1.0.0"
Expand All @@ -71,7 +77,7 @@ features = [
"Win32_Foundation",
"Wdk_Storage_FileSystem",
"Win32_System_IO",
"Win32_System_WindowsProgramming"
"Win32_System_WindowsProgramming",
]
version = "0.51.0"

Expand All @@ -95,3 +101,8 @@ tempfile = { version = "3.3" }
httpmock = "0.7.0"
datadog-trace-utils = { path = "../trace-utils", features = ["test-utils"] }

[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = [
'cfg(tokio_taskdump,windows_seh_wrapper)',
'cfg(feature, values("logging"))',
] }
1 change: 1 addition & 0 deletions sidecar/src/service/tracing/trace_flusher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ impl TraceFlusher {
///
/// * A `Result` which is `Ok` if the flusher task successfully joins, or `Err` if the flusher
/// task panics.
///
/// If the flusher task is not running, it returns `Ok`.
pub(crate) async fn join(&self) -> anyhow::Result<(), JoinError> {
let flusher = {
Expand Down
4 changes: 4 additions & 0 deletions spawn_worker/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,7 @@ tempfile = { version = "3.3" }

[target.'cfg(not(windows))'.dev-dependencies]
rlimit = {version = "0.9"}

[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(coverage,coverage_nightly)'] }

6 changes: 3 additions & 3 deletions tests/spawn_from_lib/tests/spawn.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
// #![cfg(feature = "prefer-dynamic")]
// use test_spawn_from_lib::spawn_self;

#[cfg(feature = "prefer_dynamic")]
#[cfg(feature = "prefer-dynamic")]
use std::io::{Read, Seek};

#[cfg(feature = "prefer_dynamic")]
#[cfg(feature = "prefer-dynamic")]
fn rewind_and_read(file: &mut std::fs::File) -> anyhow::Result<String> {
file.rewind()?;
let mut buf = String::new();
Expand All @@ -19,7 +19,7 @@ fn rewind_and_read(file: &mut std::fs::File) -> anyhow::Result<String> {
/// prefer-dynamic -- --ignored
#[test]
#[ignore = "requires -C prefer-dynamic"]
#[cfg(feature = "prefer_dynamic")]
#[cfg(feature = "prefer-dynamic")]
fn test_spawning_trampoline_worker() {
let mut stdout = tempfile::tempfile().unwrap();
let mut stderr = tempfile::tempfile().unwrap();
Expand Down
3 changes: 1 addition & 2 deletions trace-mini-agent/src/http_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ use serde_json::json;

/// Does two things:
/// 1. Logs the given message. A success status code (within 200-299) will cause an info log to be
/// written,
/// otherwise error will be written.
/// written, otherwise error will be written.
/// 2. Returns the given message in the body of JSON response with the given status code.
///
/// Response body format:
Expand Down
5 changes: 2 additions & 3 deletions trace-normalization/src/normalizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,8 @@ pub fn normalize_trace(trace: &mut [pb::Span]) -> anyhow::Result<()> {

/// normalize_chunk takes a trace chunk and
/// * populates origin field if it wasn't populated
/// * populates priority field if it wasn't populated
/// the root span is used to populate these fields, and it's index in TraceChunk spans vec must be
/// passed.
/// * populates priority field if it wasn't populated the root span is used to populate these
/// fields, and it's index in TraceChunk spans vec must be passed.
pub fn normalize_chunk(chunk: &mut pb::TraceChunk, root_span_index: usize) -> anyhow::Result<()> {
// check if priority is not populated
let root_span = match chunk.spans.get(root_span_index) {
Expand Down

0 comments on commit 370f8d1

Please sign in to comment.