diff --git a/.github/ISSUE_TEMPLATE/minor-release.md b/.github/ISSUE_TEMPLATE/minor-release.md index 0ea25a30c0432..71ba54de40652 100644 --- a/.github/ISSUE_TEMPLATE/minor-release.md +++ b/.github/ISSUE_TEMPLATE/minor-release.md @@ -41,8 +41,8 @@ On the day of release: - [ ] Merge release preparation branch into the release branch - `git co v0. && git merge --ff-only prepare-v0.` - [ ] Tag new release - - [ ] `git tag v0..0 -a -m v0..0`` - - [ ] `git push origin v0..0 + - [ ] `git tag v0..0 -a -m v0..0` + - [ ] `git push origin v0..0` - [ ] Wait for release workflow to complete - Discoverable via [https://github.com/timberio/vector/actions/workflows/release.yml](https://github.com/timberio/vector/actions/workflows/release.yml) - [ ] Release updated Helm chart. See [releasing Helm chart](https://github.com/vectordotdev/helm-charts#releasing). diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index d9b13fcf742c0..aaa3f302b430a 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -36,6 +36,7 @@ Celkon Ceph Chromecast Citrix +Cloudflare Cloudfone Cmx Coby @@ -52,6 +53,7 @@ Enot Evercoss Explay FAQs +FDO FQDNs Fabro Figma @@ -165,6 +167,7 @@ Samsung Sega Segoe Shopify +SIGINTs Simvalley Skype Skytex @@ -244,6 +247,7 @@ compiletime coredns corejs coreutils +curta daemonset databend datacenter @@ -295,6 +299,9 @@ gpg gql grafana graphiql +greptime +greptimecloud +greptimedb gvisor gws hadoop @@ -313,6 +320,7 @@ https humungus icecream ifeq +ifneq imobile influxd ionik @@ -418,6 +426,8 @@ userguide webhdfs winapi workarounds +XCHACHA +XSALSA yandex zeek zookeeper diff --git a/.github/actions/spelling/excludes.txt b/.github/actions/spelling/excludes.txt index fd124acd37b4d..620bf03ad6410 100644 --- a/.github/actions/spelling/excludes.txt +++ b/.github/actions/spelling/excludes.txt @@ -48,6 +48,7 @@ \.otf$ \.p12$ \.pattern$ +\.pb$ \.pdf$ \.pem$ \.png$ @@ -68,11 +69,13 @@ \.zst$ ^\.github/actions/spelling/ ^\Q.cargo/config.toml\E$ -^\Q.github/workflows/spelling.yml\E$ ^\Q.github/workflows/gardener_issue_comment.yml\E$ +^\Q.github/workflows/spelling.yml\E$ ^\Qbenches/codecs/moby_dick.txt\E$ ^\Qbenches/dnstap/mod.rs\E$ ^\Qbenches/transform/route.rs\E$ +^\Qlib/codecs/tests/data/decoding/protobuf/test_protobuf.desc\E$ +^\Qlib/codecs/tests/data/decoding/protobuf/test_protobuf3.desc\E$ ^\Qlib/dnsmsg-parser/benches/benches.rs\E$ ^\Qlib/dnsmsg-parser/src/dns_message_parser.rs\E$ ^\Qlib/lookup/tests/fixtures/lookup/quoted\E$ diff --git a/.github/actions/spelling/expect.txt b/.github/actions/spelling/expect.txt index ed5c241d38b0f..eeae2da48b292 100644 --- a/.github/actions/spelling/expect.txt +++ b/.github/actions/spelling/expect.txt @@ -1,9 +1,7 @@ abcd abcdefghijklm abcdefghijklmnopqrstuvwxyzand -abced abortable -acb ack'ing acking Acq @@ -37,10 +35,8 @@ ARNOTAREALIDD arshiyasolei asdf asdfasdf -ASMS assertverify Asterix -asynk atag atx aty @@ -140,7 +136,6 @@ casttype castvalue cbe CBOR -cbs cddl cdeab cdylib @@ -224,6 +219,7 @@ cwl Dailywarehousing daschl dashmap +datadir dataflows datafuselabs datasources @@ -234,6 +230,7 @@ datname dbkind dbreader DBserver +DCMAKE ddagent ddev ddmetric @@ -304,7 +301,6 @@ ebfcee edenhill edns eeyun -efg efgh Elhage emca @@ -325,7 +321,6 @@ ENVARS envsubst EOIG EOL'ed -Erfxl Err'ing errorf Errorsfor @@ -464,12 +459,12 @@ gty Guangzhou guenter gzip'ed +hadolint halfsies hannes Hashbang hashbrown hashindex -hashlink hashring hashset hashsum @@ -574,7 +569,6 @@ kernelmode keybase keyclock keyid -keypair keyxxxxx khvzak kib @@ -778,6 +772,7 @@ NQTP nresamples nullishness numbackends +oahd oap OKD omfwd @@ -821,6 +816,7 @@ pathgen peekable PEMS pgmajfault +pgrep PII Pitbull pkc @@ -873,6 +869,7 @@ qwe raboof rande RANDFILE +ratatui rawconfig rawstring rdkafka @@ -1053,7 +1050,6 @@ supertrait suser sustainability svalue -Sya sysfs sysinit syslogng @@ -1114,7 +1110,6 @@ Tomola tonydanza toolbars toolchains -TOOLSDIRECTORY toolset toor topdir @@ -1201,6 +1196,7 @@ watchexec watchlogs wayfor webgraphviz +webpki webservers websites weee diff --git a/.github/workflows/changes.yml.upstream b/.github/workflows/changes.yml.upstream index db6c4225073e0..653d3395bdfda 100644 --- a/.github/workflows/changes.yml.upstream +++ b/.github/workflows/changes.yml.upstream @@ -70,6 +70,8 @@ on: value: ${{ jobs.int_tests.outputs.fluent }} gcp: value: ${{ jobs.int_tests.outputs.gcp }} + greptimedb: + value: ${{ jobs.int_tests.outputs.greptimedb }} humio: value: ${{ jobs.int_tests.outputs.humio }} http-client: @@ -194,6 +196,7 @@ jobs: eventstoredb: ${{ steps.filter.outputs.eventstoredb }} fluent: ${{ steps.filter.outputs.fluent }} gcp: ${{ steps.filter.outputs.gcp }} + greptimedb: ${{ steps.filter.outputs.greptimedb }} humio: ${{ steps.filter.outputs.humio }} http-client: ${{ steps.filter.outputs.http-client }} influxdb: ${{ steps.filter.outputs.influxdb }} diff --git a/.github/workflows/comment-trigger.yml.upstream b/.github/workflows/comment-trigger.yml.upstream index 84cabd825ccf3..8cb2c3780887e 100644 --- a/.github/workflows/comment-trigger.yml.upstream +++ b/.github/workflows/comment-trigger.yml.upstream @@ -15,6 +15,7 @@ # /ci-run-unit-windows : runs Unit - Windows # /ci-run-environment : runs Environment Suite # /ci-run-regression : runs Regression Detection Suite +# /ci-run-k8s : runs K8s E2E Suite name: Comment Trigger @@ -55,15 +56,22 @@ jobs: || contains(github.event.comment.body, '/ci-run-unit-windows') || contains(github.event.comment.body, '/ci-run-environment') || contains(github.event.comment.body, '/ci-run-regression') + || contains(github.event.comment.body, '/ci-run-k8s') ) steps: + - name: Generate authentication token + id: generate_token + uses: tibdex/github-app-token@b62528385c34dbc9f38e5f4225ac829252d1ea92 + with: + app_id: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_ID }} + private_key: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_PRIVATE_KEY }} - name: Get PR comment author id: comment uses: tspascoal/get-user-teams-membership@v2 with: username: ${{ github.actor }} team: 'Vector' - GITHUB_TOKEN: ${{ secrets.GH_PAT_ORG }} + GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - name: Validate author membership if: steps.comment.outputs.isTeamMember == 'false' @@ -116,3 +124,9 @@ jobs: if: contains(github.event.comment.body, '/ci-run-all') || contains(github.event.comment.body, '/ci-run-regression') uses: ./.github/workflows/regression.yml secrets: inherit + + k8s: + needs: validate + if: contains(github.event.comment.body, '/ci-run-all') || contains(github.event.comment.body, '/ci-run-k8s') + uses: ./.github/workflows/k8s_e2e.yml + secrets: inherit diff --git a/.github/workflows/environment.yml.upstream b/.github/workflows/environment.yml.upstream index cdddb0a980db8..7aa76a5cc2bbb 100644 --- a/.github/workflows/environment.yml.upstream +++ b/.github/workflows/environment.yml.upstream @@ -42,7 +42,7 @@ jobs: - name: Set up QEMU uses: docker/setup-qemu-action@v2.2.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2.8.0 + uses: docker/setup-buildx-action@v2.9.1 - name: Login to DockerHub uses: docker/login-action@v2.1.0 if: github.ref == 'refs/heads/master' diff --git a/.github/workflows/gardener_issue_comment.yml.upstream b/.github/workflows/gardener_issue_comment.yml.upstream index 8ea89fb315145..f25dc9da5e02a 100644 --- a/.github/workflows/gardener_issue_comment.yml.upstream +++ b/.github/workflows/gardener_issue_comment.yml.upstream @@ -20,50 +20,74 @@ jobs: GH_TOKEN: ${{ secrets.GH_PROJECT_PAT }} run: | issue_id=${{ github.event.issue.node_id }} + echo "issue_id: $issue_id" # IDs fetched from https://docs.github.com/en/graphql/overview/explorer project_id="PVT_kwDOAQFeYs4AAsTr" # Gardener status_field_id="PVTF_lADOAQFeYs4AAsTrzgAXRuU" # Status triage_option_id="2a08fafa" - # ensures that the issue is already on board but also seems to be the only way to fetch - # the item id - item_id="$(gh api graphql -f query=' - mutation($project_id: ID!, $content_id: ID!) { - addProjectV2ItemById(input: {projectId: $project_id, contentId: $content_id}) { - item { - id - } - } - }' -f project_id="$project_id" -f content_id="$issue_id" -q '.data.addProjectV2ItemById.item.id' - )" - - echo "item_id: $item_id" - - if [ -z "$item_id" ] ; then - echo "Issue not found in Gardener board" - exit 0 - else - echo "Found issue on Gardener board" - fi - - current_status="$(gh api graphql -f query=' + # Query for project items for the given issue + project_items="$(gh api graphql -f query=' query($item_id: ID!) { node(id: $item_id) { - ... on ProjectV2Item { - fieldValueByName(name: "Status") { - ... on ProjectV2ItemFieldSingleSelectValue { - name + ... on Issue { + projectItems(first: 50) { + ... on ProjectV2ItemConnection { + nodes { + fieldValueByName(name: "Status") { + ... on ProjectV2ItemFieldSingleSelectValue { + name + } + } + ... on ProjectV2Item { + id + project { + ... on ProjectV2 { + id + } + } + } + } + } + } + } + ... on PullRequest { + projectItems(first: 50) { + ... on ProjectV2ItemConnection { + nodes { + fieldValueByName(name: "Status") { + ... on ProjectV2ItemFieldSingleSelectValue { + name + } + } + ... on ProjectV2Item { + id + project { + ... on ProjectV2 { + id + } + } + } + } } } } } - }' -f item_id="$item_id" + }' -f item_id="$issue_id" )" - current_status=$(echo $current_status | jq -c -r '.["data"]["node"]["fieldValueByName"]["name"]') + # Extract the item in the Gardener project + project=$(echo $project_items | jq -c -r --arg project_id $project_id '.data.node.projectItems.nodes[] | select(.project.id == $project_id)') + current_status=$(echo $project | jq -c -r '.fieldValueByName.name') + item_id=$(echo $project | jq -c '.id') - echo "Current issue status is: '${current_status}'" + if [ -z "$current_status" ] ; then + echo "Issue not found in Gardener board" + exit 0 + else + echo "Found issue on Gardener board. Current issue status is: '${current_status}'" + fi if [ "$current_status" = "Blocked / Waiting" ] ; then echo "Moving issue from 'Blocked / Waiting' to 'Triage'" diff --git a/.github/workflows/gardener_open_pr.yml.upstream b/.github/workflows/gardener_open_pr.yml.upstream index e9b0fef67ba05..4fc84c07cc5ea 100644 --- a/.github/workflows/gardener_open_pr.yml.upstream +++ b/.github/workflows/gardener_open_pr.yml.upstream @@ -13,12 +13,18 @@ jobs: runs-on: ubuntu-latest if: ${{ github.actor != 'dependabot[bot]' }} steps: + - name: Generate authentication token + id: generate_token + uses: tibdex/github-app-token@b62528385c34dbc9f38e5f4225ac829252d1ea92 + with: + app_id: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_ID }} + private_key: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_PRIVATE_KEY }} - uses: tspascoal/get-user-teams-membership@v2 id: checkVectorMember with: username: ${{ github.actor }} team: vector - GITHUB_TOKEN: ${{ secrets.GH_PAT_ORG }} + GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - uses: actions/add-to-project@v0.5.0 if: ${{ steps.checkVectorMember.outputs.isTeamMember == 'false' }} with: diff --git a/.github/workflows/integration-comment.yml.upstream b/.github/workflows/integration-comment.yml.upstream index d915a956e2bb8..f6ed21bda71b5 100644 --- a/.github/workflows/integration-comment.yml.upstream +++ b/.github/workflows/integration-comment.yml.upstream @@ -47,13 +47,19 @@ jobs: runs-on: ubuntu-latest if: contains(github.event.comment.body, '/ci-run-integration') || contains(github.event.comment.body, '/ci-run-all') steps: + - name: Generate authentication token + id: generate_token + uses: tibdex/github-app-token@b62528385c34dbc9f38e5f4225ac829252d1ea92 + with: + app_id: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_ID }} + private_key: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_PRIVATE_KEY }} - name: Get PR comment author id: comment uses: tspascoal/get-user-teams-membership@v2 with: username: ${{ github.actor }} team: 'Vector' - GITHUB_TOKEN: ${{ secrets.GH_PAT_ORG }} + GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - name: Validate author membership if: steps.comment.outputs.isTeamMember == 'false' @@ -70,107 +76,330 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} status: pending - test-integration: - uses: ./.github/workflows/integration-test.yml - with: - if: ${{ matrix.run.if }} - test_name: ${{ matrix.run.test_name }} + integration-tests: needs: prep-pr - secrets: inherit - strategy: - fail-fast: false - matrix: - run: - - test_name: 'amqp' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-amqp') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'appsignal' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-appsignal') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'aws' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-aws') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'axiom' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-axiom') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'azure' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-azure') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'clickhouse' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-clickhouse') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'databend' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-databend') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'datadog-agent' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'datadog-logs' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'datadog-metrics' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'datadog-traces' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'dnstap' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-dnstap') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'docker-logs' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-docker-logs') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'elasticsearch' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-elasticsearch') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'eventstoredb' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-eventstoredb') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'fluent' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-fluent') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'gcp' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-gcp') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'humio' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-humio') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'http-client' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-http-client') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'influxdb' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-influxdb') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'kafka' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-kafka') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'logstash' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-logstash') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'loki' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-loki') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'mongodb' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-mongodb') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'nats' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-nats') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'nginx' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-nginx') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'opentelemetry' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-opentelemetry') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'postgres' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-postgres') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'prometheus' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-prometheus') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'pulsar' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-pulsar') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'redis' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-redis') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'shutdown' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-shutdown') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'splunk' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-splunk') || contains(github.event.comment.body, '/ci-run-all') }} - - test_name: 'webhdfs' - if: ${{ contains(github.event.comment.body, '/ci-run-integration-webhdfs') || contains(github.event.comment.body, '/ci-run-all') }} + runs-on: [linux, ubuntu-20.04-4core] + steps: + - uses: actions/checkout@v3 + with: + submodules: "recursive" + + - run: sudo npm -g install @datadog/datadog-ci + + - run: docker image prune -af ; docker container prune -f + + - name: amqp + if: ${{ contains(github.event.comment.body, '/ci-run-integration-amqp') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + # First one requires more time, as we need to build the image from scratch + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh amqp + + - name: appsignal + if: ${{ contains(github.event.comment.body, '/ci-run-integration-appsignal') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh appsignal + + - name: aws + if: ${{ contains(github.event.comment.body, '/ci-run-integration-aws') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh aws + + - name: axiom + if: ${{ contains(github.event.comment.body, '/ci-run-integration-axiom') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh axiom + + - name: azure + if: ${{ contains(github.event.comment.body, '/ci-run-integration-azure') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh azure + + - name: clickhouse + if: ${{ contains(github.event.comment.body, '/ci-run-integration-clickhouse') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh clickhouse + + - name: databend + if: ${{ contains(github.event.comment.body, '/ci-run-integration-databend') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh databend + + - name: datadog-agent + if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog-agent') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-agent + + - name: datadog-logs + if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog-logs') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-logs + + - name: datadog-metrics + if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog-metrics') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-metrics + + - name: datadog-traces + if: ${{ contains(github.event.comment.body, '/ci-run-integration-datadog-traces') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-traces + + - name: dnstap + if: ${{ contains(github.event.comment.body, '/ci-run-integration-dnstap') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh dnstap + + - run: docker image prune -af --filter=label!=vector-test-runner=true ; docker container prune -f + + - name: docker-logs + if: ${{ contains(github.event.comment.body, '/ci-run-integration-docker-logs') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh docker-logs + + - name: elasticsearch + if: ${{ contains(github.event.comment.body, '/ci-run-integration-elasticsearch') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh elasticsearch + + - name: eventstoredb + if: ${{ contains(github.event.comment.body, '/ci-run-integration-eventstoredb') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh eventstoredb + + - name: fluent + if: ${{ contains(github.event.comment.body, '/ci-run-integration-fluent') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh fluent + + - name: gcp + if: ${{ contains(github.event.comment.body, '/ci-run-integration-gcp') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh gcp + + - name: greptimedb + if: ${{ contains(github.event.comment.body, '/ci-run-integration-greptimedb') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh greptimedb + + - name: humio + if: ${{ contains(github.event.comment.body, '/ci-run-integration-humio') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh humio + + - name: http-client + if: ${{ contains(github.event.comment.body, '/ci-run-integration-http-client') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh http-client + + - name: influxdb + if: ${{ contains(github.event.comment.body, '/ci-run-integration-influxdb') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh influxdb + + - name: kafka + if: ${{ contains(github.event.comment.body, '/ci-run-integration-kafka') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh kafka + + - name: logstash + if: ${{ contains(github.event.comment.body, '/ci-run-integration-logstash') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh logstash + + - name: loki + if: ${{ contains(github.event.comment.body, '/ci-run-integration-loki') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh loki + + - name: mongodb + if: ${{ contains(github.event.comment.body, '/ci-run-integration-mongodb') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh mongodb + + - run: docker image prune -af --filter=label!=vector-test-runner=true ; docker container prune -f + + - name: nats + if: ${{ contains(github.event.comment.body, '/ci-run-integration-nats') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh nats + + - name: nginx + if: ${{ contains(github.event.comment.body, '/ci-run-integration-nginx') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh nginx + + - name: opentelemetry + if: ${{ contains(github.event.comment.body, '/ci-run-integration-opentelemetry') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh opentelemetry + + - name: postgres + if: ${{ contains(github.event.comment.body, '/ci-run-integration-postgres') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh postgres + + - name: prometheus + if: ${{ contains(github.event.comment.body, '/ci-run-integration-prometheus') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh prometheus + + - name: pulsar + if: ${{ contains(github.event.comment.body, '/ci-run-integration-pulsar') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh pulsar + + - name: redis + if: ${{ contains(github.event.comment.body, '/ci-run-integration-redis') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh redis + + - name: shutdown + if: ${{ contains(github.event.comment.body, '/ci-run-integration-shutdown') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh shutdown + + - name: splunk + if: ${{ contains(github.event.comment.body, '/ci-run-integration-splunk') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh splunk + + - name: webhdfs + if: ${{ contains(github.event.comment.body, '/ci-run-integration-webhdfs') || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh webhdfs update-pr-status: name: Signal result to PR runs-on: ubuntu-latest - needs: test-integration + needs: integration-tests if: always() && (contains(github.event.comment.body, '/ci-run-integration') || contains(github.event.comment.body, '/ci-run-all')) steps: + - name: Generate authentication token + id: generate_token + uses: tibdex/github-app-token@b62528385c34dbc9f38e5f4225ac829252d1ea92 + with: + app_id: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_ID }} + private_key: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_PRIVATE_KEY }} - name: Validate issue comment if: github.event_name == 'issue_comment' uses: tspascoal/get-user-teams-membership@v2 with: username: ${{ github.actor }} team: 'Vector' - GITHUB_TOKEN: ${{ secrets.GH_PAT_ORG }} + GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - name: (PR comment) Get PR branch uses: xt0rted/pull-request-comment-branch@v2 id: comment-branch - - name: (PR comment) Submit PR result as ${{ needs.test-integration.result }} + - name: (PR comment) Submit PR result as ${{ needs.integration-tests.result }} uses: myrotvorets/set-commit-status-action@v1.1.7 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} - status: ${{ needs.test-integration.result }} + status: ${{ needs.integration-tests.result }} diff --git a/.github/workflows/integration-test.yml.upstream b/.github/workflows/integration-test.yml.upstream index 4d8d635fcb477..ce0dbb195ab4b 100644 --- a/.github/workflows/integration-test.yml.upstream +++ b/.github/workflows/integration-test.yml.upstream @@ -1,22 +1,12 @@ -# This workflow is used to run an integration test. -# The most common use case is that it is triggered by another workflow, -# such as the Master Merge Queue Suite, or the Integration Comment. +# Integration Test # -# It can also be triggered on manual dispatch in CI however. -# In that use case, an input for the test name needs to be provided. +# This workflow is used to run an integration test on demand. +# An input for the test name needs to be provided. # TODO: check if the input is "all" , and run all, without a timeout? name: Integration Test on: - workflow_call: - inputs: - if: - required: false - type: boolean - test_name: - required: true - type: string workflow_dispatch: inputs: test_name: diff --git a/.github/workflows/integration.yml.upstream b/.github/workflows/integration.yml.upstream index 7f85870e7dff4..01a2a7dabca3a 100644 --- a/.github/workflows/integration.yml.upstream +++ b/.github/workflows/integration.yml.upstream @@ -22,6 +22,9 @@ env: CONTAINER_TOOL: "docker" DD_ENV: "ci" DD_API_KEY: ${{ secrets.DD_API_KEY }} + TEST_DATADOG_API_KEY: ${{ secrets.CI_TEST_DATADOG_API_KEY }} + TEST_APPSIGNAL_PUSH_API_KEY: ${{ secrets.TEST_APPSIGNAL_PUSH_API_KEY }} + AXIOM_TOKEN: ${{ secrets.AXIOM_TOKEN }} RUST_BACKTRACE: full TEST_LOG: vector=debug VERBOSE: true @@ -34,6 +37,7 @@ env: jobs: changes: + if: github.event_name == 'pull_request' uses: ./.github/workflows/changes.yml with: base_ref: ${{ github.event.pull_request.base.ref }} @@ -42,94 +46,365 @@ jobs: int_tests: true secrets: inherit - # Calls the Integration Test workflow for each integration that was detected to have files changed that impact it. - integration-matrix: - uses: ./.github/workflows/integration-test.yml - with: - if: ${{ matrix.run.if }} - test_name: ${{ matrix.run.test_name }} - secrets: inherit + integration-tests: + name: Integration Tests + runs-on: [linux, ubuntu-20.04-4core] needs: changes - strategy: - fail-fast: false - matrix: - run: - - test_name: 'amqp' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.amqp == 'true' }} - - test_name: 'appsignal' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.appsignal == 'true' }} - - test_name: 'aws' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.aws == 'true' }} - - test_name: 'axiom' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.axiom == 'true' }} - - test_name: 'azure' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.azure == 'true' }} - - test_name: 'clickhouse' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.clickhouse == 'true' }} - - test_name: 'databend' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.databend == 'true' }} - - test_name: 'datadog-agent' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.datadog == 'true' }} - - test_name: 'datadog-logs' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.datadog == 'true' }} - - test_name: 'datadog-metrics' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.datadog == 'true' }} - - test_name: 'datadog-traces' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.datadog == 'true' }} - - test_name: 'dnstap' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.dnstap == 'true' }} - - test_name: 'docker-logs' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.docker-logs == 'true' }} - - test_name: 'elasticsearch' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.elasticsearch == 'true' }} - - test_name: 'eventstoredb' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.eventstoredb == 'true' }} - - test_name: 'fluent' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.fluent == 'true' }} - - test_name: 'gcp' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.gcp == 'true' }} - - test_name: 'humio' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.humio == 'true' }} - - test_name: 'http-client' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.http-client == 'true' }} - - test_name: 'influxdb' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.influxdb == 'true' }} - - test_name: 'kafka' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.kafka == 'true' }} - - test_name: 'logstash' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.logstash == 'true' }} - - test_name: 'loki' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.loki == 'true' }} - - test_name: 'mongodb' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.mongodb == 'true' }} - - test_name: 'nats' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.nats == 'true' }} - - test_name: 'nginx' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.nginx == 'true' }} - - test_name: 'opentelemetry' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.opentelemetry == 'true' }} - - test_name: 'postgres' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.postgres == 'true' }} - - test_name: 'prometheus' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.prometheus == 'true' }} - - test_name: 'pulsar' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.pulsar == 'true' }} - - test_name: 'redis' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.redis == 'true' }} - - test_name: 'shutdown' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' }} - - test_name: 'splunk' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.splunk == 'true' }} - - test_name: 'webhdfs' - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.int-all == 'true' || needs.changes.outputs.webhdfs == 'true' }} - - # This is a required status check, so it always needs to run if prior jobs failed, in order to mark the status correctly. - integration: + if: always() && ( + github.event_name == 'merge_group' || ( + needs.changes.outputs.all-int == 'true' + || needs.changes.outputs.amqp == 'true' + || needs.changes.outputs.appsignal == 'true' + || needs.changes.outputs.aws == 'true' + || needs.changes.outputs.axiom == 'true' + || needs.changes.outputs.azure == 'true' + || needs.changes.outputs.clickhouse == 'true' + || needs.changes.outputs.databend == 'true' + || needs.changes.outputs.datadog == 'true' + || needs.changes.outputs.dnstap == 'true' + || needs.changes.outputs.docker-logs == 'true' + || needs.changes.outputs.elasticsearch == 'true' + || needs.changes.outputs.eventstoredb == 'true' + || needs.changes.outputs.fluent == 'true' + || needs.changes.outputs.gcp == 'true' + || needs.changes.outputs.greptimedb == 'true' + || needs.changes.outputs.humio == 'true' + || needs.changes.outputs.http-client == 'true' + || needs.changes.outputs.influxdb == 'true' + || needs.changes.outputs.kafka == 'true' + || needs.changes.outputs.logstash == 'true' + || needs.changes.outputs.loki == 'true' + || needs.changes.outputs.mongodb == 'true' + || needs.changes.outputs.nats == 'true' + || needs.changes.outputs.nginx == 'true' + || needs.changes.outputs.opentelemetry == 'true' + || needs.changes.outputs.postgres == 'true' + || needs.changes.outputs.prometheus == 'true' + || needs.changes.outputs.pulsar == 'true' + || needs.changes.outputs.redis == 'true' + || needs.changes.outputs.splunk == 'true' + || needs.changes.outputs.webhdfs == 'true' + ) + ) + timeout-minutes: 75 + steps: + - uses: actions/checkout@v3 + with: + submodules: "recursive" + + - run: sudo npm -g install @datadog/datadog-ci + + - run: docker image prune -af ; docker container prune -f + + - name: Determine if secrets are defined (PR author is team member). + if: github.event_name == 'pull_request' + env: + GH_APP_DATADOG_VECTOR_CI_APP_ID: ${{ secrets.GH_APP_DATADOG_VECTOR_CI_APP_ID }} + run: | + if [[ "$GH_APP_DATADOG_VECTOR_CI_APP_ID" != "" ]] ; then + echo "PR_HAS_ACCESS_TO_SECRETS=true" >> "$GITHUB_ENV" + else + echo "PR_HAS_ACCESS_TO_SECRETS=false" >> "$GITHUB_ENV" + fi + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.amqp == 'true' }} + name: amqp + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh amqp + + - if: (github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.appsignal == 'true') && + (github.event_name != 'pull_request' || env.PR_HAS_ACCESS_TO_SECRETS == 'true') + name: appsignal + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh appsignal + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.aws == 'true' }} + name: aws + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh aws + + - if: (github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.axiom == 'true') && + (github.event_name != 'pull_request' || env.PR_HAS_ACCESS_TO_SECRETS == 'true') + name: axiom + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh axiom + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.azure == 'true' }} + name: azure + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh azure + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.clickhouse == 'true' }} + name: clickhouse + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh clickhouse + + - if: (github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.databend == 'true') && + (github.event_name != 'pull_request' || env.PR_HAS_ACCESS_TO_SECRETS == 'true') + name: databend + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh databend + + - if: (github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.datadog == 'true') && + (github.event_name != 'pull_request' || env.PR_HAS_ACCESS_TO_SECRETS == 'true') + name: datadog-agent + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-agent + + - if: (github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.datadog == 'true') && + (github.event_name != 'pull_request' || env.PR_HAS_ACCESS_TO_SECRETS == 'true') + name: datadog-logs + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-logs + + - if: (github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.datadog == 'true') && + (github.event_name != 'pull_request' || env.PR_HAS_ACCESS_TO_SECRETS == 'true') + name: datadog-metrics + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-metrics + + - if: (github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.datadog == 'true') && + (github.event_name != 'pull_request' || env.PR_HAS_ACCESS_TO_SECRETS == 'true') + name: datadog-traces + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh datadog-traces + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.dnstap == 'true' }} + name: dnstap + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh dnstap + + - run: docker image prune -af --filter=label!=vector-test-runner=true ; docker container prune -f + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.docker-logs == 'true' }} + name: docker-logs + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh docker-logs + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.elasticsearch == 'true' }} + name: elasticsearch + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh elasticsearch + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.eventstoredb == 'true' }} + name: eventstoredb + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh eventstoredb + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.fluent == 'true' }} + name: fluent + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh fluent + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.gcp == 'true' }} + name: gcp + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh gcp + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.greptimedb == 'true' }} + name: greptimedb + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh greptimedb + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.humio == 'true' }} + name: humio + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh humio + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.http-client == 'true' }} + name: http-client + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh http-client + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.influxdb == 'true' }} + name: influxdb + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh influxdb + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.kafka == 'true' }} + name: kafka + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh kafka + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.logstash == 'true' }} + name: logstash + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh logstash + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.loki == 'true' }} + name: loki + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh loki + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.mongodb == 'true' }} + name: mongodb + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh mongodb + + - run: docker image prune -af --filter=label!=vector-test-runner=true ; docker container prune -f + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.nats == 'true' }} + name: nats + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh nats + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.nginx == 'true' }} + name: nginx + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh nginx + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.opentelemetry == 'true' }} + name: opentelemetry + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh opentelemetry + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.postgres == 'true' }} + name: postgres + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh postgres + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.prometheus == 'true' }} + name: prometheus + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh prometheus + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.pulsar == 'true' }} + name: pulsar + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh pulsar + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.redis == 'true' }} + name: redis + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh redis + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' }} + name: shutdown + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh shutdown + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.splunk == 'true' }} + name: splunk + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh splunk + + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.webhdfs == 'true' }} + name: webhdfs + uses: nick-fields/retry@v2 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-integration-test.sh webhdfs + + integration-test-suite: name: Integration Test Suite runs-on: ubuntu-latest if: always() needs: - - integration-matrix + - changes + - integration-tests env: FAILED: ${{ contains(needs.*.result, 'failure') }} steps: diff --git a/.github/workflows/k8s_e2e.yml.upstream b/.github/workflows/k8s_e2e.yml.upstream index e7faa3a59555a..a2c09a760a8cf 100644 --- a/.github/workflows/k8s_e2e.yml.upstream +++ b/.github/workflows/k8s_e2e.yml.upstream @@ -16,9 +16,8 @@ name: K8S E2E Suite on: workflow_dispatch: + workflow_call: pull_request: - issue_comment: - types: [created] merge_group: types: [checks_requested] schedule: @@ -27,7 +26,6 @@ on: concurrency: group: ${{ github.workflow }}-${{ github.event.number || github.event.comment.html_url || github.event.merge_group.head_sha || github.event.schedule || github.sha }} - cancel-in-progress: true env: @@ -42,31 +40,9 @@ env: PROFILE: debug jobs: - validate: - name: Validate comment - runs-on: ubuntu-latest - if: | - github.event_name != 'issue_comment' || - ( github.event.issue.pull_request && - ( contains(github.event.comment.body, '/ci-run-all') || - contains(github.event.comment.body, '/ci-run-k8s') - ) - ) - steps: - - name: Get PR comment author - id: comment - uses: tspascoal/get-user-teams-membership@v2 - with: - username: ${{ github.actor }} - team: 'Vector' - GITHUB_TOKEN: ${{ secrets.GH_PAT_ORG }} - - - name: Validate author membership - if: steps.comment.outputs.isTeamMember == 'false' - run: exit 1 - changes: - needs: validate + # Only evaluate files changed on pull request trigger + if: github.event_name == 'pull_request' uses: ./.github/workflows/changes.yml with: base_ref: ${{ github.event.pull_request.base.ref }} @@ -76,8 +52,9 @@ jobs: build-x86_64-unknown-linux-gnu: name: Build - x86_64-unknown-linux-gnu runs-on: [linux, ubuntu-20.04-4core] - needs: [changes, validate] - if: github.event_name != 'pull_request' || needs.changes.outputs.k8s == 'true' + needs: changes + # Run this job even if `changes` job is skipped (non- pull request trigger) + if: ${{ !failure() && !cancelled() && (github.event_name != 'pull_request' || needs.changes.outputs.k8s == 'true') }} # cargo-deb requires a release build, but we don't need optimizations for tests env: CARGO_PROFILE_RELEASE_OPT_LEVEL: 0 @@ -141,8 +118,9 @@ jobs: compute-k8s-test-plan: name: Compute K8s test plan runs-on: ubuntu-latest - needs: [changes, validate] - if: github.event_name != 'pull_request' || needs.changes.outputs.k8s == 'true' + needs: changes + # Run this job even if `changes` job is skipped + if: ${{ !failure() && !cancelled() && (github.event_name != 'pull_request' || needs.changes.outputs.k8s == 'true') }} outputs: matrix: ${{ steps.set-matrix.outputs.matrix }} steps: @@ -195,9 +173,10 @@ jobs: name: K8s ${{ matrix.kubernetes_version.version }} / ${{ matrix.container_runtime }} (${{ matrix.kubernetes_version.role }}) runs-on: [linux, ubuntu-20.04-4core] needs: - - validate - build-x86_64-unknown-linux-gnu - compute-k8s-test-plan + # because `changes` job might be skipped + if: always() && needs.build-x86_64-unknown-linux-gnu.result == 'success' && needs.compute-k8s-test-plan.result == 'success' strategy: matrix: ${{ fromJson(needs.compute-k8s-test-plan.outputs.matrix) }} fail-fast: false @@ -246,20 +225,22 @@ jobs: final-result: name: K8s E2E Suite runs-on: ubuntu-latest - needs: [test-e2e-kubernetes, validate] - if: | - always() && (github.event_name != 'issue_comment' || (github.event.issue.pull_request - && (contains(github.event.comment.body, '/ci-run-k8s') || contains(github.event.comment.body, '/ci-run-all')))) + needs: + - changes + - build-x86_64-unknown-linux-gnu + - compute-k8s-test-plan + - test-e2e-kubernetes + if: always() env: FAILED: ${{ contains(needs.*.result, 'failure') }} steps: - name: (PR comment) Get PR branch - if: success() && github.event_name == 'issue_comment' + if: github.event_name == 'issue_comment' && env.FAILED != 'true' uses: xt0rted/pull-request-comment-branch@v2 id: comment-branch - name: (PR comment) Submit PR result as success - if: success() && github.event_name == 'issue_comment' + if: github.event_name == 'issue_comment' && env.FAILED != 'true' uses: myrotvorets/set-commit-status-action@v1.1.7 with: sha: ${{ steps.comment-branch.outputs.head_sha }} diff --git a/.github/workflows/publish.yml.upstream b/.github/workflows/publish.yml.upstream index 8265f10e6f5c0..20644e2bba50e 100644 --- a/.github/workflows/publish.yml.upstream +++ b/.github/workflows/publish.yml.upstream @@ -296,10 +296,10 @@ jobs: - ubuntu:18.04 - ubuntu:20.04 - ubuntu:22.04 - - ubuntu:22.10 - ubuntu:23.04 - debian:10 - debian:11 + - debian:12 container: image: ${{ matrix.container }} steps: @@ -441,7 +441,7 @@ jobs: platforms: all - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2.8.0 + uses: docker/setup-buildx-action@v2.9.1 with: version: latest install: true diff --git a/.github/workflows/regression.yml.upstream b/.github/workflows/regression.yml.upstream index 4d770d496486f..d50a5d4969ea8 100644 --- a/.github/workflows/regression.yml.upstream +++ b/.github/workflows/regression.yml.upstream @@ -37,6 +37,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.event.merge_group.head_sha || github.sha }} cancel-in-progress: true +env: + SINGLE_MACHINE_PERFORMANCE_API: ${{ secrets.SINGLE_MACHINE_PERFORMANCE_API }} + jobs: # Only run this workflow if files changed in areas that could possibly introduce a regression @@ -229,8 +232,8 @@ jobs: export REPLICAS="10" export TOTAL_SAMPLES="600" export P_VALUE="0.1" - export SMP_CRATE_VERSION="0.7.3" - export LADING_VERSION="0.12.0" + export SMP_CRATE_VERSION="0.10.0" + export LADING_VERSION="0.18.0" echo "warmup seconds: ${WARMUP_SECONDS}" echo "replicas: ${REPLICAS}" @@ -295,7 +298,7 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2.8.0 + uses: docker/setup-buildx-action@v2.9.1 - name: Build 'vector' target image uses: docker/build-push-action@v4.1.1 @@ -332,7 +335,7 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2.8.0 + uses: docker/setup-buildx-action@v2.9.1 - name: Build 'vector' target image uses: docker/build-push-action@v4.1.1 diff --git a/.github/workflows/test.yml.upstream b/.github/workflows/test.yml.upstream index 918880c6b8ada..6d68eaf5ffb38 100644 --- a/.github/workflows/test.yml.upstream +++ b/.github/workflows/test.yml.upstream @@ -124,7 +124,7 @@ jobs: name: Test Suite runs-on: ubuntu-20.04 if: always() - needs: checks + needs: [changes, checks] env: FAILED: ${{ contains(needs.*.result, 'failure') }} steps: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c993c8de4dc1a..fe68054dd30dc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -224,13 +224,13 @@ Integration tests are not run by default when running ### Deprecations -When deprecating functionality in Vector, see [DEPRECATION.md](DEPRECATION.md). +When deprecating functionality in Vector, see [DEPRECATION.md](docs/DEPRECATION.md). ### Dependencies When adding, modifying, or removing a dependency in Vector you may find that you need to update the inventory of third-party licenses maintained in `LICENSE-3rdparty.csv`. This file is generated using -[rust-license-tool](https://github.com/DataDog/rust-license-tool.git) and can be updated using +[dd-rust-license-tool](https://github.com/DataDog/rust-license-tool.git) and can be updated using `cargo vdev build licenses`. ## Next steps @@ -238,9 +238,9 @@ inventory of third-party licenses maintained in `LICENSE-3rdparty.csv`. This fil As discussed in the [`README`](README.md), you should continue to the following documents: -1. **[DEVELOPING.md](DEVELOPING.md)** - Everything necessary to develop -2. **[DOCUMENTING.md](DOCUMENTING.md)** - Preparing your change for Vector users -3. **[DEPRECATION.md](DEPRECATION.md)** - Deprecating functionality in Vector +1. **[DEVELOPING.md](docs/DEVELOPING.md)** - Everything necessary to develop +2. **[DOCUMENTING.md](docs/DOCUMENTING.md)** - Preparing your change for Vector users +3. **[DEPRECATION.md](docs/DEPRECATION.md)** - Deprecating functionality in Vector ## Legal diff --git a/Cargo.lock b/Cargo.lock index 1a08d3073263d..efa09ba0b000b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20,9 +20,9 @@ checksum = "8b5ace29ee3216de37c0546865ad08edef58b0f9e76838ed8959a84a990e58c5" [[package]] name = "addr2line" -version = "0.21.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" dependencies = [ "gimli", ] @@ -39,6 +39,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + [[package]] name = "aes" version = "0.8.2" @@ -63,24 +73,14 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.6" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +checksum = "bf6ccdb167abbf410dcb915cabd428929d7f6a04980b54a11f26a39f1c7f7107" dependencies = [ "cfg-if", "getrandom 0.2.10", "once_cell", "version_check", - "zerocopy 0.7.31", -] - -[[package]] -name = "aho-corasick" -version = "0.7.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" -dependencies = [ - "memchr", ] [[package]] @@ -170,6 +170,21 @@ dependencies = [ "winapi", ] +[[package]] +name = "anstream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +dependencies = [ + "anstyle 1.0.0", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is-terminal", + "utf8parse", +] + [[package]] name = "anstyle" version = "0.3.1" @@ -182,11 +197,39 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d" +[[package]] +name = "anstyle-parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +dependencies = [ + "anstyle 1.0.0", + "windows-sys 0.48.0", +] + [[package]] name = "anyhow" -version = "1.0.71" +version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" [[package]] name = "anymap" @@ -196,12 +239,12 @@ checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72" [[package]] name = "apache-avro" -version = "0.14.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cf4144857f9e4d7dd6cc4ba4c78efd2a46bad682b029bd0d91e76a021af1b2a" +checksum = "9c0fdddc3fdac97394ffcc5c89c634faa9c1c166ced54189af34e407c97b6ee7" dependencies = [ "byteorder", - "digest 0.10.7", + "digest", "lazy_static", "libflate", "log", @@ -214,9 +257,9 @@ dependencies = [ "strum", "strum_macros", "thiserror", - "typed-builder", + "typed-builder 0.14.0", "uuid", - "zerocopy 0.6.1", + "zerocopy", ] [[package]] @@ -261,7 +304,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c6368f9ae5c6ec403ca910327ae0c9437b0a85255b6950c90d497e6177f6e5e" dependencies = [ "proc-macro-hack", - "quote 1.0.33", + "quote 1.0.32", "syn 1.0.109", ] @@ -311,7 +354,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d94121b572ccf1d1b38a1004155e59c64f4c6ff7793070d84a8807e0550881e" dependencies = [ - "quote 1.0.33", + "quote 1.0.32", "syn 1.0.109", ] @@ -338,12 +381,12 @@ dependencies = [ [[package]] name = "assert_cmd" -version = "2.0.11" +version = "2.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86d6b683edf8d1119fe420a94f8a7e389239666aa72e65495d91c00462510151" +checksum = "88903cb14723e4d4003335bb7f8a14f27691649105346a0f0957466c096adfe6" dependencies = [ "anstyle 1.0.0", - "bstr 1.5.0", + "bstr 1.6.0", "doc-comment", "predicates 3.0.1", "predicates-core", @@ -377,9 +420,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0122885821398cc923ece939e24d1056a2384ee719432397fa9db87230ff11" +checksum = "62b74f44609f0f91493e3082d3734d98497e094777144380ea4db9f9905dd5b6" dependencies = [ "flate2", "futures-core", @@ -448,14 +491,13 @@ version = "5.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b35ef8f9be23ee30fe1eb1cf175c689bc33517c6c6d0fd0669dade611e5ced7f" dependencies = [ - "async-graphql-derive", - "async-graphql-parser", - "async-graphql-value", + "async-graphql-derive 5.0.10", + "async-graphql-parser 5.0.10", + "async-graphql-value 5.0.10", "async-stream", "async-trait", "base64 0.13.1", "bytes 1.4.0", - "chrono", "fnv", "futures-util", "http", @@ -473,6 +515,37 @@ dependencies = [ "thiserror", ] +[[package]] +name = "async-graphql" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d1f56ef571e325930c41685502269651505160ae0d7e0d7413dd84afe86432c" +dependencies = [ + "async-graphql-derive 6.0.0", + "async-graphql-parser 6.0.0", + "async-graphql-value 6.0.0", + "async-stream", + "async-trait", + "base64 0.13.1", + "bytes 1.4.0", + "chrono", + "fnv", + "futures-util", + "http", + "indexmap 2.0.0", + "mime", + "multer", + "num-traits", + "once_cell", + "pin-project-lite", + "regex", + "serde", + "serde_json", + "serde_urlencoded", + "static_assertions", + "thiserror", +] + [[package]] name = "async-graphql-derive" version = "5.0.10" @@ -480,22 +553,51 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a0f6ceed3640b4825424da70a5107e79d48d9b2bc6318dfc666b2fc4777f8c4" dependencies = [ "Inflector", - "async-graphql-parser", + "async-graphql-parser 5.0.10", "darling 0.14.2", "proc-macro-crate 1.2.1", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", "thiserror", ] +[[package]] +name = "async-graphql-derive" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a3a4c50aafce65a48d1aba749aaa946173a52d274abb5b9f76360a966ce17c6" +dependencies = [ + "Inflector", + "async-graphql-parser 6.0.0", + "darling 0.20.3", + "proc-macro-crate 1.2.1", + "proc-macro2 1.0.66", + "quote 1.0.32", + "strum", + "syn 2.0.28", + "thiserror", +] + [[package]] name = "async-graphql-parser" version = "5.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ecc308cd3bc611ee86c9cf19182d2b5ee583da40761970e41207f088be3db18f" dependencies = [ - "async-graphql-value", + "async-graphql-value 5.0.10", + "pest", + "serde", + "serde_json", +] + +[[package]] +name = "async-graphql-parser" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a64488a0f0afd284f829977437a2e49e9f62cb72ea5fbd96aec19f87351576df" +dependencies = [ + "async-graphql-value 6.0.0", "pest", "serde", "serde_json", @@ -513,13 +615,25 @@ dependencies = [ "serde_json", ] +[[package]] +name = "async-graphql-value" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86046bbced96c0fab3ff5d2b3c769c0c55b0b3a7d67f9e2f2044f349f2e7d501" +dependencies = [ + "bytes 1.4.0", + "indexmap 2.0.0", + "serde", + "serde_json", +] + [[package]] name = "async-graphql-warp" -version = "5.0.10" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce971f92675defe1adf14f9e70b8798d797db9f454463b611a552bffd5532188" +checksum = "4de785650dd90a223b5f5968c5345215160a36d196c9295c6c9a316cb29cba04" dependencies = [ - "async-graphql", + "async-graphql 6.0.0", "futures-util", "serde_json", "warp", @@ -555,6 +669,40 @@ dependencies = [ "futures-lite", ] +[[package]] +name = "async-nats" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8257238e2a3629ee5618502a75d1b91f8017c24638c75349fc8d2d80cf1f7c4c" +dependencies = [ + "base64 0.21.2", + "bytes 1.4.0", + "futures 0.3.28", + "http", + "itoa", + "memchr", + "nkeys", + "nuid", + "once_cell", + "rand 0.8.5", + "regex", + "ring", + "rustls-native-certs", + "rustls-pemfile", + "rustls-webpki", + "serde", + "serde_json", + "serde_nanos", + "serde_repr", + "thiserror", + "time", + "tokio", + "tokio-retry", + "tokio-rustls 0.24.0", + "tracing 0.1.37", + "url", +] + [[package]] name = "async-net" version = "1.7.0" @@ -603,9 +751,9 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -625,9 +773,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -638,13 +786,13 @@ checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -993,7 +1141,7 @@ dependencies = [ "once_cell", "percent-encoding", "regex", - "sha2 0.10.7", + "sha2", "time", "tracing 0.1.37", ] @@ -1025,7 +1173,7 @@ dependencies = [ "md-5", "pin-project-lite", "sha1", - "sha2 0.10.7", + "sha2", "tracing 0.1.37", ] @@ -1167,9 +1315,9 @@ dependencies = [ [[package]] name = "axum" -version = "0.6.18" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ "async-trait", "axum-core", @@ -1213,11 +1361,12 @@ dependencies = [ [[package]] name = "azure_core" -version = "0.5.0" -source = "git+https://github.com/Azure/azure-sdk-for-rust.git?rev=b4544d4920fa3064eb921340054cd9cc130b7664#b4544d4920fa3064eb921340054cd9cc130b7664" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b0f0eea648347e40f5f7f7e6bfea4553bcefad0fbf52044ea339e5ce3aba61" dependencies = [ "async-trait", - "base64 0.13.1", + "base64 0.21.2", "bytes 1.4.0", "dyn-clone", "futures 0.3.28", @@ -1226,11 +1375,11 @@ dependencies = [ "log", "paste", "pin-project", + "quick-xml 0.29.0", "rand 0.8.5", "reqwest", "rustc_version 0.4.0", "serde", - "serde-xml-rs", "serde_json", "time", "url", @@ -1239,17 +1388,18 @@ dependencies = [ [[package]] name = "azure_identity" -version = "0.6.0" -source = "git+https://github.com/Azure/azure-sdk-for-rust.git?rev=b4544d4920fa3064eb921340054cd9cc130b7664#b4544d4920fa3064eb921340054cd9cc130b7664" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61712538f43d64b56725f335bc931d0eb42d2b082fb157056465bbadfdeb5dd3" dependencies = [ "async-lock", "async-trait", "azure_core", - "base64 0.13.1", "fix-hidden-lifetime-bug", "futures 0.3.28", "log", "oauth2", + "pin-project", "serde", "serde_json", "time", @@ -1259,23 +1409,22 @@ dependencies = [ [[package]] name = "azure_storage" -version = "0.6.0" -source = "git+https://github.com/Azure/azure-sdk-for-rust.git?rev=b4544d4920fa3064eb921340054cd9cc130b7664#b4544d4920fa3064eb921340054cd9cc130b7664" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32d9cfa13ed9acb51cd663e04f343bd550a92b455add96c90de387a9a6bc4dbc" dependencies = [ "RustyXML", "async-trait", "azure_core", - "base64 0.13.1", "bytes 1.4.0", "futures 0.3.28", "hmac", "log", "once_cell", "serde", - "serde-xml-rs", "serde_derive", "serde_json", - "sha2 0.10.7", + "sha2", "time", "url", "uuid", @@ -1283,19 +1432,17 @@ dependencies = [ [[package]] name = "azure_storage_blobs" -version = "0.6.0" -source = "git+https://github.com/Azure/azure-sdk-for-rust.git?rev=b4544d4920fa3064eb921340054cd9cc130b7664#b4544d4920fa3064eb921340054cd9cc130b7664" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57cb0fe58af32a3fb49e560613cb1e4937f9f13161a2c1caf1bba0224435f2af" dependencies = [ "RustyXML", "azure_core", "azure_storage", - "base64 0.13.1", "bytes 1.4.0", "futures 0.3.28", "log", - "md5", "serde", - "serde-xml-rs", "serde_derive", "serde_json", "time", @@ -1328,15 +1475,15 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" dependencies = [ "addr2line", "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.6.2", "object", "rustc-demangle", ] @@ -1374,15 +1521,6 @@ dependencies = [ "simd-abstraction", ] -[[package]] -name = "base64-url" -version = "1.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a99c239d0c7e77c85dddfa9cebce48704b3c49550fcd3b84dd637e4484899f" -dependencies = [ - "base64 0.13.1", -] - [[package]] name = "base64ct" version = "1.5.3" @@ -1395,15 +1533,9 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ - "bit-vec 0.6.3", + "bit-vec", ] -[[package]] -name = "bit-vec" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b4ff8b16e6076c3e14220b39fbc1fabb6737522281a388998046859400895f" - [[package]] name = "bit-vec" version = "0.6.3" @@ -1418,18 +1550,18 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded" [[package]] name = "bitmask-enum" -version = "2.1.0" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9e32d7420c85055e8107e5b2463c4eeefeaac18b52359fe9f9c08a18f342b2" +checksum = "49fb8528abca6895a5ada33d62aedd538a5c33e77068256483b44a3230270163" dependencies = [ - "quote 1.0.33", - "syn 1.0.109", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -1450,16 +1582,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "generic-array", + "digest", ] [[package]] @@ -1495,12 +1618,12 @@ dependencies = [ ] [[package]] -name = "bloom" -version = "0.3.2" +name = "bloomy" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00ac8e5056d6d65376a3c1aa5c7c34850d6949ace17f0266953a254eb3d6fe8" +checksum = "489d2af57852b78a86478273ac6a1ef912061b6af3a439694c49f309f6ea3bdd" dependencies = [ - "bit-vec 0.4.4", + "siphasher", ] [[package]] @@ -1524,8 +1647,8 @@ dependencies = [ "log", "pin-project-lite", "rustls 0.20.7", - "rustls-native-certs 0.6.2", - "rustls-pemfile 1.0.1", + "rustls-native-certs", + "rustls-pemfile", "serde", "serde_derive", "serde_json", @@ -1535,7 +1658,7 @@ dependencies = [ "tokio", "tokio-util", "url", - "webpki 0.22.0", + "webpki", "webpki-roots", "winapi", ] @@ -1558,7 +1681,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "822462c1e7b17b31961798a6874b36daea6818e99e0cb7d3b7b0fa3c477751c3" dependencies = [ "borsh-derive", - "hashbrown 0.13.2", + "hashbrown 0.13.1", ] [[package]] @@ -1570,7 +1693,7 @@ dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", "proc-macro-crate 0.1.5", - "proc-macro2 1.0.70", + "proc-macro2 1.0.66", "syn 1.0.109", ] @@ -1580,8 +1703,8 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61820b4c5693eafb998b1e67485423c923db4a75f72585c247bdee32bad81e7b" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1591,8 +1714,8 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c76cdbfa13def20d1f8af3ae7b3c6771f06352a74221d8851262ac384c122b8e" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1630,13 +1753,12 @@ dependencies = [ [[package]] name = "bstr" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246e68bb43f6cd9db24bea052a53e40405417c5fb372e3d1a8a7f770a564ef5" +checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", - "once_cell", - "regex-automata 0.1.10", + "regex-automata 0.3.6", "serde", ] @@ -1662,8 +1784,8 @@ version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13e576ebe98e605500b3c8041bb888e966653577172df6dd97398714eb30b9bf" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1736,7 +1858,7 @@ dependencies = [ "cached_proc_macro", "cached_proc_macro_types", "futures 0.3.28", - "hashbrown 0.13.2", + "hashbrown 0.13.1", "instant", "once_cell", "thiserror", @@ -1751,8 +1873,8 @@ checksum = "b48814962d2fd604c50d2b9433c2a41a0ab567779ee2c02f7fba6eca1221f082" dependencies = [ "cached_proc_macro_types", "darling 0.14.2", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1788,7 +1910,7 @@ checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" dependencies = [ "camino", "cargo-platform", - "semver 1.0.17", + "semver 1.0.18", "serde", "serde_json", ] @@ -1816,11 +1938,12 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.77" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9f73505338f7d905b19d18738976aae232eb46b8efc15554ffc56deb5d9ebe4" +checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" dependencies = [ "jobserver", + "libc", ] [[package]] @@ -1844,6 +1967,30 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "chacha20" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7fc89c7c5b9e7a02dfe45cd2367bae382f9ed31c61ca8debe5f827c420a2f08" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "chacha20poly1305" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" +dependencies = [ + "aead", + "chacha20", + "cipher", + "poly1305", + "zeroize", +] + [[package]] name = "charset" version = "0.1.3" @@ -1870,9 +2017,9 @@ dependencies = [ [[package]] name = "chrono-tz" -version = "0.8.4" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e23185c0e21df6ed832a12e2bda87c7d1def6842881fb634a8511ced741b0d76" +checksum = "f1369bc6b9e9a7dfdae2055f6ec151fe9c554a9d23d357c0237cee2e25eaabb7" dependencies = [ "chrono", "chrono-tz-build", @@ -1882,9 +2029,9 @@ dependencies = [ [[package]] name = "chrono-tz-build" -version = "0.2.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433e39f13c9a060046954e0592a8d0a4bcb1040125cbf91cb8ee58964cfb350f" +checksum = "e2f5ebdc942f57ed96d560a6d1a459bae5851102a25d5bf89dc04ae453e31ecf" dependencies = [ "parse-zoneinfo", "phf", @@ -1939,6 +2086,7 @@ checksum = "d1873270f8f7942c191139cb8a40fd228da6c3fd2fc376d7e92d47aa14aeb59e" dependencies = [ "crypto-common", "inout", + "zeroize", ] [[package]] @@ -1958,9 +2106,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.1.14" +version = "4.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "906f7fe1da4185b7a282b2bc90172a496f9def1aca4545fe7526810741591e14" +checksum = "c27cdf28c0f604ba3f512b0c9a409f8de8513e4816705deb0498b627e7c3a3fd" dependencies = [ "clap_builder", "clap_derive", @@ -1973,50 +2121,49 @@ version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1eef05769009513df2eb1c3b4613e7fad873a14c600ff025b08f250f59fee7de" dependencies = [ - "clap 4.1.14", + "clap 4.3.21", "log", ] [[package]] name = "clap_builder" -version = "4.1.14" +version = "4.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "351f9ad9688141ed83dfd8f5fb998a06225ef444b48ff4dc43de6d409b7fd10b" +checksum = "08a9f1ab5e9f01a9b81f202e8562eb9a10de70abf9eaeac1be465c28b75aa4aa" dependencies = [ - "bitflags 1.3.2", + "anstream", + "anstyle 1.0.0", "clap_lex", - "is-terminal", "strsim 0.10.0", - "termcolor", "terminal_size 0.2.2", ] [[package]] name = "clap_complete" -version = "4.3.1" +version = "4.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f6b5c519bab3ea61843a7923d074b04245624bb84a64a8c150f5deb014e388b" +checksum = "5fc443334c81a804575546c5a8a79b4913b50e28d69232903604cada1de817ce" dependencies = [ - "clap 4.1.14", + "clap 4.3.21", ] [[package]] name = "clap_derive" -version = "4.1.14" +version = "4.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81d7dc0031c3a59a04fc2ba395c8e2dd463cba1859275f065d225f6122221b45" +checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" dependencies = [ - "heck 0.4.0", - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "heck 0.4.1", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "clap_lex" -version = "0.4.1" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" [[package]] name = "clipboard-win" @@ -2057,6 +2204,7 @@ dependencies = [ "ordered-float 3.7.0", "prometheus-remote-write", "prost", + "prost-reflect", "quick-protobuf", "regex", "serde", @@ -2088,15 +2236,21 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + [[package]] name = "colored" -version = "2.0.0" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd" +checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6" dependencies = [ - "atty", + "is-terminal", "lazy_static", - "winapi", + "windows-sys 0.48.0", ] [[package]] @@ -2184,9 +2338,9 @@ dependencies = [ [[package]] name = "console-subscriber" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57ab2224a0311582eb03adba4caaf18644f7b1f10a760803a803b9b605187fc7" +checksum = "d4cf42660ac07fcebed809cfe561dd8730bcd35b075215e6479c516bcd0d11cb" dependencies = [ "console-api", "crossbeam-channel", @@ -2208,9 +2362,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.6.2" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6f2aa4d0537bcc1c74df8755072bd31c1ef1a3a1b85a68e8404a8c353b7b8b" +checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" [[package]] name = "convert_case" @@ -2251,9 +2405,9 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.2.11" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -2300,7 +2454,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.1.14", + "clap 4.3.21", "criterion-plot", "futures 0.3.28", "is-terminal", @@ -2382,22 +2536,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "crossterm" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67" -dependencies = [ - "bitflags 1.3.2", - "crossterm_winapi", - "libc", - "mio", - "parking_lot", - "signal-hook", - "signal-hook-mio", - "winapi", -] - [[package]] name = "crossterm" version = "0.26.1" @@ -2437,9 +2575,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", + "rand_core 0.6.4", "typenum", ] +[[package]] +name = "crypto_secretbox" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d6cf87adf719ddf43a805e92c6870a531aedda35ff640442cbaf8674e141e1" +dependencies = [ + "aead", + "cipher", + "generic-array", + "poly1305", + "salsa20", + "subtle", + "zeroize", +] + [[package]] name = "csv" version = "1.2.2" @@ -2461,6 +2615,16 @@ dependencies = [ "memchr", ] +[[package]] +name = "ctor" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" +dependencies = [ + "quote 1.0.32", + "syn 1.0.109", +] + [[package]] name = "ctr" version = "0.9.2" @@ -2478,15 +2642,29 @@ checksum = "b365fabc795046672053e29c954733ec3b05e4be654ab130fe8f1f94d7051f35" [[package]] name = "curve25519-dalek" -version = "3.2.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" +checksum = "f711ade317dd348950a9910f81c5947e3d8907ebd2b83f76203ff1807e6a2bc2" dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "platforms 3.0.2", + "rustc_version 0.4.0", "subtle", - "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +dependencies = [ + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -2510,8 +2688,8 @@ dependencies = [ "cc", "codespan-reporting", "once_cell", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "scratch", "syn 1.0.109", ] @@ -2528,8 +2706,8 @@ version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a08a6e2fcc370a089ad3b4aaf54db3b1b4cee38ddabce5896b33eb693275f470" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -2553,6 +2731,16 @@ dependencies = [ "darling_macro 0.14.2", ] +[[package]] +name = "darling" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +dependencies = [ + "darling_core 0.20.3", + "darling_macro 0.20.3", +] + [[package]] name = "darling_core" version = "0.13.4" @@ -2561,8 +2749,8 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "strsim 0.10.0", "syn 1.0.109", ] @@ -2575,12 +2763,26 @@ checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "strsim 0.10.0", "syn 1.0.109", ] +[[package]] +name = "darling_core" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.66", + "quote 1.0.32", + "strsim 0.10.0", + "syn 2.0.28", +] + [[package]] name = "darling_macro" version = "0.13.4" @@ -2588,7 +2790,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core 0.13.4", - "quote 1.0.33", + "quote 1.0.32", "syn 1.0.109", ] @@ -2599,18 +2801,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e" dependencies = [ "darling_core 0.14.2", - "quote 1.0.33", + "quote 1.0.32", "syn 1.0.109", ] +[[package]] +name = "darling_macro" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +dependencies = [ + "darling_core 0.20.3", + "quote 1.0.32", + "syn 2.0.28", +] + [[package]] name = "dashmap" -version = "5.4.0" +version = "5.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +checksum = "6943ae99c34386c84a470c499d3414f66502a41340aa895406e0d2e4a207b91d" dependencies = [ "cfg-if", - "hashbrown 0.12.3", + "hashbrown 0.14.0", "lock_api", "once_cell", "parking_lot_core", @@ -2676,11 +2889,13 @@ checksum = "f578e8e2c440e7297e008bb5486a3a8a194775224bbc23729b0dbdfaeebf162e" [[package]] name = "der" -version = "0.4.5" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79b71cca7d95d7681a4b3b9cdf63c8dbc3730d0584c2c74e31416d64a90493f4" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ "const-oid", + "pem-rfc7468", + "zeroize", ] [[package]] @@ -2689,20 +2904,20 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "derive_arbitrary" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cdeb9ec472d588e539a818b2dee436825730da08ad0017c4b1a17676bdc8b7" +checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -2712,8 +2927,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case 0.4.0", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "rustc_version 0.4.0", "syn 1.0.109", ] @@ -2736,22 +2951,13 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ - "block-buffer 0.10.3", + "block-buffer", "crypto-common", "subtle", ] @@ -2829,9 +3035,9 @@ dependencies = [ [[package]] name = "dns-lookup" -version = "2.0.4" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5766087c2235fec47fafa4cfecc81e494ee679d0fd4a59887ea0919bfb0e4fc" +checksum = "8f332aa79f9e9de741ac013237294ef42ce2e9c6394dc7d766725812f1238812" dependencies = [ "cfg-if", "libc", @@ -2895,29 +3101,29 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "dyn-clone" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30" +checksum = "304e6508efa593091e97a9abbc10f90aa7ca635b6d2784feff3c89d41dd12272" [[package]] name = "ed25519" -version = "1.5.3" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +checksum = "60f6d271ca33075c88028be6f04d502853d63a5ece419d269c15315d4fc1cf1d" dependencies = [ "signature", ] [[package]] name = "ed25519-dalek" -version = "1.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" dependencies = [ "curve25519-dalek", "ed25519", - "sha2 0.9.9", - "zeroize", + "sha2", + "signature", ] [[package]] @@ -2979,9 +3185,9 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" dependencies = [ - "heck 0.4.0", - "proc-macro2 1.0.70", - "quote 1.0.33", + "heck 0.4.1", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -2991,22 +3197,22 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116" dependencies = [ - "heck 0.4.0", - "proc-macro2 1.0.70", - "quote 1.0.33", + "heck 0.4.1", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "enum_dispatch" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11f36e95862220b211a6e2aa5eca09b4fa391b13cd52ceb8035a24bf65a79de2" +checksum = "8f33313078bb8d4d05a2733a94ac4c2d8a0df9a2b84424ebf4f33bfc224a890e" dependencies = [ "once_cell", - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -3024,9 +3230,9 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e9a1f9f7d83e59740248a6e14ecf93929ade55027844dfcea78beafccc15745" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -3073,15 +3279,15 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" [[package]] name = "erased-serde" -version = "0.3.23" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54558e0ba96fbe24280072642eceb9d7d442e32c7ec0ea9e7ecd7b4ea2cf4e11" +checksum = "f94c0e13118e7d7533271f754a168ae8400e6a1cc043f2bfd53cc7290f1a1de3" dependencies = [ "serde", ] @@ -3165,8 +3371,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f47da3a72ec598d9c8937a7ebca8962a5c7a1f28444e38c2b33c771ba3f55f05" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -3235,11 +3441,17 @@ dependencies = [ "instant", ] +[[package]] +name = "fiat-crypto" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e825f6987101665dea6ec934c09ec6d721de7bc1bf92248e1d5810c8cd636b77" + [[package]] name = "file-source" version = "0.1.0" dependencies = [ - "bstr 1.5.0", + "bstr 1.6.0", "bytes 1.4.0", "chrono", "crc", @@ -3248,7 +3460,7 @@ dependencies = [ "flate2", "futures 0.3.28", "glob", - "indexmap 2.0.2", + "indexmap 2.0.0", "libc", "quickcheck", "scan_fmt", @@ -3291,8 +3503,8 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4c81935e123ab0741c4c4f0d9b8377e5fb21d3de7e062fa4b1263b1fbcba1ea" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -3315,7 +3527,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.7.1", ] [[package]] @@ -3487,9 +3699,9 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -3532,12 +3744,13 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", + "zeroize", ] [[package]] @@ -3574,22 +3787,11 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "ghost" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb19fe8de3ea0920d282f7b77dd4227aea6b8b999b42cdf0ca41b2472b14443a" -dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", -] - [[package]] name = "gimli" -version = "0.28.1" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "glob" @@ -3599,9 +3801,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "gloo-utils" -version = "0.1.7" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" dependencies = [ "js-sys", "serde", @@ -3631,9 +3833,9 @@ dependencies = [ [[package]] name = "governor" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c390a940a5d157878dd057c78680a33ce3415bcd05b4799509ea44210914b4d5" +checksum = "821239e5672ff23e2a7060901fa622950bbd80b649cdaadd78d1c1767ed14eb4" dependencies = [ "cfg-if", "dashmap", @@ -3684,10 +3886,10 @@ checksum = "a40f793251171991c4eb75bd84bc640afa8b68ff6907bc89d3b712a22f700506" dependencies = [ "graphql-introspection-query", "graphql-parser", - "heck 0.4.0", + "heck 0.4.1", "lazy_static", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "serde", "serde_json", "syn 1.0.109", @@ -3700,10 +3902,43 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00bda454f3d313f909298f626115092d348bc231025699f557b27e248475f48c" dependencies = [ "graphql_client_codegen", - "proc-macro2 1.0.70", + "proc-macro2 1.0.66", "syn 1.0.109", ] +[[package]] +name = "greptime-proto" +version = "0.1.0" +source = "git+https://github.com/GreptimeTeam/greptime-proto.git?tag=0.2.1#4398d20c56d5f7939cc2960789cb1fa7dd18e6fe" +dependencies = [ + "prost", + "serde", + "serde_json", + "tonic", + "tonic-build", +] + +[[package]] +name = "greptimedb-client" +version = "0.1.0" +source = "git+https://github.com/GreptimeTeam/greptimedb-client-rust.git?rev=bc32362adf0df17a41a95bae4221d6d8f1775656#bc32362adf0df17a41a95bae4221d6d8f1775656" +dependencies = [ + "dashmap", + "enum_dispatch", + "futures 0.3.28", + "futures-util", + "greptime-proto", + "parking_lot", + "prost", + "rand 0.8.5", + "snafu", + "tokio", + "tokio-stream", + "tonic", + "tonic-build", + "tower", +] + [[package]] name = "grok" version = "2.0.0" @@ -3716,9 +3951,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.22" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" dependencies = [ "bytes 1.4.0", "fnv", @@ -3726,7 +3961,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.0.2", + "indexmap 1.9.3", "slab", "tokio", "tokio-util", @@ -3756,20 +3991,20 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.13.2" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.2", ] [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.2", ] [[package]] @@ -3822,9 +4057,9 @@ dependencies = [ [[package]] name = "heck" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "heim" @@ -3905,7 +4140,7 @@ dependencies = [ "log", "mach", "ntapi", - "platforms", + "platforms 1.1.0", "winapi", ] @@ -3987,7 +4222,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "digest 0.10.7", + "digest", ] [[package]] @@ -4168,7 +4403,7 @@ dependencies = [ "hyper", "log", "rustls 0.20.7", - "rustls-native-certs 0.6.2", + "rustls-native-certs", "tokio", "tokio-rustls 0.23.4", ] @@ -4181,7 +4416,7 @@ checksum = "0646026eb1b3eea4cd9ba47912ea5ce9cc07713d105b1a14698f4e6433d348b7" dependencies = [ "http", "hyper", - "rustls 0.21.0", + "rustls 0.21.6", "tokio", "tokio-rustls 0.24.0", ] @@ -4294,20 +4529,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" dependencies = [ "equivalent", - "hashbrown 0.14.3", + "hashbrown 0.14.0", "serde", ] [[package]] name = "indicatif" -version = "0.17.5" +version = "0.17.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ff8cc23a7393a397ed1d7f56e6365cba772aba9f9912ab968b03043c395d057" +checksum = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730" dependencies = [ "console", "instant", @@ -4319,9 +4554,9 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.1" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f2cb48b81b1dc9f39676bf99f5499babfec7cd8fe14307f7b3d747208fb5690" +checksum = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4" [[package]] name = "infer" @@ -4331,9 +4566,9 @@ checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" [[package]] name = "infer" -version = "0.14.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb78f4c4a058ef30a9ff77322e758f7e60f871274b602d7fdc1b0956b0cb88e" +checksum = "cb33622da908807a06f9513c19b3c1ad50fab3e4137d82a78107d502075aa199" [[package]] name = "inotify" @@ -4376,12 +4611,9 @@ dependencies = [ [[package]] name = "inventory" -version = "0.3.6" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0539b5de9241582ce6bd6b0ba7399313560151e58c9aaf8b74b711b1bdce644" -dependencies = [ - "ghost", -] +checksum = "a53088c87cf71c9d4f3372a2cb9eea1e7b8a0b1bf8b7f7d23fe5b76dbb07e63b" [[package]] name = "io-lifetimes" @@ -4518,12 +4750,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "json" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "078e285eafdfb6c4b434e0d31e8cfcb5115b651496faca5749b88fafd4f23bfd" - [[package]] name = "json-patch" version = "1.0.0" @@ -4682,7 +4908,7 @@ dependencies = [ "secrecy", "serde", "serde_json", - "serde_yaml 0.9.27", + "serde_yaml 0.9.25", "thiserror", "tokio", "tokio-util", @@ -4714,7 +4940,7 @@ version = "0.82.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed8442b2f1d6c1d630677ade9e5d5ebe793dec099a75fb582d56d77b8eb8cee8" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.2", "async-trait", "backoff", "derivative", @@ -4748,7 +4974,7 @@ dependencies = [ "lalrpop-util", "petgraph", "regex", - "regex-syntax 0.7.2", + "regex-syntax 0.7.4", "string_cache", "term", "tiny-keccak", @@ -4763,9 +4989,9 @@ checksum = "3f35c735096c0293d313e8f2a641627472b83d01b937177fe76e5e2708d31e0d" [[package]] name = "lapin" -version = "2.2.1" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acc13beaa09eed710f406201f46b961345b4d061dd90ec3d3ccc70721e70342a" +checksum = "5f3067a1fcfbc3fc46455809c023e69b8f6602463201010f4ae5a3b572adb9dc" dependencies = [ "amq-protocol", "async-global-executor-trait", @@ -4791,9 +5017,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.151" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libflate" @@ -4882,9 +5108,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg", "scopeguard", @@ -4898,9 +5124,9 @@ checksum = "8166fbddef141acbea89cf3425ed97d4c22d14a68161977fc01c301175a4fb89" [[package]] name = "log" -version = "0.4.19" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "logfmt" @@ -4922,9 +5148,9 @@ dependencies = [ [[package]] name = "lru" -version = "0.10.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718e8fae447df0c7e1ba7f5189829e63fd536945c8988d61444c19039f16b670" +checksum = "eedb2bdbad7e0634f83989bf596f497b070130daaa398ab22d84c39e266deec5" [[package]] name = "lru-cache" @@ -5060,20 +5286,14 @@ version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" dependencies = [ - "digest 0.10.7", + "digest", ] -[[package]] -name = "md5" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" - [[package]] name = "memchr" -version = "2.6.4" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" @@ -5104,11 +5324,11 @@ dependencies = [ [[package]] name = "metrics" -version = "0.21.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa8ebbd1a9e57bbab77b9facae7f5136aea44c356943bf9a198f647da64285d6" +checksum = "fde3af1a009ed76a778cb84fdef9e7dbbdf5775ae3e4cc1f434a6a307f6f76c5" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.2", "metrics-macros", "portable-atomic", ] @@ -5119,9 +5339,9 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -5142,14 +5362,14 @@ dependencies = [ [[package]] name = "metrics-util" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "111cb375987443c3de8d503580b536f77dc8416d32db62d9456db5d93bd7ac47" +checksum = "4de2ed6e491ed114b40b732e4d1659a9d53992ebd87490c44a6ffe23739d973e" dependencies = [ - "aho-corasick 0.7.20", + "aho-corasick", "crossbeam-epoch", "crossbeam-utils", - "hashbrown 0.13.2", + "hashbrown 0.13.1", "indexmap 1.9.3", "metrics", "num_cpus", @@ -5181,6 +5401,15 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + [[package]] name = "miniz_oxide" version = "0.7.1" @@ -5192,9 +5421,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.10" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "log", @@ -5246,8 +5475,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -5275,9 +5504,9 @@ dependencies = [ [[package]] name = "mongodb" -version = "2.6.1" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16928502631c0db72214720aa479c722397fe5aed6bf1c740a3830b3fe4bfcfe" +checksum = "ebcd85ec209a5b84fd9f54b9e381f6fa17462bc74160d018fc94fd8b9f61faa8" dependencies = [ "async-trait", "base64 0.13.1", @@ -5299,12 +5528,12 @@ dependencies = [ "rand 0.8.5", "rustc_version_runtime", "rustls 0.20.7", - "rustls-pemfile 1.0.1", + "rustls-pemfile", "serde", "serde_bytes", "serde_with 1.14.0", "sha-1", - "sha2 0.10.7", + "sha2", "socket2 0.4.9", "stringprep", "strsim 0.10.0", @@ -5315,7 +5544,7 @@ dependencies = [ "tokio-util", "trust-dns-proto 0.21.2", "trust-dns-resolver", - "typed-builder", + "typed-builder 0.10.0", "uuid", "webpki-roots", ] @@ -5362,42 +5591,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "nats" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eebb39ba0555bcf232817d42ed3346499f14f6f8d4de1c0ca4517bda99c1a7b" -dependencies = [ - "base64 0.13.1", - "base64-url", - "blocking", - "crossbeam-channel", - "fastrand", - "itoa", - "json", - "lazy_static", - "libc", - "log", - "memchr", - "nkeys 0.2.0", - "nuid", - "once_cell", - "parking_lot", - "regex", - "ring", - "rustls 0.19.1", - "rustls-native-certs 0.5.0", - "rustls-pemfile 0.2.1", - "serde", - "serde_json", - "serde_nanos", - "serde_repr", - "time", - "url", - "webpki 0.21.4", - "winapi", -] - [[package]] name = "ndarray" version = "0.15.6" @@ -5474,27 +5667,13 @@ dependencies = [ [[package]] name = "nkeys" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e66a7cd1358277b2a6f77078e70aea7315ff2f20db969cc61153103ec162594" -dependencies = [ - "byteorder", - "data-encoding", - "ed25519-dalek", - "getrandom 0.2.10", - "log", - "rand 0.8.5", - "signatory", -] - -[[package]] -name = "nkeys" -version = "0.3.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2d151f6ece2f3d1077f6c779268de2516653d8344ddde65addd785cce764fe5" +checksum = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47" dependencies = [ "byteorder", "data-encoding", + "ed25519", "ed25519-dalek", "getrandom 0.2.10", "log", @@ -5504,9 +5683,9 @@ dependencies = [ [[package]] name = "no-proxy" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b24b826bdb92c7a2c6f22ed4cf649001bd237f936587ee0b76cd9dea86003d01" +checksum = "5cc5956cc74e3574924e108ad12e14340a64183e1cd1d69a7e41e9680c109e67" dependencies = [ "cidr-utils", "serde", @@ -5685,9 +5864,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -5728,8 +5907,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.2.1", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -5740,9 +5919,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.2.1", - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -5762,9 +5941,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "oauth2" -version = "4.4.2" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c38841cdd844847e3e7c8d29cef9dcfed8877f8f56f9071f77843ecf3baf937f" +checksum = "eeaf26a72311c087f8c5ba617c96fac67a5c04f430e716ac8d8ab2de62e23368" dependencies = [ "base64 0.13.1", "chrono", @@ -5775,7 +5954,7 @@ dependencies = [ "serde", "serde_json", "serde_path_to_error", - "sha2 0.10.7", + "sha2", "thiserror", "url", ] @@ -5791,9 +5970,9 @@ dependencies = [ [[package]] name = "object" -version = "0.32.1" +version = "0.30.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" dependencies = [ "memchr", ] @@ -5870,7 +6049,7 @@ dependencies = [ "parking_lot", "percent-encoding", "pin-project", - "quick-xml", + "quick-xml 0.27.1", "reqwest", "serde", "serde_json", @@ -5880,9 +6059,9 @@ dependencies = [ [[package]] name = "openidconnect" -version = "2.5.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98dd5b7049bac4fdd2233b8c9767d42c05da8006fdb79cc903258556d2b18009" +checksum = "87af7097640fedbe64718ac1c9b0549d72da747a3f527cd089215f96c6f691d5" dependencies = [ "base64 0.13.1", "chrono", @@ -5898,18 +6077,15 @@ dependencies = [ "serde_derive", "serde_json", "serde_path_to_error", - "serde_plain", - "serde_with 1.14.0", - "subtle", "thiserror", "url", ] [[package]] name = "openssl" -version = "0.10.55" +version = "0.10.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" dependencies = [ "bitflags 1.3.2", "cfg-if", @@ -5926,8 +6102,8 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -5939,18 +6115,16 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "111.25.0+1.1.1t" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3173cd3626c43e3854b1b727422a276e568d9ec5fe8cec197822cf52cfb743d6" +version = "300.1.3+3.1.2" +source = "git+https://github.com/vectordotdev/openssl-src-rs.git?tag=release-300-force-engine+3.1.2#98b1172bcef15358ad7bbf4baa3a3aa59d831e81" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +version = "0.9.91" +source = "git+https://github.com/vectordotdev/rust-openssl.git?tag=openssl-sys-v0.9.91+3.0.0#c3a8b494e0a8ab88db692c239d30c903353b56a3" dependencies = [ "cc", "libc", @@ -6034,6 +6208,15 @@ dependencies = [ "winapi", ] +[[package]] +name = "output_vt100" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66" +dependencies = [ + "winapi", +] + [[package]] name = "outref" version = "0.1.0" @@ -6082,15 +6265,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.4" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall 0.3.5", "smallvec", - "windows-sys 0.42.0", + "windows-targets 0.48.0", ] [[package]] @@ -6104,9 +6287,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "pb-rs" @@ -6126,7 +6309,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ - "digest 0.10.7", + "digest", ] [[package]] @@ -6146,9 +6329,9 @@ dependencies = [ [[package]] name = "pem-rfc7468" -version = "0.2.3" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f22eb0e3c593294a99e9ff4b24cf6b752d43f193aa4415fe5077c159996d497" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" dependencies = [ "base64ct", ] @@ -6161,9 +6344,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.5.6" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cbd939b234e95d72bc393d51788aec68aeeb5d51e748ca08ff3aad58cb722f7" +checksum = "1acb4a4365a13f749a93f1a094a7805e5cfa0955373a9de860d962eaa3a5fe5a" dependencies = [ "thiserror", "ucd-trie", @@ -6171,9 +6354,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.5.6" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a81186863f3d0a27340815be8f2078dd8050b14cd71913db9fbda795e5f707d7" +checksum = "666d00490d4ac815001da55838c500eafb0320019bbaa44444137c48b443a853" dependencies = [ "pest", "pest_generator", @@ -6181,26 +6364,26 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.5.6" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75a1ef20bf3193c15ac345acb32e26b3dc3223aff4d77ae4fc5359567683796b" +checksum = "68ca01446f50dbda87c1786af8770d535423fa8a53aec03b8f4e3d7eb10e0929" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "pest_meta" -version = "2.5.6" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e3b284b1f13a20dc5ebc90aff59a51b8d7137c221131b52a7260c08cbc1cc80" +checksum = "56af0a30af74d0445c0bf6d9d051c979b516a1a5af790d251daee76005420a48" dependencies = [ "once_cell", "pest", - "sha2 0.10.7", + "sha2", ] [[package]] @@ -6275,16 +6458,16 @@ version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" [[package]] name = "pin-utils" @@ -6306,14 +6489,12 @@ dependencies = [ [[package]] name = "pkcs8" -version = "0.7.6" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee3ef9b64d26bad0536099c816c6734379e45bbd5f14798def6809e5cc350447" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", - "pem-rfc7468", "spki", - "zeroize", ] [[package]] @@ -6328,6 +6509,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "989d43012e2ca1c4a02507c67282691a0a3207f9dc67cec596b43fe925b3d325" +[[package]] +name = "platforms" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d7ddaed09e0eb771a79ab0fd64609ba0afb0a8366421957936ad14cbd13630" + [[package]] name = "plotters" version = "0.3.4" @@ -6370,6 +6557,17 @@ dependencies = [ "windows-sys 0.42.0", ] +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures", + "opaque-debug", + "universal-hash", +] + [[package]] name = "portable-atomic" version = "1.3.1" @@ -6410,7 +6608,7 @@ dependencies = [ "md-5", "memchr", "rand 0.8.5", - "sha2 0.10.7", + "sha2", "stringprep", ] @@ -6485,11 +6683,13 @@ dependencies = [ [[package]] name = "pretty_assertions" -version = "1.4.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755" dependencies = [ + "ctor", "diff", + "output_vt100", "yansi", ] @@ -6511,7 +6711,7 @@ version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c142c0e46b57171fe0c528bee8c5b7569e80f0c17e377cd0e30ea57dbc11bb51" dependencies = [ - "proc-macro2 1.0.70", + "proc-macro2 1.0.66", "syn 1.0.109", ] @@ -6556,8 +6756,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", "version_check", ] @@ -6568,8 +6768,8 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "version_check", ] @@ -6596,9 +6796,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.70" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] @@ -6607,7 +6807,7 @@ dependencies = [ name = "prometheus-parser" version = "0.1.0" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.0.0", "nom", "num_enum 0.6.1", "prost", @@ -6668,7 +6868,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ "bytes 1.4.0", - "heck 0.4.0", + "heck 0.4.1", "itertools 0.10.5", "lazy_static", "log", @@ -6691,20 +6891,23 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools 0.10.5", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "prost-reflect" -version = "0.11.5" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b823de344848e011658ac981009100818b322421676740546f8b52ed5249428" +checksum = "000e1e05ebf7b26e1eba298e66fe4eee6eb19c567d0ffb35e0dd34231cdac4c8" dependencies = [ + "base64 0.21.2", "once_cell", "prost", "prost-types", + "serde", + "serde-value", ] [[package]] @@ -6731,8 +6934,8 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -6754,7 +6957,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6eb95b2e36b92d3e0536be87eaf7accb17db39f5a44452759b43f1328e82dc9" dependencies = [ "async-trait", - "bit-vec 0.6.3", + "bit-vec", "bytes 1.4.0", "chrono", "crc", @@ -6833,6 +7036,16 @@ dependencies = [ "serde", ] +[[package]] +name = "quick-xml" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81b9228215d82c7b61490fec1de287136b5de6f5700f6e58ea9ad61a7964ca51" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "quickcheck" version = "1.0.3" @@ -6850,8 +7063,8 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -6866,11 +7079,11 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.33" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" dependencies = [ - "proc-macro2 1.0.70", + "proc-macro2 1.0.66", ] [[package]] @@ -6985,6 +7198,21 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "ratatui" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8285baa38bdc9f879d92c0e37cb562ef38aa3aeefca22b3200186bc39242d3d5" +dependencies = [ + "bitflags 2.3.2", + "cassowary", + "crossterm", + "indoc", + "paste", + "unicode-segmentation", + "unicode-width", +] + [[package]] name = "raw-cpuid" version = "10.6.0" @@ -7034,9 +7262,9 @@ dependencies = [ [[package]] name = "rdkafka" -version = "0.32.2" +version = "0.33.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8733bc5dc0b192d1a4b28073f9bff1326ad9e4fecd4d9b025d6fc358d1c3e79" +checksum = "da18026aad1c24033da3da726200de7e911e75c2e2cc2f77ffb9b4502720faae" dependencies = [ "futures-channel", "futures-util", @@ -7079,9 +7307,9 @@ dependencies = [ [[package]] name = "redis" -version = "0.23.0" +version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea8c51b5dc1d8e5fd3350ec8167f464ec0995e79f2e90a075b63371500d557f" +checksum = "ffd6543a7bc6428396845f6854ccf3d1ae8823816592e2cbe74f20f50f209d02" dependencies = [ "arc-swap", "async-trait", @@ -7096,6 +7324,7 @@ dependencies = [ "ryu", "tokio", "tokio-native-tls", + "tokio-retry", "tokio-util", "url", ] @@ -7131,14 +7360,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" dependencies = [ - "aho-corasick 1.0.1", + "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.3.6", + "regex-syntax 0.7.4", ] [[package]] @@ -7152,13 +7381,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ - "aho-corasick 1.0.1", + "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.7.4", ] [[package]] @@ -7169,15 +7398,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" - -[[package]] -name = "regex-syntax" -version = "0.8.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "rend" @@ -7213,8 +7436,8 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.0", - "rustls-pemfile 1.0.1", + "rustls 0.21.6", + "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", @@ -7283,8 +7506,8 @@ version = "0.7.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff26ed6c7c4dfc2aa9480b86a60e3c7233543a270a680e10758a507c5a4ce476" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -7307,9 +7530,9 @@ dependencies = [ [[package]] name = "rmp-serde" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b13be192e0220b8afb7222aa5813cb62cc269ebb5cac346ca6487681d2913e" +checksum = "bffea85eea980d8a74453e5d02a8d93028f3c34725de143085a844ebe953258a" dependencies = [ "byteorder", "rmp", @@ -7330,9 +7553,9 @@ dependencies = [ [[package]] name = "roaring" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef0fb5e826a8bde011ecae6a8539dd333884335c57ff0f003fbe27c25bbe8f71" +checksum = "6106b5cf8587f5834158895e9715a3c6c9716c8aefab57f1f7680917191c7873" dependencies = [ "bytemuck", "byteorder", @@ -7402,7 +7625,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.17", + "semver 1.0.18", ] [[package]] @@ -7443,19 +7666,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "rustls" -version = "0.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" -dependencies = [ - "base64 0.13.1", - "log", - "ring", - "sct 0.6.1", - "webpki 0.21.4", -] - [[package]] name = "rustls" version = "0.20.7" @@ -7464,69 +7674,48 @@ checksum = "539a2bfe908f471bfa933876bd1eb6a19cf2176d375f82ef7f99530a40e48c2c" dependencies = [ "log", "ring", - "sct 0.7.0", - "webpki 0.22.0", + "sct", + "webpki", ] [[package]] name = "rustls" -version = "0.21.0" +version = "0.21.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07180898a28ed6a7f7ba2311594308f595e3dd2e3c3812fa0a80a47b45f17e5d" +checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" dependencies = [ "log", "ring", "rustls-webpki", - "sct 0.7.0", -] - -[[package]] -name = "rustls-native-certs" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a07b7c1885bd8ed3831c289b7870b13ef46fe0e856d288c30d9cc17d75a2092" -dependencies = [ - "openssl-probe", - "rustls 0.19.1", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-native-certs" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" -dependencies = [ - "openssl-probe", - "rustls-pemfile 1.0.1", - "schannel", - "security-framework", + "sct", ] [[package]] -name = "rustls-pemfile" -version = "0.2.1" +name = "rustls-native-certs" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" +checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" dependencies = [ - "base64 0.13.1", + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", ] [[package]] name = "rustls-pemfile" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0864aeff53f8c05aa08d86e5ef839d3dfcf07aeba2db32f12db0ef716e87bd55" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64 0.13.1", + "base64 0.21.2", ] [[package]] name = "rustls-webpki" -version = "0.100.1" +version = "0.101.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b" +checksum = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d" dependencies = [ "ring", "untrusted", @@ -7556,7 +7745,7 @@ version = "12.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "994eca4bca05c87e86e15d90fc7a91d1be64b4482b38cb2d27474568fe7c9db9" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.3.2", "cfg-if", "clipboard-win", "libc", @@ -7572,9 +7761,18 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.13" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "salsa20" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +dependencies = [ + "cipher", +] [[package]] name = "same-file" @@ -7609,11 +7807,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" dependencies = [ - "windows-sys 0.42.0", + "windows-sys 0.48.0", ] [[package]] @@ -7643,16 +7841,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8132065adcfd6e02db789d9285a0deb2f3fcb04002865ab67d5fb103533898" -[[package]] -name = "sct" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "sct" version = "0.7.0" @@ -7681,9 +7869,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -7713,9 +7901,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" +checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" dependencies = [ "serde", ] @@ -7741,7 +7929,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a78072b550e5c20bc4a9d1384be28809cbdb7b25b2b4707ddc6d908b7e6de3bf" dependencies = [ - "toml 0.7.8", + "toml 0.7.6", ] [[package]] @@ -7765,23 +7953,11 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "serde-xml-rs" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb3aa78ecda1ebc9ec9847d5d3aba7d618823446a049ba2491940506da6e2782" -dependencies = [ - "log", - "serde", - "thiserror", - "xml-rs", -] - [[package]] name = "serde_bytes" -version = "0.11.9" +version = "0.11.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416bda436f9aab92e02c8e10d49a15ddd339cea90b6e340fe51ed97abb548294" +checksum = "ab33ec92f677585af6d88c65593ae2375adde54efdbf16d597f2cbc7a6d368ff" dependencies = [ "serde", ] @@ -7792,29 +7968,29 @@ version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "serde_derive_internals" -version = "0.26.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c" +checksum = "e578a843d40b4189a4d66bba51d7684f57da5bd7c304c64e14bd63efbef49509" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.0.0", "itoa", "ryu", "serde", @@ -7822,9 +7998,9 @@ dependencies = [ [[package]] name = "serde_nanos" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e44969a61f5d316be20a42ff97816efb3b407a924d06824c3d8a49fa8450de0e" +checksum = "8ae801b7733ca8d6a2b580debe99f67f36826a0f5b8a36055dc6bc40f8d6bc71" dependencies = [ "serde", ] @@ -7838,15 +8014,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_plain" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1fc6db65a611022b23a0dec6975d63fb80a302cb3388835ff02c097258d50" -dependencies = [ - "serde", -] - [[package]] name = "serde_qs" version = "0.8.5" @@ -7860,20 +8027,20 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.9" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca" +checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "serde_spanned" -version = "0.6.4" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" +checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" dependencies = [ "serde", ] @@ -7912,7 +8079,23 @@ dependencies = [ "indexmap 1.9.3", "serde", "serde_json", - "serde_with_macros 2.3.2", + "time", +] + +[[package]] +name = "serde_with" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1402f54f9a3b9e2efe71c1cea24e648acce55887983553eeb858cf3115acfd49" +dependencies = [ + "base64 0.21.2", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.0.0", + "serde", + "serde_json", + "serde_with_macros 3.2.0", "time", ] @@ -7923,21 +8106,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling 0.13.4", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "serde_with_macros" -version = "2.3.2" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859011bddcc11f289f07f467cc1fe01c7a941daa4d8f6c40d4d1c92eb6d9319c" +checksum = "9197f1ad0e3c173a0222d3c4404fb04c3afe87e962bcb327af73e8301fa203c7" dependencies = [ - "darling 0.14.2", - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", + "darling 0.20.3", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -7954,11 +8137,11 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.27" +version = "0.9.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" +checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.0.0", "itoa", "ryu", "serde", @@ -7985,8 +8168,8 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "079a83df15f85d89a68d64ae1238f142f172b1fa915d0d76b26a7cba1b659a69" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -7998,7 +8181,7 @@ checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" dependencies = [ "cfg-if", "cpufeatures", - "digest 0.10.7", + "digest", ] [[package]] @@ -8009,20 +8192,7 @@ checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ "cfg-if", "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", + "digest", ] [[package]] @@ -8033,7 +8203,7 @@ checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ "cfg-if", "cpufeatures", - "digest 0.10.7", + "digest", ] [[package]] @@ -8042,7 +8212,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" dependencies = [ - "digest 0.10.7", + "digest", "keccak", ] @@ -8097,9 +8267,9 @@ dependencies = [ [[package]] name = "signatory" -version = "0.23.2" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfecc059e81632eef1dd9b79e22fc28b8fe69b30d3357512a77a0ad8ee3c782" +checksum = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31" dependencies = [ "pkcs8", "rand_core 0.6.4", @@ -8109,9 +8279,12 @@ dependencies = [ [[package]] name = "signature" -version = "1.6.4" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" +dependencies = [ + "digest", +] [[package]] name = "simd-abstraction" @@ -8186,9 +8359,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" dependencies = [ "serde", ] @@ -8229,9 +8402,9 @@ dependencies = [ [[package]] name = "snafu" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0656e7e3ffb70f6c39b3c2a86332bb74aa3c679da781642590f3c1118c5045" +checksum = "e4de37ad025c587a29e8f3f5605c00f70b98715ef90b9061a815b9e59e9042d6" dependencies = [ "doc-comment", "futures-core", @@ -8241,13 +8414,13 @@ dependencies = [ [[package]] name = "snafu-derive" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "475b3bbe5245c26f2d8a6f62d67c1f30eb9fffeccee721c45d162c3ebbdf81b2" +checksum = "990079665f075b699031e9c08fd3ab99be5029b96f3b78dc0709e8f77e4efebf" dependencies = [ - "heck 0.4.0", - "proc-macro2 1.0.70", - "quote 1.0.33", + "heck 0.4.1", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -8294,10 +8467,11 @@ dependencies = [ [[package]] name = "spki" -version = "0.4.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c01a0c15da1b0b0e1494112e7af814a678fec9bd157881b49beac661e9b6f32" +checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" dependencies = [ + "base64ct", "der", ] @@ -8353,7 +8527,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "011cbb39cf7c1f62871aea3cc46e5817b0937b49e9447370c93cacbe93a766d8" dependencies = [ - "vte", + "vte 0.10.1", +] + +[[package]] +name = "strip-ansi-escapes" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55ff8ef943b384c414f54aefa961dd2bd853add74ec75e7ac74cf91dba62bcfa" +dependencies = [ + "vte 0.11.1", ] [[package]] @@ -8387,28 +8570,31 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "strum" -version = "0.24.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" +dependencies = [ + "strum_macros", +] [[package]] name = "strum_macros" -version = "0.24.3" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +checksum = "6069ca09d878a33f883cc06aaa9718ede171841d3832450354410b718b097232" dependencies = [ - "heck 0.4.0", - "proc-macro2 1.0.70", - "quote 1.0.33", + "heck 0.4.1", + "proc-macro2 1.0.66", + "quote 1.0.32", "rustversion", - "syn 1.0.109", + "syn 2.0.28", ] [[package]] @@ -8453,19 +8639,19 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.41" +version = "2.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269" +checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "unicode-ident", ] @@ -8475,18 +8661,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8" -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", - "unicode-xid 0.2.4", -] - [[package]] name = "syslog" version = "6.1.0" @@ -8502,9 +8676,9 @@ dependencies = [ [[package]] name = "syslog_loose" -version = "0.18.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fb75f176928530867b2a659e470f9c9ff71904695bab6556f7ad30f9039efd" +checksum = "acf5252d1adec0a489a0225f867c1a7fd445e41674530a396d0629cff0c4b211" dependencies = [ "chrono", "nom", @@ -8636,22 +8810,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.40" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.40" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] @@ -8676,9 +8850,9 @@ dependencies = [ [[package]] name = "tikv-jemallocator" -version = "0.5.0" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20612db8a13a6c06d57ec83953694185a367e16945f66565e8028d2c0bd76979" +checksum = "965fe0c26be5c56c94e38ba547249074803efd52adfb66de62107d95aab3eaca" dependencies = [ "libc", "tikv-jemalloc-sys", @@ -8686,9 +8860,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.17" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" dependencies = [ "itoa", "libc", @@ -8700,15 +8874,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.6" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" dependencies = [ "time-core", ] @@ -8749,9 +8923,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.33.0" +version = "1.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" +checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd" dependencies = [ "backtrace", "bytes 1.4.0", @@ -8794,16 +8968,16 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "tokio-native-tls" -version = "0.3.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" dependencies = [ "native-tls", "tokio", @@ -8845,6 +9019,17 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand 0.8.5", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.23.4" @@ -8853,7 +9038,7 @@ checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ "rustls 0.20.7", "tokio", - "webpki 0.22.0", + "webpki", ] [[package]] @@ -8862,7 +9047,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e0d409377ff5b1e3ca6437aa86c1eb7d40c134bfec254e44c830defa92669db5" dependencies = [ - "rustls 0.21.0", + "rustls 0.21.6", "tokio", ] @@ -8903,19 +9088,6 @@ dependencies = [ "tungstenite 0.18.0", ] -[[package]] -name = "tokio-tungstenite" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec509ac96e9a0c43427c74f003127d953a265737636129424288d27cb5c4b12c" -dependencies = [ - "futures-util", - "log", - "rustls 0.21.0", - "tokio", - "tungstenite 0.19.0", -] - [[package]] name = "tokio-tungstenite" version = "0.20.1" @@ -8924,14 +9096,15 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", + "rustls 0.21.6", "tokio", "tungstenite 0.20.1", ] [[package]] name = "tokio-util" -version = "0.7.4" -source = "git+https://github.com/vectordotdev/tokio?branch=tokio-util-0.7.4-framed-read-continue-on-error#53a17f257b599a9d18bd75249de98d0b6fc28cfa" +version = "0.7.8" +source = "git+https://github.com/vectordotdev/tokio?branch=tokio-util-0.7.8-framed-read-continue-on-error#3747655f8f0443e13fe20da3f613ea65c23347c2" dependencies = [ "bytes 1.4.0", "futures-core", @@ -8954,9 +9127,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.8" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" dependencies = [ "serde", "serde_spanned", @@ -8966,20 +9139,20 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.19.15" +version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +checksum = "c500344a19072298cd05a7224b3c0c629348b78692bf48466c5238656e315a78" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.0.0", "serde", "serde_spanned", "toml_datetime", @@ -9008,8 +9181,8 @@ dependencies = [ "percent-encoding", "pin-project", "prost", - "rustls-native-certs 0.6.2", - "rustls-pemfile 1.0.1", + "rustls-native-certs", + "rustls-pemfile", "tokio", "tokio-rustls 0.24.0", "tokio-stream", @@ -9026,9 +9199,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" dependencies = [ "prettyplease", - "proc-macro2 1.0.70", + "proc-macro2 1.0.66", "prost-build", - "quote 1.0.33", + "quote 1.0.32", "syn 1.0.109", ] @@ -9054,13 +9227,13 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +checksum = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82" dependencies = [ "async-compression", "base64 0.21.2", - "bitflags 2.4.1", + "bitflags 2.3.2", "bytes 1.4.0", "futures-core", "futures-util", @@ -9131,8 +9304,8 @@ version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -9341,19 +9514,6 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" -[[package]] -name = "tui" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccdd26cbd674007e649a272da4475fb666d3aa0ad0531da7136db6fab0e5bad1" -dependencies = [ - "bitflags 1.3.2", - "cassowary", - "crossterm 0.25.0", - "unicode-segmentation", - "unicode-width", -] - [[package]] name = "tungstenite" version = "0.18.0" @@ -9373,25 +9533,6 @@ dependencies = [ "utf-8", ] -[[package]] -name = "tungstenite" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15fba1a6d6bb030745759a9a2a588bfe8490fc8b4751a277db3a0be1c9ebbf67" -dependencies = [ - "byteorder", - "bytes 1.4.0", - "data-encoding", - "http", - "httparse", - "log", - "rand 0.8.5", - "sha1", - "thiserror", - "url", - "utf-8", -] - [[package]] name = "tungstenite" version = "0.20.1" @@ -9427,8 +9568,19 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 1.0.109", +] + +[[package]] +name = "typed-builder" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64cba322cb9b7bc6ca048de49e83918223f35e7a86311267013afff257004870" +dependencies = [ + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -9440,9 +9592,9 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "typetag" -version = "0.2.8" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6898cc6f6a32698cc3e14d5632a14d2b23ed9f7b11e6b8e05ce685990acc22" +checksum = "aec6850cc671cd0cfb3ab285465e48a3b927d9de155051c35797446b32f9169f" dependencies = [ "erased-serde", "inventory", @@ -9453,20 +9605,20 @@ dependencies = [ [[package]] name = "typetag-impl" -version = "0.2.8" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c3e1c30cedd24fc597f7d37a721efdbdc2b1acae012c1ef1218f4c7c2c0f3e7" +checksum = "30c49a6815b4f8379c36f06618bc1b80ca77aaf8a3fd4d8549dca6fdb016000f" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", ] [[package]] name = "uaparser" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d705ae455d32248d299de9af5316a79ce9dc502c0b533aaeaf5f1c2fc02cc5" +checksum = "cf694e7b0434d4fad6c879e984e8fdc3a62f5533c3d421762244f9e9d03f6927" dependencies = [ "derive_more", "lazy_static", @@ -9542,6 +9694,16 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + [[package]] name = "unreachable" version = "1.0.0" @@ -9606,15 +9768,15 @@ checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" [[package]] name = "utf8parse" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936e4b492acfd135421d8dca4b1aa80a7bfc26e702ef3af710e0752684df5372" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.6.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" dependencies = [ "getrandom 0.2.10", "md-5", @@ -9643,7 +9805,7 @@ dependencies = [ "atty", "cached", "chrono", - "clap 4.1.14", + "clap 4.3.21", "clap-verbosity-flag", "clap_complete", "confy", @@ -9651,7 +9813,7 @@ dependencies = [ "dunce", "glob", "hex", - "indexmap 2.0.2", + "indexmap 2.0.0", "indicatif", "itertools 0.11.0", "log", @@ -9663,10 +9825,10 @@ dependencies = [ "reqwest", "serde", "serde_json", - "serde_yaml 0.9.27", - "sha2 0.10.7", + "serde_yaml 0.9.25", + "sha2", "tempfile", - "toml 0.7.8", + "toml 0.7.6", ] [[package]] @@ -9677,7 +9839,7 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "vector" -version = "0.31.0" +version = "0.32.2" dependencies = [ "apache-avro", "approx", @@ -9686,8 +9848,9 @@ dependencies = [ "assay", "assert_cmd", "async-compression", - "async-graphql", + "async-graphql 6.0.0", "async-graphql-warp", + "async-nats", "async-stream", "async-trait", "atty", @@ -9714,18 +9877,18 @@ dependencies = [ "azure_storage_blobs", "base64 0.21.2", "blake2", - "bloom", + "bloomy", "bollard", "bytes 1.4.0", "bytesize", "chrono", "cidr-utils", - "clap 4.1.14", + "clap 4.3.21", "codecs", "colored", "console-subscriber", "criterion", - "crossterm 0.26.1", + "crossterm", "csv", "deadpool-postgres", "derivative", @@ -9745,10 +9908,11 @@ dependencies = [ "glob", "goauth", "governor", + "greptimedb-client", "grok", "h2", "hash_hasher", - "hashbrown 0.14.3", + "hashbrown 0.14.0", "headers", "heim", "hex", @@ -9759,9 +9923,9 @@ dependencies = [ "hyper", "hyper-openssl", "hyper-proxy", - "indexmap 2.0.2", + "indexmap 2.0.0", "indoc", - "infer 0.14.0", + "infer 0.15.0", "inventory", "itertools 0.11.0", "k8s-openapi 0.18.0", @@ -9780,9 +9944,8 @@ dependencies = [ "mockall", "moka", "mongodb", - "nats", "nix 0.26.2", - "nkeys 0.3.0", + "nkeys", "nom", "notify", "num", @@ -9810,6 +9973,7 @@ dependencies = [ "quickcheck", "rand 0.8.5", "rand_distr", + "ratatui", "rdkafka", "redis", "regex", @@ -9818,15 +9982,15 @@ dependencies = [ "rmpv", "roaring", "seahash", - "semver 1.0.17", + "semver 1.0.18", "serde", "serde-toml-merge", "serde_bytes", "serde_json", - "serde_with 2.3.2", - "serde_yaml 0.9.27", + "serde_with 3.2.0", + "serde_yaml 0.9.25", "serial_test", - "sha2 0.10.7", + "sha2", "similar-asserts", "smallvec", "smpl_jwt", @@ -9834,7 +9998,7 @@ dependencies = [ "snap", "socket2 0.5.3", "stream-cancel", - "strip-ansi-escapes", + "strip-ansi-escapes 0.2.0", "syslog", "temp-env", "tempfile", @@ -9847,7 +10011,7 @@ dependencies = [ "tokio-test", "tokio-tungstenite 0.20.1", "tokio-util", - "toml 0.7.8", + "toml 0.7.6", "tonic", "tonic-build", "tower", @@ -9860,7 +10024,6 @@ dependencies = [ "tracing-subscriber", "tracing-tower", "trust-dns-proto 0.22.0", - "tui", "typetag", "url", "urlencoding", @@ -9888,7 +10051,7 @@ dependencies = [ "anyhow", "async-trait", "chrono", - "clap 4.1.14", + "clap 4.3.21", "futures 0.3.28", "graphql_client", "indoc", @@ -9897,7 +10060,7 @@ dependencies = [ "serde_json", "tokio", "tokio-stream", - "tokio-tungstenite 0.19.0", + "tokio-tungstenite 0.20.1", "url", "uuid", ] @@ -9911,7 +10074,7 @@ dependencies = [ "async-trait", "bytecheck", "bytes 1.4.0", - "clap 4.1.14", + "clap 4.3.21", "crc32fast", "criterion", "crossbeam-queue", @@ -9931,7 +10094,7 @@ dependencies = [ "rand 0.8.5", "rkyv", "serde", - "serde_yaml 0.9.27", + "serde_yaml 0.9.25", "snafu", "temp-dir", "tokio", @@ -9957,7 +10120,7 @@ dependencies = [ "crossbeam-utils", "derivative", "futures 0.3.28", - "indexmap 2.0.2", + "indexmap 2.0.0", "metrics", "nom", "ordered-float 3.7.0", @@ -9987,16 +10150,16 @@ dependencies = [ "chrono", "chrono-tz", "encoding_rs", - "indexmap 2.0.2", + "indexmap 2.0.0", "inventory", "no-proxy", "num-traits", "once_cell", "serde", "serde_json", - "serde_with 2.3.2", + "serde_with 3.2.0", "snafu", - "toml 0.7.8", + "toml 0.7.6", "tracing 0.1.37", "url", "vector-config-common", @@ -10009,13 +10172,13 @@ name = "vector-config-common" version = "0.1.0" dependencies = [ "convert_case 0.6.0", - "darling 0.13.4", + "darling 0.20.3", "once_cell", - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "serde", "serde_json", - "syn 1.0.109", + "syn 2.0.28", "tracing 0.1.37", ] @@ -10023,12 +10186,12 @@ dependencies = [ name = "vector-config-macros" version = "0.1.0" dependencies = [ - "darling 0.13.4", - "proc-macro2 1.0.70", - "quote 1.0.33", + "darling 0.20.3", + "proc-macro2 1.0.66", + "quote 1.0.32", "serde", "serde_derive_internals", - "syn 1.0.109", + "syn 2.0.28", "vector-config", "vector-config-common", ] @@ -10037,7 +10200,7 @@ dependencies = [ name = "vector-core" version = "0.1.0" dependencies = [ - "async-graphql", + "async-graphql 5.0.10", "async-trait", "base64 0.21.2", "bitmask-enum", @@ -10059,7 +10222,7 @@ dependencies = [ "http", "httptest", "hyper-proxy", - "indexmap 2.0.2", + "indexmap 2.0.0", "metrics", "metrics-tracing-context", "metrics-util", @@ -10090,7 +10253,7 @@ dependencies = [ "security-framework", "serde", "serde_json", - "serde_with 2.3.2", + "serde_with 3.2.0", "similar-asserts", "smallvec", "snafu", @@ -10101,7 +10264,7 @@ dependencies = [ "tokio-stream", "tokio-test", "tokio-util", - "toml 0.7.8", + "toml 0.7.6", "tonic", "tower", "tracing 0.1.37", @@ -10136,7 +10299,7 @@ dependencies = [ name = "vector-vrl-cli" version = "0.1.0" dependencies = [ - "clap 4.1.14", + "clap 4.3.21", "vector-vrl-functions", "vrl", ] @@ -10155,7 +10318,7 @@ dependencies = [ "ansi_term", "chrono", "chrono-tz", - "clap 4.1.14", + "clap 4.3.21", "enrichment", "glob", "prettydiff", @@ -10196,8 +10359,8 @@ checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" [[package]] name = "vrl" -version = "0.5.0" -source = "git+ssh://git@github.com/answerbook/vrl.git?rev=v0.7.0#2457db0764e0bac16e4da3e41b032507dc509d62" +version = "0.6.0" +source = "git+ssh://git@github.com/answerbook/vrl.git?rev=v0.8.0#e729f555a66908108ecc2d9c3ea0423cc578c276" dependencies = [ "aes", "ansi_term", @@ -10209,12 +10372,14 @@ dependencies = [ "cbc", "cfb-mode", "cfg-if", + "chacha20poly1305", "charset", "chrono", "chrono-tz", "cidr-utils", - "clap 4.1.14", + "clap 4.3.21", "codespan-reporting", + "crypto_secretbox", "csv", "ctr", "data-encoding", @@ -10226,7 +10391,7 @@ dependencies = [ "hex", "hmac", "hostname", - "indexmap 2.0.2", + "indexmap 2.0.0", "indoc", "itertools 0.11.0", "lalrpop", @@ -10255,10 +10420,10 @@ dependencies = [ "serde", "serde_json", "sha-1", - "sha2 0.10.7", + "sha2", "sha3", "snafu", - "strip-ansi-escapes", + "strip-ansi-escapes 0.1.1", "substring", "syslog_loose", "termcolor", @@ -10284,14 +10449,24 @@ dependencies = [ "vte_generate_state_changes", ] +[[package]] +name = "vte" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197" +dependencies = [ + "utf8parse", + "vte_generate_state_changes", +] + [[package]] name = "vte_generate_state_changes" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", ] [[package]] @@ -10347,7 +10522,7 @@ dependencies = [ "mime_guess", "percent-encoding", "pin-project", - "rustls-pemfile 1.0.1", + "rustls-pemfile", "scoped-tls", "serde", "serde_json", @@ -10391,9 +10566,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", "wasm-bindgen-shared", ] @@ -10415,7 +10590,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ - "quote 1.0.33", + "quote 1.0.32", "wasm-bindgen-macro-support", ] @@ -10425,9 +10600,9 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -10478,16 +10653,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "webpki" -version = "0.21.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "webpki" version = "0.22.0" @@ -10504,7 +10669,7 @@ version = "0.22.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "368bfe657969fb01238bb756d351dcade285e0f6fcbd36dcb23359a5169975be" dependencies = [ - "webpki 0.22.0", + "webpki", ] [[package]] @@ -10736,9 +10901,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.5.28" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" +checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699" dependencies = [ "memchr", ] @@ -10793,12 +10958,6 @@ dependencies = [ "tap", ] -[[package]] -name = "xml-rs" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52839dc911083a8ef63efa4d039d1f58b5e409f923e44c80828f206f66e5541c" - [[package]] name = "xmlparser" version = "0.13.5" @@ -10827,16 +10986,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "332f188cc1bcf1fe1064b8c58d150f497e697f49774aa846f2dc949d9a25f236" dependencies = [ "byteorder", - "zerocopy-derive 0.3.2", -] - -[[package]] -name = "zerocopy" -version = "0.7.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c4061bedbb353041c12f413700357bec76df2c7e2ca8e4df8bac24c6bf68e3d" -dependencies = [ - "zerocopy-derive 0.7.31", + "zerocopy-derive", ] [[package]] @@ -10845,42 +10995,16 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6505e6815af7de1746a08f69c69606bb45695a17149517680f3b2149713b19a3" dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] -[[package]] -name = "zerocopy-derive" -version = "0.7.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" -dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 2.0.41", -] - [[package]] name = "zeroize" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.3.2" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f8f187641dad4f680d25c4bfc4225b418165984179f26ca76ec4fb6441d3a17" -dependencies = [ - "proc-macro2 1.0.70", - "quote 1.0.33", - "syn 1.0.109", - "synstructure", -] +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" [[package]] name = "zstd" diff --git a/Cargo.toml b/Cargo.toml index c45bbfb12a14c..6c6453f6831b3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "vector" -version = "0.31.0" +version = "0.32.2" authors = ["Vector Contributors "] edition = "2021" description = "A lightweight and ultra-fast tool for building observability pipelines" @@ -57,7 +57,10 @@ assets = [ ["config/vector.toml", "/etc/vector/vector.toml", "644"], ["config/examples/*", "/etc/vector/examples/", "644"], ["distribution/systemd/vector.service", "/lib/systemd/system/vector.service", "644"], - ["distribution/systemd/vector.default", "/etc/default/vector", "600"] + ["distribution/systemd/vector.default", "/etc/default/vector", "600"], + ["licenses/*", "/usr/share/vector/licenses/", "644"], + ["NOTICE", "/usr/share/vector/NOTICE", "644"], + ["LICENSE-3rdparty.csv", "/usr/share/vector/LICENSE-3rdparty.csv", "644"], ] license-file = ["target/debian-license.txt"] extended-description-file = "target/debian-extended-description.txt" @@ -118,7 +121,7 @@ members = [ ] [workspace.dependencies] -vrl = { git = "ssh://git@github.com/answerbook/vrl.git", rev = "v0.7.0", features = ["cli", "test", "test_framework", "arbitrary"] } +vrl = { git = "ssh://git@github.com/answerbook/vrl.git", rev = "v0.8.0", features = ["cli", "test", "test_framework", "arbitrary"] } [dependencies] vrl.workspace = true @@ -146,13 +149,13 @@ loki-logproto = { path = "lib/loki-logproto", optional = true } # Tokio / Futures async-stream = { version = "0.3.5", default-features = false } -async-trait = { version = "0.1.68", default-features = false } +async-trait = { version = "0.1.73", default-features = false } futures = { version = "0.3.28", default-features = false, features = ["compat", "io-compat"], package = "futures" } -tokio = { version = "1.29.0", default-features = false, features = ["full"] } +tokio = { version = "1.30.0", default-features = false, features = ["full"] } tokio-openssl = { version = "0.6.3", default-features = false } tokio-stream = { version = "0.1.14", default-features = false, features = ["net", "sync", "time"] } tokio-util = { version = "0.7", default-features = false, features = ["io", "time"] } -console-subscriber = { version = "0.1.9", default-features = false, optional = true } +console-subscriber = { version = "0.1.10", default-features = false, optional = true } # Tracing tracing = { version = "0.1.34", default-features = false } @@ -162,7 +165,7 @@ tracing-subscriber = { version = "0.3.17", default-features = false, features = tracing-tower = { git = "https://github.com/tokio-rs/tracing", default-features = false, rev = "e0642d949891546a3bb7e47080365ee7274f05cd" } # Metrics -metrics = "0.21.0" +metrics = "0.21.1" metrics-tracing-context = { version = "0.14.0", default-features = false } # AWS - Official SDK @@ -185,27 +188,27 @@ aws-smithy-http-tower = { git = "https://github.com/vectordotdev/aws-sdk-rust", aws-smithy-types = { git = "https://github.com/vectordotdev/aws-sdk-rust", rev = "3d6aefb7fcfced5fc2a7e761a87e4ddbda1ee670", default-features = false, optional = true } # Azure -azure_core = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, features = ["enable_reqwest"], optional = true } -azure_identity = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, features = ["enable_reqwest"], optional = true } -azure_storage = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, optional = true } -azure_storage_blobs = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, optional = true } +azure_core = { version = "0.13", default-features = false, features = ["enable_reqwest"], optional = true } +azure_identity = { version = "0.13", default-features = false, features = ["enable_reqwest"], optional = true } +azure_storage = { version = "0.13", default-features = false, optional = true } +azure_storage_blobs = { version = "0.13", default-features = false, optional = true } # OpenDAL opendal = {version = "0.38", default-features = false, features = ["native-tls", "services-webhdfs"], optional = true} # Tower tower = { version = "0.4.13", default-features = false, features = ["buffer", "limit", "retry", "timeout", "util", "balance", "discover"] } -tower-http = { version = "0.4.1", default-features = false, features = ["decompression-gzip"]} +tower-http = { version = "0.4.3", default-features = false, features = ["decompression-gzip"]} # Serde -serde = { version = "1.0.164", default-features = false, features = ["derive"] } +serde = { version = "1.0.183", default-features = false, features = ["derive"] } serde-toml-merge = { version = "0.3.0", default-features = false } -serde_bytes = { version = "0.11.9", default-features = false, features = ["std"], optional = true } -serde_json = { version = "1.0.99", default-features = false, features = ["raw_value"] } -serde_with = { version = "2.3.2", default-features = false, features = ["macros", "std"] } -serde_yaml = { version = "0.9.22", default-features = false } +serde_bytes = { version = "0.11.12", default-features = false, features = ["std"], optional = true } +serde_json = { version = "1.0.104", default-features = false, features = ["raw_value"] } +serde_with = { version = "3.2.0", default-features = false, features = ["macros", "std"] } +serde_yaml = { version = "0.9.25", default-features = false } # Messagepack -rmp-serde = { version = "1.1.1", default-features = false, optional = true } +rmp-serde = { version = "1.1.2", default-features = false, optional = true } rmpv = { version = "1.0.0", default-features = false, features = ["with-serde"], optional = true } # Prost / Protocol Buffers @@ -219,18 +222,18 @@ smpl_jwt = { version = "0.7.1", default-features = false, optional = true } # AMQP # Mezmo: we have enabled openssl instead of native-tls (upstream) LOG-16435 -lapin = { version = "2.1.1", default-features = false, features = ["openssl"], optional = true } +lapin = { version = "2.3.1", default-features = false, features = ["openssl"], optional = true } # API -async-graphql = { version = "5.0.10", default-features = false, optional = true, features = ["chrono"] } -async-graphql-warp = { version = "5.0.10", default-features = false, optional = true } +async-graphql = { version = "6.0.0", default-features = false, optional = true, features = ["chrono", "playground"] } +async-graphql-warp = { version = "6.0.0", default-features = false, optional = true } itertools = { version = "0.11.0", default-features = false, optional = true } # API client crossterm = { version = "0.26.1", default-features = false, features = ["event-stream"], optional = true } num-format = { version = "0.4.4", default-features = false, features = ["with-num-bigint"], optional = true } number_prefix = { version = "0.4.0", default-features = false, features = ["std"], optional = true } -tui = { version = "0.19.0", optional = true, default-features = false, features = ["crossterm"] } +ratatui = { version = "0.22.0", optional = true, default-features = false, features = ["crossterm"] } # Datadog Pipelines # datadog-filter = { package = "datadog-filter", git = "ssh://git@github.com/answerbook/vrl.git", rev = "next" } @@ -239,33 +242,36 @@ tui = { version = "0.19.0", optional = true, default-features = false, features hex = { version = "0.4.3", default-features = false} sha2 = { version = "0.10.7", default-features = false} +# GreptimeDB +greptimedb-client = { git = "https://github.com/GreptimeTeam/greptimedb-client-rust.git", rev = "bc32362adf0df17a41a95bae4221d6d8f1775656", optional = true } + # External libs arc-swap = { version = "1.6", default-features = false, optional = true } -async-compression = { version = "0.4.0", default-features = false, features = ["tokio", "gzip", "zstd"], optional = true } -apache-avro = { version = "0.14.0", default-features = false, optional = true } -axum = { version = "0.6.18", default-features = false } +async-compression = { version = "0.4.1", default-features = false, features = ["tokio", "gzip", "zstd"], optional = true } +apache-avro = { version = "0.15.0", default-features = false, optional = true } +axum = { version = "0.6.20", default-features = false } base64 = { version = "0.21.2", default-features = false, optional = true } -bloom = { version = "0.3.2", default-features = false, optional = true } +bloomy = { version = "1.2.0", default-features = false, optional = true } bollard = { version = "0.14.0", default-features = false, features = ["ssl", "chrono"], optional = true } bytes = { version = "1.4.0", default-features = false, features = ["serde"] } blake2 = { version = "0.10.6", default-features = false, optional = true } bytesize = { version = "1.2.0", default-features = false } chrono = { version = "0.4.26", default-features = false, features = ["serde"] } cidr-utils = { version = "0.5.10", default-features = false } -clap = { version = "4.1.14", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } -colored = { version = "2.0.0", default-features = false } +clap = { version = "4.3.21", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } +colored = { version = "2.0.4", default-features = false } csv = { version = "1.2", default-features = false } deadpool-postgres = { version = "0.10.2"} derivative = { version = "2.2.0", default-features = false } dirs-next = { version = "2.0.0", default-features = false, optional = true } -dyn-clone = { version = "1.0.11", default-features = false } +dyn-clone = { version = "1.0.12", default-features = false } encoding_rs = { version = "0.8.32", default-features = false, features = ["serde"] } -enum_dispatch = { version = "0.3.11", default-features = false } +enum_dispatch = { version = "0.3.12", default-features = false } exitcode = { version = "1.1.2", default-features = false } flate2 = { version = "1.0.26", default-features = false, features = ["default"] } futures-util = { version = "0.3.28", default-features = false } glob = { version = "0.3.1", default-features = false } -governor = { version = "0.5.1", default-features = false, features = ["dashmap", "jitter", "std"], optional = true } +governor = { version = "0.6.0", default-features = false, features = ["dashmap", "jitter", "std"], optional = true } grok = { version = "2.0.0", default-features = false, optional = true } h2 = { version = "0.3.20", default-features = false, optional = true } hash_hasher = { version = "2.0.0", default-features = false } @@ -278,58 +284,58 @@ hyper = { version = "0.14.27", default-features = false, features = ["client", " hyper-openssl = { version = "0.9.2", default-features = false } hyper-proxy = { version = "0.9.1", default-features = false, features = ["openssl-tls"] } indexmap = { version = "~2.0.0", default-features = false, features = ["serde", "std"] } -infer = { version = "0.14.0", default-features = false, optional = true} -indoc = { version = "2.0.1", default-features = false } -inventory = { version = "0.3.6", default-features = false } +infer = { version = "0.15.0", default-features = false, optional = true} +indoc = { version = "2.0.3", default-features = false } +inventory = { version = "0.3.11", default-features = false } k8s-openapi = { version = "0.18.0", default-features = false, features = ["api", "v1_26"], optional = true } kube = { version = "0.82.0", default-features = false, features = ["client", "openssl-tls", "runtime"], optional = true } listenfd = { version = "1.0.1", default-features = false, optional = true } logfmt = { version = "0.0.2", default-features = false, optional = true } -lru = { version = "0.10.1", default-features = false, optional = true } +lru = { version = "0.11.0", default-features = false, optional = true } maxminddb = { version = "0.23.0", default-features = false, optional = true } md-5 = { version = "0.10", default-features = false, optional = true } mongodb = { version = "2.6.0", default-features = false, features = ["tokio-runtime"], optional = true } -nats = { version = "0.24.0", default-features = false, optional = true } -nkeys = { version = "0.3.0", default-features = false, optional = true } +async-nats = { version = "0.31.0", default-features = false, optional = true } +nkeys = { version = "0.3.1", default-features = false, optional = true } nom = { version = "7.1.3", default-features = false, optional = true } notify = { version = "6.0.1", default-features = false, features = ["macos_fsevent"] } once_cell = { version = "1.18", default-features = false } -openssl = { version = "0.10.55", default-features = false, features = ["vendored"] } +openssl = { version = "0.10.56", default-features = false, features = ["vendored"] } openssl-probe = { version = "0.1.5", default-features = false } ordered-float = { version = "3.7.0", default-features = false } -paste = "1.0.12" +paste = "1.0.14" percent-encoding = { version = "2.3.0", default-features = false } -pin-project = { version = "1.1.1", default-features = false } +pin-project = { version = "1.1.3", default-features = false } postgres-openssl = { version = "0.5.0", default-features = false, features = ["runtime"], optional = true } pulsar = { version = "6.0.1", default-features = false, features = ["tokio-runtime", "auth-oauth2", "flate2", "lz4", "snap", "zstd"], optional = true } rand = { version = "0.8.5", default-features = false, features = ["small_rng"] } rand_distr = { version = "0.4.3", default-features = false } reqwest = { version = "0.11", features = ["json"] } -rdkafka = { version = "0.32.2", default-features = false, features = ["tokio", "libz", "ssl", "zstd"], optional = true } -redis = { version = "0.23.0", default-features = false, features = ["connection-manager", "tokio-comp", "tokio-native-tls-comp"], optional = true } -regex = { version = "1.8.4", default-features = false, features = ["std", "perf"] } -roaring = { version = "0.10.1", default-features = false, optional = true } +rdkafka = { version = "0.33.2", default-features = false, features = ["tokio", "libz", "ssl", "zstd"], optional = true } +redis = { version = "0.23.2", default-features = false, features = ["connection-manager", "tokio-comp", "tokio-native-tls-comp"], optional = true } +regex = { version = "1.9.3", default-features = false, features = ["std", "perf"] } +roaring = { version = "0.10.2", default-features = false, optional = true } seahash = { version = "4.1.0", default-features = false } -semver = { version = "1.0.17", default-features = false, features = ["serde", "std"], optional = true } +semver = { version = "1.0.18", default-features = false, features = ["serde", "std"], optional = true } smallvec = { version = "1", default-features = false, features = ["union", "serde"] } -snafu = { version = "0.7.4", default-features = false, features = ["futures"] } +snafu = { version = "0.7.5", default-features = false, features = ["futures"] } snap = { version = "1.1.0", default-features = false, optional = true } socket2 = { version = "0.5.3", default-features = false } stream-cancel = { version = "0.8.1", default-features = false } -strip-ansi-escapes = { version = "0.1.1", default-features = false } +strip-ansi-escapes = { version = "0.2.0", default-features = false } syslog = { version = "6.1.0", default-features = false, optional = true } urlencoding = { version = "2.1.0", default-features = false } -tikv-jemallocator = { version = "0.5.0", default-features = false, optional = true } +tikv-jemallocator = { version = "0.5.4", default-features = false, optional = true } tokio-postgres = { version = "0.7.7", default-features = false, features = ["runtime", "with-chrono-0_4"], optional = true } tokio-tungstenite = {version = "0.20.1", default-features = false, features = ["connect"], optional = true} -toml = { version = "0.7.5", default-features = false, features = ["parse", "display"] } +toml = { version = "0.7.6", default-features = false, features = ["parse", "display"] } tonic = { version = "0.9", optional = true, default-features = false, features = ["transport", "codegen", "prost", "tls", "tls-roots", "gzip"] } trust-dns-proto = { version = "0.22.0", default-features = false, features = ["dnssec"], optional = true } -typetag = { version = "0.2.8", default-features = false } +typetag = { version = "0.2.12", default-features = false } url = { version = "2.4.0", default-features = false, features = ["serde"] } uuid = { version = "1", default-features = false, features = ["serde", "v4"] } warp = { version = "0.3.5", default-features = false } -zstd = { version = "0.12.3", default-features = false } +zstd = { version = "0.12.4", default-features = false } arr_macro = { version = "0.2.1" } moka = { version = "0.11" } num = { version = "0.4.0" } @@ -354,15 +360,15 @@ nix = { version = "0.26.2", default-features = false, features = ["socket", "sig [build-dependencies] prost-build = { version = "0.11", default-features = false, optional = true } tonic-build = { version = "0.9", default-features = false, features = ["transport", "prost"], optional = true } -openssl-src = { version = "111", default-features = false, features = ["force-engine"] } +openssl-src = { version = "300", default-features = false, features = ["force-engine", "legacy"] } [dev-dependencies] approx = "0.5.1" -assert_cmd = { version = "2.0.11", default-features = false } -azure_core = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, features = ["enable_reqwest", "azurite_workaround"] } -azure_identity = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, features = ["enable_reqwest"] } -azure_storage_blobs = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, features = ["azurite_workaround"] } -azure_storage = { git = "https://github.com/Azure/azure-sdk-for-rust.git", rev = "b4544d4920fa3064eb921340054cd9cc130b7664", default-features = false, features = ["azurite_workaround"] } +assert_cmd = { version = "2.0.12", default-features = false } +azure_core = { version = "0.13", default-features = false, features = ["enable_reqwest", "azurite_workaround"] } +azure_identity = { version = "0.13", default-features = false, features = ["enable_reqwest"] } +azure_storage_blobs = { version = "0.13", default-features = false, features = ["azurite_workaround"] } +azure_storage = { version = "0.13", default-features = false } mockall = "0.11.4" httptest = "0.15.4" base64 = "0.21.2" @@ -376,12 +382,12 @@ serial_test = "1.0.0" tempfile = "3.6.0" test-generator = "0.3.1" tokio-test = "0.4.2" -tokio = { version = "1.29.0", features = ["test-util"] } +tokio = { version = "1.30.0", features = ["test-util"] } tower-test = "0.4.0" vector-core = { path = "lib/vector-core", default-features = false, features = ["vrl", "test"] } snap = "1" wiremock = "0.5.19" -zstd = { version = "0.12.3", default-features = false } +zstd = { version = "0.12.4", default-features = false } assay = "0.1.1" temp-env = "0.3.1" @@ -390,11 +396,17 @@ temp-env = "0.3.1" # https://github.com/chronotope/chrono/pull/578 chrono = { git = "https://github.com/vectordotdev/chrono.git", tag = "v0.4.26-no-default-time-1" } # The upgrade for `tokio-util` >= 0.6.9 is blocked on https://github.com/vectordotdev/vector/issues/11257. -tokio-util = { git = "https://github.com/vectordotdev/tokio", branch = "tokio-util-0.7.4-framed-read-continue-on-error" } +tokio-util = { git = "https://github.com/vectordotdev/tokio", branch = "tokio-util-0.7.8-framed-read-continue-on-error" } nix = { git = "https://github.com/vectordotdev/nix.git", branch = "memfd/gnu/musl" } # The `heim` crates depend on `ntapi` 0.3.7 on Windows, but that version has an # unaligned access bug fixed in the following revision. ntapi = { git = "https://github.com/MSxDOS/ntapi.git", rev = "24fc1e47677fc9f6e38e5f154e6011dc9b270da6" } +# The current `openssl-sys` crate will vendor the OpenSSL sources via +# `openssl-src` at version 1.1.1*, but we want version 3.1.*. Bring in forked +# version of that crate with the appropriate dependency patched in. +openssl-sys = { git = "https://github.com/vectordotdev/rust-openssl.git", tag = "openssl-sys-v0.9.91+3.0.0" } +openssl-src = { git = "https://github.com/vectordotdev/openssl-src-rs.git", tag = "release-300-force-engine+3.1.2"} + [features] # Default features for *-unknown-linux-gnu and *-apple-darwin @@ -452,7 +464,7 @@ api-client = [ "dep:crossterm", "dep:num-format", "dep:number_prefix", - "dep:tui", + "dep:ratatui", "vector-core/api", "dep:vector-api-client", ] @@ -576,7 +588,7 @@ sources-kafka = ["dep:rdkafka"] sources-kubernetes_logs = ["dep:file-source", "kubernetes", "transforms-reduce"] sources-logstash = ["sources-utils-net-tcp", "tokio-util/net"] sources-mongodb_metrics = ["dep:mongodb"] -sources-nats = ["dep:nats", "dep:nkeys"] +sources-nats = ["dep:async-nats", "dep:nkeys"] sources-nginx_metrics = ["dep:nom"] sources-opentelemetry = ["dep:opentelemetry-proto", "dep:prost-types", "sources-http_server", "sources-utils-http", "sources-vector"] sources-postgresql_metrics = ["dep:postgres-openssl", "dep:tokio-postgres"] @@ -668,11 +680,11 @@ transforms-mezmo_aggregate = [] transforms-mezmo_log_to_metric = [] transforms-mezmo_log_clustering = ["dep:lru", "dep:blake2", "dep:base64", "dep:tokio-postgres"] transforms-mezmo_log_classification = ["dep:grok"] -transforms-mezmo_tag_cardinality_limit = ["dep:bloom", "dep:hashbrown"] +transforms-mezmo_tag_cardinality_limit = ["dep:bloomy", "dep:hashbrown"] transforms-remap = [] transforms-route = [] transforms-sample = [] -transforms-tag_cardinality_limit = ["dep:bloom", "dep:hashbrown"] +transforms-tag_cardinality_limit = ["dep:bloomy", "dep:hashbrown"] transforms-throttle = ["dep:governor"] transforms-protobuf_to_metric = [] transforms-protobuf_to_log = [] @@ -756,6 +768,7 @@ sinks-metrics = [ "sinks-blackhole", "sinks-console", "sinks-datadog_metrics", + "sinks-greptimedb", "sinks-humio", "sinks-influxdb", "sinks-kafka", @@ -800,6 +813,7 @@ sinks-datadog_traces = ["protobuf-build", "dep:rmpv", "dep:rmp-serde", "dep:serd sinks-elasticsearch = ["aws-core", "transforms-metric_to_log"] sinks-file = ["dep:async-compression"] sinks-gcp = ["dep:base64", "gcp"] +sinks-greptimedb = ["dep:greptimedb-client"] sinks-honeycomb = [] sinks-http = [] sinks-humio = ["sinks-splunk_hec", "transforms-metric_to_log"] @@ -807,7 +821,7 @@ sinks-influxdb = [] sinks-kafka = ["dep:rdkafka"] sinks-mezmo = [] sinks-loki = ["loki-logproto"] -sinks-nats = ["dep:nats", "dep:nkeys"] +sinks-nats = ["dep:async-nats", "dep:nkeys"] sinks-new_relic_logs = ["sinks-http"] sinks-new_relic = [] sinks-papertrail = ["dep:syslog"] @@ -862,6 +876,7 @@ all-integration-tests = [ "gcp-cloud-storage-integration-tests", "gcp-integration-tests", "gcp-pubsub-integration-tests", + "greptimedb-integration-tests", "http-client-integration-tests", "humio-integration-tests", "influxdb-integration-tests", @@ -924,6 +939,7 @@ fluent-integration-tests = ["docker", "sources-fluent"] gcp-cloud-storage-integration-tests = ["sinks-gcp"] gcp-integration-tests = ["sinks-gcp"] gcp-pubsub-integration-tests = ["sinks-gcp", "sources-gcp_pubsub"] +greptimedb-integration-tests = ["sinks-greptimedb"] humio-integration-tests = ["sinks-humio"] http-client-integration-tests = ["sources-http_client"] influxdb-integration-tests = ["sinks-influxdb"] diff --git a/Jenkinsfile b/Jenkinsfile index 75fdd5c9b21bf..b63118d5f4fc3 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -151,7 +151,9 @@ pipeline { } stage('Feature build and publish') { when { - branch pattern: "(feature\\/LOG-\\d+)", comparator: "REGEXP" + expression { + CURRENT_BRANCH ==~ /feature\/LOG-\d+/ + } } steps { script { diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index fd759a75ab82a..33d1067e89910 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -4,6 +4,7 @@ RustyXML,https://github.com/Florob/RustyXML,MIT OR Apache-2.0,Florian Zeitz adler32,https://github.com/remram44/adler32-rs,Zlib,Remi Rampin +aead,https://github.com/RustCrypto/traits,MIT OR Apache-2.0,RustCrypto Developers aes,https://github.com/RustCrypto/block-ciphers,MIT OR Apache-2.0,RustCrypto Developers ahash,https://github.com/tkaitchuck/ahash,MIT OR Apache-2.0,Tom Kaitchuck aho-corasick,https://github.com/BurntSushi/aho-corasick,Unlicense OR MIT,Andrew Gallant @@ -11,6 +12,11 @@ amq-protocol,https://github.com/amqp-rs/amq-protocol,BSD-2-Clause,Marc-Antoine P android-tzdata,https://github.com/RumovZ/android-tzdata,MIT OR Apache-2.0,RumovZ android_system_properties,https://github.com/nical/android_system_properties,MIT OR Apache-2.0,Nicolas Silva ansi_term,https://github.com/ogham/rust-ansi-term,MIT,"ogham@bsago.me, Ryan Scheel (Havvy) , Josh Triplett " +anstream,https://github.com/rust-cli/anstyle,MIT OR Apache-2.0,The anstream Authors +anstyle,https://github.com/rust-cli/anstyle,MIT OR Apache-2.0,The anstyle Authors +anstyle-parse,https://github.com/rust-cli/anstyle,MIT OR Apache-2.0,The anstyle-parse Authors +anstyle-query,https://github.com/rust-cli/anstyle,MIT OR Apache-2.0,The anstyle-query Authors +anstyle-wincon,https://github.com/rust-cli/anstyle,MIT OR Apache-2.0,The anstyle-wincon Authors anyhow,https://github.com/dtolnay/anyhow,MIT OR Apache-2.0,David Tolnay anymap,https://github.com/chris-morgan/anymap,BlueOak-1.0.0 OR MIT OR Apache-2.0,Chris Morgan apache-avro,https://github.com/apache/avro,Apache-2.0,Apache Avro team @@ -22,7 +28,6 @@ ascii,https://github.com/tomprogrammer/rust-ascii,Apache-2.0 OR MIT,"Thomas Ba assert-json-diff,https://github.com/davidpdrsn/assert-json-diff,MIT,David Pedersen async-channel,https://github.com/smol-rs/async-channel,Apache-2.0 OR MIT,Stjepan Glavina async-compat,https://github.com/smol-rs/async-compat,Apache-2.0 OR MIT,Stjepan Glavina -async-compression,https://github.com/Nemo157/async-compression,MIT OR Apache-2.0,"Wim Looman , Allen Bui " async-compression,https://github.com/Nullus157/async-compression,MIT OR Apache-2.0,"Wim Looman , Allen Bui " async-executor,https://github.com/smol-rs/async-executor,Apache-2.0 OR MIT,Stjepan Glavina async-fs,https://github.com/smol-rs/async-fs,Apache-2.0 OR MIT,Stjepan Glavina @@ -30,6 +35,7 @@ async-global-executor,https://github.com/Keruspe/async-global-executor,Apache-2. async-graphql,https://github.com/async-graphql/async-graphql,MIT OR Apache-2.0,"sunli , Koxiaet" async-io,https://github.com/smol-rs/async-io,Apache-2.0 OR MIT,Stjepan Glavina async-lock,https://github.com/smol-rs/async-lock,Apache-2.0 OR MIT,Stjepan Glavina +async-nats,https://github.com/nats-io/nats.rs,Apache-2.0,"Tomasz Pietrek , Casper Beyer " async-net,https://github.com/smol-rs/async-net,Apache-2.0 OR MIT,Stjepan Glavina async-process,https://github.com/smol-rs/async-process,Apache-2.0 OR MIT,Stjepan Glavina async-reactor-trait,https://github.com/amqp-rs/reactor-trait,Apache-2.0 OR MIT,Marc-Antoine Perennou @@ -77,7 +83,6 @@ backtrace,https://github.com/rust-lang/backtrace-rs,MIT OR Apache-2.0,The Rust P base16,https://github.com/thomcc/rust-base16,CC0-1.0,Thom Chiovoloni base64,https://github.com/marshallpierce/rust-base64,MIT OR Apache-2.0,"Alice Maz , Marshall Pierce " base64-simd,https://github.com/Nugine/simd,MIT,The base64-simd Authors -base64-url,https://github.com/magiclen/base64-url,MIT,Magic Len base64ct,https://github.com/RustCrypto/formats/tree/master/base64ct,Apache-2.0 OR MIT,RustCrypto Developers bit-set,https://github.com/contain-rs/bit-set,MIT OR Apache-2.0,Alexis Beingessner bit-vec,https://github.com/contain-rs/bit-vec,MIT OR Apache-2.0,Alexis Beingessner @@ -87,7 +92,7 @@ bitvec,https://github.com/bitvecto-rs/bitvec,MIT,The bitvec Authors block-buffer,https://github.com/RustCrypto/utils,MIT OR Apache-2.0,RustCrypto Developers block-padding,https://github.com/RustCrypto/utils,MIT OR Apache-2.0,RustCrypto Developers blocking,https://github.com/smol-rs/blocking,Apache-2.0 OR MIT,Stjepan Glavina -bloom,https://github.com/nicklan/bloom-rs,GPL-2.0,Nick Lanham +bloomy,https://docs.rs/bloomy/,MIT,"Aleksandr Bezobchuk , Alexis Sellier " bollard,https://github.com/fussybeaver/bollard,Apache-2.0,Bollard contributors borsh,https://github.com/near/borsh-rs,MIT OR Apache-2.0,Near Inc borsh-derive,https://github.com/nearprotocol/borsh,Apache-2.0,Near Inc @@ -109,6 +114,8 @@ cbc,https://github.com/RustCrypto/block-modes,MIT OR Apache-2.0,RustCrypto Devel cesu8,https://github.com/emk/cesu8-rs,Apache-2.0 OR MIT,Eric Kidd cfb-mode,https://github.com/RustCrypto/block-modes,MIT OR Apache-2.0,RustCrypto Developers cfg-if,https://github.com/alexcrichton/cfg-if,MIT OR Apache-2.0,Alex Crichton +chacha20,https://github.com/RustCrypto/stream-ciphers,Apache-2.0 OR MIT,RustCrypto Developers +chacha20poly1305,https://github.com/RustCrypto/AEADs/tree/master/chacha20poly1305,Apache-2.0 OR MIT,RustCrypto Developers charset,https://github.com/hsivonen/charset,MIT OR Apache-2.0,Henri Sivonen chrono,https://github.com/chronotope/chrono,MIT OR Apache-2.0,The chrono Authors chrono-tz,https://github.com/chronotope/chrono-tz,MIT OR Apache-2.0,The chrono-tz Authors @@ -121,6 +128,7 @@ clap_derive,https://github.com/clap-rs/clap/tree/master/clap_derive,MIT OR Apach clap_lex,https://github.com/clap-rs/clap/tree/master/clap_lex,MIT OR Apache-2.0,The clap_lex Authors clipboard-win,https://github.com/DoumanAsh/clipboard-win,BSL-1.0,Douman codespan-reporting,https://github.com/brendanzab/codespan,Apache-2.0,Brendan Zabarauskas +colorchoice,https://github.com/rust-cli/anstyle,MIT OR Apache-2.0,The colorchoice Authors colored,https://github.com/mackwic/colored,MPL-2.0,Thomas Wickham combine,https://github.com/Marwes/combine,MIT,Markus Westerlind concurrent-queue,https://github.com/smol-rs/concurrent-queue,Apache-2.0 OR MIT,Stjepan Glavina @@ -135,18 +143,19 @@ crc,https://github.com/mrhooray/crc-rs,MIT OR Apache-2.0,"Rui Hu crc32c,https://github.com/zowens/crc32c,Apache-2.0 OR MIT,Zack Owens crc32fast,https://github.com/srijs/rust-crc32fast,MIT OR Apache-2.0,"Sam Rijs , Alex Crichton " -crossbeam-channel,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-channel Authors crossbeam-epoch,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-epoch Authors crossbeam-queue,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-queue Authors crossbeam-utils,https://github.com/crossbeam-rs/crossbeam,MIT OR Apache-2.0,The crossbeam-utils Authors crossterm,https://github.com/crossterm-rs/crossterm,MIT,T. Post crossterm_winapi,https://github.com/crossterm-rs/crossterm-winapi,MIT,T. Post crypto-common,https://github.com/RustCrypto/traits,MIT OR Apache-2.0,RustCrypto Developers +crypto_secretbox,https://github.com/RustCrypto/nacl-compat/tree/master/crypto_secretbox,Apache-2.0 OR MIT,RustCrypto Developers csv,https://github.com/BurntSushi/rust-csv,Unlicense OR MIT,Andrew Gallant ctor,https://github.com/mmastrac/rust-ctor,Apache-2.0 OR MIT,Matt Mastracci ctr,https://github.com/RustCrypto/block-modes,MIT OR Apache-2.0,RustCrypto Developers cty,https://github.com/japaric/cty,MIT OR Apache-2.0,Jorge Aparicio curve25519-dalek,https://github.com/dalek-cryptography/curve25519-dalek,BSD-3-Clause,"Isis Lovecruft , Henry de Valence " +curve25519-dalek-derive,https://github.com/dalek-cryptography/curve25519-dalek,MIT OR Apache-2.0,The curve25519-dalek-derive Authors cxx,https://github.com/dtolnay/cxx,MIT OR Apache-2.0,David Tolnay darling,https://github.com/TedDriggs/darling,MIT,Ted Driggs dashmap,https://github.com/xacrimon/dashmap,MIT,Acrimon @@ -167,7 +176,7 @@ dns-lookup,https://github.com/keeperofdakeys/dns-lookup,MIT OR Apache-2.0,Josh D doc-comment,https://github.com/GuillaumeGomez/doc-comment,MIT,Guillaume Gomez dyn-clone,https://github.com/dtolnay/dyn-clone,MIT OR Apache-2.0,David Tolnay ed25519,https://github.com/RustCrypto/signatures/tree/master/ed25519,Apache-2.0 OR MIT,RustCrypto Developers -ed25519-dalek,https://github.com/dalek-cryptography/ed25519-dalek,BSD-3-Clause,isis lovecruft +ed25519-dalek,https://github.com/dalek-cryptography/ed25519-dalek,BSD-3-Clause,"isis lovecruft , Tony Arcieri , Michael Rosenberg " either,https://github.com/bluss/either,MIT OR Apache-2.0,bluss encode_unicode,https://github.com/tormol/encode_unicode,Apache-2.0 OR MIT,Torbjørn Birch Moltu encoding_rs,https://github.com/hsivonen/encoding_rs,(Apache-2.0 OR MIT) AND BSD-3-Clause,Henri Sivonen @@ -189,6 +198,7 @@ extend,https://github.com/davidpdrsn/ext,MIT,David Pedersen fallible-iterator,https://github.com/sfackler/rust-fallible-iterator,MIT OR Apache-2.0,Steven Fackler fastrand,https://github.com/smol-rs/fastrand,Apache-2.0 OR MIT,Stjepan Glavina +fiat-crypto,https://github.com/mit-plv/fiat-crypto,MIT OR Apache-2.0 OR BSD-1-Clause,Fiat Crypto library authors filetime,https://github.com/alexcrichton/filetime,MIT OR Apache-2.0,Alex Crichton fix-hidden-lifetime-bug,https://github.com/danielhenrymantilla/fix-hidden-lifetime-bug.rs,Zlib OR MIT OR Apache-2.0,Daniel Henry-Mantilla fix-hidden-lifetime-bug-proc_macros,https://github.com/danielhenrymantilla/fix-hidden-lifetime-bug.rs,Zlib OR MIT OR Apache-2.0,Daniel Henry-Mantilla @@ -215,7 +225,6 @@ futures-timer,https://github.com/async-rs/futures-timer,MIT OR Apache-2.0,Alex C futures-util,https://github.com/rust-lang/futures-rs,MIT OR Apache-2.0,The futures-util Authors generic-array,https://github.com/fizyk20/generic-array,MIT,"Bartłomiej Kamiński , Aaron Trent " getrandom,https://github.com/rust-random/getrandom,MIT OR Apache-2.0,The Rand Project Developers -ghost,https://github.com/dtolnay/ghost,MIT OR Apache-2.0,David Tolnay gimli,https://github.com/gimli-rs/gimli,MIT OR Apache-2.0,The gimli Authors glob,https://github.com/rust-lang/glob,MIT OR Apache-2.0,The Rust Project Developers goauth,https://github.com/durch/rust-goauth,MIT,Drazen Urch @@ -225,6 +234,8 @@ graphql-parser,https://github.com/graphql-rust/graphql-parser,MIT OR Apache-2.0, graphql_client,https://github.com/graphql-rust/graphql-client,Apache-2.0 OR MIT,Tom Houlé graphql_client_codegen,https://github.com/graphql-rust/graphql-client,Apache-2.0 OR MIT,Tom Houlé graphql_query_derive,https://github.com/graphql-rust/graphql-client,Apache-2.0 OR MIT,Tom Houlé +greptime-proto,https://github.com/GreptimeTeam/greptime-proto,Apache-2.0,The greptime-proto Authors +greptimedb-client,https://github.com/GreptimeTeam/greptimedb-client-rust,Apache-2.0,The greptimedb-client Authors grok,https://github.com/daschl/grok,Apache-2.0,Michael Nitschinger h2,https://github.com/hyperium/h2,MIT,"Carl Lerche , Sean McArthur " hash_hasher,https://github.com/Fraser999/Hash-Hasher,Apache-2.0 OR MIT,Fraser Hutchison @@ -273,7 +284,6 @@ itoa,https://github.com/dtolnay/itoa,MIT OR Apache-2.0,David Tolnay jni-sys,https://github.com/sfackler/rust-jni-sys,MIT OR Apache-2.0,Steven Fackler js-sys,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/js-sys,MIT OR Apache-2.0,The wasm-bindgen Developers -json,https://github.com/maciejhirsz/json-rust,MIT OR Apache-2.0,Maciej Hirsz json-patch,https://github.com/idubrov/json-patch,MIT OR Apache-2.0,Ivan Dubrov jsonpath_lib,https://github.com/freestrings/jsonpath,MIT,Changseok Han k8s-openapi,https://github.com/Arnavion/k8s-openapi,Apache-2.0,Arnavion @@ -311,7 +321,6 @@ matches,https://github.com/SimonSapin/rust-std-candidates,MIT,Simon Sapin maxminddb,https://github.com/oschwald/maxminddb-rust,ISC,Gregory J. Oschwald md-5,https://github.com/RustCrypto/hashes,MIT OR Apache-2.0,RustCrypto Developers -md5,https://github.com/stainless-steel/md5,Apache-2.0 OR MIT,"Ivan Ukhov , Kamal Ahmad , Konstantin Stepanov , Lukas Kalbertodt , Nathan Musoke , Scott Mabin , Tony Arcieri , Wim de With , Yosef Dinerstein " memchr,https://github.com/BurntSushi/memchr,Unlicense OR MIT,"Andrew Gallant , bluss" memmap2,https://github.com/RazrFalcon/memmap2-rs,MIT OR Apache-2.0,"Dan Burkert , Yevhenii Reizner " memoffset,https://github.com/Gilnaa/memoffset,MIT,Gilad Naaman @@ -326,7 +335,6 @@ mlua,https://github.com/khvzak/mlua,MIT,"Aleksandr Orlenko , kyren mongodb,https://github.com/mongodb/mongo-rust-driver,Apache-2.0,"Saghm Rossi , Patrick Freed , Isabel Atkinson , Abraham Egnor , Kaitlin Mahar " multer,https://github.com/rousan/multer-rs,MIT,Rousan Ali native-tls,https://github.com/sfackler/rust-native-tls,MIT OR Apache-2.0,Steven Fackler -nats,https://github.com/nats-io/nats.rs,Apache-2.0,"Derek Collison , Tyler Neely , Stjepan Glavina " ndk-context,https://github.com/rust-windowing/android-ndk-rs,MIT OR Apache-2.0,The Rust Windowing contributors nibble_vec,https://github.com/michaelsproul/rust_nibble_vec,MIT,Michael Sproul nix,https://github.com/nix-rust/nix,MIT,The nix-rust Project Developers @@ -383,6 +391,7 @@ pinky-swear,https://github.com/amqp-rs/pinky-swear,BSD-2-Clause,Marc-Antoine Per pkcs8,https://github.com/RustCrypto/formats/tree/master/pkcs8,Apache-2.0 OR MIT,RustCrypto Developers platforms,https://github.com/RustSec/platforms-crate,Apache-2.0 OR MIT,Tony Arcieri polling,https://github.com/smol-rs/polling,Apache-2.0 OR MIT,Stjepan Glavina +poly1305,https://github.com/RustCrypto/universal-hashes,Apache-2.0 OR MIT,RustCrypto Developers portable-atomic,https://github.com/taiki-e/portable-atomic,Apache-2.0 OR MIT,The portable-atomic Authors postgres-openssl,https://github.com/sfackler/rust-postgres,MIT OR Apache-2.0,Steven Fackler postgres-protocol,https://github.com/sfackler/rust-postgres,MIT OR Apache-2.0,Steven Fackler @@ -415,14 +424,18 @@ rand_chacha,https://github.com/rust-random/rand,MIT OR Apache-2.0,"The Rand Proj rand_distr,https://github.com/rust-random/rand,MIT OR Apache-2.0,The Rand Project Developers rand_hc,https://github.com/rust-random/rand,MIT OR Apache-2.0,The Rand Project Developers rand_xorshift,https://github.com/rust-random/rngs,MIT OR Apache-2.0,"The Rand Project Developers, The Rust Project Developers" +ratatui,https://github.com/tui-rs-revival/ratatui,MIT,"Florian Dehau , The Ratatui Developers" raw-cpuid,https://github.com/gz/rust-cpuid,MIT,Gerd Zellweger raw-window-handle,https://github.com/rust-windowing/raw-window-handle,MIT OR Apache-2.0 OR Zlib,Osspial rdkafka,https://github.com/fede1024/rust-rdkafka,MIT,Federico Giraud redis,https://github.com/redis-rs/redis-rs,BSD-3-Clause,The redis Authors redox_syscall,https://gitlab.redox-os.org/redox-os/syscall,MIT,Jeremy Soller redox_users,https://gitlab.redox-os.org/redox-os/users,MIT,"Jose Narvaez , Wesley Hershberger " -regex,https://github.com/rust-lang/regex,MIT OR Apache-2.0,The Rust Project Developers +regex,https://github.com/rust-lang/regex,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " regex-automata,https://github.com/BurntSushi/regex-automata,Unlicense OR MIT,Andrew Gallant +regex-automata,https://github.com/rust-lang/regex/tree/master/regex-automata,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " +regex-syntax,https://github.com/rust-lang/regex,MIT OR Apache-2.0,The Rust Project Developers +regex-syntax,https://github.com/rust-lang/regex/tree/master/regex-syntax,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " rend,https://github.com/djkoloski/rend,MIT,David Koloski reqwest,https://github.com/seanmonstar/reqwest,MIT OR Apache-2.0,Sean McArthur resolv-conf,http://github.com/tailhook/resolv-conf,MIT OR Apache-2.0,paul@colomiets.name @@ -442,16 +455,15 @@ rustc-hash,https://github.com/rust-lang-nursery/rustc-hash,Apache-2.0 OR MIT,The rustc_version,https://github.com/Kimundi/rustc-version-rs,MIT OR Apache-2.0,Marvin Löbel rustc_version_runtime,https://github.com/seppo0010/rustc-version-runtime-rs,MIT,Sebastian Waisbrot rustix,https://github.com/bytecodealliance/rustix,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,"Dan Gohman , Jakub Konka " -rustls,https://github.com/ctz/rustls,Apache-2.0 OR ISC OR MIT,Joseph Birr-Pixton rustls,https://github.com/rustls/rustls,Apache-2.0 OR ISC OR MIT,The rustls Authors rustls-native-certs,https://github.com/ctz/rustls-native-certs,Apache-2.0 OR ISC OR MIT,Joseph Birr-Pixton -rustls-pemfile,https://github.com/rustls/pemfile,Apache-2.0 OR ISC OR MIT,Joseph Birr-Pixton rustls-pemfile,https://github.com/rustls/pemfile,Apache-2.0 OR ISC OR MIT,The rustls-pemfile Authors rustls-webpki,https://github.com/rustls/webpki,ISC,The rustls-webpki Authors rustversion,https://github.com/dtolnay/rustversion,MIT OR Apache-2.0,David Tolnay rusty-fork,https://github.com/altsysrq/rusty-fork,MIT OR Apache-2.0,Jason Lingle rustyline,https://github.com/kkawakam/rustyline,MIT,Katsu Kawakami ryu,https://github.com/dtolnay/ryu,Apache-2.0 OR BSL-1.0,David Tolnay +salsa20,https://github.com/RustCrypto/stream-ciphers,MIT OR Apache-2.0,RustCrypto Developers same-file,https://github.com/BurntSushi/same-file,Unlicense OR MIT,Andrew Gallant sasl2-sys,https://github.com/MaterializeInc/rust-sasl,Apache-2.0,"Materialize, Inc." scan_fmt,https://github.com/wlentz/scan_fmt,MIT,wlentz @@ -468,8 +480,8 @@ semver-parser,https://github.com/steveklabnik/semver-parser,MIT OR Apache-2.0,St serde,https://github.com/serde-rs/serde,MIT OR Apache-2.0,"Erick Tryzelaar , David Tolnay " serde-toml-merge,https://github.com/jdrouet/serde-toml-merge,MIT,Jeremie Drouet serde-value,https://github.com/arcnmx/serde-value,MIT,arcnmx -serde-xml-rs,https://github.com/RReverser/serde-xml-rs,MIT,Ingvar Stepanyan serde_bytes,https://github.com/serde-rs/bytes,MIT OR Apache-2.0,David Tolnay +serde_derive,https://github.com/serde-rs/serde,MIT OR Apache-2.0,David Tolnay serde_json,https://github.com/serde-rs/json,MIT OR Apache-2.0,"Erick Tryzelaar , David Tolnay " serde_nanos,https://github.com/caspervonb/serde_nanos,MIT OR Apache-2.0,Casper Beyer serde_path_to_error,https://github.com/dtolnay/path-to-error,MIT OR Apache-2.0,David Tolnay @@ -516,7 +528,6 @@ strum,https://github.com/Peternator7/strum,MIT,Peter Glotfelty , Henry de Valence " syn,https://github.com/dtolnay/syn,MIT OR Apache-2.0,David Tolnay sync_wrapper,https://github.com/Actyx/sync_wrapper,Apache-2.0,Actyx AG -synstructure,https://github.com/mystor/synstructure,MIT,Nika Layzell syslog,https://github.com/Geal/rust-syslog,MIT,contact@geoffroycouprie.com syslog_loose,https://github.com/FungusHumungus/syslog-loose,MIT,Stephen Wakely take_mut,https://github.com/Sgeo/take_mut,MIT,Sgeo @@ -540,6 +551,7 @@ tokio-io-timeout,https://github.com/sfackler/tokio-io-timeout,MIT OR Apache-2.0, tokio-native-tls,https://github.com/tokio-rs/tls,MIT,Tokio Contributors tokio-openssl,https://github.com/sfackler/tokio-openssl,MIT OR Apache-2.0,Alex Crichton tokio-postgres,https://github.com/sfackler/rust-postgres,MIT OR Apache-2.0,Steven Fackler +tokio-retry,https://github.com/srijs/rust-tokio-retry,MIT,Sam Rijs tokio-rustls,https://github.com/tokio-rs/tls,MIT OR Apache-2.0,quininer kel tokio-tungstenite,https://github.com/snapview/tokio-tungstenite,MIT,"Daniel Abramov , Alexey Galakhov " toml,https://github.com/toml-rs/toml,MIT OR Apache-2.0,Alex Crichton @@ -558,7 +570,6 @@ treediff,https://github.com/Byron/treediff-rs,MIT OR Apache-2.0,Sebastian Thiel trust-dns-proto,https://github.com/bluejekyll/trust-dns,MIT OR Apache-2.0,Benjamin Fry trust-dns-resolver,https://github.com/bluejekyll/trust-dns,MIT OR Apache-2.0,Benjamin Fry try-lock,https://github.com/seanmonstar/try-lock,MIT,Sean McArthur -tui,https://github.com/fdehau/tui-rs,MIT,Florian Dehau tungstenite,https://github.com/snapview/tungstenite-rs,MIT OR Apache-2.0,"Alexey Galakhov, Daniel Abramov" twox-hash,https://github.com/shepmaster/twox-hash,MIT,Jake Goulding typed-builder,https://github.com/idanarye/rust-typed-builder,MIT OR Apache-2.0,"IdanArye , Chris Morgan " @@ -573,7 +584,7 @@ unicode-ident,https://github.com/dtolnay/unicode-ident,(MIT OR Apache-2.0) AND U unicode-normalization,https://github.com/unicode-rs/unicode-normalization,MIT OR Apache-2.0,"kwantam , Manish Goregaokar " unicode-segmentation,https://github.com/unicode-rs/unicode-segmentation,MIT OR Apache-2.0,"kwantam , Manish Goregaokar " unicode-width,https://github.com/unicode-rs/unicode-width,MIT OR Apache-2.0,"kwantam , Manish Goregaokar " -unicode-xid,https://github.com/unicode-rs/unicode-xid,MIT OR Apache-2.0,"erick.tryzelaar , kwantam , Manish Goregaokar " +universal-hash,https://github.com/RustCrypto/traits,MIT OR Apache-2.0,RustCrypto Developers unreachable,https://github.com/reem/rust-unreachable,MIT OR Apache-2.0,Jonathan Reem unsafe-libyaml,https://github.com/dtolnay/unsafe-libyaml,MIT,David Tolnay untrusted,https://github.com/briansmith/untrusted,ISC,Brian Smith @@ -582,7 +593,6 @@ url,https://github.com/servo/rust-url,MIT OR Apache-2.0,The rust-url developers urlencoding,https://github.com/kornelski/rust_urlencoding,MIT,"Kornel , Bertram Truong " utf-8,https://github.com/SimonSapin/rust-utf8,MIT OR Apache-2.0,Simon Sapin utf8-width,https://github.com/magiclen/utf8-width,MIT,Magic Len -utf8parse,https://github.com/jwilm/vte,Apache-2.0 OR MIT,"Joe Wilm , Christian Duerr " uuid,https://github.com/uuid-rs/uuid,Apache-2.0 OR MIT,"Ashley Mannix, Christopher Armstrong, Dylan DPC, Hunar Roop Kahlon" valuable,https://github.com/tokio-rs/valuable,MIT,The valuable Authors vec_map,https://github.com/contain-rs/vec-map,MIT OR Apache-2.0,"Alex Crichton , Jorge Aparicio , Alexis Beingessner , Brian Anderson <>, tbu- <>, Manish Goregaokar <>, Aaron Turon , Adolfo Ochagavía <>, Niko Matsakis <>, Steven Fackler <>, Chase Southwood , Eduard Burtescu <>, Florian Wilkens <>, Félix Raimundo <>, Tibor Benke <>, Markus Siemens , Josh Branchaud , Huon Wilson , Corey Farwell , Aaron Liblong <>, Nick Cameron , Patrick Walton , Felix S Klock II <>, Andrew Paseltiner , Sean McArthur , Vadim Petrochenkov <>" @@ -626,14 +636,12 @@ winnow,https://github.com/winnow-rs/winnow,MIT,The winnow Authors winreg,https://github.com/gentoo90/winreg-rs,MIT,Igor Shaula woothee,https://github.com/woothee/woothee-rust,Apache-2.0,hhatto wyz,https://github.com/myrrlyn/wyz,MIT,myrrlyn -xml-rs,https://github.com/kornelski/xml-rs,MIT,Vladimir Matveev xmlparser,https://github.com/RazrFalcon/xmlparser,MIT OR Apache-2.0,Evgeniy Reizner yaml-rust,https://github.com/chyh1990/yaml-rust,MIT OR Apache-2.0,Yuheng Chen yansi,https://github.com/SergioBenitez/yansi,MIT OR Apache-2.0,Sergio Benitez zerocopy,https://fuchsia.googlesource.com/fuchsia/+/HEAD/src/lib/zerocopy,BSD-2-Clause,Joshua Liebow-Feeser zerocopy-derive,https://github.com/google/zerocopy,BSD-2-Clause,Joshua Liebow-Feeser zeroize,https://github.com/RustCrypto/utils/tree/master/zeroize,Apache-2.0 OR MIT,The RustCrypto Project Developers -zeroize_derive,https://github.com/RustCrypto/utils/tree/master/zeroize/derive,Apache-2.0 OR MIT,The RustCrypto Project Developers zstd,https://github.com/gyscos/zstd-rs,MIT,Alexandre Bury zstd-safe,https://github.com/gyscos/zstd-rs,MIT OR Apache-2.0,Alexandre Bury zstd-sys,https://github.com/gyscos/zstd-rs,MIT OR Apache-2.0,Alexandre Bury diff --git a/Makefile b/Makefile index 5ababc8144521..a6d4b3c580977 100644 --- a/Makefile +++ b/Makefile @@ -56,7 +56,13 @@ export VERBOSE ?= false # Override the container tool. Tries docker first and then tries podman. export CONTAINER_TOOL ?= auto ifeq ($(CONTAINER_TOOL),auto) - override CONTAINER_TOOL = $(shell docker version >/dev/null 2>&1 && echo docker || echo podman) + ifeq ($(shell docker version >/dev/null 2>&1 && echo docker), docker) + override CONTAINER_TOOL = docker + else ifeq ($(shell podman version >/dev/null 2>&1 && echo podman), podman) + override CONTAINER_TOOL = podman + else + override CONTAINER_TOOL = unknown + endif endif # If we're using podman create pods else if we're using docker create networks. export CURRENT_DIR = $(shell pwd) @@ -76,7 +82,7 @@ export AWS_ACCESS_KEY_ID ?= "dummy" export AWS_SECRET_ACCESS_KEY ?= "dummy" # Set version -export VERSION ?= $(shell cargo vdev version) +export VERSION ?= $(shell command -v cargo >/dev/null && cargo vdev version || echo unknown) # Set if you are on the CI and actually want the things to happen. (Non-CI users should never set this.) export CI ?= false @@ -157,6 +163,7 @@ define ENVIRONMENT_EXEC endef +ifneq ($(CONTAINER_TOOL), unknown) ifeq ($(ENVIRONMENT_AUTOBUILD), true) define ENVIRONMENT_PREPARE @echo "Building the environment. (ENVIRONMENT_AUTOBUILD=true) This may take a few minutes..." @@ -171,6 +178,11 @@ define ENVIRONMENT_PREPARE $(CONTAINER_TOOL) pull $(ENVIRONMENT_UPSTREAM) endef endif +else +define ENVIRONMENT_PREPARE +$(error "Please install a container tool such as Docker or Podman") +endef +endif .PHONY: check-container-tool check-container-tool: ## Checks what container tool is installed @@ -237,9 +249,11 @@ build-graphql-schema: ## Generate the `schema.json` for Vector's GraphQL API .PHONY: check-build-tools check-build-tools: -ifeq (, $(shell which cargo)) +ifneq ($(ENVIRONMENT), true) +ifeq ($(shell command -v cargo >/dev/null || echo not-found), not-found) $(error "Please install Rust: https://www.rust-lang.org/tools/install") endif +endif ##@ Cross Compiling .PHONY: cross-enable @@ -299,6 +313,9 @@ target/%/vector.tar.gz: target/%/vector CARGO_HANDLES_FRESHNESS cp -R -f -v \ README.md \ LICENSE \ + licenses \ + NOTICE \ + LICENSE-3rdparty.csv \ config \ target/scratch/vector-${TRIPLE}/ cp -R -f -v \ @@ -361,7 +378,7 @@ test-behavior: test-behavior-transforms test-behavior-formats test-behavior-conf test-integration: ## Runs all integration tests test-integration: test-integration-amqp test-integration-appsignal test-integration-aws test-integration-axiom test-integration-azure test-integration-chronicle test-integration-clickhouse test-integration: test-integration-databend test-integration-docker-logs test-integration-elasticsearch -test-integration: test-integration-eventstoredb test-integration-fluent test-integration-gcp test-integration-humio test-integration-http-client test-integration-influxdb +test-integration: test-integration-eventstoredb test-integration-fluent test-integration-gcp test-integration-greptimedb test-integration-humio test-integration-http-client test-integration-influxdb test-integration: test-integration-kafka test-integration-logstash test-integration-loki test-integration-mongodb test-integration-nats test-integration: test-integration-nginx test-integration-opentelemetry test-integration-postgres test-integration-prometheus test-integration-pulsar test-integration: test-integration-redis test-integration-splunk test-integration-dnstap test-integration-datadog-agent test-integration-datadog-logs diff --git a/STYLE.md b/STYLE.md index 4e3e70818f708..b817dbac18793 100644 --- a/STYLE.md +++ b/STYLE.md @@ -229,7 +229,7 @@ would know that the queue size was _currently_ zero but we'd also know that we j [Component Specification](https://github.com/vectordotdev/vector/blob/master/docs/specs/component.md). - **Don't** emit metrics in tight loops. Each metric emission carries an overhead, and emitting them - in tight loops can cause that overhead to become noticable in terms of CPU usage and throughput + in tight loops can cause that overhead to become noticeable in terms of CPU usage and throughput reduction. Instead of incrementing a counter every time a loop iteration occurs, you might consider incrementing a local variable instead, and then emitting that sum after the loop is over. - **Don't** update a counter to measure the total number of operations/events/etc if you're already @@ -327,3 +327,22 @@ you'll need to use an asynchronous-specific synchronization primitives, namely t itself. The documentation on `tokio`'s own [`Mutex`](https://docs.rs/tokio/latest/tokio/sync/struct.Mutex.html), for example, calls out the specifics of when and where you might need to use it vs the one from `std::sync`. + + +## New Configuration Fields vs CLI flags + +Vector makes the distinction between configuration items that are essential to understand data +pipelines and runtime flags that determine the details of the runtime behavior. The main configuration +generally lives in a file in the current directory or in `/etc/vector`. + +Examples of main configuration fields are source, transformation, and sink declaration, as well as +information about where any disk buffers should be persisted. + +For configuration items that purely inform details of Vector's runtime behavior, CLI flags without +corresponding configuration fields should be used. + +An example of a runtime flag is +`vector run --no-graceful-shutdown-limit`, which tells Vector to ignore SIGINTs and to continue running +as normal until a SIGKILL is received. In this case, as the configuration describes the desired runtime +behavior in a specific environment and not to the underlying data pipeline, no corresponding field in +the configuration file should exist. diff --git a/Tiltfile b/Tiltfile index 6c0c9246042b4..9050565afe259 100644 --- a/Tiltfile +++ b/Tiltfile @@ -7,7 +7,7 @@ load('ext://helm_resource', 'helm_resource', 'helm_repo') docker_build( ref='timberio/vector', context='.', - build_args={'RUST_VERSION': '1.70.0'}, + build_args={'RUST_VERSION': '1.71.1'}, dockerfile='tilt/Dockerfile' ) diff --git a/benches/codecs/character_delimited_bytes.rs b/benches/codecs/character_delimited_bytes.rs index 9e8774edacf3e..90e6f9d996f3c 100644 --- a/benches/codecs/character_delimited_bytes.rs +++ b/benches/codecs/character_delimited_bytes.rs @@ -55,7 +55,7 @@ fn decoding(c: &mut Criterion) { .map(|ml| CharacterDelimitedDecoder::new_with_max_length(b'a', ml)) .unwrap_or(CharacterDelimitedDecoder::new(b'a')), ); - let deserializer = Deserializer::Bytes(BytesDeserializer::new()); + let deserializer = Deserializer::Bytes(BytesDeserializer); let decoder = vector::codecs::Decoder::new(framer, deserializer); (Box::new(decoder), param.input.clone()) diff --git a/benches/codecs/newline_bytes.rs b/benches/codecs/newline_bytes.rs index e7b4bce8a0392..a3fc1751a6df7 100644 --- a/benches/codecs/newline_bytes.rs +++ b/benches/codecs/newline_bytes.rs @@ -53,7 +53,7 @@ fn decoding(c: &mut Criterion) { .map(|ml| NewlineDelimitedDecoder::new_with_max_length(ml)) .unwrap_or(NewlineDelimitedDecoder::new()), ); - let deserializer = Deserializer::Bytes(BytesDeserializer::new()); + let deserializer = Deserializer::Bytes(BytesDeserializer); let decoder = vector::codecs::Decoder::new(framer, deserializer); (Box::new(decoder), param.input.clone()) diff --git a/benches/dnstap/mod.rs b/benches/dnstap/mod.rs index e4dda4b5f3d4b..37839b41ace06 100644 --- a/benches/dnstap/mod.rs +++ b/benches/dnstap/mod.rs @@ -1,14 +1,10 @@ use bytes::Bytes; use criterion::{criterion_group, criterion_main, BatchSize, Criterion, Throughput}; -use vector::{ - event::LogEvent, - sources::dnstap::{schema::DnstapEventSchema, DnstapParser}, -}; +use vector::event::LogEvent; +use vector::sources::dnstap::parser::DnstapParser; fn benchmark_query_parsing(c: &mut Criterion) { let mut event = LogEvent::default(); - let schema = DnstapEventSchema::new(); - let mut parser = DnstapParser::new(&schema, &mut event); let raw_dnstap_data = "ChVqYW1lcy1WaXJ0dWFsLU1hY2hpbmUSC0JJTkQgOS4xNi4zcnoIAxACGAEiEAAAAAAAAA\ AAAAAAAAAAAAAqECABBQJwlAAAAAAAAAAAADAw8+0CODVA7+zq9wVNMU3WNlI2kwIAAAABAAAAAAABCWZhY2Vib29rMQNjb\ 20AAAEAAQAAKQIAAACAAAAMAAoACOxjCAG9zVgzWgUDY29tAHgB"; @@ -19,7 +15,7 @@ fn benchmark_query_parsing(c: &mut Criterion) { group.bench_function("dns_query_parsing", |b| { b.iter_batched( || dnstap_data.clone(), - |dnstap_data| parser.parse_dnstap_data(Bytes::from(dnstap_data)).unwrap(), + |dnstap_data| DnstapParser::parse(&mut event, Bytes::from(dnstap_data)).unwrap(), BatchSize::SmallInput, ) }); @@ -29,8 +25,6 @@ fn benchmark_query_parsing(c: &mut Criterion) { fn benchmark_update_parsing(c: &mut Criterion) { let mut event = LogEvent::default(); - let schema = DnstapEventSchema::new(); - let mut parser = DnstapParser::new(&schema, &mut event); let raw_dnstap_data = "ChVqYW1lcy1WaXJ0dWFsLU1hY2hpbmUSC0JJTkQgOS4xNi4zcmsIDhABGAEiBH8AAA\ EqBH8AAAEwrG44AEC+iu73BU14gfofUh1wi6gAAAEAAAAAAAAHZXhhbXBsZQNjb20AAAYAAWC+iu73BW0agDwvch1wi6gAA\ AEAAAAAAAAHZXhhbXBsZQNjb20AAAYAAXgB"; @@ -41,7 +35,7 @@ fn benchmark_update_parsing(c: &mut Criterion) { group.bench_function("dns_update_parsing", |b| { b.iter_batched( || dnstap_data.clone(), - |dnstap_data| parser.parse_dnstap_data(Bytes::from(dnstap_data)).unwrap(), + |dnstap_data| DnstapParser::parse(&mut event, Bytes::from(dnstap_data)).unwrap(), BatchSize::SmallInput, ) }); diff --git a/benches/event.rs b/benches/event.rs index c28d0756a1a37..0f8322da087fd 100644 --- a/benches/event.rs +++ b/benches/event.rs @@ -1,6 +1,7 @@ use bytes::Bytes; use criterion::{criterion_group, BatchSize, Criterion}; use vector::event::LogEvent; +use vrl::event_path; fn benchmark_event_iterate(c: &mut Criterion) { let mut group = c.benchmark_group("event/iterate"); @@ -9,9 +10,9 @@ fn benchmark_event_iterate(c: &mut Criterion) { b.iter_batched_ref( || { let mut log = LogEvent::default(); - log.insert("key1", Bytes::from("value1")); - log.insert("key2", Bytes::from("value2")); - log.insert("key3", Bytes::from("value3")); + log.insert(event_path!("key1"), Bytes::from("value1")); + log.insert(event_path!("key2"), Bytes::from("value2")); + log.insert(event_path!("key3"), Bytes::from("value3")); log }, |e| e.all_fields().unwrap().count(), @@ -23,9 +24,15 @@ fn benchmark_event_iterate(c: &mut Criterion) { b.iter_batched_ref( || { let mut log = LogEvent::default(); - log.insert("key1.nested1.nested2", Bytes::from("value1")); - log.insert("key1.nested1.nested3", Bytes::from("value4")); - log.insert("key3", Bytes::from("value3")); + log.insert( + event_path!("key1", "nested1", "nested2"), + Bytes::from("value1"), + ); + log.insert( + event_path!("key1", "nested1", "nested3"), + Bytes::from("value4"), + ); + log.insert(event_path!("key3"), Bytes::from("value3")); log }, |e| e.all_fields().unwrap().count(), @@ -37,8 +44,8 @@ fn benchmark_event_iterate(c: &mut Criterion) { b.iter_batched_ref( || { let mut log = LogEvent::default(); - log.insert("key1.nested1[0]", Bytes::from("value1")); - log.insert("key1.nested1[1]", Bytes::from("value2")); + log.insert(event_path!("key1", "nested1", 0), Bytes::from("value1")); + log.insert(event_path!("key1", "nested1", 1), Bytes::from("value2")); log }, |e| e.all_fields().unwrap().count(), @@ -53,25 +60,31 @@ fn benchmark_event_create(c: &mut Criterion) { group.bench_function("single-level", |b| { b.iter(|| { let mut log = LogEvent::default(); - log.insert("key1", Bytes::from("value1")); - log.insert("key2", Bytes::from("value2")); - log.insert("key3", Bytes::from("value3")); + log.insert(event_path!("key1"), Bytes::from("value1")); + log.insert(event_path!("key2"), Bytes::from("value2")); + log.insert(event_path!("key3"), Bytes::from("value3")); }) }); group.bench_function("nested-keys", |b| { b.iter(|| { let mut log = LogEvent::default(); - log.insert("key1.nested1.nested2", Bytes::from("value1")); - log.insert("key1.nested1.nested3", Bytes::from("value4")); - log.insert("key3", Bytes::from("value3")); + log.insert( + event_path!("key1", "nested1", "nested2"), + Bytes::from("value1"), + ); + log.insert( + event_path!("key1", "nested1", "nested3"), + Bytes::from("value4"), + ); + log.insert(event_path!("key3"), Bytes::from("value3")); }) }); group.bench_function("array", |b| { b.iter(|| { let mut log = LogEvent::default(); - log.insert("key1.nested1[0]", Bytes::from("value1")); - log.insert("key1.nested1[1]", Bytes::from("value2")); + log.insert(event_path!("key1", "nested1", 0), Bytes::from("value1")); + log.insert(event_path!("key1", "nested1", 1), Bytes::from("value2")); }) }); } diff --git a/benches/lua.rs b/benches/lua.rs index 3eda8a9572d98..7aabba2996890 100644 --- a/benches/lua.rs +++ b/benches/lua.rs @@ -9,6 +9,7 @@ use vector::{ test_util::collect_ready, transforms::{self, OutputBuffer, Transform}, }; +use vrl::event_path; fn bench_add_fields(c: &mut Criterion) { let event = Event::from(LogEvent::default()); @@ -87,7 +88,7 @@ fn bench_field_filter(c: &mut Criterion) { let events = (0..num_events) .map(|i| { let mut event = LogEvent::default(); - event.insert("the_field", (i % 10).to_string()); + event.insert(event_path!("the_field"), (i % 10).to_string()); Event::from(event) }) .collect::>(); diff --git a/benches/remap.rs b/benches/remap.rs index 2f371d93db1a6..8fd2629c17b8e 100644 --- a/benches/remap.rs +++ b/benches/remap.rs @@ -12,6 +12,7 @@ use vector::{ }, }; use vector_common::TimeZone; +use vrl::event_path; use vrl::prelude::*; criterion_group!( @@ -35,9 +36,18 @@ fn benchmark_remap(c: &mut Criterion) { let result = outputs.take_primary(); let output_1 = result.first().unwrap().as_log(); - debug_assert_eq!(output_1.get("foo").unwrap().to_string_lossy(), "bar"); - debug_assert_eq!(output_1.get("bar").unwrap().to_string_lossy(), "baz"); - debug_assert_eq!(output_1.get("copy").unwrap().to_string_lossy(), "buz"); + debug_assert_eq!( + output_1.get(event_path!("foo")).unwrap().to_string_lossy(), + "bar" + ); + debug_assert_eq!( + output_1.get(event_path!("bar")).unwrap().to_string_lossy(), + "baz" + ); + debug_assert_eq!( + output_1.get(event_path!("copy")).unwrap().to_string_lossy(), + "buz" + ); result }; @@ -67,7 +77,9 @@ fn benchmark_remap(c: &mut Criterion) { let event = { let mut event = Event::Log(LogEvent::from("augment me")); - event.as_mut_log().insert("copy_from", "buz".to_owned()); + event + .as_mut_log() + .insert(event_path!("copy_from"), "buz".to_owned()); event }; @@ -88,11 +100,11 @@ fn benchmark_remap(c: &mut Criterion) { let output_1 = result.first().unwrap().as_log(); debug_assert_eq!( - output_1.get("foo").unwrap().to_string_lossy(), + output_1.get(event_path!("foo")).unwrap().to_string_lossy(), r#"{"key": "value"}"# ); debug_assert_eq!( - output_1.get("bar").unwrap().to_string_lossy(), + output_1.get(event_path!("bar")).unwrap().to_string_lossy(), r#"{"key":"value"}"# ); @@ -141,10 +153,16 @@ fn benchmark_remap(c: &mut Criterion) { let result = outputs.take_primary(); let output_1 = result.first().unwrap().as_log(); - debug_assert_eq!(output_1.get("number").unwrap(), &Value::Integer(1234)); - debug_assert_eq!(output_1.get("bool").unwrap(), &Value::Boolean(true)); debug_assert_eq!( - output_1.get("timestamp").unwrap(), + output_1.get(event_path!("number")).unwrap(), + &Value::Integer(1234) + ); + debug_assert_eq!( + output_1.get(event_path!("bool")).unwrap(), + &Value::Boolean(true) + ); + debug_assert_eq!( + output_1.get(event_path!("timestamp")).unwrap(), &Value::Timestamp(timestamp), ); @@ -176,7 +194,7 @@ fn benchmark_remap(c: &mut Criterion) { ("bool", "yes"), ("timestamp", "19/06/2019:17:20:49 -0400"), ] { - event.as_mut_log().insert(key, value.to_owned()); + event.as_mut_log().insert(event_path!(key), value.to_owned()); } let timestamp = diff --git a/benches/template.rs b/benches/template.rs index 949f0beeb52b5..008df426c98bc 100644 --- a/benches/template.rs +++ b/benches/template.rs @@ -13,10 +13,7 @@ fn bench_elasticsearch_index(c: &mut Criterion) { let index = Template::try_from("index-%Y.%m.%d").unwrap(); let mut event = Event::Log(LogEvent::from("hello world")); event.as_mut_log().insert( - ( - lookup::PathPrefix::Event, - log_schema().timestamp_key().unwrap(), - ), + log_schema().timestamp_key_target_path().unwrap(), Utc::now(), ); @@ -31,10 +28,7 @@ fn bench_elasticsearch_index(c: &mut Criterion) { let index = Template::try_from("index").unwrap(); let mut event = Event::Log(LogEvent::from("hello world")); event.as_mut_log().insert( - ( - lookup::PathPrefix::Event, - log_schema().timestamp_key().unwrap(), - ), + log_schema().timestamp_key_target_path().unwrap(), Utc::now(), ); diff --git a/build.rs b/build.rs index 42316c102da33..4e8c4f4986304 100644 --- a/build.rs +++ b/build.rs @@ -1,8 +1,5 @@ use std::{collections::HashSet, env, fs::File, io::Write, path::Path, process::Command}; -#[cfg(feature = "protobuf-build")] -use std::path::PathBuf; - struct TrackedEnv { tracked: HashSet, } @@ -133,7 +130,7 @@ fn main() { // in a type-safe way, which is necessary for incrementally building certain payloads, like // the ones generated in the `datadog_metrics` sink. let protobuf_fds_path = - PathBuf::from(std::env::var("OUT_DIR").expect("OUT_DIR environment variable not set")) + Path::new(&std::env::var("OUT_DIR").expect("OUT_DIR environment variable not set")) .join("protobuf-fds.bin"); let mut prost_build = prost_build::Config::new(); diff --git a/deny.toml b/deny.toml index 22181e1e0727b..726a94cf04e89 100644 --- a/deny.toml +++ b/deny.toml @@ -1,22 +1,33 @@ [licenses] +unlicensed = "deny" +default = "deny" +copyleft = "deny" allow = [ - "MIT", + "0BSD", + "Apache-2.0", + "BSD-2-Clause", + "BSD-3-Clause", + "BSL-1.0", "CC0-1.0", "ISC", + "MIT", "OpenSSL", - "Unlicense", - "BSD-2-Clause", - "BSD-3-Clause", - "Apache-2.0", - "Apache-2.0 WITH LLVM-exception", - "Zlib", + "Unicode-DFS-2016", + "Zlib" ] -unlicensed = "warn" -default = "warn" - private = { ignore = true } +exceptions = [ + # MPL-2.0 are added case-by-case to make sure we are in compliance. To be in + # compliance we cannot be modifying the source files. + { allow = ["MPL-2.0"], name = "colored", version = "*" }, + { allow = ["MPL-2.0"], name = "webpki-roots", version = "*" }, + { allow = ["MPL-2.0"], name = "vector-config-common", version = "*" }, + { allow = ["MPL-2.0"], name = "vector-config-macros", version = "*" }, + { allow = ["MPL-2.0"], name = "vrl", version = "*" }, +] + [[licenses.clarify]] name = "ring" version = "*" diff --git a/distribution/docker/alpine/Dockerfile b/distribution/docker/alpine/Dockerfile index c3479f1001191..c749e9fc0540a 100644 --- a/distribution/docker/alpine/Dockerfile +++ b/distribution/docker/alpine/Dockerfile @@ -8,6 +8,8 @@ RUN tar -xvf vector-0*-"$(cat /etc/apk/arch)"-unknown-linux-musl*.tar.gz --strip RUN mkdir -p /var/lib/vector FROM docker.io/alpine:3.18 +# we want the latest versions of these +# hadolint ignore=DL3018 RUN apk --no-cache add ca-certificates tzdata COPY --from=builder /vector/bin/* /usr/local/bin/ diff --git a/distribution/docker/debian/Dockerfile b/distribution/docker/debian/Dockerfile index 46afabb2480e4..c678dfd3080ee 100644 --- a/distribution/docker/debian/Dockerfile +++ b/distribution/docker/debian/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/debian:bullseye-slim AS builder +FROM docker.io/debian:bookworm-slim AS builder WORKDIR /vector @@ -7,11 +7,14 @@ RUN dpkg -i vector_*_"$(dpkg --print-architecture)".deb RUN mkdir -p /var/lib/vector -FROM docker.io/debian:bullseye-slim +FROM docker.io/debian:bookworm-slim +# we want the latest versions of these +# hadolint ignore=DL3008 RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates tzdata systemd && rm -rf /var/lib/apt/lists/* COPY --from=builder /usr/bin/vector /usr/bin/vector +COPY --from=builder /usr/share/vector /usr/share/vector COPY --from=builder /usr/share/doc/vector /usr/share/doc/vector COPY --from=builder /etc/vector /etc/vector COPY --from=builder /var/lib/vector /var/lib/vector diff --git a/distribution/docker/distroless-libc/Dockerfile b/distribution/docker/distroless-libc/Dockerfile index 6c164dcd07dfe..773aeadcbdc93 100644 --- a/distribution/docker/distroless-libc/Dockerfile +++ b/distribution/docker/distroless-libc/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/debian:bullseye-slim AS builder +FROM docker.io/debian:bookworm-slim AS builder WORKDIR /vector @@ -7,10 +7,13 @@ RUN dpkg -i vector_*_"$(dpkg --print-architecture)".deb RUN mkdir -p /var/lib/vector -FROM gcr.io/distroless/cc-debian11 +# distroless doesn't use static tags +# hadolint ignore=DL3007 +FROM gcr.io/distroless/cc-debian11:latest COPY --from=builder /usr/bin/vector /usr/bin/vector COPY --from=builder /usr/share/doc/vector /usr/share/doc/vector +COPY --from=builder /usr/share/vector /usr/share/vector COPY --from=builder /etc/vector /etc/vector COPY --from=builder /var/lib/vector /var/lib/vector diff --git a/distribution/docker/distroless-static/Dockerfile b/distribution/docker/distroless-static/Dockerfile index 874c165d64457..280c7d6be25d0 100644 --- a/distribution/docker/distroless-static/Dockerfile +++ b/distribution/docker/distroless-static/Dockerfile @@ -7,7 +7,9 @@ RUN tar -xvf vector-0*-"$(cat /etc/apk/arch)"-unknown-linux-musl*.tar.gz --strip RUN mkdir -p /var/lib/vector -FROM gcr.io/distroless/static +# distroless doesn't use static tags +# hadolint ignore=DL3007 +FROM gcr.io/distroless/static:latest COPY --from=builder /vector/bin/* /usr/local/bin/ COPY --from=builder /vector/config/vector.toml /etc/vector/vector.toml diff --git a/distribution/install.sh b/distribution/install.sh index 4fbfb8f10b059..bc639ddab3320 100755 --- a/distribution/install.sh +++ b/distribution/install.sh @@ -12,7 +12,7 @@ set -u # If PACKAGE_ROOT is unset or empty, default it. PACKAGE_ROOT="${PACKAGE_ROOT:-"https://packages.timber.io/vector"}" -VECTOR_VERSION="0.31.0" +VECTOR_VERSION="0.32.2" _divider="--------------------------------------------------------------------------------" _prompt=">>>" _indent=" " @@ -152,6 +152,16 @@ install_from_archive() { x86_64-*linux*-musl) _archive_arch="x86_64-unknown-linux-musl" ;; + aarch64-apple-darwin) + # This if statement can be removed when Vector publishes aarch64-apple-darwin builds + if /usr/bin/pgrep oahd >/dev/null 2>&1; then + echo "Rosetta is installed, installing x86_64-apple-darwin archive" + _archive_arch="x86_64-apple-darwin" + else + echo "Builds for Apple Silicon are not published today, please install Rosetta" + err "unsupported arch: $_arch" + fi + ;; aarch64-*linux*) _archive_arch="aarch64-unknown-linux-musl" ;; diff --git a/distribution/kubernetes/vector-agent/README.md b/distribution/kubernetes/vector-agent/README.md index 58bc8ff221193..531ce3abac357 100644 --- a/distribution/kubernetes/vector-agent/README.md +++ b/distribution/kubernetes/vector-agent/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.22.0 with the following `values.yaml`: +version 0.23.0 with the following `values.yaml`: ```yaml role: Agent diff --git a/distribution/kubernetes/vector-agent/configmap.yaml b/distribution/kubernetes/vector-agent/configmap.yaml index d7c928b159f7c..cc4d0a701a446 100644 --- a/distribution/kubernetes/vector-agent/configmap.yaml +++ b/distribution/kubernetes/vector-agent/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" data: agent.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-agent/daemonset.yaml b/distribution/kubernetes/vector-agent/daemonset.yaml index 8508420956313..5deb05f8a834b 100644 --- a/distribution/kubernetes/vector-agent/daemonset.yaml +++ b/distribution/kubernetes/vector-agent/daemonset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: {} spec: selector: @@ -16,6 +16,7 @@ spec: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent + minReadySeconds: 0 template: metadata: annotations: {} @@ -29,7 +30,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.30.0-distroless-libc" + image: "timberio/vector:0.31.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir @@ -51,6 +52,8 @@ spec: value: "/host/proc" - name: SYSFS_ROOT value: "/host/sys" + - name: VECTOR_LOG + value: "info" ports: - name: prom-exporter containerPort: 9090 @@ -61,17 +64,17 @@ spec: - name: config mountPath: "/etc/vector/" readOnly: true - - name: var-log - mountPath: "/var/log/" + - mountPath: /var/log/ + name: var-log readOnly: true - - name: var-lib - mountPath: "/var/lib" + - mountPath: /var/lib + name: var-lib readOnly: true - - name: procfs - mountPath: "/host/proc" + - mountPath: /host/proc + name: procfs readOnly: true - - name: sysfs - mountPath: "/host/sys" + - mountPath: /host/sys + name: sysfs readOnly: true terminationGracePeriodSeconds: 60 volumes: @@ -83,15 +86,15 @@ spec: - name: data hostPath: path: "/var/lib/vector" - - name: var-log - hostPath: - path: "/var/log/" - - name: var-lib - hostPath: - path: "/var/lib/" - - name: procfs - hostPath: - path: "/proc" - - name: sysfs - hostPath: - path: "/sys" + - hostPath: + path: /var/log/ + name: var-log + - hostPath: + path: /var/lib/ + name: var-lib + - hostPath: + path: /proc + name: procfs + - hostPath: + path: /sys + name: sysfs diff --git a/distribution/kubernetes/vector-agent/rbac.yaml b/distribution/kubernetes/vector-agent/rbac.yaml index 2ad572473e7d7..2309c07b8b9d2 100644 --- a/distribution/kubernetes/vector-agent/rbac.yaml +++ b/distribution/kubernetes/vector-agent/rbac.yaml @@ -10,7 +10,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" rules: - apiGroups: - "" @@ -31,7 +31,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole diff --git a/distribution/kubernetes/vector-agent/service-headless.yaml b/distribution/kubernetes/vector-agent/service-headless.yaml index 18ea854b8f9e7..1ef8fb122e0e3 100644 --- a/distribution/kubernetes/vector-agent/service-headless.yaml +++ b/distribution/kubernetes/vector-agent/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-agent/serviceaccount.yaml b/distribution/kubernetes/vector-agent/serviceaccount.yaml index 18d7093b583f8..389773e9101a3 100644 --- a/distribution/kubernetes/vector-agent/serviceaccount.yaml +++ b/distribution/kubernetes/vector-agent/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/README.md b/distribution/kubernetes/vector-aggregator/README.md index 194d80cc3ebb1..4c5a3b4fcb1b4 100644 --- a/distribution/kubernetes/vector-aggregator/README.md +++ b/distribution/kubernetes/vector-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.22.0 with the following `values.yaml`: +version 0.23.0 with the following `values.yaml`: ```yaml diff --git a/distribution/kubernetes/vector-aggregator/configmap.yaml b/distribution/kubernetes/vector-aggregator/configmap.yaml index 028a2f273a075..3f82d196965e4 100644 --- a/distribution/kubernetes/vector-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-aggregator/service-headless.yaml b/distribution/kubernetes/vector-aggregator/service-headless.yaml index 06cad3551b635..643102caccd3c 100644 --- a/distribution/kubernetes/vector-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-aggregator/service.yaml b/distribution/kubernetes/vector-aggregator/service.yaml index 449a24950bc1e..023e8dfd0f557 100644 --- a/distribution/kubernetes/vector-aggregator/service.yaml +++ b/distribution/kubernetes/vector-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml index 0bf2da2d58d3b..024e2f65b3b1b 100644 --- a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/statefulset.yaml b/distribution/kubernetes/vector-aggregator/statefulset.yaml index 2eef56ffd5ad9..ad5a0d0086a2b 100644 --- a/distribution/kubernetes/vector-aggregator/statefulset.yaml +++ b/distribution/kubernetes/vector-aggregator/statefulset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: {} spec: replicas: 1 @@ -32,7 +32,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.30.0-distroless-libc" + image: "timberio/vector:0.31.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/README.md b/distribution/kubernetes/vector-stateless-aggregator/README.md index 2703746f5d435..69466743c76ec 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/README.md +++ b/distribution/kubernetes/vector-stateless-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.22.0 with the following `values.yaml`: +version 0.23.0 with the following `values.yaml`: ```yaml role: Stateless-Aggregator diff --git a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml index 766c693669f7b..38de88fe03a06 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml index 6ff20dc958816..6072b5464fa86 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: {} spec: replicas: 1 @@ -30,7 +30,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.30.0-distroless-libc" + image: "timberio/vector:0.31.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml index 3230af57fbd76..a7d86afb480c5 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-stateless-aggregator/service.yaml b/distribution/kubernetes/vector-stateless-aggregator/service.yaml index a22d86ff0925f..9d9f59d239018 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml index 50bba163cf9f5..d46fee95872d7 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.30.0-distroless-libc" + app.kubernetes.io/version: "0.31.0-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/rpm/vector.spec b/distribution/rpm/vector.spec index 04a57d4564b6a..3a895ace15f21 100644 --- a/distribution/rpm/vector.spec +++ b/distribution/rpm/vector.spec @@ -52,6 +52,7 @@ mkdir -p %{buildroot}%{_bindir} mkdir -p %{buildroot}%{_sysconfdir}/%{_name} mkdir -p %{buildroot}%{_sysconfdir}/default mkdir -p %{buildroot}%{_sharedstatedir}/%{_name} +mkdir -p %{buildroot}%{_datadir}/%{_name} mkdir -p %{buildroot}%{_unitdir} cp -a %{_builddir}/bin/vector %{buildroot}%{_bindir} @@ -59,6 +60,9 @@ cp -a %{_builddir}/config/vector.toml %{buildroot}%{_sysconfdir}/%{_name}/vector cp -a %{_builddir}/config/examples/. %{buildroot}%{_sysconfdir}/%{_name}/examples cp -a %{_builddir}/systemd/vector.service %{buildroot}%{_unitdir}/vector.service cp -a %{_builddir}/systemd/vector.default %{buildroot}%{_sysconfdir}/default/vector +cp -a %{_builddir}/licenses/. %{buildroot}%{_datadir}/%{_name}/licenses +cp -a %{_builddir}/NOTICE %{buildroot}%{_datadir}/%{_name}/NOTICE +cp -a %{_builddir}/LICENSE-3rdparty.csv %{buildroot}%{_datadir}/%{_name}/LICENSE-3rdparty.csv %post getent passwd %{_username} > /dev/null || \ @@ -80,6 +84,9 @@ rm -rf %{buildroot} %config %{_sysconfdir}/%{_name}/examples/* %dir %{_sharedstatedir}/%{_name} %doc README.md +%doc %{_datadir}/%{_name}/NOTICE +%doc %{_datadir}/%{_name}/licenses/* +%doc %{_datadir}/%{_name}/LICENSE-3rdparty.csv %license LICENSE %changelog diff --git a/docs/DEVELOPING.md b/docs/DEVELOPING.md index d7512a980bd6c..cf66c472587df 100644 --- a/docs/DEVELOPING.md +++ b/docs/DEVELOPING.md @@ -121,9 +121,10 @@ To build Vector on your own host will require a fairly complete development envi Loosely, you'll need the following: - **To build Vector:** Have working Rustup, Protobuf tools, C++/C build tools (LLVM, GCC, or MSVC), Python, and Perl, `make` (the GNU one preferably), `bash`, `cmake`, `GNU coreutils`, and `autotools`. +- **To run `make test`:** Install [`cargo-nextest`](https://nexte.st/) - **To run integration tests:** Have `docker` available, or a real live version of that service. (Use `AUTOSPAWN=false`) - **To run `make check-component-features`:** Have `remarshal` installed. -- **To run `make check-licenses` or `cargo vdev build licenses`:** Have `rust-license-tool` [installed](https://github.com/DataDog/rust-license-tool). +- **To run `make check-licenses` or `cargo vdev build licenses`:** Have `dd-rust-license-tool` [installed](https://github.com/DataDog/rust-license-tool). - **To run `cargo vdev build component-docs`:** Have `cue` [installed](https://cuelang.org/docs/install/). If you find yourself needing to run something inside the Docker environment described above, that's totally fine, they won't collide or hurt each other. In this case, you'd just run `make environment-generate`. @@ -141,19 +142,19 @@ cargo build make build-dev # Validate your test pass cargo test sources::example -make test scope="sources::example" +make test SCOPE="sources::example" # Validate tests (that do not require other services) pass cargo test make test # Validate your tests pass (starting required services in Docker) -make test-integration scope="sources::example" +make test-integration SCOPE="sources::example" # Validate your tests pass against a live service. -make test-integration scope="sources::example" autospawn=false +make test-integration SCOPE="sources::example" autospawn=false cargo test --features docker sources::example # Validate all tests pass (starting required services in Docker) make test-integration # Run your benchmarks -make bench scope="transforms::example" +make bench SCOPE="transforms::example" cargo bench transforms::example # Format your code before pushing! make fmt diff --git a/docs/REVIEWING.md b/docs/REVIEWING.md index f8356f7a2f271..26dfd8943d896 100644 --- a/docs/REVIEWING.md +++ b/docs/REVIEWING.md @@ -36,6 +36,42 @@ following items should also be checked: - [ ] Does it comply with [component spec](specs/component.md)? - [ ] Does it comply with the [instrumentation spec](specs/instrumentation.md)? + +### Checklist - new sink + +This checklist is specific for Vector's sink code. + +#### Logic + +- [ ] Does it work? Do you understand what it is supposed to be doing? +- [ ] Does the retry logic make sense? +- [ ] Are the tests testing that the sink is emitting the correct metrics? +- [ ] Are there integration tests? + +#### Code structure + +- [ ] Is it using the sink prelude (`use crate::sinks::prelude::*`)? +- [ ] Is the sink a stream based sink? + Check that the return value from `SinkConfig::build` is the return from `VectorSink::from_event_streamsink`. +- [ ] Is it gated by sensible feature flags? +- [ ] Is the code modularized into `mod.rs`, `config.rs`, `sink.rs`, `request_builder.rs`, `service.rs` +- [ ] Does the code follow our [style guidelines]. + +#### Documentation + +- [ ] Look at the doc preview on Netlify. Does it look good? +- [ ] Is there a `cue` file linking to `base`? +- [ ] Is there a markdown file under `/website/content/en/docs/reference/configuration/sinks/`? +- [ ] Are module comments included in `mod.rs` linking to any relevant areas in the external services documentation? + +#### Configuration + +- [ ] Are TLS settings configurable? +- [ ] Are the Request settings configurable? +- [ ] Should it have proxy settings? If so, are they in place? +- [ ] Does it need batch settings? If so, are they used? + + ## Expectations We endeavour to review all PRs within 2 working days (Monday to Friday) of submission. @@ -131,3 +167,5 @@ your best judgment, some code requires more testing than others depending on its importance. For integrations, consider whether the code could be integration tested. + +[style guidelines]: https://github.com/vectordotdev/vector/blob/master/STYLE.md diff --git a/docs/tutorials/sinks/1_basic_sink.md b/docs/tutorials/sinks/1_basic_sink.md index 18194636eb469..ae49909f91dd2 100644 --- a/docs/tutorials/sinks/1_basic_sink.md +++ b/docs/tutorials/sinks/1_basic_sink.md @@ -33,7 +33,7 @@ is deserialized to the fields in this struct so the user can customise the sink's behaviour. ```rust -#[configurable_component(sink("basic", "Basic sink."))] +#[configurable_component(sink("basic"))] #[derive(Clone, Debug)] /// A basic sink that dumps its output to stdout. pub struct BasicConfig { @@ -273,7 +273,7 @@ emit the event. Change the body of `run_inner` to look like the following: ## EventSent -[`EventSent`][events_sent] is emmitted by each component in Vector to +[`EventSent`][events_sent] is emitted by each component in Vector to instrument how many bytes have been sent to the next downstream component. Add the following after emitting `BytesSent`: @@ -284,7 +284,7 @@ Add the following after emitting `BytesSent`: + count: 1, + byte_size: event_byte_size, + output: None, -+ }) ++ }); ``` More details about instrumenting Vector can be found diff --git a/docs/tutorials/sinks/2_http_sink.md b/docs/tutorials/sinks/2_http_sink.md index 66fcb2e4d6f97..179ff4192c8cc 100644 --- a/docs/tutorials/sinks/2_http_sink.md +++ b/docs/tutorials/sinks/2_http_sink.md @@ -16,6 +16,7 @@ use crate::{ http::HttpClient, internal_events::SinkRequestBuildError, }; +use vector_core::config::telemetry; use bytes::Bytes; ``` @@ -81,12 +82,12 @@ struct BasicEncoder; The Encoder must implement the [`Encoder`][encoder] trait: ```rust -impl Encoder for BasicEncoder { +impl encoding::Encoder for BasicEncoder { fn encode_input( &self, input: Event, writer: &mut dyn std::io::Write, - ) -> std::io::Result { + ) -> std::io::Result<(usize, GroupedCountByteSize)> { } } ``` @@ -98,16 +99,25 @@ sending batches of events, or they may send a completely different type if each event is processed in some way prior to encoding. [`encode_input`][encoder_encode_input] serializes the event to a String and -writes these bytes: +writes these bytes. The function also creates a [`GroupedCountByteSize`] +[grouped_count_byte_size] object. This object tracks the size of the event +that is sent by the sink, optionally grouped by the source and service that +originated the event if Vector has been configured to do so. It is necessary to +calculate the sizes in this function since the encode function sometimes drops +fields from the event prior to encoding. We need the size to be calculated after +these fields have been dropped. ```rust fn encode_input( &self, input: Event, writer: &mut dyn std::io::Write, - ) -> std::io::Result { + ) -> std::io::Result<(usize, GroupedCountByteSize)> { + let mut byte_size = telemetry().create_request_count_byte_size(); + byte_size.add_event(&input, input.estimated_json_encoded_size_of()); + let event = serde_json::to_string(&input).unwrap(); - write_all(writer, 1, event.as_bytes()).map(|()| event.len()) + write_all(writer, 1, event.as_bytes()).map(|()| (event.len(), byte_size)) } ``` @@ -152,8 +162,12 @@ We need to implement a number of traits for the request to access these fields: ```rust impl MetaDescriptive for BasicRequest { - fn get_metadata(&self) -> RequestMetadata { - self.metadata + fn get_metadata(&self) -> &RequestMetadata { + &self.metadata + } + + fn metadata_mut(&mut self) -> &mut RequestMetadata { + &mut self.metadata } } @@ -249,7 +263,7 @@ when sending the event to an `amqp` server. mut input: Event, ) -> (Self::Metadata, RequestMetadataBuilder, Self::Events) { let finalizers = input.take_finalizers(); - let metadata_builder = RequestMetadataBuilder::from_events(&input); + let metadata_builder = RequestMetadataBuilder::from_event(&input); (finalizers, metadata_builder, input) } ``` @@ -338,7 +352,12 @@ that will be invoked to send the actual data. match client.call(req).await { Ok(response) => { if response.status().is_success() { - Ok(BasicResponse { byte_size }) + Ok(BasicResponse { + byte_size, + json_size: request + .metadata + .into_events_estimated_json_encoded_byte_size(), + }) } else { Err("received error response") } @@ -359,6 +378,7 @@ The return from our service must be an object that implements the ```rust struct BasicResponse { byte_size: usize, + json_size: GroupedCountByteSize, } impl DriverResponse for BasicResponse { @@ -366,11 +386,13 @@ impl DriverResponse for BasicResponse { EventStatus::Delivered } - fn events_sent(&self) -> RequestCountByteSize { - // (events count, byte size) - CountByteSize(1, self.byte_size).into() + fn events_sent(&self) -> &GroupedCountByteSize { + &self.json_size } -} + + fn bytes_sent(&self) -> Option { + Some(self.byte_size) + }} ``` Vector calls the methods in this trait to determine if the event was delivered successfully. @@ -469,7 +491,7 @@ BODY: {"log":{"host":"computer","message":"zork","source_type":"stdin","timestamp":"2023-01-23T10:21:57.215019942Z"}} ``` -[tutorial_1]: https://github.com/vectordotdev/vector/tree/master/docs/tutorials/sinsk/1_basic_sink.md +[tutorial_1]: https://github.com/vectordotdev/vector/tree/master/docs/tutorials/sinks/1_basic_sink.md [tower]: https://docs.rs/tower/latest/tower/ [tower_service]: https://docs.rs/tower/latest/tower/trait.Service.html [hyper_docs]: https://docs.rs/hyper/latest/hyper/ @@ -492,3 +514,4 @@ BODY: [sinkbuilder_ext_into_driver]: https://rust-doc.vector.dev/vector/sinks/util/builder/trait.sinkbuilderext#method.into_driver [stream_filter_map]: https://docs.rs/futures/latest/futures/stream/trait.StreamExt.html#method.filter_map [driver]: https://rust-doc.vector.dev/vector_core/stream/struct.driver +[grouped_count_byte_size]: https://rust-doc.vector.dev/vector_common/request_metadata/enum.groupedcountbytesize diff --git a/lib/codecs/Cargo.toml b/lib/codecs/Cargo.toml index 612951fc488cc..3914aa99553e5 100644 --- a/lib/codecs/Cargo.toml +++ b/lib/codecs/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -apache-avro = { version = "0.14.0", default-features = false } +apache-avro = { version = "0.15.0", default-features = false } bytes = { version = "1", default-features = false } chrono = { version = "0.4", default-features = false } csv = { version = "1.2", default-features = false } @@ -19,13 +19,14 @@ once_cell = { version = "1.18", default-features = false } ordered-float = { version = "3.7.0", default-features = false } prometheus-remote-write = { version = "1.1", tag = "v1.1.3", git = "ssh://git@github.com/answerbook/prometheus-remote-write-rs.git" } prost = { version = "0.11.8", default-features = false, features = ["std"] } -regex = { version = "1.8.4", default-features = false, features = ["std", "perf"] } +prost-reflect = { version = "0.11", default-features = false, features = ["serde"] } +regex = { version = "1.9.3", default-features = false, features = ["std", "perf"] } serde = { version = "1", default-features = false, features = ["derive"] } serde_json = { version = "1", default-features = false } smallvec = { version = "1", default-features = false, features = ["union"] } -snafu = { version = "0.7.4", default-features = false, features = ["futures"] } +snafu = { version = "0.7.5", default-features = false, features = ["futures"] } snap = "1" -syslog_loose = { version = "0.18", default-features = false, optional = true } +syslog_loose = { version = "0.19", default-features = false, optional = true } tokio-util = { version = "0.7", default-features = false, features = ["codec"] } tracing = { version = "0.1", default-features = false } vrl.workspace = true diff --git a/lib/codecs/src/decoding/format/bytes.rs b/lib/codecs/src/decoding/format/bytes.rs index e27df861cf1e0..06a97b67c2950 100644 --- a/lib/codecs/src/decoding/format/bytes.rs +++ b/lib/codecs/src/decoding/format/bytes.rs @@ -1,9 +1,9 @@ use bytes::Bytes; -use lookup::lookup_v2::parse_value_path; use lookup::OwnedTargetPath; use serde::{Deserialize, Serialize}; use smallvec::{smallvec, SmallVec}; use vector_core::config::LogNamespace; +use vector_core::schema::meaning; use vector_core::{ config::{log_schema, DataType}, event::{Event, LogEvent}, @@ -25,7 +25,7 @@ impl BytesDeserializerConfig { /// Build the `BytesDeserializer` from this configuration. pub fn build(&self) -> BytesDeserializer { - BytesDeserializer::new() + BytesDeserializer } /// Return the type of event build by this deserializer. @@ -36,11 +36,17 @@ impl BytesDeserializerConfig { /// The schema produced by the deserializer. pub fn schema_definition(&self, log_namespace: LogNamespace) -> schema::Definition { match log_namespace { - LogNamespace::Legacy => schema::Definition::empty_legacy_namespace().with_event_field( - &parse_value_path(log_schema().message_key()).expect("valid message key"), - Kind::bytes(), - Some("message"), - ), + LogNamespace::Legacy => { + let definition = schema::Definition::empty_legacy_namespace(); + if let Some(message_key) = log_schema().message_key() { + return definition.with_event_field( + message_key, + Kind::bytes(), + Some(meaning::MESSAGE), + ); + } + definition + } LogNamespace::Vector => { schema::Definition::new_with_default_metadata(Kind::bytes(), [log_namespace]) .with_meaning(OwnedTargetPath::event_root(), "message") @@ -54,32 +60,16 @@ impl BytesDeserializerConfig { /// This deserializer can be considered as the no-op action for input where no /// further decoding has been specified. #[derive(Debug, Clone)] -pub struct BytesDeserializer { - // Only used with the "Legacy" namespace. The "Vector" namespace decodes the data at the root of the event. - log_schema_message_key: &'static str, -} - -impl Default for BytesDeserializer { - fn default() -> Self { - Self::new() - } -} +pub struct BytesDeserializer; impl BytesDeserializer { - /// Creates a new `BytesDeserializer`. - pub fn new() -> Self { - Self { - log_schema_message_key: log_schema().message_key(), - } - } - /// Deserializes the given bytes, which will always produce a single `LogEvent`. pub fn parse_single(&self, bytes: Bytes, log_namespace: LogNamespace) -> LogEvent { match log_namespace { LogNamespace::Vector => log_namespace.new_log_from_data(bytes), LogNamespace::Legacy => { let mut log = LogEvent::default(); - log.insert(self.log_schema_message_key, bytes); + log.maybe_insert(log_schema().message_key_target_path(), bytes); log } } @@ -99,15 +89,13 @@ impl Deserializer for BytesDeserializer { #[cfg(test)] mod tests { - use vector_core::config::log_schema; - use vrl::value::Value; - use super::*; + use vrl::value::Value; #[test] fn deserialize_bytes_legacy_namespace() { let input = Bytes::from("foo"); - let deserializer = BytesDeserializer::new(); + let deserializer = BytesDeserializer; let events = deserializer.parse(input, LogNamespace::Legacy).unwrap(); let mut events = events.into_iter(); @@ -115,7 +103,7 @@ mod tests { { let event = events.next().unwrap(); let log = event.as_log(); - assert_eq!(log[log_schema().message_key()], "foo".into()); + assert_eq!(*log.get_message().unwrap(), "foo".into()); } assert_eq!(events.next(), None); @@ -124,7 +112,7 @@ mod tests { #[test] fn deserialize_bytes_vector_namespace() { let input = Bytes::from("foo"); - let deserializer = BytesDeserializer::new(); + let deserializer = BytesDeserializer; let events = deserializer.parse(input, LogNamespace::Vector).unwrap(); assert_eq!(events.len(), 1); diff --git a/lib/codecs/src/decoding/format/gelf.rs b/lib/codecs/src/decoding/format/gelf.rs index e5b7dbe96c315..eae4c30739880 100644 --- a/lib/codecs/src/decoding/format/gelf.rs +++ b/lib/codecs/src/decoding/format/gelf.rs @@ -1,7 +1,7 @@ use bytes::Bytes; use chrono::{DateTime, NaiveDateTime, Utc}; use derivative::Derivative; -use lookup::{event_path, owned_value_path, PathPrefix}; +use lookup::{event_path, owned_value_path}; use serde::{Deserialize, Serialize}; use smallvec::{smallvec, SmallVec}; use std::collections::HashMap; @@ -17,11 +17,12 @@ use vrl::value::kind::Collection; use vrl::value::{Kind, Value}; use super::{default_lossy, Deserializer}; +use crate::gelf::GELF_TARGET_PATHS; use crate::{gelf_fields::*, VALID_FIELD_REGEX}; /// On GELF decoding behavior: /// Graylog has a relaxed decoding. They are much more lenient than the spec would -/// suggest. We've elected to take a more strict approach to maintain backwards compatability +/// suggest. We've elected to take a more strict approach to maintain backwards compatibility /// in the event that we need to change the behavior to be more relaxed, so that prior versions /// of vector will still work with the new relaxed decoding. @@ -123,44 +124,41 @@ impl GelfDeserializer { .into()); } - log.insert(VERSION, parsed.version.to_string()); - log.insert(HOST, parsed.host.to_string()); + log.insert(&GELF_TARGET_PATHS.version, parsed.version.to_string()); + log.insert(&GELF_TARGET_PATHS.host, parsed.host.to_string()); if let Some(full_message) = &parsed.full_message { - log.insert(FULL_MESSAGE, full_message.to_string()); + log.insert(&GELF_TARGET_PATHS.full_message, full_message.to_string()); } - if let Some(timestamp_key) = log_schema().timestamp_key() { + if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { if let Some(timestamp) = parsed.timestamp { let naive = NaiveDateTime::from_timestamp_opt( f64::trunc(timestamp) as i64, f64::fract(timestamp) as u32, ) .expect("invalid timestamp"); - log.insert( - (PathPrefix::Event, timestamp_key), - DateTime::::from_utc(naive, Utc), - ); + log.insert(timestamp_key, DateTime::::from_utc(naive, Utc)); // per GELF spec- add timestamp if not provided } else { - log.insert((PathPrefix::Event, timestamp_key), Utc::now()); + log.insert(timestamp_key, Utc::now()); } } if let Some(level) = parsed.level { - log.insert(LEVEL, level); + log.insert(&GELF_TARGET_PATHS.level, level); } if let Some(facility) = &parsed.facility { - log.insert(FACILITY, facility.to_string()); + log.insert(&GELF_TARGET_PATHS.facility, facility.to_string()); } if let Some(line) = parsed.line { log.insert( - LINE, + &GELF_TARGET_PATHS.line, Value::Float(ordered_float::NotNan::new(line).expect("JSON doesn't allow NaNs")), ); } if let Some(file) = &parsed.file { - log.insert(FILE, file.to_string()); + log.insert(&GELF_TARGET_PATHS.file, file.to_string()); } if let Some(add) = &parsed.additional_fields { @@ -293,7 +291,7 @@ mod tests { Some(&Value::Bytes(Bytes::from_static(b"example.org"))) ); assert_eq!( - log.get(log_schema().message_key()), + log.get(log_schema().message_key_target_path().unwrap()), Some(&Value::Bytes(Bytes::from_static( b"A short message that helps you identify what is going on" ))) @@ -348,7 +346,7 @@ mod tests { let events = deserialize_gelf_input(&input).unwrap(); assert_eq!(events.len(), 1); let log = events[0].as_log(); - assert!(log.contains(log_schema().message_key())); + assert!(log.contains(log_schema().message_key_target_path().unwrap())); } // filter out id diff --git a/lib/codecs/src/decoding/format/json.rs b/lib/codecs/src/decoding/format/json.rs index 67e7bc624bbdf..0906f739b803a 100644 --- a/lib/codecs/src/decoding/format/json.rs +++ b/lib/codecs/src/decoding/format/json.rs @@ -3,7 +3,6 @@ use std::convert::TryInto; use bytes::Bytes; use chrono::Utc; use derivative::Derivative; -use lookup::PathPrefix; use smallvec::{smallvec, SmallVec}; use vector_config::configurable_component; use vector_core::{ @@ -133,11 +132,11 @@ impl Deserializer for JsonDeserializer { LogNamespace::Legacy => { let timestamp = Utc::now(); - if let Some(timestamp_key) = log_schema().timestamp_key() { + if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { for event in &mut events { let log = event.as_mut_log(); - if !log.contains((PathPrefix::Event, timestamp_key)) { - log.insert((PathPrefix::Event, timestamp_key), timestamp); + if !log.contains(timestamp_key) { + log.insert(timestamp_key, timestamp); } } } @@ -218,7 +217,7 @@ mod tests { let log = event.as_log(); assert_eq!(log["bar"], 456.into()); assert_eq!( - log.get((PathPrefix::Event, log_schema().timestamp_key().unwrap())) + log.get(log_schema().timestamp_key_target_path().unwrap()) .is_some(), namespace == LogNamespace::Legacy ); diff --git a/lib/codecs/src/decoding/format/mezmo/open_telemetry/log_parser.rs b/lib/codecs/src/decoding/format/mezmo/open_telemetry/log_parser.rs index 5f2aa5432942a..ef1eb37db0ad9 100644 --- a/lib/codecs/src/decoding/format/mezmo/open_telemetry/log_parser.rs +++ b/lib/codecs/src/decoding/format/mezmo/open_telemetry/log_parser.rs @@ -170,7 +170,7 @@ pub fn to_events(log_request: ExportLogsServiceRequest) -> SmallVec<[Event; 1]> Value::from(metadata), ), // Add the actual line - (log_schema().message_key().to_string(), line), + (log_schema().message_key().unwrap().to_string(), line), ]); // Wrap line in mezmo format diff --git a/lib/codecs/src/decoding/format/mezmo/open_telemetry/trace_parser.rs b/lib/codecs/src/decoding/format/mezmo/open_telemetry/trace_parser.rs index ae5e7de5f6385..560eb4b820884 100644 --- a/lib/codecs/src/decoding/format/mezmo/open_telemetry/trace_parser.rs +++ b/lib/codecs/src/decoding/format/mezmo/open_telemetry/trace_parser.rs @@ -260,7 +260,10 @@ pub fn to_events(trace_request: ExportTraceServiceRequest) -> SmallVec<[Event; 1 user_metadata.insert("attributes".to_string(), filtered_attributes); let log_line = BTreeMap::from_iter([ - (log_schema().message_key().to_string(), message.into()), + ( + log_schema().message_key().unwrap().to_string(), + message.into(), + ), ( log_schema().user_metadata_key().to_string(), user_metadata.into(), diff --git a/lib/codecs/src/decoding/format/mod.rs b/lib/codecs/src/decoding/format/mod.rs index 76942d49af244..61a42a0a6eb81 100644 --- a/lib/codecs/src/decoding/format/mod.rs +++ b/lib/codecs/src/decoding/format/mod.rs @@ -8,6 +8,7 @@ mod gelf; mod json; mod native; mod native_json; +mod protobuf; #[cfg(feature = "syslog")] mod syslog; @@ -25,6 +26,7 @@ pub use native::{NativeDeserializer, NativeDeserializerConfig}; pub use native_json::{ NativeJsonDeserializer, NativeJsonDeserializerConfig, NativeJsonDeserializerOptions, }; +pub use protobuf::{ProtobufDeserializer, ProtobufDeserializerConfig}; use smallvec::SmallVec; #[cfg(feature = "syslog")] pub use syslog::{SyslogDeserializer, SyslogDeserializerConfig, SyslogDeserializerOptions}; diff --git a/lib/codecs/src/decoding/format/native.rs b/lib/codecs/src/decoding/format/native.rs index 1f47414e103f5..5258e96e6f282 100644 --- a/lib/codecs/src/decoding/format/native.rs +++ b/lib/codecs/src/decoding/format/native.rs @@ -19,7 +19,7 @@ pub struct NativeDeserializerConfig; impl NativeDeserializerConfig { /// Build the `NativeDeserializer` from this configuration. pub fn build(&self) -> NativeDeserializer { - NativeDeserializer::default() + NativeDeserializer } /// Return the type of event build by this deserializer. diff --git a/lib/codecs/src/decoding/format/protobuf.rs b/lib/codecs/src/decoding/format/protobuf.rs new file mode 100644 index 0000000000000..71bd98c1567fa --- /dev/null +++ b/lib/codecs/src/decoding/format/protobuf.rs @@ -0,0 +1,366 @@ +use std::collections::BTreeMap; +use std::fs; +use std::path::PathBuf; + +use bytes::Bytes; +use chrono::Utc; +use derivative::Derivative; +use ordered_float::NotNan; +use prost_reflect::{DescriptorPool, DynamicMessage, MessageDescriptor, ReflectMessage}; +use smallvec::{smallvec, SmallVec}; +use vector_config::configurable_component; +use vector_core::event::LogEvent; +use vector_core::{ + config::{log_schema, DataType, LogNamespace}, + event::Event, + schema, +}; +use vrl::value::Kind; + +use super::Deserializer; + +/// Config used to build a `ProtobufDeserializer`. +#[configurable_component] +#[derive(Debug, Clone, Default)] +pub struct ProtobufDeserializerConfig { + /// Protobuf-specific decoding options. + #[serde( + default, + skip_serializing_if = "vector_core::serde::skip_serializing_if_default" + )] + pub protobuf: ProtobufDeserializerOptions, +} + +impl ProtobufDeserializerConfig { + /// Build the `ProtobufDeserializer` from this configuration. + pub fn build(&self) -> vector_common::Result { + ProtobufDeserializer::try_from(self) + } + + /// Return the type of event build by this deserializer. + pub fn output_type(&self) -> DataType { + DataType::Log + } + + /// The schema produced by the deserializer. + pub fn schema_definition(&self, log_namespace: LogNamespace) -> schema::Definition { + match log_namespace { + LogNamespace::Legacy => { + let mut definition = + schema::Definition::empty_legacy_namespace().unknown_fields(Kind::any()); + + if let Some(timestamp_key) = log_schema().timestamp_key() { + definition = definition.try_with_field( + timestamp_key, + // The protobuf decoder will try to insert a new `timestamp`-type value into the + // "timestamp_key" field, but only if that field doesn't already exist. + Kind::any().or_timestamp(), + Some("timestamp"), + ); + } + definition + } + LogNamespace::Vector => { + schema::Definition::new_with_default_metadata(Kind::any(), [log_namespace]) + } + } + } +} + +/// Protobuf-specific decoding options. +#[configurable_component] +#[derive(Debug, Clone, PartialEq, Eq, Derivative)] +#[derivative(Default)] +pub struct ProtobufDeserializerOptions { + /// Path to desc file + desc_file: PathBuf, + + /// message type. e.g package.message + message_type: String, +} + +/// Deserializer that builds `Event`s from a byte frame containing protobuf. +#[derive(Debug, Clone)] +pub struct ProtobufDeserializer { + message_descriptor: MessageDescriptor, +} + +impl ProtobufDeserializer { + /// Creates a new `ProtobufDeserializer`. + pub fn new(message_descriptor: MessageDescriptor) -> Self { + Self { message_descriptor } + } + + fn get_message_descriptor( + desc_file: &PathBuf, + message_type: String, + ) -> vector_common::Result { + let b = fs::read(desc_file) + .map_err(|e| format!("Failed to open protobuf desc file '{desc_file:?}': {e}",))?; + let pool = DescriptorPool::decode(b.as_slice()) + .map_err(|e| format!("Failed to parse protobuf desc file '{desc_file:?}': {e}"))?; + Ok(pool.get_message_by_name(&message_type).unwrap_or_else(|| { + panic!("The message type '{message_type}' could not be found in '{desc_file:?}'") + })) + } +} + +impl Deserializer for ProtobufDeserializer { + fn parse( + &self, + bytes: Bytes, + log_namespace: LogNamespace, + ) -> vector_common::Result> { + let dynamic_message = DynamicMessage::decode(self.message_descriptor.clone(), bytes) + .map_err(|error| format!("Error parsing protobuf: {:?}", error))?; + + let proto_vrl = to_vrl(&prost_reflect::Value::Message(dynamic_message), None)?; + let mut event = Event::Log(LogEvent::from(proto_vrl)); + let event = match log_namespace { + LogNamespace::Vector => event, + LogNamespace::Legacy => { + let timestamp = Utc::now(); + if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { + let log = event.as_mut_log(); + if !log.contains(timestamp_key) { + log.insert(timestamp_key, timestamp); + } + } + event + } + }; + + Ok(smallvec![event]) + } +} + +impl TryFrom<&ProtobufDeserializerConfig> for ProtobufDeserializer { + type Error = vector_common::Error; + fn try_from(config: &ProtobufDeserializerConfig) -> vector_common::Result { + let message_descriptor = ProtobufDeserializer::get_message_descriptor( + &config.protobuf.desc_file, + config.protobuf.message_type.clone(), + )?; + Ok(Self::new(message_descriptor)) + } +} + +fn to_vrl( + prost_reflect_value: &prost_reflect::Value, + field_descriptor: Option<&prost_reflect::FieldDescriptor>, +) -> vector_common::Result { + let vrl_value = match prost_reflect_value { + prost_reflect::Value::Bool(v) => vrl::value::Value::from(*v), + prost_reflect::Value::I32(v) => vrl::value::Value::from(*v), + prost_reflect::Value::I64(v) => vrl::value::Value::from(*v), + prost_reflect::Value::U32(v) => vrl::value::Value::from(*v), + prost_reflect::Value::U64(v) => vrl::value::Value::from(*v), + prost_reflect::Value::F32(v) => vrl::value::Value::Float( + NotNan::new(f64::from(*v)).map_err(|_e| "Float number cannot be Nan")?, + ), + prost_reflect::Value::F64(v) => { + vrl::value::Value::Float(NotNan::new(*v).map_err(|_e| "F64 number cannot be Nan")?) + } + prost_reflect::Value::String(v) => vrl::value::Value::from(v.as_str()), + prost_reflect::Value::Bytes(v) => vrl::value::Value::from(v.clone()), + prost_reflect::Value::EnumNumber(v) => { + if let Some(field_descriptor) = field_descriptor { + let kind = field_descriptor.kind(); + let enum_desc = kind.as_enum().ok_or_else(|| { + format!( + "Internal error while parsing protobuf enum. Field descriptor: {:?}", + field_descriptor + ) + })?; + vrl::value::Value::from( + enum_desc + .get_value(*v) + .ok_or_else(|| { + format!("The number {} cannot be in '{}'", v, enum_desc.name()) + })? + .name(), + ) + } else { + Err("Expected valid field descriptor")? + } + } + prost_reflect::Value::Message(v) => { + let mut obj_map = BTreeMap::new(); + for field_desc in v.descriptor().fields() { + let field_value = v.get_field(&field_desc); + let out = to_vrl(field_value.as_ref(), Some(&field_desc))?; + obj_map.insert(field_desc.name().to_string(), out); + } + vrl::value::Value::from(obj_map) + } + prost_reflect::Value::List(v) => { + let vec = v + .iter() + .map(|o| to_vrl(o, field_descriptor)) + .collect::, vector_common::Error>>()?; + vrl::value::Value::from(vec) + } + prost_reflect::Value::Map(v) => { + if let Some(field_descriptor) = field_descriptor { + let kind = field_descriptor.kind(); + let message_desc = kind.as_message().ok_or_else(|| { + format!( + "Internal error while parsing protobuf field descriptor: {:?}", + field_descriptor + ) + })?; + vrl::value::Value::from( + v.iter() + .map(|kv| { + Ok(( + kv.0.as_str() + .ok_or_else(|| { + format!( + "Internal error while parsing protobuf map. Field descriptor: {:?}", + field_descriptor + ) + })? + .to_string(), + to_vrl(kv.1, Some(&message_desc.map_entry_value_field()))?, + )) + }) + .collect::>>()?, + ) + } else { + Err("Expected valid field descriptor")? + } + } + }; + Ok(vrl_value) +} + +#[cfg(test)] +mod tests { + // TODO: add test for bad file path & invalid message_type + + use std::path::PathBuf; + use std::{env, fs}; + use vector_core::config::log_schema; + + use super::*; + + fn test_data_dir() -> PathBuf { + PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()) + .join("tests/data/decoding/protobuf") + } + + fn parse_and_validate( + protobuf_bin_message: String, + protobuf_desc_path: PathBuf, + message_type: &str, + validate_log: fn(&LogEvent), + ) { + let input = Bytes::from(protobuf_bin_message); + let message_descriptor = ProtobufDeserializer::get_message_descriptor( + &protobuf_desc_path, + message_type.to_string(), + ) + .unwrap(); + let deserializer = ProtobufDeserializer::new(message_descriptor); + + for namespace in [LogNamespace::Legacy, LogNamespace::Vector] { + let events = deserializer.parse(input.clone(), namespace).unwrap(); + let mut events = events.into_iter(); + + { + let event = events.next().unwrap(); + let log = event.as_log(); + validate_log(log); + assert_eq!( + log.get(log_schema().timestamp_key_target_path().unwrap()) + .is_some(), + namespace == LogNamespace::Legacy + ); + } + + assert_eq!(events.next(), None); + } + } + + #[test] + fn deserialize_protobuf() { + let protobuf_bin_message_path = test_data_dir().join("person_someone.pb"); + let protobuf_desc_path = test_data_dir().join("test_protobuf.desc"); + let message_type = "test_protobuf.Person"; + let validate_log = |log: &LogEvent| { + assert_eq!(log["name"], "someone".into()); + assert_eq!( + log["phones"].as_array().unwrap()[0].as_object().unwrap()["number"] + .as_str() + .unwrap(), + "123456" + ); + }; + + parse_and_validate( + fs::read_to_string(protobuf_bin_message_path).unwrap(), + protobuf_desc_path, + message_type, + validate_log, + ); + } + + #[test] + fn deserialize_protobuf3() { + let protobuf_bin_message_path = test_data_dir().join("person_someone3.pb"); + let protobuf_desc_path = test_data_dir().join("test_protobuf3.desc"); + let message_type = "test_protobuf3.Person"; + let validate_log = |log: &LogEvent| { + assert_eq!(log["name"], "someone".into()); + assert_eq!( + log["phones"].as_array().unwrap()[0].as_object().unwrap()["number"] + .as_str() + .unwrap(), + "1234" + ); + assert_eq!( + log["data"].as_object().unwrap()["data_phone"], + "HOME".into() + ); + }; + + parse_and_validate( + fs::read_to_string(protobuf_bin_message_path).unwrap(), + protobuf_desc_path, + message_type, + validate_log, + ); + } + + #[test] + fn deserialize_empty_buffer() { + let protobuf_bin_message = "".to_string(); + let protobuf_desc_path = test_data_dir().join("test_protobuf.desc"); + let message_type = "test_protobuf.Person"; + let validate_log = |log: &LogEvent| { + assert_eq!(log["name"], "".into()); + }; + + parse_and_validate( + protobuf_bin_message, + protobuf_desc_path, + message_type, + validate_log, + ); + } + + #[test] + fn deserialize_error_invalid_protobuf() { + let input = Bytes::from("{ foo"); + let message_descriptor = ProtobufDeserializer::get_message_descriptor( + &test_data_dir().join("test_protobuf.desc"), + "test_protobuf.Person".to_string(), + ) + .unwrap(); + let deserializer = ProtobufDeserializer::new(message_descriptor); + + for namespace in [LogNamespace::Legacy, LogNamespace::Vector] { + assert!(deserializer.parse(input.clone(), namespace).is_err()); + } + } +} diff --git a/lib/codecs/src/decoding/format/syslog.rs b/lib/codecs/src/decoding/format/syslog.rs index 336d7c1aa232f..041d6a4c93cc6 100644 --- a/lib/codecs/src/decoding/format/syslog.rs +++ b/lib/codecs/src/decoding/format/syslog.rs @@ -1,12 +1,11 @@ use bytes::Bytes; use chrono::{DateTime, Datelike, Utc}; use derivative::Derivative; -use lookup::lookup_v2::parse_value_path; -use lookup::{event_path, owned_value_path, OwnedTargetPath, OwnedValuePath, PathPrefix}; +use lookup::{event_path, owned_value_path, OwnedTargetPath, OwnedValuePath}; use smallvec::{smallvec, SmallVec}; use std::borrow::Cow; use std::collections::BTreeMap; -use syslog_loose::{IncompleteDate, Message, ProcId, Protocol}; +use syslog_loose::{IncompleteDate, Message, ProcId, Protocol, Variant}; use vector_config::configurable_component; use vector_core::config::{LegacyKey, LogNamespace}; use vector_core::{ @@ -71,7 +70,7 @@ impl SyslogDeserializerConfig { // The `message` field is always defined. If parsing fails, the entire body becomes the // message. .with_event_field( - &parse_value_path(log_schema().message_key()).expect("valid message key"), + log_schema().message_key().expect("valid message key"), Kind::bytes(), Some("message"), ); @@ -283,7 +282,8 @@ impl Deserializer for SyslogDeserializer { false => Cow::from(std::str::from_utf8(&bytes)?), }; let line = line.trim(); - let parsed = syslog_loose::parse_message_with_year_exact(line, resolve_year)?; + let parsed = + syslog_loose::parse_message_with_year_exact(line, resolve_year, Variant::Either)?; let log = match (self.source, log_namespace) { (Some(source), LogNamespace::Vector) => { @@ -429,7 +429,7 @@ fn insert_fields_from_syslog( ) { match log_namespace { LogNamespace::Legacy => { - log.insert(event_path!(log_schema().message_key()), parsed.msg); + log.maybe_insert(log_schema().message_key_target_path(), parsed.msg); } LogNamespace::Vector => { log.insert(event_path!("message"), parsed.msg); @@ -440,9 +440,7 @@ fn insert_fields_from_syslog( let timestamp = DateTime::::from(timestamp); match log_namespace { LogNamespace::Legacy => { - if let Some(timestamp_key) = log_schema().timestamp_key() { - log.insert((PathPrefix::Event, timestamp_key), timestamp); - } + log.maybe_insert(log_schema().timestamp_key_target_path(), timestamp); } LogNamespace::Vector => { log.insert(event_path!("timestamp"), timestamp); @@ -486,9 +484,8 @@ fn insert_fields_from_syslog( #[cfg(test)] mod tests { - use vector_core::config::{init_log_schema, log_schema, LogSchema}; - use super::*; + use vector_core::config::{init_log_schema, log_schema, LogSchema}; #[test] fn deserialize_syslog_legacy_namespace() { @@ -500,7 +497,10 @@ mod tests { let events = deserializer.parse(input, LogNamespace::Legacy).unwrap(); assert_eq!(events.len(), 1); - assert_eq!(events[0].as_log()[log_schema().message_key()], "MSG".into()); + assert_eq!( + events[0].as_log()[log_schema().message_key().unwrap().to_string()], + "MSG".into() + ); assert!( events[0].as_log()[log_schema().timestamp_key().unwrap().to_string()].is_timestamp() ); @@ -522,8 +522,12 @@ mod tests { fn init() { let mut schema = LogSchema::default(); - schema.set_message_key("legacy_message".to_string()); - schema.set_message_key("legacy_timestamp".to_string()); + schema.set_message_key(Some(OwnedTargetPath::event(owned_value_path!( + "legacy_message" + )))); + schema.set_message_key(Some(OwnedTargetPath::event(owned_value_path!( + "legacy_timestamp" + )))); init_log_schema(schema, false); } } diff --git a/lib/codecs/src/decoding/mod.rs b/lib/codecs/src/decoding/mod.rs index 5af56aed89de6..c7acefc17919d 100644 --- a/lib/codecs/src/decoding/mod.rs +++ b/lib/codecs/src/decoding/mod.rs @@ -12,6 +12,7 @@ pub use format::{ GelfDeserializerConfig, GelfDeserializerOptions, JsonDeserializer, JsonDeserializerConfig, JsonDeserializerOptions, MezmoDeserializer, NativeDeserializer, NativeDeserializerConfig, NativeJsonDeserializer, NativeJsonDeserializerConfig, NativeJsonDeserializerOptions, + ProtobufDeserializer, ProtobufDeserializerConfig, }; #[cfg(feature = "syslog")] pub use format::{SyslogDeserializer, SyslogDeserializerConfig, SyslogDeserializerOptions}; @@ -200,6 +201,11 @@ pub enum DeserializerConfig { /// [json]: https://www.json.org/ Json(JsonDeserializerConfig), + /// Decodes the raw bytes as [protobuf][protobuf]. + /// + /// [protobuf]: https://protobuf.dev/ + Protobuf(ProtobufDeserializerConfig), + #[cfg(feature = "syslog")] /// Decodes the raw bytes as a Syslog message. /// @@ -277,16 +283,19 @@ impl From for DeserializerConfig { impl DeserializerConfig { /// Build the `Deserializer` from this configuration. - pub fn build(&self) -> Deserializer { + pub fn build(&self) -> vector_common::Result { match self { - DeserializerConfig::Bytes => Deserializer::Bytes(BytesDeserializerConfig.build()), - DeserializerConfig::Json(config) => Deserializer::Json(config.build()), + DeserializerConfig::Bytes => Ok(Deserializer::Bytes(BytesDeserializerConfig.build())), + DeserializerConfig::Json(config) => Ok(Deserializer::Json(config.build())), + DeserializerConfig::Protobuf(config) => Ok(Deserializer::Protobuf(config.build()?)), #[cfg(feature = "syslog")] - DeserializerConfig::Syslog(config) => Deserializer::Syslog(config.build()), - DeserializerConfig::Native => Deserializer::Native(NativeDeserializerConfig.build()), - DeserializerConfig::NativeJson(config) => Deserializer::NativeJson(config.build()), - DeserializerConfig::Gelf(config) => Deserializer::Gelf(config.build()), - DeserializerConfig::Mezmo(config) => Deserializer::Boxed(config.build()), + DeserializerConfig::Syslog(config) => Ok(Deserializer::Syslog(config.build())), + DeserializerConfig::Native => { + Ok(Deserializer::Native(NativeDeserializerConfig.build())) + } + DeserializerConfig::NativeJson(config) => Ok(Deserializer::NativeJson(config.build())), + DeserializerConfig::Gelf(config) => Ok(Deserializer::Gelf(config.build())), + DeserializerConfig::Mezmo(config) => Ok(Deserializer::Boxed(config.build())), } } @@ -300,6 +309,7 @@ impl DeserializerConfig { | DeserializerConfig::NativeJson(_) => { FramingConfig::NewlineDelimited(Default::default()) } + DeserializerConfig::Protobuf(_) => FramingConfig::Bytes, #[cfg(feature = "syslog")] DeserializerConfig::Syslog(_) => FramingConfig::NewlineDelimited(Default::default()), DeserializerConfig::Mezmo(config) => config.default_stream_framing(), @@ -311,6 +321,7 @@ impl DeserializerConfig { match self { DeserializerConfig::Bytes => BytesDeserializerConfig.output_type(), DeserializerConfig::Json(config) => config.output_type(), + DeserializerConfig::Protobuf(config) => config.output_type(), #[cfg(feature = "syslog")] DeserializerConfig::Syslog(config) => config.output_type(), DeserializerConfig::Native => NativeDeserializerConfig.output_type(), @@ -325,6 +336,7 @@ impl DeserializerConfig { match self { DeserializerConfig::Bytes => BytesDeserializerConfig.schema_definition(log_namespace), DeserializerConfig::Json(config) => config.schema_definition(log_namespace), + DeserializerConfig::Protobuf(config) => config.schema_definition(log_namespace), #[cfg(feature = "syslog")] DeserializerConfig::Syslog(config) => config.schema_definition(log_namespace), DeserializerConfig::Native => NativeDeserializerConfig.schema_definition(log_namespace), @@ -354,6 +366,7 @@ impl DeserializerConfig { }), ) => "application/json", (DeserializerConfig::Native, _) => "application/octet-stream", + (DeserializerConfig::Protobuf(_), _) => "application/octet-stream", ( DeserializerConfig::Json(_) | DeserializerConfig::NativeJson(_) @@ -375,6 +388,8 @@ pub enum Deserializer { Bytes(BytesDeserializer), /// Uses a `JsonDeserializer` for deserialization. Json(JsonDeserializer), + /// Uses a `ProtobufDeserializer` for deserialization. + Protobuf(ProtobufDeserializer), #[cfg(feature = "syslog")] /// Uses a `SyslogDeserializer` for deserialization. Syslog(SyslogDeserializer), @@ -397,6 +412,7 @@ impl format::Deserializer for Deserializer { match self { Deserializer::Bytes(deserializer) => deserializer.parse(bytes, log_namespace), Deserializer::Json(deserializer) => deserializer.parse(bytes, log_namespace), + Deserializer::Protobuf(deserializer) => deserializer.parse(bytes, log_namespace), #[cfg(feature = "syslog")] Deserializer::Syslog(deserializer) => deserializer.parse(bytes, log_namespace), Deserializer::Native(deserializer) => deserializer.parse(bytes, log_namespace), diff --git a/lib/codecs/src/encoding/format/common.rs b/lib/codecs/src/encoding/format/common.rs new file mode 100644 index 0000000000000..44d7b3e44c88b --- /dev/null +++ b/lib/codecs/src/encoding/format/common.rs @@ -0,0 +1,12 @@ +use vector_core::config::log_schema; +use vector_core::schema; +use vrl::value::Kind; + +/// Inspect the global log schema and create a schema requirement. +pub fn get_serializer_schema_requirement() -> schema::Requirement { + if let Some(message_key) = log_schema().message_key() { + schema::Requirement::empty().required_meaning(message_key.to_string(), Kind::any()) + } else { + schema::Requirement::empty() + } +} diff --git a/lib/codecs/src/encoding/format/gelf.rs b/lib/codecs/src/encoding/format/gelf.rs index 3a0c6d0461991..1c12ee5f1dc30 100644 --- a/lib/codecs/src/encoding/format/gelf.rs +++ b/lib/codecs/src/encoding/format/gelf.rs @@ -1,3 +1,4 @@ +use crate::gelf::GELF_TARGET_PATHS; use crate::{gelf_fields::*, VALID_FIELD_REGEX}; use bytes::{BufMut, BytesMut}; use lookup::event_path; @@ -15,7 +16,7 @@ use vector_core::{ /// On GELF encoding behavior: /// Graylog has a relaxed parsing. They are much more lenient than the spec would -/// suggest. We've elected to take a more strict approach to maintain backwards compatability +/// suggest. We've elected to take a more strict approach to maintain backwards compatibility /// in the event that we need to change the behavior to be more relaxed, so that prior versions /// of vector will still work. /// The exception is that if 'Additional fields' are found to be missing an underscore prefix and @@ -130,21 +131,21 @@ fn coerce_required_fields(mut log: LogEvent) -> vector_common::Result } // add the VERSION if it does not exist - if !log.contains(VERSION) { - log.insert(VERSION, GELF_VERSION); + if !log.contains(&GELF_TARGET_PATHS.version) { + log.insert(&GELF_TARGET_PATHS.version, GELF_VERSION); } - if !log.contains(HOST) { + if !log.contains(&GELF_TARGET_PATHS.host) { err_missing_field(HOST)?; } - let message_key = log_schema().message_key(); - if !log.contains(SHORT_MESSAGE) { - // rename the log_schema().message_key() to SHORT_MESSAGE - if log.contains(message_key) { - log.rename_key(message_key, SHORT_MESSAGE); - } else { - err_missing_field(SHORT_MESSAGE)?; + if !log.contains(&GELF_TARGET_PATHS.short_message) { + if let Some(message_key) = log_schema().message_key_target_path() { + if log.contains(message_key) { + log.rename_key(message_key, &GELF_TARGET_PATHS.short_message); + } else { + err_missing_field(SHORT_MESSAGE)?; + } } } Ok(log) @@ -329,7 +330,7 @@ mod tests { let event_fields = btreemap! { VERSION => "1.1", HOST => "example.org", - log_schema().message_key() => "Some message", + log_schema().message_key().unwrap().to_string() => "Some message", }; let jsn = do_serialize(true, event_fields).unwrap(); diff --git a/lib/codecs/src/encoding/format/mod.rs b/lib/codecs/src/encoding/format/mod.rs index 1d4e008380516..efff723f65b46 100644 --- a/lib/codecs/src/encoding/format/mod.rs +++ b/lib/codecs/src/encoding/format/mod.rs @@ -4,6 +4,7 @@ #![deny(missing_docs)] mod avro; +mod common; mod csv; mod gelf; mod json; diff --git a/lib/codecs/src/encoding/format/raw_message.rs b/lib/codecs/src/encoding/format/raw_message.rs index b955ae8355dc6..1f341295c12ea 100644 --- a/lib/codecs/src/encoding/format/raw_message.rs +++ b/lib/codecs/src/encoding/format/raw_message.rs @@ -1,12 +1,8 @@ +use crate::encoding::format::common::get_serializer_schema_requirement; use bytes::{BufMut, BytesMut}; use serde::{Deserialize, Serialize}; use tokio_util::codec::Encoder; -use vector_core::{ - config::{log_schema, DataType}, - event::Event, - schema, -}; -use vrl::value::Kind; +use vector_core::{config::DataType, event::Event, schema}; /// Config used to build a `RawMessageSerializer`. #[derive(Debug, Clone, Default, Deserialize, Serialize)] @@ -30,7 +26,7 @@ impl RawMessageSerializerConfig { /// The schema required by the serializer. pub fn schema_requirement(&self) -> schema::Requirement { - schema::Requirement::empty().required_meaning(log_schema().message_key(), Kind::any()) + get_serializer_schema_requirement() } } @@ -49,18 +45,10 @@ impl Encoder for RawMessageSerializer { type Error = vector_common::Error; fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { - let message_key = log_schema().message_key(); - let log = event.as_log(); - - if let Some(bytes) = log - .get_by_meaning(message_key) - .or_else(|| log.get(message_key)) - .map(|value| value.coerce_to_bytes()) - { + if let Some(bytes) = log.get_message().map(|value| value.coerce_to_bytes()) { buffer.put(bytes); } - Ok(()) } } diff --git a/lib/codecs/src/encoding/format/text.rs b/lib/codecs/src/encoding/format/text.rs index 0b9e49a44b976..3820c606da2b7 100644 --- a/lib/codecs/src/encoding/format/text.rs +++ b/lib/codecs/src/encoding/format/text.rs @@ -1,11 +1,7 @@ +use crate::encoding::format::common::get_serializer_schema_requirement; use bytes::{BufMut, BytesMut}; use tokio_util::codec::Encoder; -use vector_core::{ - config::{log_schema, DataType}, - event::Event, - schema, -}; -use vrl::value::Kind; +use vector_core::{config::DataType, event::Event, schema}; use crate::MetricTagValues; @@ -42,7 +38,7 @@ impl TextSerializerConfig { /// The schema required by the serializer. pub fn schema_requirement(&self) -> schema::Requirement { - schema::Requirement::empty().required_meaning(log_schema().message_key(), Kind::any()) + get_serializer_schema_requirement() } } @@ -67,15 +63,9 @@ impl Encoder for TextSerializer { type Error = vector_common::Error; fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { - let message_key = log_schema().message_key(); - match event { Event::Log(log) => { - if let Some(bytes) = log - .get_by_meaning(message_key) - .or_else(|| log.get(message_key)) - .map(|value| value.coerce_to_bytes()) - { + if let Some(bytes) = log.get_message().map(|value| value.coerce_to_bytes()) { buffer.put(bytes); } } diff --git a/lib/codecs/src/gelf.rs b/lib/codecs/src/gelf.rs index d059958f003a1..b78df6cb0e4d9 100644 --- a/lib/codecs/src/gelf.rs +++ b/lib/codecs/src/gelf.rs @@ -2,10 +2,11 @@ use once_cell::sync::Lazy; use regex::Regex; +use vrl::owned_value_path; +use vrl::path::OwnedTargetPath; /// GELF Message fields. Definitions from . pub mod gelf_fields { - /// (not a field) The latest version of the GELF specification. pub const GELF_VERSION: &str = "1.1"; @@ -40,6 +41,30 @@ pub mod gelf_fields { // < Every field with an underscore (_) prefix will be treated as an additional field. > } +/// GELF owned target paths. +pub(crate) struct GelfTargetPaths { + pub version: OwnedTargetPath, + pub host: OwnedTargetPath, + pub full_message: OwnedTargetPath, + pub level: OwnedTargetPath, + pub facility: OwnedTargetPath, + pub line: OwnedTargetPath, + pub file: OwnedTargetPath, + pub short_message: OwnedTargetPath, +} + +/// Lazily initialized singleton. +pub(crate) static GELF_TARGET_PATHS: Lazy = Lazy::new(|| GelfTargetPaths { + version: OwnedTargetPath::event(owned_value_path!(gelf_fields::VERSION)), + host: OwnedTargetPath::event(owned_value_path!(gelf_fields::HOST)), + full_message: OwnedTargetPath::event(owned_value_path!(gelf_fields::FULL_MESSAGE)), + level: OwnedTargetPath::event(owned_value_path!(gelf_fields::LEVEL)), + facility: OwnedTargetPath::event(owned_value_path!(gelf_fields::FACILITY)), + line: OwnedTargetPath::event(owned_value_path!(gelf_fields::LINE)), + file: OwnedTargetPath::event(owned_value_path!(gelf_fields::FILE)), + short_message: OwnedTargetPath::event(owned_value_path!(gelf_fields::SHORT_MESSAGE)), +}); + /// Regex for matching valid field names. Must contain only word chars, periods and dashes. /// Additional field names must also be prefixed with an `_` , however that is intentionally /// omitted from this regex to be checked separately to create a specific error message. diff --git a/lib/codecs/tests/data/decoding/protobuf/person_someone.pb b/lib/codecs/tests/data/decoding/protobuf/person_someone.pb new file mode 100644 index 0000000000000..46b78f47969af --- /dev/null +++ b/lib/codecs/tests/data/decoding/protobuf/person_someone.pb @@ -0,0 +1,3 @@ + +someone" +123456 \ No newline at end of file diff --git a/lib/codecs/tests/data/decoding/protobuf/person_someone.txt b/lib/codecs/tests/data/decoding/protobuf/person_someone.txt new file mode 100644 index 0000000000000..72e26e70a16df --- /dev/null +++ b/lib/codecs/tests/data/decoding/protobuf/person_someone.txt @@ -0,0 +1,2 @@ +debug print of person_someone.pb with prost +Person { name: Some("someone"), id: None, email: None, phones: [PhoneNumber { number: Some("123456"), r#type: None }] } diff --git a/lib/codecs/tests/data/decoding/protobuf/person_someone3.pb b/lib/codecs/tests/data/decoding/protobuf/person_someone3.pb new file mode 100644 index 0000000000000..80e4ef6f4dfb6 Binary files /dev/null and b/lib/codecs/tests/data/decoding/protobuf/person_someone3.pb differ diff --git a/lib/codecs/tests/data/decoding/protobuf/person_someone3.txt b/lib/codecs/tests/data/decoding/protobuf/person_someone3.txt new file mode 100644 index 0000000000000..0a5086c628414 --- /dev/null +++ b/lib/codecs/tests/data/decoding/protobuf/person_someone3.txt @@ -0,0 +1,2 @@ +debug print of person_someone3.pb with prost +Person { name: Some("someone"), id: None, email: None, data: {"data_phone": Home}, phones: [PhoneNumber { number: Some("1234"), r#type: Some(Mobile) }] } diff --git a/lib/codecs/tests/data/decoding/protobuf/test_protobuf.desc b/lib/codecs/tests/data/decoding/protobuf/test_protobuf.desc new file mode 100644 index 0000000000000..43e7acf6cf771 Binary files /dev/null and b/lib/codecs/tests/data/decoding/protobuf/test_protobuf.desc differ diff --git a/lib/codecs/tests/data/decoding/protobuf/test_protobuf.proto b/lib/codecs/tests/data/decoding/protobuf/test_protobuf.proto new file mode 100644 index 0000000000000..a4cb111f18ec5 --- /dev/null +++ b/lib/codecs/tests/data/decoding/protobuf/test_protobuf.proto @@ -0,0 +1,26 @@ +syntax = "proto2"; + +package test_protobuf; + +message Person { + optional string name = 1; + optional int32 id = 2; + optional string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + optional string number = 1; + optional PhoneType type = 2 [default = HOME]; + } + + repeated PhoneNumber phones = 4; +} + +message AddressBook { + repeated Person people = 1; +} diff --git a/lib/codecs/tests/data/decoding/protobuf/test_protobuf3.desc b/lib/codecs/tests/data/decoding/protobuf/test_protobuf3.desc new file mode 100644 index 0000000000000..9fcfec71b44b8 Binary files /dev/null and b/lib/codecs/tests/data/decoding/protobuf/test_protobuf3.desc differ diff --git a/lib/codecs/tests/data/decoding/protobuf/test_protobuf3.proto b/lib/codecs/tests/data/decoding/protobuf/test_protobuf3.proto new file mode 100644 index 0000000000000..672a05d2fac9a --- /dev/null +++ b/lib/codecs/tests/data/decoding/protobuf/test_protobuf3.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +package test_protobuf3; + +message Person { + optional string name = 1; + optional int32 id = 2; + optional string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + optional string number = 1; + optional PhoneType type = 2; + } + + map data = 4; + repeated PhoneNumber phones = 5; +} + +message AddressBook { + repeated Person people = 1; +} diff --git a/lib/codecs/tests/data/native_encoding/schema.cue b/lib/codecs/tests/data/native_encoding/schema.cue index 34c1bec6adc3d..91ddb069b885b 100644 --- a/lib/codecs/tests/data/native_encoding/schema.cue +++ b/lib/codecs/tests/data/native_encoding/schema.cue @@ -5,8 +5,8 @@ #Trace: {...} #Metric: { - name: string - namespace?: string + name: string + namespace?: string tags?: {[string]: #TagValueSet} timestamp?: #Timestamp interval_ms?: int @@ -40,11 +40,11 @@ sum: number avg: number } - } +} } -#TagValueSet: { #TagValue | [...#TagValue] } +#TagValueSet: {#TagValue | [...#TagValue]} -#TagValue: { string | null } +#TagValue: {string | null} #Timestamp: =~"^\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}(.\\d+)?Z" diff --git a/lib/docs-renderer/Cargo.toml b/lib/docs-renderer/Cargo.toml index 998d19d95bc3d..f391dc8f63139 100644 --- a/lib/docs-renderer/Cargo.toml +++ b/lib/docs-renderer/Cargo.toml @@ -6,10 +6,10 @@ edition = "2021" publish = false [dependencies] -anyhow = { version = "1.0.71", default-features = false, features = ["std"] } +anyhow = { version = "1.0.72", default-features = false, features = ["std"] } serde = { version = "1.0", default-features = false } serde_json = { version = "1.0", default-features = false, features = ["std"] } -snafu = { version = "0.7.4", default-features = false } +snafu = { version = "0.7.5", default-features = false } tracing = { version = "0.1.34", default-features = false } tracing-subscriber = { version = "0.3.17", default-features = false, features = ["ansi", "env-filter", "fmt", "json", "registry", "tracing-log"] } vector-config = { path = "../vector-config" } diff --git a/lib/enrichment/Cargo.toml b/lib/enrichment/Cargo.toml index 1a8e2467f0fed..f6674c8c96ec3 100644 --- a/lib/enrichment/Cargo.toml +++ b/lib/enrichment/Cargo.toml @@ -8,5 +8,5 @@ publish = false [dependencies] arc-swap = { version = "1.6.0", default-features = false } chrono = { version = "0.4.19", default-features = false } -dyn-clone = { version = "1.0.11", default-features = false } +dyn-clone = { version = "1.0.12", default-features = false } vrl.workspace = true diff --git a/lib/enrichment/src/find_enrichment_table_records.rs b/lib/enrichment/src/find_enrichment_table_records.rs index 3ade47b7c55a6..7086709cd208a 100644 --- a/lib/enrichment/src/find_enrichment_table_records.rs +++ b/lib/enrichment/src/find_enrichment_table_records.rs @@ -1,6 +1,7 @@ use std::collections::BTreeMap; use vrl::prelude::*; +use crate::vrl_util::is_case_sensitive; use crate::{ vrl_util::{self, add_index, evaluate_condition}, Case, Condition, IndexHandle, TableRegistry, TableSearch, @@ -87,7 +88,7 @@ impl Function for FindEnrichmentTableRecords { fn compile( &self, - _state: &TypeState, + state: &TypeState, ctx: &mut FunctionCompileContext, arguments: ArgumentList, ) -> Compiled { @@ -102,7 +103,7 @@ impl Function for FindEnrichmentTableRecords { .collect::>(); let table = arguments - .required_enum("table", &tables)? + .required_enum("table", &tables, state)? .try_bytes_utf8_lossy() .expect("table is not valid utf8") .into_owned(); @@ -110,21 +111,7 @@ impl Function for FindEnrichmentTableRecords { let select = arguments.optional("select"); - let case_sensitive = arguments - .optional_literal("case_sensitive")? - .and_then(|literal| literal.resolve_constant()) - .map(|value| value.try_boolean()) - .transpose() - .expect("case_sensitive should be boolean") // This will have been caught by the type checker. - .map(|case_sensitive| { - if case_sensitive { - Case::Sensitive - } else { - Case::Insensitive - } - }) - .unwrap_or(Case::Sensitive); - + let case_sensitive = is_case_sensitive(&arguments, state)?; let index = Some( add_index(registry, &table, case_sensitive, &condition) .map_err(|err| Box::new(err) as Box<_>)?, diff --git a/lib/enrichment/src/get_enrichment_table_record.rs b/lib/enrichment/src/get_enrichment_table_record.rs index 3678d85e4fea0..a028894d89373 100644 --- a/lib/enrichment/src/get_enrichment_table_record.rs +++ b/lib/enrichment/src/get_enrichment_table_record.rs @@ -1,6 +1,7 @@ use std::collections::BTreeMap; use vrl::prelude::*; +use crate::vrl_util::is_case_sensitive; use crate::{ vrl_util::{self, add_index, evaluate_condition}, Case, Condition, IndexHandle, TableRegistry, TableSearch, @@ -79,7 +80,7 @@ impl Function for GetEnrichmentTableRecord { fn compile( &self, - _state: &TypeState, + state: &TypeState, ctx: &mut FunctionCompileContext, arguments: ArgumentList, ) -> Compiled { @@ -94,7 +95,7 @@ impl Function for GetEnrichmentTableRecord { .collect::>(); let table = arguments - .required_enum("table", &tables)? + .required_enum("table", &tables, state)? .try_bytes_utf8_lossy() .expect("table is not valid utf8") .into_owned(); @@ -102,21 +103,7 @@ impl Function for GetEnrichmentTableRecord { let select = arguments.optional("select"); - let case_sensitive = arguments - .optional_literal("case_sensitive")? - .and_then(|literal| literal.resolve_constant()) - .map(|value| value.try_boolean()) - .transpose() - .expect("case_sensitive should be boolean") // This will have been caught by the type checker. - .map(|case_sensitive| { - if case_sensitive { - Case::Sensitive - } else { - Case::Insensitive - } - }) - .unwrap_or(Case::Sensitive); - + let case_sensitive = is_case_sensitive(&arguments, state)?; let index = Some( add_index(registry, &table, case_sensitive, &condition) .map_err(|err| Box::new(err) as Box<_>)?, diff --git a/lib/enrichment/src/vrl_util.rs b/lib/enrichment/src/vrl_util.rs index f6aaa4dac02ce..ccdebd3cdfbb1 100644 --- a/lib/enrichment/src/vrl_util.rs +++ b/lib/enrichment/src/vrl_util.rs @@ -80,6 +80,26 @@ pub(crate) fn add_index( Ok(index) } +pub(crate) fn is_case_sensitive( + arguments: &ArgumentList, + state: &TypeState, +) -> Result { + Ok(arguments + .optional_literal("case_sensitive", state)? + .map(|value| { + let case_sensitive = value + .as_boolean() + .expect("case_sensitive should be boolean"); // This will have been caught by the type checker. + + if case_sensitive { + Case::Sensitive + } else { + Case::Insensitive + } + }) + .unwrap_or(Case::Sensitive)) +} + #[cfg(test)] mod tests { use std::sync::{Arc, Mutex}; diff --git a/lib/file-source/Cargo.toml b/lib/file-source/Cargo.toml index 50631a6765175..fee1a7e065658 100644 --- a/lib/file-source/Cargo.toml +++ b/lib/file-source/Cargo.toml @@ -19,7 +19,7 @@ vector-config-common = { path = "../vector-config-common", default-features = fa vector-config-macros = { path = "../vector-config-macros", default-features = false } [dependencies.bstr] -version = "1.5" +version = "1.6" default-features = false features = [] @@ -34,7 +34,7 @@ default-features = false features = ["clock", "serde"] [dependencies.dashmap] -version = "5.2" +version = "5.5" default-features = false features = [] @@ -69,7 +69,7 @@ default-features = false features = [] [dependencies.tokio] -version = "1.29.0" +version = "1.30.0" default-features = false features = ["full"] diff --git a/lib/k8s-e2e-tests/Cargo.toml b/lib/k8s-e2e-tests/Cargo.toml index fd1a5aa7ab672..df72dab60fd11 100644 --- a/lib/k8s-e2e-tests/Cargo.toml +++ b/lib/k8s-e2e-tests/Cargo.toml @@ -14,8 +14,8 @@ k8s-test-framework = { version = "0.1", path = "../k8s-test-framework" } regex = "1" reqwest = { version = "0.11.18", features = ["json"] } serde_json = "1" -tokio = { version = "1.29.0", features = ["full"] } -indoc = "2.0.1" +tokio = { version = "1.30.0", features = ["full"] } +indoc = "2.0.3" env_logger = "0.10" tracing = { version = "0.1", features = ["log"] } rand = "0.8" diff --git a/lib/k8s-test-framework/Cargo.toml b/lib/k8s-test-framework/Cargo.toml index 7ab378add13ff..484a49ce0ea63 100644 --- a/lib/k8s-test-framework/Cargo.toml +++ b/lib/k8s-test-framework/Cargo.toml @@ -11,5 +11,5 @@ license = "MPL-2.0" k8s-openapi = { version = "0.16.0", default-features = false, features = ["v1_19"] } serde_json = "1" tempfile = "3" -tokio = { version = "1.29.0", features = ["full"] } +tokio = { version = "1.30.0", features = ["full"] } log = "0.4" diff --git a/lib/loki-logproto/Cargo.toml b/lib/loki-logproto/Cargo.toml index f09e431152d02..c140e168546a9 100644 --- a/lib/loki-logproto/Cargo.toml +++ b/lib/loki-logproto/Cargo.toml @@ -2,6 +2,8 @@ name = "loki-logproto" version = "0.1.0" edition = "2021" +license = "MPL-2.0" +publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/lib/loki-logproto/LICENSE b/lib/loki-logproto/LICENSE new file mode 100644 index 0000000000000..e87a115e462e1 --- /dev/null +++ b/lib/loki-logproto/LICENSE @@ -0,0 +1,363 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. + diff --git a/lib/opentelemetry-proto/src/convert.rs b/lib/opentelemetry-proto/src/convert.rs index 4ccfda0b726ba..875be35d4dd9c 100644 --- a/lib/opentelemetry-proto/src/convert.rs +++ b/lib/opentelemetry-proto/src/convert.rs @@ -94,7 +94,7 @@ impl ResourceLog { LogNamespace::Legacy => { let mut log = LogEvent::default(); if let Some(v) = self.log_record.body.and_then(|av| av.value) { - log.insert(log_schema().message_key(), v); + log.maybe_insert(log_schema().message_key_target_path(), v); } log } @@ -208,7 +208,7 @@ impl ResourceLog { log_namespace.insert_vector_metadata( &mut log, - Some(log_schema().source_type_key()), + log_schema().source_type_key(), path!("source_type"), Bytes::from_static(SOURCE_NAME.as_bytes()), ); diff --git a/lib/tracing-limit/Cargo.toml b/lib/tracing-limit/Cargo.toml index 48c0bf0762a98..5eb8fe73f0840 100644 --- a/lib/tracing-limit/Cargo.toml +++ b/lib/tracing-limit/Cargo.toml @@ -9,7 +9,7 @@ license = "MPL-2.0" [dependencies] tracing-core = { version = "0.1", default-features = false } tracing-subscriber = { version = "0.3", default-features = false, features = ["registry", "std"] } -dashmap = { version = "5.2.0", default-features = false } +dashmap = { version = "5.5.0", default-features = false } [dev-dependencies] criterion = "0.5" diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index ed243d3e368ec..7c47c41c0ce5a 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -9,16 +9,16 @@ license = "MPL-2.0" [dependencies] # Serde -serde = { version = "1.0.164", default-features = false, features = ["derive"] } -serde_json = { version = "1.0.99", default-features = false, features = ["raw_value"] } +serde = { version = "1.0.183", default-features = false, features = ["derive"] } +serde_json = { version = "1.0.104", default-features = false, features = ["raw_value"] } # Error handling -anyhow = { version = "1.0.71", default-features = false, features = ["std"] } +anyhow = { version = "1.0.72", default-features = false, features = ["std"] } # Tokio / Futures async-trait = { version = "0.1", default-features = false } futures = { version = "0.3", default-features = false, features = ["compat", "io-compat"] } -tokio = { version = "1.29.0", default-features = false, features = ["macros", "rt", "sync"] } +tokio = { version = "1.30.0", default-features = false, features = ["macros", "rt", "sync"] } tokio-stream = { version = "0.1.14", default-features = false, features = ["sync"] } # GraphQL @@ -26,11 +26,11 @@ graphql_client = { version = "0.13.0", default-features = false, features = ["gr # HTTP / WebSockets reqwest = { version = "0.11.18", default-features = false, features = ["json"] } -tokio-tungstenite = { version = "0.19.0", default-features = false, features = ["connect", "rustls"] } +tokio-tungstenite = { version = "0.20.1", default-features = false, features = ["connect", "rustls"] } # External libs chrono = { version = "0.4.6", default-features = false, features = ["serde"] } -clap = { version = "4.1.14", default-features = false, features = ["derive"] } +clap = { version = "4.3.21", default-features = false, features = ["derive"] } url = { version = "2.4.0", default-features = false } uuid = { version = "1", default-features = false, features = ["serde", "v4"] } -indoc = { version = "2.0.1", default-features = false } +indoc = { version = "2.0.3", default-features = false } diff --git a/lib/vector-api-client/src/client.rs b/lib/vector-api-client/src/client.rs index 01159be74d01f..941ffa7963f27 100644 --- a/lib/vector-api-client/src/client.rs +++ b/lib/vector-api-client/src/client.rs @@ -1,8 +1,9 @@ use anyhow::Context; use graphql_client::GraphQLQuery; -use indoc::indoc; use url::Url; +use crate::gql::HealthQueryExt; + /// Wrapped `Result` type, that returns deserialized GraphQL response data. pub type QueryResult = anyhow::Result::ResponseData>>; @@ -19,33 +20,9 @@ impl Client { Self { url } } - pub async fn new_with_healthcheck(url: Url) -> Option { - #![allow(clippy::print_stderr)] - - use crate::gql::HealthQueryExt; - - // Create a new API client for connecting to the local/remote Vector instance. - let client = Self::new(url.clone()); - - // Check that the GraphQL server is reachable - match client.health_query().await { - Ok(_) => Some(client), - _ => { - eprintln!( - indoc! {" - Vector API server isn't reachable ({}). - - Have you enabled the API? - - To enable the API, add the following to your `vector.toml` config file: - - [api] - enabled = true"}, - url - ); - None - } - } + /// Send a health query + pub async fn healthcheck(&self) -> Result<(), ()> { + self.health_query().await.map(|_| ()).map_err(|_| ()) } /// Issue a GraphQL query using Reqwest, serializing the response to the associated diff --git a/lib/vector-buffers/Cargo.toml b/lib/vector-buffers/Cargo.toml index 09de0d0ac6153..633ff8cf658e4 100644 --- a/lib/vector-buffers/Cargo.toml +++ b/lib/vector-buffers/Cargo.toml @@ -17,14 +17,14 @@ crossbeam-utils = { version = "0.8.16", default-features = false } fslock = { version = "0.2.1", default-features = false, features = ["std"] } futures = { version = "0.3.28", default-features = false, features = ["std"] } memmap2 = { version = "0.7.1", default-features = false } -metrics = "0.21.0" -num-traits = { version = "0.2.15", default-features = false } -pin-project = { version = "1.1.1", default-features = false } +metrics = "0.21.1" +num-traits = { version = "0.2.16", default-features = false } +pin-project = { version = "1.1.3", default-features = false } rkyv = { version = "0.7.40", default-features = false, features = ["size_32", "std", "strict", "validation"] } -serde = { version = "1.0.164", default-features = false, features = ["derive"] } -snafu = { version = "0.7.4", default-features = false, features = ["std"] } +serde = { version = "1.0.183", default-features = false, features = ["derive"] } +snafu = { version = "0.7.5", default-features = false, features = ["std"] } tokio-util = { version = "0.7.0", default-features = false } -tokio = { version = "1.29.0", default-features = false, features = ["rt", "macros", "rt-multi-thread", "sync", "fs", "io-util", "time"] } +tokio = { version = "1.30.0", default-features = false, features = ["rt", "macros", "rt-multi-thread", "sync", "fs", "io-util", "time"] } tracing = { version = "0.1.34", default-features = false, features = ["attributes"] } vector-config = { path = "../vector-config", default-features = false } vector-config-common = { path = "../vector-config-common", default-features = false } @@ -32,12 +32,12 @@ vector-config-macros = { path = "../vector-config-macros", default-features = fa vector-common = { path = "../vector-common", default-features = false, features = ["byte_size_of", "serde"] } [dev-dependencies] -clap = "4.1.14" +clap = "4.3.21" criterion = { version = "0.5", features = ["html_reports", "async_tokio"] } crossbeam-queue = "0.3.8" hdrhistogram = "7.5.2" metrics-tracing-context = { version = "0.14.0", default-features = false } -metrics-util = { version = "0.15.0", default-features = false, features = ["debugging"] } +metrics-util = { version = "0.15.1", default-features = false, features = ["debugging"] } once_cell = "1.18" proptest = "1.2" quickcheck = "1.0" diff --git a/lib/vector-common/Cargo.toml b/lib/vector-common/Cargo.toml index 24023729e5104..523f403b8f774 100644 --- a/lib/vector-common/Cargo.toml +++ b/lib/vector-common/Cargo.toml @@ -49,18 +49,18 @@ crossbeam-utils = { version = "0.8.16", default-features = false } derivative = { version = "2.2.0", default-features = false } futures = { version = "0.3.28", default-features = false, features = ["std"] } indexmap = { version = "~2.0.0", default-features = false, features = ["std"] } -metrics = "0.21.0" +metrics = "0.21.1" nom = { version = "7", optional = true } ordered-float = { version = "3.7.0", default-features = false } -paste = "1.0.12" -pin-project = { version = "1.1.1", default-features = false } +paste = "1.0.14" +pin-project = { version = "1.1.3", default-features = false } ryu = { version = "1", default-features = false } -serde_json = { version = "1.0.99", default-features = false, features = ["std", "raw_value"] } -serde = { version = "1.0.164", optional = true, features = ["derive"] } +serde_json = { version = "1.0.104", default-features = false, features = ["std", "raw_value"] } +serde = { version = "1.0.183", optional = true, features = ["derive"] } smallvec = { version = "1", default-features = false } snafu = { version = "0.7", optional = true } stream-cancel = { version = "0.8.1", default-features = false } -tokio = { version = "1.29.0", default-features = false, features = ["macros", "time"] } +tokio = { version = "1.30.0", default-features = false, features = ["macros", "time"] } tracing = { version = "0.1.34", default-features = false } vrl.workspace = true vector-config = { path = "../vector-config" } @@ -69,6 +69,6 @@ vector-config-macros = { path = "../vector-config-macros" } [dev-dependencies] futures = { version = "0.3.28", default-features = false, features = ["async-await", "std"] } -tokio = { version = "1.29.0", default-features = false, features = ["rt", "time"] } +tokio = { version = "1.30.0", default-features = false, features = ["rt", "time"] } quickcheck = "1" quickcheck_macros = "1" diff --git a/lib/vector-common/src/finalizer.rs b/lib/vector-common/src/finalizer.rs index 15bc52ab3510d..9af4237a6a341 100644 --- a/lib/vector-common/src/finalizer.rs +++ b/lib/vector-common/src/finalizer.rs @@ -62,7 +62,7 @@ where Self { sender: Some(todo_tx), flush: flush1, - _phantom: PhantomData::default(), + _phantom: PhantomData, }, finalizer_stream(shutdown, todo_rx, S::default(), flush2).boxed(), ) @@ -199,7 +199,7 @@ pub struct EmptyStream(PhantomData); impl Default for EmptyStream { fn default() -> Self { - Self(PhantomData::default()) + Self(PhantomData) } } diff --git a/lib/vector-common/src/internal_event/cached_event.rs b/lib/vector-common/src/internal_event/cached_event.rs index e672848c93584..54f6b5ed5c64f 100644 --- a/lib/vector-common/src/internal_event/cached_event.rs +++ b/lib/vector-common/src/internal_event/cached_event.rs @@ -1,5 +1,6 @@ use std::{ - collections::BTreeMap, + collections::HashMap, + hash::Hash, sync::{Arc, RwLock}, }; @@ -17,11 +18,12 @@ use super::{InternalEventHandle, RegisterInternalEvent}; /// new event is emitted for a previously unseen set of tags an event is registered /// and stored in the cache. #[derive(Derivative)] -#[derivative(Clone(bound = ""), Default(bound = ""))] -pub struct RegisteredEventCache { +#[derivative(Clone(bound = "T: Clone"))] +pub struct RegisteredEventCache { + fixed_tags: T, cache: Arc< RwLock< - BTreeMap< + HashMap< ::Tags, ::Handle, >, @@ -36,16 +38,31 @@ pub trait RegisterTaggedInternalEvent: RegisterInternalEvent { /// that will be used when registering the event. type Tags; - fn register(tags: Self::Tags) -> ::Handle; + /// The type that contains data necessary to extract the tags that will + /// be fixed and only need setting up front when the cache is first created. + type Fixed; + + fn register(fixed: Self::Fixed, tags: Self::Tags) -> ::Handle; } -impl RegisteredEventCache +impl RegisteredEventCache where Data: Sized, EventHandle: InternalEventHandle, - Tags: Ord + Clone, - Event: RegisterInternalEvent + RegisterTaggedInternalEvent, + Tags: Clone + Eq + Hash, + FixedTags: Clone, + Event: RegisterInternalEvent + + RegisterTaggedInternalEvent, { + /// Create a new event cache with a set of fixed tags. These tags are passed to + /// all registered events. + pub fn new(fixed_tags: FixedTags) -> Self { + Self { + fixed_tags, + cache: Arc::default(), + } + } + /// Emits the event with the given tags. /// It will register the event and store in the cache if this has not already /// been done. @@ -58,7 +75,10 @@ where if let Some(event) = read.get(tags) { event.emit(value); } else { - let event = ::register(tags.clone()); + let event = ::register( + self.fixed_tags.clone(), + tags.clone(), + ); event.emit(value); // Ensure the read lock is dropped so we can write. @@ -67,3 +87,43 @@ where } } } + +#[cfg(test)] +mod tests { + #![allow(unreachable_pub)] + use metrics::{register_counter, Counter}; + + use super::*; + + crate::registered_event!( + TestEvent { + fixed: String, + dynamic: String, + } => { + event: Counter = { + register_counter!("test_event_total", "fixed" => self.fixed, "dynamic" => self.dynamic) + }, + } + + fn emit(&self, count: u64) { + self.event.increment(count); + } + + fn register(fixed: String, dynamic: String) { + crate::internal_event::register(TestEvent { + fixed, + dynamic, + }) + } + ); + + #[test] + fn test_fixed_tag() { + let event: RegisteredEventCache = + RegisteredEventCache::new("fixed".to_string()); + + for tag in 1..=5 { + event.emit(&format!("dynamic{tag}"), tag); + } + } +} diff --git a/lib/vector-common/src/internal_event/events_sent.rs b/lib/vector-common/src/internal_event/events_sent.rs index d12a22bf17e8a..b902b6afaff65 100644 --- a/lib/vector-common/src/internal_event/events_sent.rs +++ b/lib/vector-common/src/internal_event/events_sent.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use metrics::{register_counter, Counter}; use tracing::trace; -use crate::{config::ComponentKey, request_metadata::EventCountTags}; +use crate::config::ComponentKey; use super::{CountByteSize, OptionalTag, Output, SharedString}; @@ -91,19 +91,25 @@ crate::registered_event!( self.event_bytes.increment(byte_size.get() as u64); } - fn register(tags: EventCountTags) { - super::register(TaggedEventsSent::new( - tags, - )) + fn register(_fixed: (), tags: TaggedEventsSent) { + super::register(tags) } ); impl TaggedEventsSent { #[must_use] - pub fn new(tags: EventCountTags) -> Self { + pub fn new_empty() -> Self { Self { - source: tags.source, - service: tags.service, + source: OptionalTag::Specified(None), + service: OptionalTag::Specified(None), + } + } + + #[must_use] + pub fn new_unspecified() -> Self { + Self { + source: OptionalTag::Ignored, + service: OptionalTag::Ignored, } } } diff --git a/lib/vector-common/src/internal_event/mod.rs b/lib/vector-common/src/internal_event/mod.rs index 2915c88797bf8..9dfb1d9acb825 100644 --- a/lib/vector-common/src/internal_event/mod.rs +++ b/lib/vector-common/src/internal_event/mod.rs @@ -1,6 +1,6 @@ mod bytes_received; mod bytes_sent; -mod cached_event; +pub mod cached_event; pub mod component_events_dropped; mod events_received; mod events_sent; @@ -195,7 +195,7 @@ impl From for SharedString { macro_rules! registered_event { // A registered event struct with no fields (zero-sized type). ($event:ident => $($tail:tt)*) => { - #[derive(Debug)] + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct $event; $crate::registered_event!(=> $event $($tail)*); @@ -203,7 +203,7 @@ macro_rules! registered_event { // A normal registered event struct. ($event:ident { $( $field:ident: $type:ty, )* } => $($tail:tt)*) => { - #[derive(Debug)] + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct $event { $( pub $field: $type, )* } @@ -220,7 +220,7 @@ macro_rules! registered_event { fn emit(&$slf:ident, $data_name:ident: $data:ident) $emit_body:block - $(fn register($tags_name:ident: $tags:ty) + $(fn register($fixed_name:ident: $fixed_tags:ty, $tags_name:ident: $tags:ty) $register_body:block)? ) => { paste::paste!{ @@ -252,10 +252,12 @@ macro_rules! registered_event { $(impl $crate::internal_event::cached_event::RegisterTaggedInternalEvent for $event { type Tags = $tags; + type Fixed = $fixed_tags; fn register( + $fixed_name: $fixed_tags, $tags_name: $tags, - ) -> ::Handle { + ) -> ::Handle { $register_body } })? diff --git a/lib/vector-common/src/request_metadata.rs b/lib/vector-common/src/request_metadata.rs index 9b93a63df7626..12a164d23e437 100644 --- a/lib/vector-common/src/request_metadata.rs +++ b/lib/vector-common/src/request_metadata.rs @@ -1,44 +1,20 @@ -use std::ops::Add; -use std::{collections::HashMap, sync::Arc}; +use std::{ + collections::HashMap, + ops::{Add, AddAssign}, +}; use crate::{ - config::ComponentKey, internal_event::{ - CountByteSize, InternalEventHandle, OptionalTag, RegisterTaggedInternalEvent, - RegisteredEventCache, + CountByteSize, InternalEventHandle, RegisterTaggedInternalEvent, RegisteredEventCache, + TaggedEventsSent, }, json_size::JsonSize, }; -/// Tags that are used to group the events within a batch for emitting telemetry. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct EventCountTags { - pub source: OptionalTag>, - pub service: OptionalTag, -} - -impl EventCountTags { - #[must_use] - pub fn new_empty() -> Self { - Self { - source: OptionalTag::Specified(None), - service: OptionalTag::Specified(None), - } - } - - #[must_use] - pub fn new_unspecified() -> Self { - Self { - source: OptionalTag::Ignored, - service: OptionalTag::Ignored, - } - } -} - /// Must be implemented by events to get the tags that will be attached to /// the `component_sent_event_*` emitted metrics. pub trait GetEventCountTags { - fn get_tags(&self) -> EventCountTags; + fn get_tags(&self) -> TaggedEventsSent; } /// Keeps track of the estimated json size of a given batch of events by @@ -48,7 +24,7 @@ pub enum GroupedCountByteSize { /// When we need to keep track of the events by certain tags we use this /// variant. Tagged { - sizes: HashMap, + sizes: HashMap, }, /// If we don't need to track the events by certain tags we can use /// this variant to avoid allocating a `HashMap`, @@ -85,8 +61,8 @@ impl GroupedCountByteSize { /// Returns a `HashMap` of tags => event counts for when we are tracking by tags. /// Returns `None` if we are not tracking by tags. #[must_use] - #[cfg(test)] - pub fn sizes(&self) -> Option<&HashMap> { + #[cfg(any(test, feature = "test"))] + pub fn sizes(&self) -> Option<&HashMap> { match self { Self::Tagged { sizes } => Some(sizes), Self::Untagged { .. } => None, @@ -95,8 +71,8 @@ impl GroupedCountByteSize { /// Returns a single count for when we are not tracking by tags. #[must_use] - #[cfg(test)] - fn size(&self) -> Option { + #[cfg(any(test, feature = "test"))] + pub fn size(&self) -> Option { match self { Self::Tagged { .. } => None, Self::Untagged { size } => Some(*size), @@ -129,9 +105,9 @@ impl GroupedCountByteSize { } /// Emits our counts to a `RegisteredEvent` cached event. - pub fn emit_event(&self, event_cache: &RegisteredEventCache) + pub fn emit_event(&self, event_cache: &RegisteredEventCache<(), T>) where - T: RegisterTaggedInternalEvent, + T: RegisterTaggedInternalEvent, H: InternalEventHandle, { match self { @@ -141,10 +117,26 @@ impl GroupedCountByteSize { } } GroupedCountByteSize::Untagged { size } => { - event_cache.emit(&EventCountTags::new_unspecified(), *size); + event_cache.emit(&TaggedEventsSent::new_unspecified(), *size); } } } + + /// Returns `true` if we are the `Tagged` variant - keeping track of the byte sizes + /// grouped by their relevant tags. + #[must_use] + pub fn is_tagged(&self) -> bool { + match self { + GroupedCountByteSize::Tagged { .. } => true, + GroupedCountByteSize::Untagged { .. } => false, + } + } + + /// Returns `true` if we are the `Untagged` variant - keeping a single count for all events. + #[must_use] + pub fn is_untagged(&self) -> bool { + !self.is_tagged() + } } impl From for GroupedCountByteSize { @@ -153,34 +145,89 @@ impl From for GroupedCountByteSize { } } +impl AddAssign for GroupedCountByteSize { + fn add_assign(&mut self, mut rhs: Self) { + if self.is_untagged() && rhs.is_tagged() { + // First handle the case where we are untagged and assigning to a tagged value. + // We need to change `self` and so need to ensure our match doesn't take ownership of the object. + *self = match (&self, &mut rhs) { + (Self::Untagged { size }, Self::Tagged { sizes }) => { + let mut sizes = std::mem::take(sizes); + match sizes.get_mut(&TaggedEventsSent::new_empty()) { + Some(empty_size) => *empty_size += *size, + None => { + sizes.insert(TaggedEventsSent::new_empty(), *size); + } + } + + Self::Tagged { sizes } + } + _ => { + unreachable!() + } + }; + + return; + } + + // For these cases, we know we won't have to change `self` so the match can take ownership. + match (self, rhs) { + (Self::Tagged { sizes: ref mut lhs }, Self::Tagged { sizes: rhs }) => { + for (key, value) in rhs { + match lhs.get_mut(&key) { + Some(size) => *size += value, + None => { + lhs.insert(key.clone(), value); + } + } + } + } + + (Self::Untagged { size: lhs }, Self::Untagged { size: rhs }) => { + *lhs = *lhs + rhs; + } + + (Self::Tagged { ref mut sizes }, Self::Untagged { size }) => { + match sizes.get_mut(&TaggedEventsSent::new_empty()) { + Some(empty_size) => *empty_size += size, + None => { + sizes.insert(TaggedEventsSent::new_empty(), size); + } + } + } + (Self::Untagged { .. }, Self::Tagged { .. }) => unreachable!(), + }; + } +} + impl<'a> Add<&'a GroupedCountByteSize> for GroupedCountByteSize { type Output = GroupedCountByteSize; fn add(self, other: &'a Self::Output) -> Self::Output { match (self, other) { - (Self::Tagged { sizes: mut us }, Self::Tagged { sizes: them }) => { - for (key, value) in them { - match us.get_mut(key) { + (Self::Tagged { sizes: mut lhs }, Self::Tagged { sizes: rhs }) => { + for (key, value) in rhs { + match lhs.get_mut(key) { Some(size) => *size += *value, None => { - us.insert(key.clone(), *value); + lhs.insert(key.clone(), *value); } } } - Self::Tagged { sizes: us } + Self::Tagged { sizes: lhs } } - (Self::Untagged { size: us }, Self::Untagged { size: them }) => { - Self::Untagged { size: us + *them } + (Self::Untagged { size: lhs }, Self::Untagged { size: rhs }) => { + Self::Untagged { size: lhs + *rhs } } // The following two scenarios shouldn't really occur in practice, but are provided for completeness. (Self::Tagged { mut sizes }, Self::Untagged { size }) => { - match sizes.get_mut(&EventCountTags::new_empty()) { + match sizes.get_mut(&TaggedEventsSent::new_empty()) { Some(empty_size) => *empty_size += *size, None => { - sizes.insert(EventCountTags::new_empty(), *size); + sizes.insert(TaggedEventsSent::new_empty(), *size); } } @@ -188,10 +235,10 @@ impl<'a> Add<&'a GroupedCountByteSize> for GroupedCountByteSize { } (Self::Untagged { size }, Self::Tagged { sizes }) => { let mut sizes = sizes.clone(); - match sizes.get_mut(&EventCountTags::new_empty()) { + match sizes.get_mut(&TaggedEventsSent::new_empty()) { Some(empty_size) => *empty_size += size, None => { - sizes.insert(EventCountTags::new_empty(), size); + sizes.insert(TaggedEventsSent::new_empty(), size); } } @@ -307,6 +354,10 @@ pub trait MetaDescriptive { #[cfg(test)] mod tests { + use std::sync::Arc; + + use crate::{config::ComponentKey, internal_event::OptionalTag}; + use super::*; struct DummyEvent { @@ -315,8 +366,8 @@ mod tests { } impl GetEventCountTags for DummyEvent { - fn get_tags(&self) -> EventCountTags { - EventCountTags { + fn get_tags(&self) -> TaggedEventsSent { + TaggedEventsSent { source: self.source.clone(), service: self.service.clone(), } @@ -380,14 +431,14 @@ mod tests { assert_eq!( vec![ ( - EventCountTags { + TaggedEventsSent { source: OptionalTag::Ignored, service: Some("cabbage".to_string()).into() }, CountByteSize(2, JsonSize::new(78)) ), ( - EventCountTags { + TaggedEventsSent { source: OptionalTag::Ignored, service: Some("tomato".to_string()).into() }, diff --git a/lib/vector-config-common/Cargo.toml b/lib/vector-config-common/Cargo.toml index 0d4ddaed219a0..9de0b5159865a 100644 --- a/lib/vector-config-common/Cargo.toml +++ b/lib/vector-config-common/Cargo.toml @@ -6,11 +6,11 @@ license = "MPL-2.0" [dependencies] convert_case = { version = "0.6", default-features = false } -darling = { version = "0.13", default-features = false, features = ["suggestions"] } +darling = { version = "0.20", default-features = false, features = ["suggestions"] } once_cell = { version = "1", default-features = false, features = ["std"] } proc-macro2 = { version = "1.0", default-features = false } serde = { version = "1.0", default-features = false, features = ["derive"] } serde_json = { version = "1.0", default-features = false, features = ["std"] } -syn = { version = "1.0", features = ["full", "extra-traits", "visit-mut", "visit"] } +syn = { version = "2.0", features = ["full", "extra-traits", "visit-mut", "visit"] } tracing = { version = "0.1.34", default-features = false } quote = { version = "1.0", default-features = false } diff --git a/lib/vector-config-common/src/validation.rs b/lib/vector-config-common/src/validation.rs index f264b707fe461..569f80cf33007 100644 --- a/lib/vector-config-common/src/validation.rs +++ b/lib/vector-config-common/src/validation.rs @@ -1,7 +1,7 @@ use darling::FromMeta; use proc_macro2::TokenStream; use quote::{quote, ToTokens}; -use syn::{Lit, Meta}; +use syn::{Expr, Lit, Meta}; use crate::{ num::{ERR_NUMERIC_OUT_OF_RANGE, NUMERIC_ENFORCED_LOWER_BOUND, NUMERIC_ENFORCED_UPPER_BOUND}, @@ -336,17 +336,19 @@ fn maybe_float_or_int(meta: &Meta) -> darling::Result> { // First make sure we can even get a valid f64 from this meta item. let result = match meta { Meta::Path(_) => Err(darling::Error::unexpected_type("path")), - Meta::List(_) => Err(darling::Error::unexpected_type("path")), - Meta::NameValue(nv) => match &nv.lit { - Lit::Str(s) => { - let s = s.value(); - s.as_str() - .parse() - .map_err(|_| darling::Error::unknown_value(s.as_str())) - } - Lit::Int(i) => i.base10_parse::().map_err(Into::into), - Lit::Float(f) => f.base10_parse::().map_err(Into::into), - lit => Err(darling::Error::unexpected_lit_type(lit)), + Meta::List(_) => Err(darling::Error::unexpected_type("list")), + Meta::NameValue(nv) => match &nv.value { + Expr::Lit(expr) => match &expr.lit { + Lit::Str(s) => { + let s = s.value(); + s.parse() + .map_err(|_| darling::Error::unknown_value(s.as_str())) + } + Lit::Int(i) => i.base10_parse::().map_err(Into::into), + Lit::Float(f) => f.base10_parse::().map_err(Into::into), + lit => Err(darling::Error::unexpected_lit_type(lit)), + }, + expr => Err(darling::Error::unexpected_expr_type(expr)), }, }; diff --git a/lib/vector-config-macros/Cargo.toml b/lib/vector-config-macros/Cargo.toml index 2a133adaa4fcd..60c0e9b3523d6 100644 --- a/lib/vector-config-macros/Cargo.toml +++ b/lib/vector-config-macros/Cargo.toml @@ -8,13 +8,13 @@ license = "MPL-2.0" proc-macro = true [dependencies] -darling = { version = "0.13", default-features = false, features = ["suggestions"] } +darling = { version = "0.20", default-features = false, features = ["suggestions"] } proc-macro2 = { version = "1.0", default-features = false } quote = { version = "1.0", default-features = false } -serde_derive_internals = "0.26" -syn = { version = "1.0", default-features = false, features = ["full", "extra-traits", "visit-mut", "visit"] } +serde_derive_internals = "0.28" +syn = { version = "2.0", default-features = false, features = ["full", "extra-traits", "visit-mut", "visit"] } vector-config-common = { path = "../vector-config-common" } [dev-dependencies] -serde = { version = "1.0.164", default-features = false } +serde = { version = "1.0.183", default-features = false } vector-config = { path = "../vector-config" } diff --git a/lib/vector-config-macros/src/ast/container.rs b/lib/vector-config-macros/src/ast/container.rs index f4a47e71d18e7..28cc12f69a35a 100644 --- a/lib/vector-config-macros/src/ast/container.rs +++ b/lib/vector-config-macros/src/ast/container.rs @@ -368,7 +368,7 @@ impl<'a> Container<'a> { /// standard `#[deprecated]` attribute, neither automatically applying it nor deriving the /// deprecation status of a field when it is present. pub fn deprecated(&self) -> bool { - self.attrs.deprecated.is_some() + self.attrs.deprecated.is_present() } /// Metadata (custom attributes) for the container, if any. @@ -584,7 +584,7 @@ mod tests { assert_eq!(literals_to_idents(&["T"]), idents); // We don't support parenthesized type parameters, like when using a function pointer type. - let parenthesized_type: Type = parse_quote! { Something String> }; + let parenthesized_type: Type = parse_quote! { Something String> }; let idents = get_generic_type_param_idents(&parenthesized_type); assert_eq!(None, idents); } diff --git a/lib/vector-config-macros/src/ast/field.rs b/lib/vector-config-macros/src/ast/field.rs index 790bf07b7dadd..d38ce6c95417c 100644 --- a/lib/vector-config-macros/src/ast/field.rs +++ b/lib/vector-config-macros/src/ast/field.rs @@ -2,9 +2,10 @@ use darling::{ util::{Flag, Override, SpannedValue}, FromAttributes, }; -use proc_macro2::Span; +use proc_macro2::{Span, TokenStream}; +use quote::ToTokens; use serde_derive_internals::ast as serde_ast; -use syn::{parse_quote, spanned::Spanned, ExprPath, Ident}; +use syn::{parse_quote, ExprPath, Ident}; use vector_config_common::validation::Validation; use super::{ @@ -174,7 +175,7 @@ impl<'a> Field<'a> { /// variants, to simply document themselves at the container/variant level and avoid needing to /// document that inner field which itself needs no further title/description. pub fn transparent(&self) -> bool { - self.attrs.transparent.is_some() + self.attrs.transparent.is_present() } /// Whether or not the field is deprecated. @@ -239,12 +240,9 @@ impl<'a> Field<'a> { } } -impl<'a> Spanned for Field<'a> { - fn span(&self) -> proc_macro2::Span { - match self.original.ident.as_ref() { - Some(ident) => ident.span(), - None => self.original.ty.span(), - } +impl<'a> ToTokens for Field<'a> { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.original.to_tokens(tokens) } } @@ -300,12 +298,12 @@ impl Attributes { // to throw an error if they are. As we're going to forcefully mark the field as // transparent, there's no reason to allow setting derived/transparent manually, as it // only leads to boilerplate and potential confusion. - if self.transparent.is_some() { - return Err(err_field_implicit_transparent(&self.transparent)); + if self.transparent.is_present() { + return Err(err_field_implicit_transparent(&self.transparent.span())); } - if self.derived.is_some() { - return Err(err_field_implicit_transparent(&self.derived)); + if self.derived.is_present() { + return Err(err_field_implicit_transparent(&self.derived.span())); } self.transparent = SpannedValue::new(Flag::present(), Span::call_site()); @@ -339,8 +337,8 @@ impl Attributes { // like a field that is flattened or not visible, it makes no sense to require a description or title for fields // in a virtual newtype. if self.description.is_none() - && !self.derived.is_some() - && !self.transparent.is_some() + && !self.derived.is_present() + && !self.transparent.is_present() && self.visible && !self.flatten && !is_virtual_newtype diff --git a/lib/vector-config-macros/src/ast/mod.rs b/lib/vector-config-macros/src/ast/mod.rs index 7688d6f2df511..1825f9e6fd4e9 100644 --- a/lib/vector-config-macros/src/ast/mod.rs +++ b/lib/vector-config-macros/src/ast/mod.rs @@ -1,4 +1,4 @@ -use darling::{error::Accumulator, util::path_to_string, FromMeta}; +use darling::{ast::NestedMeta, error::Accumulator, util::path_to_string, FromMeta}; use quote::ToTokens; use serde_derive_internals::{ast as serde_ast, attr as serde_attr}; @@ -9,7 +9,7 @@ mod variant; pub use container::Container; pub use field::Field; -use syn::{Expr, NestedMeta}; +use syn::Expr; pub use variant::Variant; use vector_config_common::constants; @@ -196,36 +196,46 @@ impl FromMeta for Metadata { errors.push(darling::Error::unexpected_type("list").with_span(nmeta)); None } - syn::Meta::NameValue(nv) => match &nv.lit { - // When dealing with a string literal, we check if it ends in `()`. If so, - // we emit that as-is, leading to doing a function call and using the return - // value of that function as the value for this key/value pair. - // - // Otherwise, we just treat the string literal normally. - syn::Lit::Str(s) => { - if s.value().ends_with("()") { - if let Ok(expr) = s.parse::() { - Some(LazyCustomAttribute::KeyValue { - key: path_to_string(&nv.path), - value: expr.to_token_stream(), - }) - } else { - errors.push( - darling::Error::custom(INVALID_VALUE_EXPR).with_span(nmeta), - ); - None + syn::Meta::NameValue(nv) => match &nv.value { + Expr::Lit(expr) => { + match &expr.lit { + // When dealing with a string literal, we check if it ends in `()`. If so, + // we emit that as-is, leading to doing a function call and using the return + // value of that function as the value for this key/value pair. + // + // Otherwise, we just treat the string literal normally. + syn::Lit::Str(s) => { + if s.value().ends_with("()") { + if let Ok(expr) = s.parse::() { + Some(LazyCustomAttribute::KeyValue { + key: path_to_string(&nv.path), + value: expr.to_token_stream(), + }) + } else { + errors.push( + darling::Error::custom(INVALID_VALUE_EXPR) + .with_span(nmeta), + ); + None + } + } else { + Some(LazyCustomAttribute::KeyValue { + key: path_to_string(&nv.path), + value: s.value().to_token_stream(), + }) + } } - } else { - Some(LazyCustomAttribute::KeyValue { + lit => Some(LazyCustomAttribute::KeyValue { key: path_to_string(&nv.path), - value: s.value().to_token_stream(), - }) + value: lit.to_token_stream(), + }), } } - lit => Some(LazyCustomAttribute::KeyValue { - key: path_to_string(&nv.path), - value: lit.to_token_stream(), - }), + expr => { + errors + .push(darling::Error::unexpected_expr_type(expr).with_span(nmeta)); + None + } }, }, NestedMeta::Lit(_) => { diff --git a/lib/vector-config-macros/src/ast/util.rs b/lib/vector-config-macros/src/ast/util.rs index 30f66bd6a406a..ddd36c5739448 100644 --- a/lib/vector-config-macros/src/ast/util.rs +++ b/lib/vector-config-macros/src/ast/util.rs @@ -1,7 +1,10 @@ -use darling::error::Accumulator; +use darling::{ast::NestedMeta, error::Accumulator}; use quote::{quote, ToTokens}; use serde_derive_internals::{attr as serde_attr, Ctxt}; -use syn::{spanned::Spanned, Attribute, ExprPath, Lit, Meta, MetaNameValue, NestedMeta}; +use syn::{ + punctuated::Punctuated, spanned::Spanned, token::Comma, Attribute, Expr, ExprLit, ExprPath, + Lit, Meta, MetaNameValue, +}; const ERR_FIELD_MISSING_DESCRIPTION: &str = "field must have a description -- i.e. `/// This is a widget...` or `#[configurable(description = \"...\")] -- or derive it from the underlying type of the field by specifying `#[configurable(derived)]`"; const ERR_FIELD_IMPLICIT_TRANSPARENT: &str = @@ -15,12 +18,16 @@ pub fn try_extract_doc_title_description( let doc_comments = attributes .iter() // We only care about `doc` attributes. - .filter(|attribute| attribute.path.is_ident("doc")) + .filter(|attribute| attribute.path().is_ident("doc")) // Extract the value of the attribute if it's in the form of `doc = "..."`. - .filter_map(|attribute| match attribute.parse_meta() { - Ok(Meta::NameValue(MetaNameValue { - lit: Lit::Str(s), .. - })) => Some(s.value()), + .filter_map(|attribute| match &attribute.meta { + Meta::NameValue(MetaNameValue { + value: + Expr::Lit(ExprLit { + lit: Lit::Str(s), .. + }), + .. + }) => Some(s.value()), _ => None, }) .collect::>(); @@ -209,12 +216,13 @@ fn find_name_value_attribute( attributes .iter() // Only take attributes whose name matches `attr_name`. - .filter(|attr| path_matches(&attr.path, attr_name)) - // Make sure the contents actually parse as a normal structured attribute. - .filter_map(|attr| attr.parse_meta().ok()) + .filter(|attr| path_matches(attr.path(), attr_name)) // Derive macro helper attributes will always be in the list form. - .filter_map(|meta| match meta { - Meta::List(ml) => Some(ml.nested.into_iter()), + .filter_map(|attr| match &attr.meta { + Meta::List(ml) => ml + .parse_args_with(Punctuated::::parse_terminated) + .map(|nested| nested.into_iter()) + .ok(), _ => None, }) .flatten() @@ -222,7 +230,10 @@ fn find_name_value_attribute( // name matches `name_key`, and return their value. .find_map(|nm| match nm { NestedMeta::Meta(meta) => match meta { - Meta::NameValue(nv) if path_matches(&nv.path, name_key) => Some(nv.lit), + Meta::NameValue(nv) if path_matches(&nv.path, name_key) => match nv.value { + Expr::Lit(ExprLit { lit, .. }) => Some(lit), + _ => None, + }, _ => None, }, _ => None, @@ -259,7 +270,7 @@ fn find_name_value_attribute( /// returned. pub fn find_delegated_serde_deser_ty(attributes: &[syn::Attribute]) -> Option { // Make sure `#[serde_as(as = "...")]` is present. - find_name_value_attribute(attributes, "serde_as", "as") + find_name_value_attribute(attributes, "serde_as", "r#as") // Make sure `#[serde(with = "...")]` is present, and grab its value. .and_then(|_| find_name_value_attribute(attributes, "serde", "with")) // Try and parse the value as a type path. diff --git a/lib/vector-config-macros/src/ast/variant.rs b/lib/vector-config-macros/src/ast/variant.rs index ea25903a9ec00..5b54ebc30d18f 100644 --- a/lib/vector-config-macros/src/ast/variant.rs +++ b/lib/vector-config-macros/src/ast/variant.rs @@ -1,7 +1,7 @@ use darling::{error::Accumulator, util::Flag, FromAttributes}; -use proc_macro2::Ident; +use proc_macro2::{Ident, TokenStream}; +use quote::ToTokens; use serde_derive_internals::ast as serde_ast; -use syn::spanned::Spanned; use super::{ util::{try_extract_doc_title_description, DarlingResultIterator}, @@ -140,7 +140,7 @@ impl<'a> Variant<'a> { /// standard `#[deprecated]` attribute, neither automatically applying it nor deriving the /// deprecation status of a variant when it is present. pub fn deprecated(&self) -> bool { - self.attrs.deprecated.is_some() + self.attrs.deprecated.is_present() } /// Whether or not this variant is visible during either serialization or deserialization. @@ -166,9 +166,9 @@ impl<'a> Variant<'a> { } } -impl<'a> Spanned for Variant<'a> { - fn span(&self) -> proc_macro2::Span { - self.original.span() +impl<'a> ToTokens for Variant<'a> { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.original.to_tokens(tokens) } } diff --git a/lib/vector-config-macros/src/component_name.rs b/lib/vector-config-macros/src/component_name.rs index 1dd974d604a7b..8c7e5efd2e65e 100644 --- a/lib/vector-config-macros/src/component_name.rs +++ b/lib/vector-config-macros/src/component_name.rs @@ -106,7 +106,7 @@ pub fn derive_component_name_impl(input: TokenStream) -> TokenStream { fn attr_to_component_name(attr: &Attribute) -> Result, Error> { // First, filter out anything that isn't ours. if !path_matches( - &attr.path, + attr.path(), &[ attrs::ENRICHMENT_TABLE_COMPONENT, attrs::PROVIDER_COMPONENT, @@ -121,14 +121,14 @@ fn attr_to_component_name(attr: &Attribute) -> Result, Error> { // Reconstruct the original attribute path (i.e. `source`) from our marker version of it (i.e. // `source_component`), so that any error message we emit is contextually relevant. - let path_str = path_to_string(&attr.path); + let path_str = path_to_string(attr.path()); let component_type_attr = path_str.replace("_component", ""); let component_type = component_type_attr.replace('_', " "); // Make sure the attribute actually has inner tokens. If it doesn't, this means they forgot // entirely to specify a component name, and we want to give back a meaningful error that looks // correct when applied in the context of `#[configurable_component(...)]`. - if attr.tokens.is_empty() { + if attr.meta.require_list().is_err() { return Err(Error::new( attr.span(), format!( @@ -162,6 +162,10 @@ fn check_component_name_validity(component_name: &str) -> Result<(), String> { // In a nutshell, component names must contain only lowercase ASCII alphabetic characters, or // numbers, or underscores. + if component_name.is_empty() { + return Err("component name must be non-empty".to_string()); + } + // We only support ASCII names, so get that out of the way. if !component_name.is_ascii() { return Err("component names may only contain ASCII characters".to_string()); diff --git a/lib/vector-config-macros/src/configurable.rs b/lib/vector-config-macros/src/configurable.rs index 21c87804e34bf..b26e317b998f5 100644 --- a/lib/vector-config-macros/src/configurable.rs +++ b/lib/vector-config-macros/src/configurable.rs @@ -2,7 +2,7 @@ use proc_macro::TokenStream; use proc_macro2::Span; use quote::{quote, quote_spanned}; use syn::{ - parse_macro_input, parse_quote, spanned::Spanned, token::Colon2, DeriveInput, ExprPath, Ident, + parse_macro_input, parse_quote, spanned::Spanned, token::PathSep, DeriveInput, ExprPath, Ident, PathArguments, Type, }; use vector_config_common::validation::Validation; @@ -1058,7 +1058,7 @@ fn get_ty_for_expr_pos(ty: &syn::Type) -> syn::Type { let mut new_tp = tp.clone(); for segment in new_tp.path.segments.iter_mut() { if let PathArguments::AngleBracketed(ab) = &mut segment.arguments { - ab.colon2_token = Some(Colon2::default()); + ab.colon2_token = Some(PathSep::default()); } } diff --git a/lib/vector-config-macros/src/configurable_component.rs b/lib/vector-config-macros/src/configurable_component.rs index 37acc7703f24d..42d7813768aa6 100644 --- a/lib/vector-config-macros/src/configurable_component.rs +++ b/lib/vector-config-macros/src/configurable_component.rs @@ -1,10 +1,10 @@ -use darling::{Error, FromMeta}; +use darling::{ast::NestedMeta, Error, FromMeta}; use proc_macro::TokenStream; use proc_macro2::{Ident, Span}; use quote::{quote, quote_spanned}; use syn::{ parse_macro_input, parse_quote, parse_quote_spanned, punctuated::Punctuated, spanned::Spanned, - token::Comma, AttributeArgs, DeriveInput, Lit, LitStr, Meta, MetaList, NestedMeta, Path, + token::Comma, DeriveInput, Lit, LitStr, Meta, MetaList, Path, }; use vector_config_common::{ constants::ComponentType, human_friendly::generate_human_friendly_string, @@ -41,16 +41,19 @@ impl TypedComponent { /// If the meta list does not have a path that matches a known component type, `None` is /// returned. Otherwise, `Some(...)` is returned with a valid `TypedComponent`. fn from_meta_list(ml: &MetaList) -> Option { - let mut items = ml.nested.iter(); + let mut items = ml + .parse_args_with(Punctuated::::parse_terminated) + .unwrap_or_default() + .into_iter(); ComponentType::try_from(&ml.path) .ok() .map(|component_type| { let component_name = match items.next() { - Some(NestedMeta::Lit(Lit::Str(component_name))) => Some(component_name.clone()), + Some(NestedMeta::Lit(Lit::Str(component_name))) => Some(component_name), _ => None, }; let description = match items.next() { - Some(NestedMeta::Lit(Lit::Str(description))) => Some(description.clone()), + Some(NestedMeta::Lit(Lit::Str(description))) => Some(description), _ => None, }; Self { @@ -157,7 +160,7 @@ struct Options { } impl FromMeta for Options { - fn from_list(items: &[syn::NestedMeta]) -> darling::Result { + fn from_list(items: &[NestedMeta]) -> darling::Result { let mut typed_component = None; let mut no_ser = false; let mut no_deser = false; @@ -255,7 +258,10 @@ impl Options { } pub fn configurable_component_impl(args: TokenStream, item: TokenStream) -> TokenStream { - let args = parse_macro_input!(args as AttributeArgs); + let args: Vec = + parse_macro_input!(args with Punctuated::::parse_terminated) + .into_iter() + .collect(); let input = parse_macro_input!(item as DeriveInput); let options = match Options::from_list(&args) { diff --git a/lib/vector-config/Cargo.toml b/lib/vector-config/Cargo.toml index e1acd6593233c..83432b11e813e 100644 --- a/lib/vector-config/Cargo.toml +++ b/lib/vector-config/Cargo.toml @@ -16,14 +16,14 @@ chrono-tz = { version = "0.8.3", default-features = false } encoding_rs = { version = "0.8", default-features = false, features = ["alloc", "serde"] } indexmap = { version = "2.0", default-features = false, features = ["std"] } inventory = { version = "0.3" } -no-proxy = { version = "0.3.1", default-features = false, features = ["serialize"] } -num-traits = { version = "0.2.15", default-features = false } +no-proxy = { version = "0.3.3", default-features = false, features = ["serialize"] } +num-traits = { version = "0.2.16", default-features = false } once_cell = { version = "1", default-features = false } serde = { version = "1.0", default-features = false } serde_json = { version = "1.0", default-features = false, features = ["std"] } -serde_with = { version = "2.3.2", default-features = false, features = ["std"] } -snafu = { version = "0.7.4", default-features = false } -toml = { version = "0.7.5", default-features = false } +serde_with = { version = "3.2.0", default-features = false, features = ["std"] } +snafu = { version = "0.7.5", default-features = false } +toml = { version = "0.7.6", default-features = false } tracing = { version = "0.1.34", default-features = false } url = { version = "2.4.0", default-features = false, features = ["serde"] } vrl.workspace = true @@ -32,4 +32,4 @@ vector-config-macros = { path = "../vector-config-macros" } [dev-dependencies] assert-json-diff = { version = "2", default-features = false } -serde_with = { version = "2.3.2", default-features = false, features = ["std", "macros"] } +serde_with = { version = "3.2.0", default-features = false, features = ["std", "macros"] } diff --git a/lib/vector-config/src/external/serde_with.rs b/lib/vector-config/src/external/serde_with.rs index 05de17e3bf605..30267e2253943 100644 --- a/lib/vector-config/src/external/serde_with.rs +++ b/lib/vector-config/src/external/serde_with.rs @@ -50,7 +50,7 @@ where impl Configurable for serde_with::DurationSeconds { fn referenceable_name() -> Option<&'static str> { // We're masking the type parameters here because we only deal with whole seconds via this - // version, and handle fractional seconds with `DurationSeconds`, which we + // version, and handle fractional seconds with `DurationSecondsWithFrac`, which we // expose as `serde_with::DurationFractionalSeconds`. Some("serde_with::DurationSeconds") } @@ -76,7 +76,7 @@ impl Configurable for serde_with::DurationSeconds { +impl Configurable for serde_with::DurationSecondsWithFrac { fn referenceable_name() -> Option<&'static str> { // We're masking the type parameters here because we only deal with fractional seconds via this // version, and handle whole seconds with `DurationSeconds`, which we diff --git a/lib/vector-config/src/schema/visitors/human_name.rs b/lib/vector-config/src/schema/visitors/human_name.rs index dc2fb7e57cca9..4b9b2330d5ed2 100644 --- a/lib/vector-config/src/schema/visitors/human_name.rs +++ b/lib/vector-config/src/schema/visitors/human_name.rs @@ -127,7 +127,7 @@ mod tests { } })); - let mut visitor = GenerateHumanFriendlyNameVisitor::default(); + let mut visitor = GenerateHumanFriendlyNameVisitor; visitor.visit_root_schema(&mut actual_schema); assert_schemas_eq(expected_schema, actual_schema); @@ -150,7 +150,7 @@ mod tests { } })); - let mut visitor = GenerateHumanFriendlyNameVisitor::default(); + let mut visitor = GenerateHumanFriendlyNameVisitor; visitor.visit_root_schema(&mut actual_schema); assert_schemas_eq(expected_schema, actual_schema); @@ -177,7 +177,7 @@ mod tests { } })); - let mut visitor = GenerateHumanFriendlyNameVisitor::default(); + let mut visitor = GenerateHumanFriendlyNameVisitor; visitor.visit_root_schema(&mut actual_schema); assert_schemas_eq(expected_schema, actual_schema); @@ -204,7 +204,7 @@ mod tests { } })); - let mut visitor = GenerateHumanFriendlyNameVisitor::default(); + let mut visitor = GenerateHumanFriendlyNameVisitor; visitor.visit_root_schema(&mut actual_schema); assert_schemas_eq(expected_schema, actual_schema); @@ -222,7 +222,7 @@ mod tests { let expected_schema = actual_schema.clone(); - let mut visitor = GenerateHumanFriendlyNameVisitor::default(); + let mut visitor = GenerateHumanFriendlyNameVisitor; visitor.visit_root_schema(&mut actual_schema); assert_schemas_eq(expected_schema, actual_schema); @@ -244,7 +244,7 @@ mod tests { let expected_schema = actual_schema.clone(); - let mut visitor = GenerateHumanFriendlyNameVisitor::default(); + let mut visitor = GenerateHumanFriendlyNameVisitor; visitor.visit_root_schema(&mut actual_schema); assert_schemas_eq(expected_schema, actual_schema); @@ -278,7 +278,7 @@ mod tests { } })); - let mut visitor = GenerateHumanFriendlyNameVisitor::default(); + let mut visitor = GenerateHumanFriendlyNameVisitor; visitor.visit_root_schema(&mut actual_schema); assert_schemas_eq(expected_schema, actual_schema); diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 44e04b859fb92..d5bceb92c9c77 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -8,13 +8,13 @@ publish = false [dependencies] async-graphql = { version = "5.0.10", default-features = false, features = ["playground" ], optional = true } async-trait = { version = "0.1", default-features = false } -bitmask-enum = { version = "2.1.0", default-features = false } +bitmask-enum = { version = "2.2.2", default-features = false } bytes = { version = "1.4.0", default-features = false, features = ["serde"] } chrono = { version = "0.4.19", default-features = false, features = ["serde"] } crossbeam-utils = { version = "0.8.16", default-features = false } db-key = { version = "0.0.5", default-features = false, optional = true } deadpool-postgres = { version = "0.10.5"} -dyn-clone = { version = "1.0.11", default-features = false } +dyn-clone = { version = "1.0.12", default-features = false } enrichment = { path = "../enrichment", optional = true } enumflags2 = { version = "0.7.7", default-features = false } float_eq = { version = "1.0", default-features = false } @@ -25,42 +25,42 @@ http = { version = "0.2.9", default-features = false } hyper-proxy = { version = "0.9.1", default-features = false, features = ["openssl-tls"] } indexmap = { version = "~2.0.0", default-features = false, features = ["serde", "std"] } lookup = { package = "vector-lookup", path = "../vector-lookup" } -metrics = "0.21.0" +metrics = "0.21.1" metrics-tracing-context = { version = "0.14.0", default-features = false } -metrics-util = { version = "0.15.0", default-features = false, features = ["registry"] } +metrics-util = { version = "0.15.1", default-features = false, features = ["registry"] } mlua = { version = "0.8.9", default-features = false, features = ["lua54", "send", "vendored"], optional = true } -no-proxy = { version = "0.3.2", default-features = false, features = ["serialize"] } +no-proxy = { version = "0.3.3", default-features = false, features = ["serialize"] } once_cell = { version = "1.18", default-features = false } ordered-float = { version = "3.7.0", default-features = false } -openssl = { version = "0.10.55", default-features = false, features = ["vendored"] } +openssl = { version = "0.10.56", default-features = false, features = ["vendored"] } parking_lot = { version = "0.12.1", default-features = false } -pin-project = { version = "1.1.1", default-features = false } +pin-project = { version = "1.1.3", default-features = false } proptest = { version = "1.2", optional = true } prost-types = { version = "0.11", default-features = false } prost = { version = "0.11", default-features = false, features = ["std"] } reqwest = { version = "0.11", features = ["json"] } quanta = { version = "0.11.1", default-features = false } -regex = { version = "1.8.4", default-features = false, features = ["std", "perf"] } +regex = { version = "1.9.3", default-features = false, features = ["std", "perf"] } ryu = { version = "1", default-features = false } -serde = { version = "1.0.164", default-features = false, features = ["derive", "rc"] } -serde_json = { version = "1.0.99", default-features = false } -serde_with = { version = "2.3.2", default-features = false, features = ["std", "macros"] } +serde = { version = "1.0.183", default-features = false, features = ["derive", "rc"] } +serde_json = { version = "1.0.104", default-features = false } +serde_with = { version = "3.2.0", default-features = false, features = ["std", "macros"] } smallvec = { version = "1", default-features = false, features = ["serde", "const_generics"] } -snafu = { version = "0.7.4", default-features = false } +snafu = { version = "0.7.5", default-features = false } socket2 = { version = "0.5.3", default-features = false } -tokio = { version = "1.29.0", default-features = false, features = ["net"] } +tokio = { version = "1.30.0", default-features = false, features = ["net"] } tokio-openssl = { version = "0.6.3", default-features = false } tokio-stream = { version = "0.1", default-features = false, features = ["time"], optional = true } tokio-util = { version = "0.7.0", default-features = false, features = ["time"] } tokio-postgres = { version = "0.7.7", default-features = false, features = ["runtime", "with-chrono-0_4", "with-uuid-1", "with-serde_json-1"] } -toml = { version = "0.7.5", default-features = false } +toml = { version = "0.7.6", default-features = false } tonic = { version = "0.9", default-features = false, features = ["transport"] } tower = { version = "0.4", default-features = false, features = ["util"] } tracing = { version = "0.1.34", default-features = false } tracing-core = { version = "0.1.26", default-features = false } tracing-log = { version = "0.1.3", default-features = false } tracing-subscriber = { version = "0.3.17", default-features = false, features = ["std"] } -typetag = { version = "0.2.8", default-features = false } +typetag = { version = "0.2.12", default-features = false } twox-hash = { version = "1.6.3", default-features = false } url = { version = "2", default-features = false } urlencoding = { version = "2.1.0", default-features = false } @@ -74,10 +74,10 @@ opentelemetry-rs = { version = "1", branch = "main" , git = "ssh://git@github.co vrl.workspace = true [target.'cfg(target_os = "macos")'.dependencies] -security-framework = "2.9.1" +security-framework = "2.9.2" [target.'cfg(windows)'.dependencies] -schannel = "0.1.21" +schannel = "0.1.22" [build-dependencies] prost-build = "0.11" @@ -93,7 +93,7 @@ quickcheck_macros = "1" proptest = "1.2" similar-asserts = "1.4.2" tokio-test = "0.4.2" -toml = { version = "0.7.5", default-features = false, features = ["parse"] } +toml = { version = "0.7.6", default-features = false, features = ["parse"] } ndarray = "0.15.6" ndarray-stats = "0.5.1" noisy_float = "0.2.0" diff --git a/lib/vector-core/benches/event/log_event.rs b/lib/vector-core/benches/event/log_event.rs index 1b5ea41df44d5..7395ab98a60e4 100644 --- a/lib/vector-core/benches/event/log_event.rs +++ b/lib/vector-core/benches/event/log_event.rs @@ -6,6 +6,14 @@ use criterion::{ use lookup::event_path; use vector_core::event::LogEvent; +fn default_log_event() -> LogEvent { + let mut log_event = LogEvent::default(); + log_event.insert(event_path!("one"), 1); + log_event.insert(event_path!("two"), 2); + log_event.insert(event_path!("three"), 3); + log_event +} + fn rename_key_flat(c: &mut Criterion) { let mut group: BenchmarkGroup = c.benchmark_group("vector_core::event::log_event::LogEvent::rename_key_flat"); @@ -13,13 +21,7 @@ fn rename_key_flat(c: &mut Criterion) { group.bench_function("rename_flat_key (key is present)", move |b| { b.iter_batched( - || { - let mut log_event = LogEvent::default(); - log_event.insert("one", 1); - log_event.insert("two", 2); - log_event.insert("three", 3); - log_event - }, + default_log_event, |mut log_event| { log_event.rename_key(event_path!("one"), event_path!("1")); }, @@ -29,13 +31,7 @@ fn rename_key_flat(c: &mut Criterion) { group.bench_function("rename_flat_key (key is NOT present)", move |b| { b.iter_batched( - || { - let mut log_event = LogEvent::default(); - log_event.insert("one", 1); - log_event.insert("two", 2); - log_event.insert("three", 3); - log_event - }, + default_log_event, |mut log_event| { log_event.rename_key(event_path!("four"), event_path!("4")); }, diff --git a/lib/vector-core/src/config/log_schema.rs b/lib/vector-core/src/config/log_schema.rs index 62462c24a5567..9614b72c70202 100644 --- a/lib/vector-core/src/config/log_schema.rs +++ b/lib/vector-core/src/config/log_schema.rs @@ -1,11 +1,18 @@ -use lookup::lookup_v2::{parse_target_path, OptionalValuePath}; -use lookup::{owned_value_path, OwnedTargetPath, OwnedValuePath}; +use lookup::lookup_v2::OptionalTargetPath; +use lookup::{OwnedTargetPath, OwnedValuePath}; use once_cell::sync::{Lazy, OnceCell}; use vector_config::configurable_component; static LOG_SCHEMA: OnceCell = OnceCell::new(); static LOG_SCHEMA_DEFAULT: Lazy = Lazy::new(LogSchema::default); +const MESSAGE: &str = "message"; +const TIMESTAMP: &str = "timestamp"; +const HOST: &str = "host"; +const SOURCE_TYPE: &str = "source_type"; +const METADATA: &str = "metadata"; +const INTERNAL_METADATA: &str = "internal_metadata"; + /// Loads Log Schema from configurations and sets global schema. Once this is /// done, configurations can be correctly loaded using configured log schema /// defaults. @@ -43,24 +50,24 @@ pub struct LogSchema { /// /// This would be the field that holds the raw message, such as a raw log line. #[serde(default = "LogSchema::default_message_key")] - message_key: String, + message_key: OptionalTargetPath, /// The name of the event field to treat as the event timestamp. #[serde(default = "LogSchema::default_timestamp_key")] - timestamp_key: OptionalValuePath, + timestamp_key: OptionalTargetPath, /// The name of the event field to treat as the host which sent the message. /// /// This field will generally represent a real host, or container, that generated the message, /// but is somewhat source-dependent. #[serde(default = "LogSchema::default_host_key")] - host_key: String, + host_key: OptionalTargetPath, /// The name of the event field to set the source identifier in. /// /// This field will be set by the Vector source that the event was created in. #[serde(default = "LogSchema::default_source_type_key")] - source_type_key: String, + source_type_key: OptionalTargetPath, /// The name of the event field to set the internal event metadata in. /// @@ -71,7 +78,7 @@ pub struct LogSchema { /// as it may (now, or in the future from upstream changes) contain information that should /// not be available within the pipeline. #[serde(default = "LogSchema::default_metadata_key")] - metadata_key: String, + metadata_key: OptionalTargetPath, /// The name of the event field for user-facing event metadata /// @@ -101,63 +108,85 @@ impl Default for LogSchema { } impl LogSchema { - fn default_message_key() -> String { - String::from("message") - } - - fn default_timestamp_key() -> OptionalValuePath { - OptionalValuePath { - path: Some(owned_value_path!("timestamp")), - } + fn default_message_key() -> OptionalTargetPath { + OptionalTargetPath::event(MESSAGE) } - fn default_host_key() -> String { - String::from("host") + fn default_timestamp_key() -> OptionalTargetPath { + OptionalTargetPath::event(TIMESTAMP) } - fn default_source_type_key() -> String { - String::from("source_type") + fn default_host_key() -> OptionalTargetPath { + OptionalTargetPath::event(HOST) } - fn default_metadata_key() -> String { - String::from("internal_metadata") + fn default_source_type_key() -> OptionalTargetPath { + OptionalTargetPath::event(SOURCE_TYPE) } fn default_user_metadata_key() -> String { - String::from("metadata") + String::from(METADATA) } fn default_annotations_key() -> String { String::from("annotations") } - pub fn message_key(&self) -> &str { - &self.message_key + fn default_metadata_key() -> OptionalTargetPath { + OptionalTargetPath::event(INTERNAL_METADATA) + } + + pub fn message_key(&self) -> Option<&OwnedValuePath> { + self.message_key.path.as_ref().map(|key| &key.path) } /// Returns an `OwnedTargetPath` of the message key. /// This parses the path and will panic if it is invalid. /// /// This should only be used where the result will either be cached, - /// or performance isn't critical, since this requires parsing / memory allocation. + /// or performance isn't critical, since this requires memory allocation. pub fn owned_message_path(&self) -> OwnedTargetPath { - parse_target_path(self.message_key()).expect("valid message key") + self.message_key + .path + .as_ref() + .expect("valid message key") + .clone() } pub fn timestamp_key(&self) -> Option<&OwnedValuePath> { - self.timestamp_key.path.as_ref() + self.timestamp_key.as_ref().map(|key| &key.path) + } + + pub fn host_key(&self) -> Option<&OwnedValuePath> { + self.host_key.as_ref().map(|key| &key.path) + } + + pub fn source_type_key(&self) -> Option<&OwnedValuePath> { + self.source_type_key.as_ref().map(|key| &key.path) + } + + pub fn metadata_key(&self) -> Option<&OwnedValuePath> { + self.metadata_key.as_ref().map(|key| &key.path) + } + + pub fn message_key_target_path(&self) -> Option<&OwnedTargetPath> { + self.message_key.as_ref() + } + + pub fn timestamp_key_target_path(&self) -> Option<&OwnedTargetPath> { + self.timestamp_key.as_ref() } - pub fn host_key(&self) -> &str { - &self.host_key + pub fn host_key_target_path(&self) -> Option<&OwnedTargetPath> { + self.host_key.as_ref() } - pub fn source_type_key(&self) -> &str { - &self.source_type_key + pub fn source_type_key_target_path(&self) -> Option<&OwnedTargetPath> { + self.source_type_key.as_ref() } - pub fn metadata_key(&self) -> &str { - &self.metadata_key + pub fn metadata_key_target_path(&self) -> Option<&OwnedTargetPath> { + self.metadata_key.as_ref() } pub fn user_metadata_key(&self) -> &str { @@ -168,24 +197,24 @@ impl LogSchema { &self.annotations_key } - pub fn set_message_key(&mut self, v: String) { - self.message_key = v; + pub fn set_message_key(&mut self, path: Option) { + self.message_key = OptionalTargetPath { path }; } - pub fn set_timestamp_key(&mut self, v: Option) { - self.timestamp_key = OptionalValuePath { path: v }; + pub fn set_timestamp_key(&mut self, path: Option) { + self.timestamp_key = OptionalTargetPath { path }; } - pub fn set_host_key(&mut self, v: String) { - self.host_key = v; + pub fn set_host_key(&mut self, path: Option) { + self.host_key = OptionalTargetPath { path }; } - pub fn set_source_type_key(&mut self, v: String) { - self.source_type_key = v; + pub fn set_source_type_key(&mut self, path: Option) { + self.source_type_key = OptionalTargetPath { path }; } - pub fn set_metadata_key(&mut self, v: String) { - self.metadata_key = v; + pub fn set_metadata_key(&mut self, path: Option) { + self.metadata_key = OptionalTargetPath { path }; } pub fn set_user_metadata_key(&mut self, v: String) { @@ -208,35 +237,35 @@ impl LogSchema { { errors.push("conflicting values for 'log_schema.host_key' found".to_owned()); } else { - self.set_host_key(other.host_key().to_string()); + self.set_host_key(other.host_key_target_path().cloned()); } if self.message_key() != LOG_SCHEMA_DEFAULT.message_key() && self.message_key() != other.message_key() { errors.push("conflicting values for 'log_schema.message_key' found".to_owned()); } else { - self.set_message_key(other.message_key().to_string()); + self.set_message_key(other.message_key_target_path().cloned()); } if self.timestamp_key() != LOG_SCHEMA_DEFAULT.timestamp_key() && self.timestamp_key() != other.timestamp_key() { errors.push("conflicting values for 'log_schema.timestamp_key' found".to_owned()); } else { - self.set_timestamp_key(other.timestamp_key().cloned()); + self.set_timestamp_key(other.timestamp_key_target_path().cloned()); } if self.source_type_key() != LOG_SCHEMA_DEFAULT.source_type_key() && self.source_type_key() != other.source_type_key() { errors.push("conflicting values for 'log_schema.source_type_key' found".to_owned()); } else { - self.set_source_type_key(other.source_type_key().to_string()); + self.set_source_type_key(other.source_type_key_target_path().cloned()); } if self.metadata_key() != LOG_SCHEMA_DEFAULT.metadata_key() && self.metadata_key() != other.metadata_key() { errors.push("conflicting values for 'log_schema.metadata_key' found".to_owned()); } else { - self.set_metadata_key(other.metadata_key().to_string()); + self.set_metadata_key(other.metadata_key_target_path().cloned()); } if self.user_metadata_key() != LOG_SCHEMA_DEFAULT.user_metadata_key() && self.user_metadata_key() != other.user_metadata_key() diff --git a/lib/vector-core/src/config/mod.rs b/lib/vector-core/src/config/mod.rs index 71786155d1d8f..c8a1fa48d149b 100644 --- a/lib/vector-core/src/config/mod.rs +++ b/lib/vector-core/src/config/mod.rs @@ -476,7 +476,7 @@ impl LogNamespace { ) { self.insert_vector_metadata( log, - Some(log_schema().source_type_key()), + log_schema().source_type_key(), path!("source_type"), Bytes::from_static(source_name.as_bytes()), ); @@ -555,10 +555,10 @@ mod test { #[test] fn test_insert_standard_vector_source_metadata() { - let nested_path = "a.b.c.d"; - let mut schema = LogSchema::default(); - schema.set_source_type_key(nested_path.to_owned()); + schema.set_source_type_key(Some(OwnedTargetPath::event(owned_value_path!( + "a", "b", "c", "d" + )))); init_log_schema(schema, false); let namespace = LogNamespace::Legacy; diff --git a/lib/vector-core/src/event/discriminant.rs b/lib/vector-core/src/event/discriminant.rs index 7c1eb40863e1c..fcbd5d0fa818f 100644 --- a/lib/vector-core/src/event/discriminant.rs +++ b/lib/vector-core/src/event/discriminant.rs @@ -27,7 +27,13 @@ impl Discriminant { pub fn from_log_event(event: &LogEvent, discriminant_fields: &[impl AsRef]) -> Self { let values: Vec> = discriminant_fields .iter() - .map(|discriminant_field| event.get(discriminant_field.as_ref()).cloned()) + .map(|discriminant_field| { + event + .parse_path_and_get_value(discriminant_field.as_ref()) + .ok() + .flatten() + .cloned() + }) .collect(); Self { values } } diff --git a/lib/vector-core/src/event/estimated_json_encoded_size_of.rs b/lib/vector-core/src/event/estimated_json_encoded_size_of.rs index b671c8a817919..9bfa3bf50a8b7 100644 --- a/lib/vector-core/src/event/estimated_json_encoded_size_of.rs +++ b/lib/vector-core/src/event/estimated_json_encoded_size_of.rs @@ -87,7 +87,7 @@ impl EstimatedJsonEncodedSizeOf for Value { /// need for UTF-8 replacement characters. /// /// This is the main reason why `EstimatedJsonEncodedSizeOf` is named as is, as most other types can -/// be calculated exactly without a noticable performance penalty. +/// be calculated exactly without a noticeable performance penalty. impl EstimatedJsonEncodedSizeOf for str { fn estimated_json_encoded_size_of(&self) -> JsonSize { JsonSize::new(QUOTES_SIZE + self.len()) diff --git a/lib/vector-core/src/event/log_event.rs b/lib/vector-core/src/event/log_event.rs index b656ee014810a..fad0462699fbc 100644 --- a/lib/vector-core/src/event/log_event.rs +++ b/lib/vector-core/src/event/log_event.rs @@ -15,11 +15,12 @@ use lookup::lookup_v2::TargetPath; use lookup::PathPrefix; use serde::{Deserialize, Serialize, Serializer}; use vector_common::{ - internal_event::OptionalTag, + internal_event::{OptionalTag, TaggedEventsSent}, json_size::{JsonSize, NonZeroJsonSize}, - request_metadata::{EventCountTags, GetEventCountTags}, + request_metadata::GetEventCountTags, EventDataEq, }; +use vrl::path::{parse_target_path, OwnedTargetPath, PathParseError}; use super::{ estimated_json_encoded_size_of::EstimatedJsonEncodedSizeOf, @@ -31,6 +32,15 @@ use crate::config::LogNamespace; use crate::config::{log_schema, telemetry}; use crate::{event::MaybeAsLogMut, ByteSizeOf}; use lookup::{metadata_path, path}; +use once_cell::sync::Lazy; +use vrl::{event_path, owned_value_path}; + +static VECTOR_SOURCE_TYPE_PATH: Lazy> = Lazy::new(|| { + Some(OwnedTargetPath::metadata(owned_value_path!( + "vector", + "source_type" + ))) +}); #[derive(Debug, Deserialize)] struct Inner { @@ -158,9 +168,10 @@ impl LogEvent { /// valid for `LogNamespace::Legacy` pub fn from_str_legacy(msg: impl Into) -> Self { let mut log = LogEvent::default(); - log.insert(log_schema().message_key(), msg.into()); - if let Some(timestamp_key) = log_schema().timestamp_key() { - log.insert((PathPrefix::Event, timestamp_key), Utc::now()); + log.maybe_insert(log_schema().message_key_target_path(), msg.into()); + + if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { + log.insert(timestamp_key, Utc::now()); } log @@ -223,7 +234,7 @@ impl EstimatedJsonEncodedSizeOf for LogEvent { } impl GetEventCountTags for LogEvent { - fn get_tags(&self) -> EventCountTags { + fn get_tags(&self) -> TaggedEventsSent { let source = if telemetry().tags().emit_source { self.metadata().source_id().cloned().into() } else { @@ -238,7 +249,7 @@ impl GetEventCountTags for LogEvent { OptionalTag::Ignored }; - EventCountTags { source, service } + TaggedEventsSent { source, service } } } @@ -292,6 +303,16 @@ impl LogEvent { self.metadata.add_finalizer(finalizer); } + /// Parse the specified `path` and if there are no parsing errors, attempt to get a reference to a value. + /// # Errors + /// Will return an error if path parsing failed. + pub fn parse_path_and_get_value( + &self, + path: impl AsRef, + ) -> Result, PathParseError> { + parse_target_path(path.as_ref()).map(|path| self.get(&path)) + } + #[allow(clippy::needless_pass_by_value)] // TargetPath is always a reference pub fn get<'a>(&self, key: impl TargetPath<'a>) -> Option<&Value> { match key.prefix() { @@ -300,19 +321,26 @@ impl LogEvent { } } + /// Retrieves the value of a field based on it's meaning. + /// This will first check if the value has previously been dropped. It is worth being + /// aware that if the field has been dropped and then somehow re-added, we still fetch + /// the dropped value here. pub fn get_by_meaning(&self, meaning: impl AsRef) -> Option<&Value> { - self.metadata() - .schema_definition() - .meaning_path(meaning.as_ref()) - .and_then(|path| self.get(path)) + if let Some(dropped) = self.metadata().dropped_field(&meaning) { + Some(dropped) + } else { + self.metadata() + .schema_definition() + .meaning_path(meaning.as_ref()) + .and_then(|path| self.get(path)) + } } - // TODO(Jean): Once the event API uses `Lookup`, the allocation here can be removed. - pub fn find_key_by_meaning(&self, meaning: impl AsRef) -> Option { + /// Retrieves the target path of a field based on the specified `meaning`. + fn find_key_by_meaning(&self, meaning: impl AsRef) -> Option<&OwnedTargetPath> { self.metadata() .schema_definition() .meaning_path(meaning.as_ref()) - .map(std::string::ToString::to_string) } #[allow(clippy::needless_pass_by_value)] // TargetPath is always a reference @@ -331,6 +359,19 @@ impl LogEvent { } } + /// Parse the specified `path` and if there are no parsing errors, attempt to insert the specified `value`. + /// + /// # Errors + /// Will return an error if path parsing failed. + pub fn parse_path_and_insert( + &mut self, + path: impl AsRef, + value: impl Into, + ) -> Result, PathParseError> { + let target_path = parse_target_path(path.as_ref())?; + Ok(self.insert(&target_path, value)) + } + #[allow(clippy::needless_pass_by_value)] // TargetPath is always a reference pub fn insert<'a>( &mut self, @@ -346,6 +387,12 @@ impl LogEvent { } } + pub fn maybe_insert<'a>(&mut self, path: Option>, value: impl Into) { + if let Some(path) = path { + self.insert(path, value); + } + } + // deprecated - using this means the schema is unknown pub fn try_insert<'a>(&mut self, path: impl TargetPath<'a>, value: impl Into) { if !self.contains(path.clone()) { @@ -418,14 +465,16 @@ impl LogEvent { } /// Merge all fields specified at `fields` from `incoming` to `current`. + /// Note that `fields` containing dots and other special characters will be treated as a single segment. pub fn merge(&mut self, mut incoming: LogEvent, fields: &[impl AsRef]) { for field in fields { - let Some(incoming_val) = incoming.remove(field.as_ref()) else { - continue; + let field_path = event_path!(field.as_ref()); + let Some(incoming_val) = incoming.remove(field_path) else { + continue }; - match self.get_mut(field.as_ref()) { + match self.get_mut(field_path) { None => { - self.insert(field.as_ref(), incoming_val); + self.insert(field_path, incoming_val); } Some(current_val) => current_val.merge(incoming_val), } @@ -439,45 +488,37 @@ impl LogEvent { impl LogEvent { /// Fetches the "message" path of the event. This is either from the "message" semantic meaning (Vector namespace) /// or from the message key set on the "Global Log Schema" (Legacy namespace). - // TODO: This can eventually return a `&TargetOwnedPath` once Semantic meaning and the - // "Global Log Schema" are updated to the new path lookup code - pub fn message_path(&self) -> Option { + pub fn message_path(&self) -> Option<&OwnedTargetPath> { match self.namespace() { LogNamespace::Vector => self.find_key_by_meaning("message"), - LogNamespace::Legacy => Some(log_schema().message_key().to_owned()), + LogNamespace::Legacy => log_schema().message_key_target_path(), } } /// Fetches the "timestamp" path of the event. This is either from the "timestamp" semantic meaning (Vector namespace) /// or from the timestamp key set on the "Global Log Schema" (Legacy namespace). - // TODO: This can eventually return a `&TargetOwnedPath` once Semantic meaning and the - // "Global Log Schema" are updated to the new path lookup code - pub fn timestamp_path(&self) -> Option { + pub fn timestamp_path(&self) -> Option<&OwnedTargetPath> { match self.namespace() { LogNamespace::Vector => self.find_key_by_meaning("timestamp"), - LogNamespace::Legacy => log_schema().timestamp_key().map(ToString::to_string), + LogNamespace::Legacy => log_schema().timestamp_key_target_path(), } } /// Fetches the `host` path of the event. This is either from the "host" semantic meaning (Vector namespace) /// or from the host key set on the "Global Log Schema" (Legacy namespace). - // TODO: This can eventually return a `&TargetOwnedPath` once Semantic meaning and the - // "Global Log Schema" are updated to the new path lookup code - pub fn host_path(&self) -> Option { + pub fn host_path(&self) -> Option<&OwnedTargetPath> { match self.namespace() { LogNamespace::Vector => self.find_key_by_meaning("host"), - LogNamespace::Legacy => Some(log_schema().host_key().to_owned()), + LogNamespace::Legacy => log_schema().host_key_target_path(), } } /// Fetches the `source_type` path of the event. This is either from the `source_type` Vector metadata field (Vector namespace) /// or from the `source_type` key set on the "Global Log Schema" (Legacy namespace). - // TODO: This can eventually return a `&TargetOwnedPath` once Semantic meaning and the - // "Global Log Schema" are updated to the new path lookup code - pub fn source_type_path(&self) -> &'static str { + pub fn source_type_path(&self) -> Option<&OwnedTargetPath> { match self.namespace() { - LogNamespace::Vector => "%vector.source_type", - LogNamespace::Legacy => log_schema().source_type_key(), + LogNamespace::Vector => VECTOR_SOURCE_TYPE_PATH.as_ref(), + LogNamespace::Legacy => log_schema().source_type_key_target_path(), } } @@ -486,7 +527,9 @@ impl LogEvent { pub fn get_message(&self) -> Option<&Value> { match self.namespace() { LogNamespace::Vector => self.get_by_meaning("message"), - LogNamespace::Legacy => self.get((PathPrefix::Event, log_schema().message_key())), + LogNamespace::Legacy => log_schema() + .message_key_target_path() + .and_then(|key| self.get(key)), } } @@ -496,8 +539,8 @@ impl LogEvent { match self.namespace() { LogNamespace::Vector => self.get_by_meaning("timestamp"), LogNamespace::Legacy => log_schema() - .timestamp_key() - .and_then(|key| self.get((PathPrefix::Event, key))), + .timestamp_key_target_path() + .and_then(|key| self.get(key)), } } @@ -505,7 +548,8 @@ impl LogEvent { /// or from the timestamp key set on the "Global Log Schema" (Legacy namespace). pub fn remove_timestamp(&mut self) -> Option { self.timestamp_path() - .and_then(|key| self.remove(key.as_str())) + .cloned() + .and_then(|key| self.remove(&key)) } /// Fetches the `host` of the event. This is either from the "host" semantic meaning (Vector namespace) @@ -513,7 +557,9 @@ impl LogEvent { pub fn get_host(&self) -> Option<&Value> { match self.namespace() { LogNamespace::Vector => self.get_by_meaning("host"), - LogNamespace::Legacy => self.get((PathPrefix::Event, log_schema().host_key())), + LogNamespace::Legacy => log_schema() + .host_key_target_path() + .and_then(|key| self.get(key)), } } @@ -522,7 +568,9 @@ impl LogEvent { pub fn get_source_type(&self) -> Option<&Value> { match self.namespace() { LogNamespace::Vector => self.get(metadata_path!("vector", "source_type")), - LogNamespace::Legacy => self.get((PathPrefix::Event, log_schema().source_type_key())), + LogNamespace::Legacy => log_schema() + .source_type_key_target_path() + .and_then(|key| self.get(key)), } } } @@ -552,10 +600,9 @@ mod test_utils { impl From for LogEvent { fn from(message: Bytes) -> Self { let mut log = LogEvent::default(); - - log.insert(log_schema().message_key(), message); - if let Some(timestamp_key) = log_schema().timestamp_key() { - log.insert((PathPrefix::Event, timestamp_key), Utc::now()); + log.maybe_insert(log_schema().message_key_target_path(), message); + if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { + log.insert(timestamp_key, Utc::now()); } log } @@ -621,6 +668,7 @@ impl TryInto for LogEvent { } } +#[cfg(any(test, feature = "test"))] impl std::ops::Index for LogEvent where T: AsRef, @@ -628,7 +676,9 @@ where type Output = Value; fn index(&self, key: T) -> &Value { - self.get(key.as_ref()) + self.parse_path_and_get_value(key.as_ref()) + .ok() + .flatten() .unwrap_or_else(|| panic!("Key is not found: {:?}", key.as_ref())) } } @@ -640,7 +690,9 @@ where { fn extend>(&mut self, iter: I) { for (k, v) in iter { - self.insert(k.as_ref(), v.into()); + if let Ok(path) = parse_target_path(k.as_ref()) { + self.insert(&path, v.into()); + } } } } @@ -663,6 +715,24 @@ impl Serialize for LogEvent { } } +// Tracing owned target paths used for tracing to log event conversions. +struct TracingTargetPaths { + pub(crate) timestamp: OwnedTargetPath, + pub(crate) kind: OwnedTargetPath, + pub(crate) module_path: OwnedTargetPath, + pub(crate) level: OwnedTargetPath, + pub(crate) target: OwnedTargetPath, +} + +/// Lazily initialized singleton. +static TRACING_TARGET_PATHS: Lazy = Lazy::new(|| TracingTargetPaths { + timestamp: OwnedTargetPath::event(owned_value_path!("timestamp")), + kind: OwnedTargetPath::event(owned_value_path!("metadata", "kind")), + level: OwnedTargetPath::event(owned_value_path!("metadata", "level")), + module_path: OwnedTargetPath::event(owned_value_path!("metadata", "module_path")), + target: OwnedTargetPath::event(owned_value_path!("metadata", "target")), +}); + impl From<&tracing::Event<'_>> for LogEvent { fn from(event: &tracing::Event<'_>) -> Self { let now = chrono::Utc::now(); @@ -670,11 +740,11 @@ impl From<&tracing::Event<'_>> for LogEvent { event.record(&mut maker); let mut log = maker; - log.insert("timestamp", now); + log.insert(&TRACING_TARGET_PATHS.timestamp, now); let meta = event.metadata(); log.insert( - "metadata.kind", + &TRACING_TARGET_PATHS.kind, if meta.is_event() { Value::Bytes("event".to_string().into()) } else if meta.is_span() { @@ -683,42 +753,42 @@ impl From<&tracing::Event<'_>> for LogEvent { Value::Null }, ); - log.insert("metadata.level", meta.level().to_string()); + log.insert(&TRACING_TARGET_PATHS.level, meta.level().to_string()); log.insert( - "metadata.module_path", + &TRACING_TARGET_PATHS.module_path, meta.module_path() .map_or(Value::Null, |mp| Value::Bytes(mp.to_string().into())), ); - log.insert("metadata.target", meta.target().to_string()); - + log.insert(&TRACING_TARGET_PATHS.target, meta.target().to_string()); log } } +/// Note that `tracing::field::Field` containing dots and other special characters will be treated as a single segment. impl tracing::field::Visit for LogEvent { fn record_str(&mut self, field: &tracing::field::Field, value: &str) { - self.insert(field.name(), value.to_string()); + self.insert(event_path!(field.name()), value.to_string()); } fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn Debug) { - self.insert(field.name(), format!("{value:?}")); + self.insert(event_path!(field.name()), format!("{value:?}")); } fn record_i64(&mut self, field: &tracing::field::Field, value: i64) { - self.insert(field.name(), value); + self.insert(event_path!(field.name()), value); } fn record_u64(&mut self, field: &tracing::field::Field, value: u64) { - let field = field.name(); + let field_path = event_path!(field.name()); let converted: Result = value.try_into(); match converted { - Ok(value) => self.insert(field, value), - Err(_) => self.insert(field, value.to_string()), + Ok(value) => self.insert(field_path, value), + Err(_) => self.insert(field_path, value.to_string()), }; } fn record_bool(&mut self, field: &tracing::field::Field, value: bool) { - self.insert(field.name(), value); + self.insert(event_path!(field.name()), value); } } diff --git a/lib/vector-core/src/event/metadata.rs b/lib/vector-core/src/event/metadata.rs index d86884be7582c..f577147e11ded 100644 --- a/lib/vector-core/src/event/metadata.rs +++ b/lib/vector-core/src/event/metadata.rs @@ -46,6 +46,14 @@ pub struct EventMetadata { /// TODO(Jean): must not skip serialization to track schemas across restarts. #[serde(default = "default_schema_definition", skip)] schema_definition: Arc, + + /// A store of values that may be dropped during the encoding process but may be needed + /// later on. The map is indexed by meaning. + /// Currently this is just used for the `service`. If the service field is dropped by `only_fields` + /// we need to ensure it is still available later on for emitting metrics tagged by the service. + /// This field could almost be keyed by `&'static str`, but because it needs to be deserializable + /// we have to use `String`. + dropped_fields: BTreeMap, } fn default_metadata_value() -> Value { @@ -123,6 +131,19 @@ impl EventMetadata { pub fn set_splunk_hec_token(&mut self, secret: Arc) { self.secrets.insert(SPLUNK_HEC_TOKEN, secret); } + + /// Adds the value to the dropped fields list. + /// There is currently no way to remove a field from this list, so if a field is dropped + /// and then the field is re-added with a new value - the dropped value will still be + /// retrieved. + pub fn add_dropped_field(&mut self, meaning: String, value: Value) { + self.dropped_fields.insert(meaning, value); + } + + /// Fetches the dropped field by meaning. + pub fn dropped_field(&self, meaning: impl AsRef) -> Option<&Value> { + self.dropped_fields.get(meaning.as_ref()) + } } impl Default for EventMetadata { @@ -134,6 +155,7 @@ impl Default for EventMetadata { schema_definition: default_schema_definition(), source_id: None, upstream_id: None, + dropped_fields: BTreeMap::new(), } } } diff --git a/lib/vector-core/src/event/metric/mezmo/vector.rs b/lib/vector-core/src/event/metric/mezmo/vector.rs index 54bfb7e3f85ad..5421a1022fdfe 100644 --- a/lib/vector-core/src/event/metric/mezmo/vector.rs +++ b/lib/vector-core/src/event/metric/mezmo/vector.rs @@ -249,13 +249,13 @@ pub fn to_metric(log: &LogEvent) -> Result { let metadata = log.metadata().clone(); let root = log - .get(log_schema().message_key()) + .get(log_schema().message_key_target_path().unwrap()) .ok_or_else(|| TransformError::FieldNotFound { - field: log_schema().message_key().into(), + field: log_schema().message_key().unwrap().to_string(), })? .as_object() .ok_or_else(|| TransformError::FieldInvalidType { - field: log_schema().message_key().into(), + field: log_schema().message_key().unwrap().to_string(), })?; let name = diff --git a/lib/vector-core/src/event/metric/mod.rs b/lib/vector-core/src/event/metric/mod.rs index 478310428faf1..b19fbe948c381 100644 --- a/lib/vector-core/src/event/metric/mod.rs +++ b/lib/vector-core/src/event/metric/mod.rs @@ -12,9 +12,9 @@ use std::{ use chrono::{DateTime, Utc}; use vector_common::{ - internal_event::OptionalTag, + internal_event::{OptionalTag, TaggedEventsSent}, json_size::JsonSize, - request_metadata::{EventCountTags, GetEventCountTags}, + request_metadata::GetEventCountTags, EventDataEq, }; use vector_config::configurable_component; @@ -499,7 +499,7 @@ impl Finalizable for Metric { } impl GetEventCountTags for Metric { - fn get_tags(&self) -> EventCountTags { + fn get_tags(&self) -> TaggedEventsSent { let source = if telemetry().tags().emit_source { self.metadata().source_id().cloned().into() } else { @@ -516,7 +516,7 @@ impl GetEventCountTags for Metric { OptionalTag::Ignored }; - EventCountTags { source, service } + TaggedEventsSent { source, service } } } diff --git a/lib/vector-core/src/event/mod.rs b/lib/vector-core/src/event/mod.rs index 9547f58dc5ed3..5c385df1fd913 100644 --- a/lib/vector-core/src/event/mod.rs +++ b/lib/vector-core/src/event/mod.rs @@ -20,11 +20,8 @@ use serde::{Deserialize, Serialize}; pub use trace::TraceEvent; use vector_buffers::EventCount; use vector_common::{ - config::ComponentKey, - finalization, - json_size::JsonSize, - request_metadata::{EventCountTags, GetEventCountTags}, - EventDataEq, + config::ComponentKey, finalization, internal_event::TaggedEventsSent, json_size::JsonSize, + request_metadata::GetEventCountTags, EventDataEq, }; pub use vrl::value::Value; #[cfg(feature = "vrl")] @@ -97,7 +94,7 @@ impl Finalizable for Event { } impl GetEventCountTags for Event { - fn get_tags(&self) -> EventCountTags { + fn get_tags(&self) -> TaggedEventsSent { match self { Event::Log(log) => log.get_tags(), Event::Metric(metric) => metric.get_tags(), diff --git a/lib/vector-core/src/event/test/size_of.rs b/lib/vector-core/src/event/test/size_of.rs index 89770273dbadc..522b0f0520fe4 100644 --- a/lib/vector-core/src/event/test/size_of.rs +++ b/lib/vector-core/src/event/test/size_of.rs @@ -115,13 +115,12 @@ fn log_operation_maintains_size() { match action { Action::InsertFlat { key, value } => { let new_value_sz = value.size_of(); - let old_value_sz = log_event - .get((PathPrefix::Event, path!(key.as_str()))) - .map_or(0, ByteSizeOf::size_of); + let target_path = (PathPrefix::Event, path!(key.as_str())); + let old_value_sz = log_event.get(target_path).map_or(0, ByteSizeOf::size_of); if !log_event.contains(key.as_str()) { current_size += key.size_of(); } - log_event.insert((PathPrefix::Event, path!(&key)), value); + log_event.insert(target_path, value); current_size -= old_value_sz; current_size += new_value_sz; } diff --git a/lib/vector-core/src/event/trace.rs b/lib/vector-core/src/event/trace.rs index 3885b50b9f13d..120c6c55f490b 100644 --- a/lib/vector-core/src/event/trace.rs +++ b/lib/vector-core/src/event/trace.rs @@ -4,10 +4,10 @@ use lookup::lookup_v2::TargetPath; use serde::{Deserialize, Serialize}; use vector_buffers::EventCount; use vector_common::{ - json_size::JsonSize, - request_metadata::{EventCountTags, GetEventCountTags}, + internal_event::TaggedEventsSent, json_size::JsonSize, request_metadata::GetEventCountTags, EventDataEq, }; +use vrl::path::PathParseError; use super::{ BatchNotifier, EstimatedJsonEncodedSizeOf, EventFinalizer, EventFinalizers, EventMetadata, @@ -72,25 +72,46 @@ impl TraceEvent { self.0.as_map().expect("inner value must be a map") } + /// Parse the specified `path` and if there are no parsing errors, attempt to get a reference to a value. + /// # Errors + /// Will return an error if path parsing failed. + pub fn parse_path_and_get_value( + &self, + path: impl AsRef, + ) -> Result, PathParseError> { + self.0.parse_path_and_get_value(path) + } + #[allow(clippy::needless_pass_by_value)] // TargetPath is always a reference pub fn get<'a>(&self, key: impl TargetPath<'a>) -> Option<&Value> { self.0.get(key) } - pub fn get_mut(&mut self, key: impl AsRef) -> Option<&mut Value> { - self.0.get_mut(key.as_ref()) + pub fn get_mut<'a>(&mut self, key: impl TargetPath<'a>) -> Option<&mut Value> { + self.0.get_mut(key) } - pub fn contains(&self, key: impl AsRef) -> bool { - self.0.contains(key.as_ref()) + pub fn contains<'a>(&self, key: impl TargetPath<'a>) -> bool { + self.0.contains(key) } - pub fn insert( + pub fn insert<'a>( &mut self, - key: impl AsRef, + key: impl TargetPath<'a>, value: impl Into + Debug, ) -> Option { - self.0.insert(key.as_ref(), value.into()) + self.0.insert(key, value.into()) + } + + pub fn maybe_insert<'a, F: FnOnce() -> Value>( + &mut self, + path: Option>, + value_callback: F, + ) -> Option { + if let Some(path) = path { + return self.0.insert(path, value_callback()); + } + None } } @@ -149,7 +170,7 @@ impl AsMut for TraceEvent { } impl GetEventCountTags for TraceEvent { - fn get_tags(&self) -> EventCountTags { + fn get_tags(&self) -> TaggedEventsSent { self.0.get_tags() } } diff --git a/lib/vector-core/src/event/vrl_target.rs b/lib/vector-core/src/event/vrl_target.rs index 782aa7ee2ae03..d917d52a66e15 100644 --- a/lib/vector-core/src/event/vrl_target.rs +++ b/lib/vector-core/src/event/vrl_target.rs @@ -52,6 +52,12 @@ pub struct TargetIter { _marker: PhantomData, } +fn create_log_event(value: Value, metadata: EventMetadata) -> LogEvent { + let mut log = LogEvent::new_with_metadata(metadata); + log.maybe_insert(log_schema().message_key_target_path(), value); + log +} + impl Iterator for TargetIter { type Item = Event; @@ -59,11 +65,7 @@ impl Iterator for TargetIter { self.iter.next().map(|v| { match v { value @ Value::Object(_) => LogEvent::from_parts(value, self.metadata.clone()), - value => { - let mut log = LogEvent::new_with_metadata(self.metadata.clone()); - log.insert(log_schema().message_key(), value); - log - } + value => create_log_event(value, self.metadata.clone()), } .into() }) @@ -79,11 +81,7 @@ impl Iterator for TargetIter { value @ Value::Object(_) => { TraceEvent::from(LogEvent::from_parts(value, self.metadata.clone())) } - value => { - let mut log = LogEvent::new_with_metadata(self.metadata.clone()); - log.insert(log_schema().message_key(), value); - TraceEvent::from(log) - } + value => TraceEvent::from(create_log_event(value, self.metadata.clone())), } .into() }) @@ -150,11 +148,7 @@ impl VrlTarget { LogNamespace::Vector => { TargetEvents::One(LogEvent::from_parts(v, metadata).into()) } - LogNamespace::Legacy => { - let mut log = LogEvent::new_with_metadata(metadata); - log.insert(log_schema().message_key(), v); - TargetEvents::One(log.into()) - } + LogNamespace::Legacy => TargetEvents::One(create_log_event(v, metadata).into()), }, }, VrlTarget::Trace(value, metadata) => match value { @@ -169,11 +163,7 @@ impl VrlTarget { _marker: PhantomData, }), - v => { - let mut log = LogEvent::new_with_metadata(metadata); - log.insert(log_schema().message_key(), v); - TargetEvents::One(log.into()) - } + v => TargetEvents::One(create_log_event(v, metadata).into()), }, VrlTarget::Metric { metric, .. } => TargetEvents::One(Event::Metric(metric)), } @@ -202,22 +192,24 @@ fn move_field_definitions_into_message(mut definition: Definition) -> Definition message.remove_array(); if !message.is_never() { - // We need to add the given message type to a field called `message` - // in the event. - let message = Kind::object(Collection::from(BTreeMap::from([( - log_schema().message_key().into(), - message, - )]))); - - definition.event_kind_mut().remove_bytes(); - definition.event_kind_mut().remove_integer(); - definition.event_kind_mut().remove_float(); - definition.event_kind_mut().remove_boolean(); - definition.event_kind_mut().remove_timestamp(); - definition.event_kind_mut().remove_regex(); - definition.event_kind_mut().remove_null(); - - *definition.event_kind_mut() = definition.event_kind().union(message); + if let Some(message_key) = log_schema().message_key() { + // We need to add the given message type to a field called `message` + // in the event. + let message = Kind::object(Collection::from(BTreeMap::from([( + message_key.to_string().into(), + message, + )]))); + + definition.event_kind_mut().remove_bytes(); + definition.event_kind_mut().remove_integer(); + definition.event_kind_mut().remove_float(); + definition.event_kind_mut().remove_boolean(); + definition.event_kind_mut().remove_timestamp(); + definition.event_kind_mut().remove_regex(); + definition.event_kind_mut().remove_null(); + + *definition.event_kind_mut() = definition.event_kind().union(message); + } } definition diff --git a/lib/vector-core/src/schema/definition.rs b/lib/vector-core/src/schema/definition.rs index a3c5afc034cb4..421b0b91a043a 100644 --- a/lib/vector-core/src/schema/definition.rs +++ b/lib/vector-core/src/schema/definition.rs @@ -1,7 +1,7 @@ use std::collections::{BTreeMap, BTreeSet}; use crate::config::{log_schema, LegacyKey, LogNamespace}; -use lookup::lookup_v2::{parse_value_path, TargetPath}; +use lookup::lookup_v2::TargetPath; use lookup::{owned_value_path, OwnedTargetPath, OwnedValuePath, PathPrefix}; use vrl::value::{kind::Collection, Kind}; @@ -144,9 +144,7 @@ impl Definition { #[must_use] pub fn with_standard_vector_source_metadata(self) -> Self { self.with_vector_metadata( - parse_value_path(log_schema().source_type_key()) - .ok() - .as_ref(), + log_schema().source_type_key(), &owned_value_path!("source_type"), Kind::bytes(), None, diff --git a/lib/vector-core/src/schema/meaning.rs b/lib/vector-core/src/schema/meaning.rs new file mode 100644 index 0000000000000..ab766b0986924 --- /dev/null +++ b/lib/vector-core/src/schema/meaning.rs @@ -0,0 +1,17 @@ +//! Constants for commonly used semantic meanings. + +/// The service typically represents the application that generated the event. +pub const SERVICE: &str = "service"; + +/// The main text message of the event. +pub const MESSAGE: &str = "message"; + +/// The main timestamp of the event. +pub const TIMESTAMP: &str = "timestamp"; + +/// The hostname of the machine where the event was generated. +pub const HOST: &str = "host"; + +pub const SOURCE: &str = "source"; +pub const SEVERITY: &str = "severity"; +pub const TRACE_ID: &str = "trace_id"; diff --git a/lib/vector-core/src/schema/mod.rs b/lib/vector-core/src/schema/mod.rs index 96f6d99442fa8..2d1c01b8d281f 100644 --- a/lib/vector-core/src/schema/mod.rs +++ b/lib/vector-core/src/schema/mod.rs @@ -1,4 +1,5 @@ mod definition; +pub mod meaning; mod requirement; pub use definition::Definition; diff --git a/lib/vector-core/src/schema/requirement.rs b/lib/vector-core/src/schema/requirement.rs index 6b3721d606b21..b42e97009ab8d 100644 --- a/lib/vector-core/src/schema/requirement.rs +++ b/lib/vector-core/src/schema/requirement.rs @@ -14,7 +14,7 @@ use super::Definition; #[derive(Debug, Clone, PartialEq)] pub struct Requirement { /// Semantic meanings configured for this requirement. - meaning: BTreeMap<&'static str, SemanticMeaning>, + meaning: BTreeMap, } /// The semantic meaning of an event. @@ -52,7 +52,7 @@ impl Requirement { /// Add a restriction to the schema. #[must_use] - pub fn required_meaning(mut self, meaning: &'static str, kind: Kind) -> Self { + pub fn required_meaning(mut self, meaning: impl Into, kind: Kind) -> Self { self.insert_meaning(meaning, kind, false); self } @@ -63,14 +63,14 @@ impl Requirement { /// specified meaning defined, but invalid for that meaning to be defined, but its [`Kind`] not /// matching the configured expectation. #[must_use] - pub fn optional_meaning(mut self, meaning: &'static str, kind: Kind) -> Self { + pub fn optional_meaning(mut self, meaning: impl Into, kind: Kind) -> Self { self.insert_meaning(meaning, kind, true); self } - fn insert_meaning(&mut self, identifier: &'static str, kind: Kind, optional: bool) { + fn insert_meaning(&mut self, identifier: impl Into, kind: Kind, optional: bool) { let meaning = SemanticMeaning { kind, optional }; - self.meaning.insert(identifier, meaning); + self.meaning.insert(identifier.into(), meaning); } /// Validate the provided [`Definition`] against the current requirement. @@ -97,7 +97,10 @@ impl Requirement { // Check if we're dealing with an invalid meaning, meaning the definition has a single // meaning identifier pointing to multiple paths. if let Some(paths) = definition.invalid_meaning(identifier).cloned() { - errors.push(ValidationError::MeaningDuplicate { identifier, paths }); + errors.push(ValidationError::MeaningDuplicate { + identifier: identifier.clone(), + paths, + }); continue; } @@ -118,14 +121,16 @@ impl Requirement { // The semantic meaning kind does not match the expected // kind, so we can't use it in the sink. errors.push(ValidationError::MeaningKind { - identifier, + identifier: identifier.clone(), want: req_meaning.kind.clone(), got: definition_kind, }); } } None if !req_meaning.optional => { - errors.push(ValidationError::MeaningMissing { identifier }); + errors.push(ValidationError::MeaningMissing { + identifier: identifier.clone(), + }); } _ => {} } @@ -176,18 +181,18 @@ impl std::fmt::Display for ValidationErrors { #[allow(clippy::enum_variant_names)] pub enum ValidationError { /// A required semantic meaning is missing. - MeaningMissing { identifier: &'static str }, + MeaningMissing { identifier: String }, /// A semantic meaning has an invalid `[Kind]`. MeaningKind { - identifier: &'static str, + identifier: String, want: Kind, got: Kind, }, /// A semantic meaning is pointing to multiple paths. MeaningDuplicate { - identifier: &'static str, + identifier: String, paths: BTreeSet, }, } @@ -301,7 +306,9 @@ mod tests { TestCase { requirement: Requirement::empty().required_meaning("foo", Kind::any()), definition: Definition::default_for_namespace(&[LogNamespace::Vector].into()), - errors: vec![ValidationError::MeaningMissing { identifier: "foo" }], + errors: vec![ValidationError::MeaningMissing { + identifier: "foo".into(), + }], }, ), ( @@ -312,8 +319,12 @@ mod tests { .required_meaning("bar", Kind::any()), definition: Definition::default_for_namespace(&[LogNamespace::Vector].into()), errors: vec![ - ValidationError::MeaningMissing { identifier: "bar" }, - ValidationError::MeaningMissing { identifier: "foo" }, + ValidationError::MeaningMissing { + identifier: "bar".into(), + }, + ValidationError::MeaningMissing { + identifier: "foo".into(), + }, ], }, ), @@ -332,7 +343,9 @@ mod tests { .optional_meaning("foo", Kind::any()) .required_meaning("bar", Kind::any()), definition: Definition::default_for_namespace(&[LogNamespace::Vector].into()), - errors: vec![ValidationError::MeaningMissing { identifier: "bar" }], + errors: vec![ValidationError::MeaningMissing { + identifier: "bar".into(), + }], }, ), ( @@ -342,7 +355,7 @@ mod tests { definition: Definition::default_for_namespace(&[LogNamespace::Vector].into()) .with_event_field(&owned_value_path!("foo"), Kind::integer(), Some("foo")), errors: vec![ValidationError::MeaningKind { - identifier: "foo", + identifier: "foo".into(), want: Kind::boolean(), got: Kind::integer(), }], @@ -355,7 +368,7 @@ mod tests { definition: Definition::default_for_namespace(&[LogNamespace::Vector].into()) .with_event_field(&owned_value_path!("foo"), Kind::integer(), Some("foo")), errors: vec![ValidationError::MeaningKind { - identifier: "foo", + identifier: "foo".into(), want: Kind::boolean(), got: Kind::integer(), }], @@ -376,7 +389,7 @@ mod tests { ), ), errors: vec![ValidationError::MeaningDuplicate { - identifier: "foo", + identifier: "foo".into(), paths: BTreeSet::from([ parse_target_path("foo").unwrap(), parse_target_path("bar").unwrap(), diff --git a/lib/vector-core/src/stream/driver.rs b/lib/vector-core/src/stream/driver.rs index 6ff23014c96d5..1a1ce79e1b27a 100644 --- a/lib/vector-core/src/stream/driver.rs +++ b/lib/vector-core/src/stream/driver.rs @@ -99,7 +99,7 @@ where pin!(batched_input); let bytes_sent = protocol.map(|protocol| register(BytesSent { protocol })); - let events_sent = RegisteredEventCache::default(); + let events_sent = RegisteredEventCache::new(()); loop { // Core behavior of the loop: @@ -203,7 +203,7 @@ where finalizers: EventFinalizers, event_count: usize, bytes_sent: &Option>, - events_sent: &RegisteredEventCache, + events_sent: &RegisteredEventCache<(), TaggedEventsSent>, ) { match result { Err(error) => { diff --git a/lib/vector-core/src/usage_metrics/mod.rs b/lib/vector-core/src/usage_metrics/mod.rs index af1c492ca0b0f..965c67cefb0b4 100644 --- a/lib/vector-core/src/usage_metrics/mod.rs +++ b/lib/vector-core/src/usage_metrics/mod.rs @@ -441,7 +441,9 @@ fn get_size_and_profile(array: &EventArray) -> UsageProfileValue { for log_event in a { if let Some(fields) = log_event.as_map() { // Account for the value of ".message" and ".meta" - let size = fields.get(log_schema().message_key()).map_or(0, value_size) + let size = fields + .get(&log_schema().message_key().unwrap().to_string()) + .map_or(0, value_size) + fields .get(log_schema().user_metadata_key()) .map_or(0, value_size); @@ -1026,7 +1028,7 @@ mod tests { let mut event_map: BTreeMap = BTreeMap::new(); event_map.insert("this_is_ignored".into(), 1u8.into()); event_map.insert("another_ignored".into(), 1.into()); - event_map.insert(log_schema().message_key().into(), 9.into()); + event_map.insert(log_schema().message_key().unwrap().to_string(), 9.into()); let event: LogEvent = event_map.into(); let usage_profile = get_size_and_profile(&event.into()); assert_eq!( @@ -1039,7 +1041,10 @@ mod tests { fn get_size_and_profile_log_message_and_meta_test() { let mut event_map: BTreeMap = BTreeMap::new(); event_map.insert("this_is_ignored".into(), 2.into()); - event_map.insert(log_schema().message_key().into(), "hello ".into()); + event_map.insert( + log_schema().message_key().unwrap().to_string(), + "hello ".into(), + ); event_map.insert(log_schema().user_metadata_key().into(), "world".into()); let event: LogEvent = event_map.into(); assert_eq!( @@ -1057,7 +1062,10 @@ mod tests { nested_map.insert("prop2".into(), 1u8.into()); nested_map.insert("prop3".into(), 1i32.into()); nested_map.insert("prop4".into(), "abcd".into()); - event_map.insert(log_schema().message_key().into(), Value::from(nested_map)); + event_map.insert( + log_schema().message_key().unwrap().to_string(), + Value::from(nested_map), + ); let event: LogEvent = event_map.into(); assert_eq!( get_size_and_profile(&event.into()).total_size, diff --git a/lib/vector-lookup/Cargo.toml b/lib/vector-lookup/Cargo.toml index a159f31561d0f..986f0317a31d6 100644 --- a/lib/vector-lookup/Cargo.toml +++ b/lib/vector-lookup/Cargo.toml @@ -7,7 +7,7 @@ publish = false license = "MPL-2.0" [dependencies] -serde = { version = "1.0.164", default-features = false, features = ["derive", "alloc"] } +serde = { version = "1.0.183", default-features = false, features = ["derive", "alloc"] } vector-config = { path = "../vector-config" } vector-config-macros = { path = "../vector-config-macros" } vrl.workspace = true diff --git a/lib/vector-lookup/src/lookup_v2/optional_path.rs b/lib/vector-lookup/src/lookup_v2/optional_path.rs index 9328aa8a2f138..de5f5d9d67ac0 100644 --- a/lib/vector-lookup/src/lookup_v2/optional_path.rs +++ b/lib/vector-lookup/src/lookup_v2/optional_path.rs @@ -1,4 +1,6 @@ use vector_config::configurable_component; +use vrl::owned_value_path; +use vrl::path::PathPrefix; use crate::lookup_v2::PathParseError; use crate::{OwnedTargetPath, OwnedValuePath}; @@ -15,6 +17,25 @@ impl OptionalTargetPath { pub fn none() -> Self { Self { path: None } } + + pub fn event(path: &str) -> Self { + Self { + path: Some(OwnedTargetPath { + prefix: PathPrefix::Event, + path: owned_value_path!(path), + }), + } + } + + pub fn from(prefix: PathPrefix, path: Option) -> Self { + Self { + path: path.map(|path| OwnedTargetPath { prefix, path }), + } + } + + pub fn as_ref(&self) -> Option<&OwnedTargetPath> { + self.path.as_ref() + } } impl TryFrom for OptionalTargetPath { @@ -56,6 +77,12 @@ impl OptionalValuePath { pub fn none() -> Self { Self { path: None } } + + pub fn new(path: &str) -> Self { + Self { + path: Some(owned_value_path!(path)), + } + } } impl TryFrom for OptionalValuePath { @@ -84,3 +111,11 @@ impl From for OptionalValuePath { Self { path: Some(path) } } } + +impl From> for OptionalValuePath { + fn from(value: Option) -> Self { + value.map_or(OptionalValuePath::none(), |path| { + OptionalValuePath::from(path) + }) + } +} diff --git a/lib/vector-vrl/cli/Cargo.toml b/lib/vector-vrl/cli/Cargo.toml index 106e430355c56..f67b06f4e1db4 100644 --- a/lib/vector-vrl/cli/Cargo.toml +++ b/lib/vector-vrl/cli/Cargo.toml @@ -7,6 +7,6 @@ publish = false license = "MPL-2.0" [dependencies] -clap = { version = "4.1.14", features = ["derive"] } +clap = { version = "4.3.21", features = ["derive"] } vector-vrl-functions = { path = "../functions" } vrl.workspace = true diff --git a/lib/vector-vrl/functions/src/set_semantic_meaning.rs b/lib/vector-vrl/functions/src/set_semantic_meaning.rs index 26e61c1671f59..f1b53065d96ed 100644 --- a/lib/vector-vrl/functions/src/set_semantic_meaning.rs +++ b/lib/vector-vrl/functions/src/set_semantic_meaning.rs @@ -62,8 +62,7 @@ impl Function for SetSemanticMeaning { let query = arguments.required_query("target")?; let meaning = arguments - .required_literal("meaning")? - .to_value() + .required_literal("meaning", state)? .try_bytes_utf8_lossy() .expect("meaning not bytes") .into_owned(); diff --git a/lib/vector-vrl/tests/Cargo.toml b/lib/vector-vrl/tests/Cargo.toml index b67b8afa63a4a..0a0ddb25d56ef 100644 --- a/lib/vector-vrl/tests/Cargo.toml +++ b/lib/vector-vrl/tests/Cargo.toml @@ -13,7 +13,7 @@ vector-vrl-functions = { path = "../../vector-vrl/functions" } ansi_term = "0.12" chrono = "0.4" chrono-tz = "0.8" -clap = { version = "4.1.14", features = ["derive"] } +clap = { version = "4.3.21", features = ["derive"] } glob = "0.3" prettydiff = "0.6" regex = "1" @@ -22,7 +22,7 @@ serde_json = "1" tracing-subscriber = { version = "0.3.17", default-features = false, features = ["fmt"] } [target.'cfg(not(target_env = "msvc"))'.dependencies] -tikv-jemallocator = { version = "0.5.0" } +tikv-jemallocator = { version = "0.5.4" } [features] default = [] diff --git a/lib/vector-vrl/web-playground/Cargo.toml b/lib/vector-vrl/web-playground/Cargo.toml index b951a29661226..d250fe905d37e 100644 --- a/lib/vector-vrl/web-playground/Cargo.toml +++ b/lib/vector-vrl/web-playground/Cargo.toml @@ -13,7 +13,7 @@ wasm-bindgen = "0.2" vrl.workspace = true serde = { version = "1.0", features = ["derive"] } serde-wasm-bindgen = "0.5" -gloo-utils = { version = "0.1", features = ["serde"] } +gloo-utils = { version = "0.2", features = ["serde"] } getrandom = { version = "0.2", features = ["js"] } vector-vrl-functions = { path = "../functions" } enrichment = { path = "../../enrichment" } diff --git a/lib/vector-vrl/web-playground/README.md b/lib/vector-vrl/web-playground/README.md index 9354b77321205..11faa7a1c172a 100644 --- a/lib/vector-vrl/web-playground/README.md +++ b/lib/vector-vrl/web-playground/README.md @@ -13,13 +13,13 @@ To build the project we need to use `wasm-pack`. This compiles our Rust code to WebAssembly which can then be used within the browser. Install it by running: ```shell -cargo install --version 0.10.3 wasm-pack +cargo install --locked --version 0.10.3 wasm-pack ``` After installing `wasm-pack` we must compile our project by running: ```shell -wasm-pack build --target web --out-dir public +wasm-pack build --target web --out-dir public/pkg ``` Notice the `public` directory was populated with `.wasm`, and `.js`, @@ -30,6 +30,8 @@ For more information on Rust and WebAssembly please visit [the mozilla docs][mozilla-wasm-rust-docs] or [the Rust book wasm chapter][rust-book-wasm] +## Run locally + The `src/lib.rs` file is the entry point of the `web-playground` crate. This file is necessary so we can use the `run_vrl()` function in the browser. Notice our `index.html` imports the VRL wasm module from `./vrl_web_playground.js` @@ -39,6 +41,7 @@ the web browser console. To see this in action we host `index.html` locally, for example by running: ```shell +cd public python3 -m http.server ``` @@ -65,7 +68,42 @@ but they will either error (enrichment functions) or abort (all the others) at r - `get_enrichment_table_record` Functions from VRL stdlib that are currently not supported can be found -with this [issue filter][vrl-wasm-unsupported-filter] +with this [issue filter][vrl-wasm-unsupported-filter]. + +### macOS Troubleshooting + +If you are getting compilation errors on macOS here are some things to check: + +```shell +xcode-select -p +# Example: '/Library/Developer/CommandLineTools +# To change this use: xcode-select -s +``` + +You can clean and reinstall with: + +```shell +rm -rf /Library/Developer/CommandLineTools # might require sudo elevation +xcode-select --install +``` + +Check your `llvm` installation and ensure that there are no conflicting installations. Check that the following command returns the expected version: + +```shell +clang --version +# Example: +# Homebrew clang version 16.0.6 +# Target: arm64-apple-darwin22.5.0 +# Thread model: posix +# InstalledDir: /opt/homebrew/opt/llvm/bin +``` + +The output of the following command should contain `WebAssembly`: + +```shell +llvm-config --targets-built # WebAssembly should be in the results +# Example: AArch64 WebAssembly +``` ## Examples diff --git a/lib/vector-vrl/web-playground/netlify.toml b/lib/vector-vrl/web-playground/netlify.toml index 3c9447d330e58..824b50e04972f 100644 --- a/lib/vector-vrl/web-playground/netlify.toml +++ b/lib/vector-vrl/web-playground/netlify.toml @@ -11,7 +11,7 @@ publish = "public/" # Default build command. - command = "bash ../../../scripts/ensure-wasm-pack-installed.sh && wasm-pack build --target web --out-dir public" + command = "bash ../../../scripts/ensure-wasm-pack-installed.sh && wasm-pack build --target web --out-dir public/pkg" # Ignore everything except the base directory and changes to vector/lib/ ignore = "git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../../../lib" diff --git a/lib/vector-vrl/web-playground/public/.gitignore b/lib/vector-vrl/web-playground/public/.gitignore deleted file mode 100644 index de695ce7d35fc..0000000000000 --- a/lib/vector-vrl/web-playground/public/.gitignore +++ /dev/null @@ -1 +0,0 @@ -vector_vrl_web_playground** diff --git a/lib/vector-vrl/web-playground/public/README.md b/lib/vector-vrl/web-playground/public/README.md deleted file mode 100644 index b042c0f4a26db..0000000000000 --- a/lib/vector-vrl/web-playground/public/README.md +++ /dev/null @@ -1,146 +0,0 @@ -# VRL WASM Web Playground - -This directory houses the exposed VRL function to WASM `run_vrl()` used to -power [Vector Remap Language Playground][vrl-playground], or **VRL Playground** -for short. Although there is already a [local REPL][vrl-repl] supported for -use within the terminal, this playground will support running VRL in the web -browser and test input via uploading event files, or specifying an event via -a text input field. - -## Setup - -To build the project we need to use `wasm-pack`. This compiles our Rust code -to WebAssembly which can then be used within the browser. Install it by running: - -```shell -cargo install --version 0.10.3 wasm-pack -``` - -After installing `wasm-pack` we must compile our project by running: - -```shell -wasm-pack build --target web --out-dir public -``` - -Notice the `public` directory was populated with `.wasm`, and `.js`, -files these will be used by our `index.html` to run the `run_vrl()` -function originally written in Rust. - -For more information on Rust and WebAssembly please visit -[the mozilla docs][mozilla-wasm-rust-docs] or -[the Rust book wasm chapter][rust-book-wasm] - -The `src/lib.rs` file is the entry point of the `web-playground` crate. -This file is necessary so we can use the `run_vrl()` function in the browser. -Notice our `index.html` imports the VRL wasm module from `./vrl_web_playground.js` -and sets the `window.run_vrl` function so that we can test VRL within -the web browser console. - -To see this in action we host `index.html` locally, for example by running: - -```shell -python3 -m http.server -``` - -Remember to be in the `public` directory where index.html is located for the -relative paths specified in `index.html` to work. -We should also be able to open the `index.html` file in chrome, or use Live Server -in VSCode to see `index.html` working. - -## Support - -Some functions of VRL are not supported or don't work as expected at the -moment due to WASM limitations with some Rust crates, in -the future we will modify the functions so that they are supported. - -List of functions that aren't supported at the moment: - -- `log()` -- `decrypt()` -- `encrypt()` -- `get_hostname()` -- `parse_groks()` -- `random_bytes()` -- `reverse_dns()` - -Functions from VRL stdlib that are currently not supported can be found -with this [issue filter][vrl-wasm-unsupported-filter] - -## Examples - -### React - -For now, you can use an old npm-published version of vrl-web-playground, -please note that this was done for testing purposes and in the future we -will likely release this automatically upon each version release of Vector, -probably under a different package name. - -Use this dependency in `package.json` - -```json -"dependencies": { - "vrl-web-playground": "0.1.0" -} -``` - -Example import and usage in a React component - -```javascript -import init, { run_vrl } from 'vrl-web-playground'; - -export function VectorExecuteButton() { - let vrlInput = {}; - try { - vrlInput = { - program: '.something = "added by vrl!"\n.message = "modified by vrl!"', - event: JSON.parse('{message: "log message here"}'), - }; - } catch (error) { - console.log('error parsing the event contents as JSON object'); - } - - return ( -