-
Notifications
You must be signed in to change notification settings - Fork 2.4k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[exporter/datasetexporter]: Initial Commit (#20733)
* DataSetExporter: Initial Commit * Cleanup go.mod * Reverse changes in elasticsearchexporter/config.go * Run mentioned commands * Run make goporto * Add replace line manually. * Add dummy comment to trigger EasyCLA check * I have executed - `make for-all CMD="make tidy"` * Incorporate changes from CR * Add dataset exporter into tests for otelcontribcol * Fix check - `make -j2 golint GROUP=other` * Remove support for metrics and mention support for traces. * Run make generate to update metadata table * Remove unused function * Update exporter/datasetexporter/factory_test.go Co-authored-by: Evan Bradley <11745660+evan-bradley@users.noreply.github.com> * Update exporter/datasetexporter/config_test.go Co-authored-by: Evan Bradley <11745660+evan-bradley@users.noreply.github.com> * Update exporter/datasetexporter/config_test.go Co-authored-by: Evan Bradley <11745660+evan-bradley@users.noreply.github.com> * Update exporter/datasetexporter/datasetexporter.go Co-authored-by: Evan Bradley <11745660+evan-bradley@users.noreply.github.com> * Swap order of parameters in Equal function * Update dependencies * Adjust code after running all those tools * Resolve the conflicts in versions * Rerun command that has failed * Restore go.mod from main * Fix license check * Fix version check * Fix check-collector-module-versionr * Rerun all the remaining commands * Rename MaxDelayMs to maxDelayMs * Modify metadata.yaml => `make generate` => adjust code --------- Co-authored-by: Evan Bradley <11745660+evan-bradley@users.noreply.github.com>
- Loading branch information
1 parent
ddc2204
commit 5f47f5b
Showing
25 changed files
with
1,350 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' | ||
change_type: new_component | ||
|
||
# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) | ||
component: datasetexporter | ||
|
||
# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). | ||
note: Add new DataSet exporter. | ||
|
||
# One or more tracking issues related to the change | ||
issues: [20660] | ||
|
||
# (Optional) One or more lines of additional information to render under the primary note. | ||
# These lines will be padded with 2 spaces and then inserted directly into the document. | ||
# Use pipe (|) for multiline entries. | ||
subtext: |
Validating CODEOWNERS rules …
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
include ../../Makefile.Common |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,60 @@ | ||
# DataSet Exporter | ||
|
||
<!-- status autogenerated section --> | ||
| Status | | | ||
| ------------- |-----------| | ||
| Stability | [development]: traces, logs | | ||
| Distributions | [] | | ||
|
||
[development]: https://github.com/open-telemetry/opentelemetry-collector#development | ||
<!-- end autogenerated section --> | ||
|
||
This exporter sends logs to [DataSet](https://www.dataset.com/). | ||
|
||
See the [Getting Started](https://app.scalyr.com/help/getting-started) guide. | ||
|
||
## Configuration | ||
|
||
### Required Settings | ||
|
||
- `dataset_url` (no default): The URL of the DataSet API that ingests the data. Most likely https://app.scalyr.com. If not specified env variable `DATASET_URL` is used. | ||
- `api_key` (no default): The "Log Write" API Key required to use API. Instructions how to get [API key](https://app.scalyr.com/help/api-keys). If not specified env variable `DATASET_API_KEY` is used. | ||
|
||
### Optional Settings | ||
|
||
- `max_delay_ms` (default = "15000"): The maximum delay between sending batches from the same source. | ||
- `group_by` (default = []): The list of attributes based on which events should be grouped. | ||
- `retry_on_failure`: See [retry_on_failure](https://github.com/open-telemetry/opentelemetry-collector/blob/main/exporter/exporterhelper/README.md) | ||
- `sending_queue`: See [sending_queue](https://github.com/open-telemetry/opentelemetry-collector/blob/main/exporter/exporterhelper/README.md) | ||
- `timeout`: See [timeout](https://github.com/open-telemetry/opentelemetry-collector/blob/main/exporter/exporterhelper/README.md) | ||
|
||
|
||
### Example | ||
|
||
```yaml | ||
|
||
exporters: | ||
dataset: | ||
# DataSet API URL | ||
dataset_url: https://app.scalyr.com | ||
# API Key | ||
api_key: your_api_key | ||
# Send batch to the API at least every 15s | ||
max_delay_ms: 15000 | ||
# Group data based on these attributes | ||
group_by: | ||
- attributes.container_id | ||
- attributes.log.file.path | ||
- body.map.kubernetes.container_hash | ||
- body.map.kubernetes.pod_id | ||
- body.map.kubernetes.docker_id | ||
- body.map.stream | ||
|
||
service: | ||
pipelines: | ||
logs: | ||
receivers: [otlp] | ||
processors: [batch] | ||
# add dataset among your exporters | ||
exporters: [dataset] | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,91 @@ | ||
// Copyright The OpenTelemetry Authors | ||
// | ||
// Licensed under the Apache License, Version 2.0 (the "License"); | ||
// you may not use this file except in compliance with the License. | ||
// You may obtain a copy of the License at | ||
// | ||
// http://www.apache.org/licenses/LICENSE-2.0 | ||
// | ||
// Unless required by applicable law or agreed to in writing, software | ||
// distributed under the License is distributed on an "AS IS" BASIS, | ||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
// See the License for the specific language governing permissions and | ||
// limitations under the License. | ||
|
||
package datasetexporter // import "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/datasetexporter" | ||
|
||
import ( | ||
"fmt" | ||
"os" | ||
"strconv" | ||
|
||
"go.opentelemetry.io/collector/confmap" | ||
"go.opentelemetry.io/collector/exporter/exporterhelper" | ||
) | ||
|
||
const maxDelayMs = "15000" | ||
|
||
type Config struct { | ||
DatasetURL string `mapstructure:"dataset_url"` | ||
APIKey string `mapstructure:"api_key"` | ||
MaxDelayMs string `mapstructure:"max_delay_ms"` | ||
GroupBy []string `mapstructure:"group_by"` | ||
exporterhelper.RetrySettings `mapstructure:"retry_on_failure"` | ||
exporterhelper.QueueSettings `mapstructure:"sending_queue"` | ||
exporterhelper.TimeoutSettings `mapstructure:"timeout"` | ||
} | ||
|
||
func (c *Config) Unmarshal(conf *confmap.Conf) error { | ||
if err := conf.Unmarshal(c, confmap.WithErrorUnused()); err != nil { | ||
return fmt.Errorf("cannot unmarshal config: %w", err) | ||
} | ||
|
||
if len(c.DatasetURL) == 0 { | ||
c.DatasetURL = os.Getenv("DATASET_URL") | ||
} | ||
if len(c.APIKey) == 0 { | ||
c.APIKey = os.Getenv("DATASET_API_KEY") | ||
} | ||
|
||
if len(c.MaxDelayMs) == 0 { | ||
c.MaxDelayMs = maxDelayMs | ||
} | ||
|
||
return nil | ||
} | ||
|
||
// Validate checks if all required fields in Config are set and have valid values. | ||
// If any of the required fields are missing or have invalid values, it returns an error. | ||
func (c *Config) Validate() error { | ||
if c.APIKey == "" { | ||
return fmt.Errorf("api_key is required") | ||
} | ||
if c.DatasetURL == "" { | ||
return fmt.Errorf("dataset_url is required") | ||
} | ||
|
||
_, err := strconv.Atoi(c.MaxDelayMs) | ||
if err != nil { | ||
return fmt.Errorf( | ||
"max_delay_ms must be integer, but %s was used: %w", | ||
c.MaxDelayMs, | ||
err, | ||
) | ||
} | ||
|
||
return nil | ||
} | ||
|
||
// String returns a string representation of the Config object. | ||
// It includes all the fields and their values in the format "field_name: field_value". | ||
func (c *Config) String() string { | ||
s := "" | ||
s += fmt.Sprintf("%s: %s; ", "DatasetURL", c.DatasetURL) | ||
s += fmt.Sprintf("%s: %s; ", "MaxDelayMs", c.MaxDelayMs) | ||
s += fmt.Sprintf("%s: %s; ", "GroupBy", c.GroupBy) | ||
s += fmt.Sprintf("%s: %+v; ", "RetrySettings", c.RetrySettings) | ||
s += fmt.Sprintf("%s: %+v; ", "QueueSettings", c.QueueSettings) | ||
s += fmt.Sprintf("%s: %+v", "TimeoutSettings", c.TimeoutSettings) | ||
|
||
return s | ||
} |
Oops, something went wrong.