Skip to content

Commit

Permalink
adding integration tests for prom RW exporter
Browse files Browse the repository at this point in the history
adding async conversions

fixing tox.ini snappy

installing snappy c library

installing c snappy library before calling tests

adding changelog

adding assertions for every test
  • Loading branch information
Azfaar Qureshi committed Dec 9, 2020
1 parent ae70d5a commit f756e69
Show file tree
Hide file tree
Showing 5 changed files with 244 additions and 3 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Changelog

## Unreleased
- Prometheus Remote Write Exporter Setup
((#180)[https://github.com/open-telemetry/opentelemetry-python-contrib/pull/180])
- Add Exporter constructor validation methods
((#206)[https://github.com/open-telemetry/opentelemetry-python-contrib/pull/206])
- Add conversion to TimeSeries methods
((#207)[https://github.com/open-telemetry/opentelemetry-python-contrib/pull/207])
- Add request methods
((#212)[https://github.com/open-telemetry/opentelemetry-python-contrib/pull/212])
- Add integration tests in opentelemetry-docker-tests
((#216)[https://github.com/open-telemetry/opentelemetry-python-contrib/pull/216])
8 changes: 8 additions & 0 deletions tests/opentelemetry-docker-tests/tests/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,11 @@ services:
- "16686:16686"
- "14268:14268"
- "9411:9411"
cortex:
image: quay.io/cortexproject/cortex:v1.5.0
command:
- -config.file=./config/cortex-config.yml
volumes:
- ./prometheus-remote-write-cortex/cortex-config.yml:/config/cortex-config.yml:ro
ports:
- 9009:9009
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
# This Cortex Config is copied from the Cortex Project documentation
# Source: https://github.com/cortexproject/cortex/blob/master/docs/configuration/single-process-config.yaml

# Configuration for running Cortex in single-process mode.
# This configuration should not be used in production.
# It is only for getting started and development.

# Disable the requirement that every request to Cortex has a
# X-Scope-OrgID header. `fake` will be substituted in instead.
auth_enabled: false

server:
http_listen_port: 9009

# Configure the server to allow messages up to 100MB.
grpc_server_max_recv_msg_size: 104857600
grpc_server_max_send_msg_size: 104857600
grpc_server_max_concurrent_streams: 1000

distributor:
shard_by_all_labels: true
pool:
health_check_ingesters: true

ingester_client:
grpc_client_config:
# Configure the client to allow messages up to 100MB.
max_recv_msg_size: 104857600
max_send_msg_size: 104857600
use_gzip_compression: true

ingester:
# We want our ingesters to flush chunks at the same time to optimise
# deduplication opportunities.
spread_flushes: true
chunk_age_jitter: 0

walconfig:
wal_enabled: true
recover_from_wal: true
wal_dir: /tmp/cortex/wal

lifecycler:
# The address to advertise for this ingester. Will be autodiscovered by
# looking up address on eth0 or en0; can be specified if this fails.
# address: 127.0.0.1

# We want to start immediately and flush on shutdown.
join_after: 0
min_ready_duration: 0s
final_sleep: 0s
num_tokens: 512
tokens_file_path: /tmp/cortex/wal/tokens

# Use an in memory ring store, so we don't need to launch a Consul.
ring:
kvstore:
store: inmemory
replication_factor: 1

# Use local storage - BoltDB for the index, and the filesystem
# for the chunks.
schema:
configs:
- from: 2019-07-29
store: boltdb
object_store: filesystem
schema: v10
index:
prefix: index_
period: 1w

storage:
boltdb:
directory: /tmp/cortex/index

filesystem:
directory: /tmp/cortex/chunks

delete_store:
store: boltdb

purger:
object_store_type: filesystem

frontend_worker:
# Configure the frontend worker in the querier to match worker count
# to max_concurrent on the queriers.
match_max_concurrent: true

# Configure the ruler to scan the /tmp/cortex/rules directory for prometheus
# rules: https://prometheus.io/docs/prometheus/latest/configuration/recording_rules/#recording-rules
ruler:
enable_api: true
enable_sharding: false
storage:
type: local
local:
directory: /tmp/cortex/rules

Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from opentelemetry import metrics
from opentelemetry.exporter.prometheus_remote_write import (
PrometheusRemoteWriteMetricsExporter,
)
from opentelemetry.test.test_base import TestBase


def observer_callback(observer):
array = [1.0, 15.0, 25.0, 26.0]
for (index, usage) in enumerate(array):
labels = {"test_label": str(index)}
observer.observe(usage, labels)


class TestPrometheusRemoteWriteExporterCortex(TestBase):
def setUp(self):
super().setUp
self.exporter = PrometheusRemoteWriteMetricsExporter(
endpoint="http://localhost:9009/api/prom/push",
headers={"X-Scope-Org-ID": "5"},
)
self.labels = {"environment": "testing"}
self.meter = self.meter_provider.get_meter(__name__)
metrics.get_meter_provider().start_pipeline(
self.meter, self.exporter, 1,
)

def test_export_counter(self):
try:
requests_counter = self.meter.create_counter(
name="counter",
description="test_export_counter",
unit="1",
value_type=int,
)
requests_counter.add(25, self.labels)
except Exception as e:
self.fail(
"Export counter failed with unexpected error {}".format(e)
)

def test_export_valuerecorder(self):
try:
requests_size = self.meter.create_valuerecorder(
name="valuerecorder",
description="test_export_valuerecorder",
unit="1",
value_type=int,
)
requests_size.record(25, self.labels)
except Exception as e:
self.fail(
"Export valuerecorder failed with unexpected error {}".format(
e
)
)

def test_export_updowncounter(self):
try:
requests_size = self.meter.create_updowncounter(
name="updowncounter",
description="test_export_updowncounter",
unit="1",
value_type=int,
)
requests_size.add(-25, self.labels)
except Exception as e:
self.fail(
"Export updowncounter failed with unexpected error {}".format(
e
)
)

def test_export_sumobserver(self):
try:
self.meter.register_sumobserver(
callback=observer_callback,
name="sumobserver",
description="test_export_sumobserver",
unit="1",
value_type=float,
)
except Exception as e:
self.fail(
"Export sumobserver failed with unexpected error {}".format(e)
)

def test_export_updownsumobserver(self):
try:
self.meter.register_updownsumobserver(
callback=observer_callback,
name="updownsumobserver",
description="test_export_updownsumobserver",
unit="1",
value_type=float,
)
except Exception as e:
self.fail(
"Export updownsumobserver failed with unexpected error {}".format(
e
)
)
10 changes: 7 additions & 3 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ commands_pre =
test: pip install {toxinidir}/opentelemetry-python-core/opentelemetry-api {toxinidir}/opentelemetry-python-core/opentelemetry-sdk {toxinidir}/opentelemetry-python-core/tests/util

test: pip install {toxinidir}/opentelemetry-python-core/opentelemetry-instrumentation

celery: pip install {toxinidir}/instrumentation/opentelemetry-instrumentation-celery[test]

grpc: pip install {toxinidir}/instrumentation/opentelemetry-instrumentation-grpc[test]
Expand Down Expand Up @@ -341,7 +341,8 @@ deps =
sqlalchemy ~= 1.3.16
redis ~= 3.3.11
celery ~= 4.0, != 4.4.4

protobuf>=3.13.0
requests==2.25.0
changedir =
tests/opentelemetry-docker-tests/tests

Expand All @@ -361,7 +362,10 @@ commands_pre =
-e {toxinidir}/instrumentation/opentelemetry-instrumentation-aiopg \
-e {toxinidir}/instrumentation/opentelemetry-instrumentation-redis \
-e {toxinidir}/instrumentation/opentelemetry-instrumentation-system-metrics \
-e {toxinidir}/opentelemetry-python-core/exporter/opentelemetry-exporter-opencensus
-e {toxinidir}/opentelemetry-python-core/exporter/opentelemetry-exporter-opencensus \
-e {toxinidir}/exporter/opentelemetry-exporter-prometheus-remote-write
sudo apt-get install libsnappy-dev
pip install python-snappy
docker-compose up -d
python check_availability.py
commands =
Expand Down

0 comments on commit f756e69

Please sign in to comment.