From 611578a40b87da4e46c051828c93b3aeeaddb19f Mon Sep 17 00:00:00 2001 From: Riccardo Magliocchetti Date: Fri, 24 Jan 2025 12:11:04 +0100 Subject: [PATCH] Add basic tracing for InvokeModelWithResponseStream --- .../bedrock-runtime/zero-code/README.rst | 1 + .../zero-code/invoke_model_stream.py | 51 +++++++ .../botocore/extensions/bedrock.py | 27 +++- .../botocore/extensions/bedrock_utils.py | 141 ++++++++++++++++- .../tests/bedrock_utils.py | 2 +- ...el_with_response_stream_invalid_model.yaml | 51 +++++++ ...onse_stream_with_content[amazon.nova].yaml | 144 ++++++++++++++++++ ...nse_stream_with_content[amazon.titan].yaml | 61 ++++++++ ...stream_with_content[anthropic.claude].yaml | 124 +++++++++++++++ .../tests/test_botocore_bedrock.py | 123 ++++++++++++++- 10 files changed, 716 insertions(+), 9 deletions(-) create mode 100644 instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/invoke_model_stream.py create mode 100644 instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_invalid_model.yaml create mode 100644 instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.nova].yaml create mode 100644 instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.titan].yaml create mode 100644 instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[anthropic.claude].yaml diff --git a/instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/README.rst b/instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/README.rst index cdd678c765..abecb0aa88 100644 --- a/instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/README.rst +++ b/instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/README.rst @@ -20,6 +20,7 @@ Available examples - `converse.py` uses `bedrock-runtime` `Converse API _`. - `converse_stream.py` uses `bedrock-runtime` `ConverseStream API _`. - `invoke_model.py` uses `bedrock-runtime` `InvokeModel API _`. +- `invoke_model_stream.py` uses `bedrock-runtime` `InvokeModelWithResponseStrea API _`. Setup ----- diff --git a/instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/invoke_model_stream.py b/instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/invoke_model_stream.py new file mode 100644 index 0000000000..deca2c9fb3 --- /dev/null +++ b/instrumentation/opentelemetry-instrumentation-botocore/examples/bedrock-runtime/zero-code/invoke_model_stream.py @@ -0,0 +1,51 @@ +import json +import os + +import boto3 + + +def main(): + chat_model = os.getenv("CHAT_MODEL", "amazon.titan-text-lite-v1") + prompt = "Write a short poem on OpenTelemetry." + if "amazon.titan" in chat_model: + body = { + "inputText": prompt, + "textGenerationConfig": {}, + } + elif "amazon.nova" in chat_model: + body = { + "messages": [{"role": "user", "content": [{"text": prompt}]}], + "schemaVersion": "messages-v1", + } + elif "anthropic.claude" in chat_model: + body = { + "messages": [ + {"role": "user", "content": [{"text": prompt, "type": "text"}]} + ], + "anthropic_version": "bedrock-2023-05-31", + "max_tokens": 200, + } + else: + raise ValueError() + client = boto3.client("bedrock-runtime") + response = client.invoke_model_with_response_stream( + modelId=chat_model, + body=json.dumps(body), + ) + + answer = "" + for event in response["body"]: + json_bytes = event.get("chunk", {}).get("bytes", b"") + decoded = json_bytes.decode("utf-8") + chunk = json.loads(decoded) + if "outputText" in chunk: + answer += chunk["outputText"] + elif "completion" in chunk: + answer += chunk["completion"] + elif "contentBlockDelta" in chunk: + answer += chunk["contentBlockDelta"]["delta"]["text"] + print(answer) + + +if __name__ == "__main__": + main() diff --git a/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock.py b/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock.py index fb664bb1e4..186029eadf 100644 --- a/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock.py +++ b/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock.py @@ -28,6 +28,7 @@ from opentelemetry.instrumentation.botocore.extensions.bedrock_utils import ( ConverseStreamWrapper, + InvokeModelWithResponseStreamWrapper, ) from opentelemetry.instrumentation.botocore.extensions.types import ( _AttributeMapT, @@ -66,8 +67,16 @@ class _BedrockRuntimeExtension(_AwsSdkExtension): Amazon Bedrock Runtime. """ - _HANDLED_OPERATIONS = {"Converse", "ConverseStream", "InvokeModel"} - _DONT_CLOSE_SPAN_ON_END_OPERATIONS = {"ConverseStream"} + _HANDLED_OPERATIONS = { + "Converse", + "ConverseStream", + "InvokeModel", + "InvokeModelWithResponseStream", + } + _DONT_CLOSE_SPAN_ON_END_OPERATIONS = { + "ConverseStream", + "InvokeModelWithResponseStream", + } def should_end_span_on_exit(self): return ( @@ -288,6 +297,20 @@ def stream_done_callback(response): # InvokeModel if "body" in result and isinstance(result["body"], StreamingBody): self._invoke_model_on_success(span, result, model_id) + return + + # InvokeModelWithResponseStream + if "body" in result and isinstance(result["body"], EventStream): + + def invoke_model_stream_done_callback(response): + # the callback gets data formatted as the simpler converse API + self._converse_on_success(span, response) + span.end() + + result["body"] = InvokeModelWithResponseStreamWrapper( + result["body"], invoke_model_stream_done_callback, model_id + ) + return # pylint: disable=no-self-use def _handle_amazon_titan_response( diff --git a/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock_utils.py b/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock_utils.py index 55d90a2b9f..cf5b4063dc 100644 --- a/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock_utils.py +++ b/instrumentation/opentelemetry-instrumentation-botocore/src/opentelemetry/instrumentation/botocore/extensions/bedrock_utils.py @@ -18,6 +18,8 @@ from __future__ import annotations +import json + from botocore.eventstream import EventStream from wrapt import ObjectProxy @@ -46,20 +48,21 @@ def __iter__(self): def _process_event(self, event): if "messageStart" in event: # {'messageStart': {'role': 'assistant'}} - pass + return if "contentBlockDelta" in event: # {'contentBlockDelta': {'delta': {'text': "Hello"}, 'contentBlockIndex': 0}} - pass + return if "contentBlockStop" in event: # {'contentBlockStop': {'contentBlockIndex': 0}} - pass + return if "messageStop" in event: # {'messageStop': {'stopReason': 'end_turn'}} if stop_reason := event["messageStop"].get("stopReason"): self._response["stopReason"] = stop_reason + return if "metadata" in event: # {'metadata': {'usage': {'inputTokens': 12, 'outputTokens': 15, 'totalTokens': 27}, 'metrics': {'latencyMs': 2980}}} @@ -72,3 +75,135 @@ def _process_event(self, event): self._response["usage"]["outputTokens"] = output_tokens self._stream_done_callback(self._response) + return + + +# pylint: disable=abstract-method +class InvokeModelWithResponseStreamWrapper(ObjectProxy): + """Wrapper for botocore.eventstream.EventStream""" + + def __init__( + self, + stream: EventStream, + stream_done_callback, + model_id: str, + ): + super().__init__(stream) + + self._stream_done_callback = stream_done_callback + self._model_id = model_id + + # accumulating things in the same shape of the Converse API + # {"usage": {"inputTokens": 0, "outputTokens": 0}, "stopReason": "finish"} + self._response = {} + + def __iter__(self): + for event in self.__wrapped__: + self._process_event(event) + yield event + + def _process_event(self, event): + if "chunk" not in event: + return + + json_bytes = event["chunk"].get("bytes", b"") + decoded = json_bytes.decode("utf-8") + try: + chunk = json.loads(decoded) + except json.JSONDecodeError: + return + + if "amazon.titan" in self._model_id: + self._process_amazon_titan_chunk(chunk) + elif "amazon.nova" in self._model_id: + self._process_amazon_nova_chunk(chunk) + elif "anthropic.claude" in self._model_id: + self._process_anthropic_claude_chunk(chunk) + + def _process_invocation_metrics(self, invocation_metrics): + self._response["usage"] = {} + if input_tokens := invocation_metrics.get("inputTokenCount"): + self._response["usage"]["inputTokens"] = input_tokens + + if output_tokens := invocation_metrics.get("outputTokenCount"): + self._response["usage"]["outputTokens"] = output_tokens + + def _process_amazon_titan_chunk(self, chunk): + if (stop_reason := chunk.get("completionReason")) is not None: + self._response["stopReason"] = stop_reason + + if invocation_metrics := chunk.get("amazon-bedrock-invocationMetrics"): + # "amazon-bedrock-invocationMetrics":{ + # "inputTokenCount":9,"outputTokenCount":128,"invocationLatency":3569,"firstByteLatency":2180 + # } + self._process_invocation_metrics(invocation_metrics) + self._stream_done_callback(self._response) + + def _process_amazon_nova_chunk(self, chunk): + if "messageStart" in chunk: + # {'messageStart': {'role': 'assistant'}} + return + + if "contentBlockDelta" in chunk: + # {'contentBlockDelta': {'delta': {'text': "Hello"}, 'contentBlockIndex': 0}} + return + + if "contentBlockStop" in chunk: + # {'contentBlockStop': {'contentBlockIndex': 0}} + return + + if "messageStop" in chunk: + # {'messageStop': {'stopReason': 'end_turn'}} + if stop_reason := chunk["messageStop"].get("stopReason"): + self._response["stopReason"] = stop_reason + return + + if "metadata" in chunk: + # {'metadata': {'usage': {'inputTokens': 8, 'outputTokens': 117}, 'metrics': {}, 'trace': {}}} + if usage := chunk["metadata"].get("usage"): + self._response["usage"] = {} + if input_tokens := usage.get("inputTokens"): + self._response["usage"]["inputTokens"] = input_tokens + + if output_tokens := usage.get("outputTokens"): + self._response["usage"]["outputTokens"] = output_tokens + + self._stream_done_callback(self._response) + return + + def _process_anthropic_claude_chunk(self, chunk): + if not (message_type := chunk.get("type")): + return + + if message_type == "message_start": + # {'type': 'message_start', 'message': {'id': 'id', 'type': 'message', 'role': 'assistant', 'model': 'claude-2.0', 'content': [], 'stop_reason': None, 'stop_sequence': None, 'usage': {'input_tokens': 18, 'output_tokens': 1}}} + return + + if message_type == "content_block_start": + # {'type': 'content_block_start', 'index': 0, 'content_block': {'type': 'text', 'text': ''}} + return + + if message_type == "content_block_delta": + # {'type': 'content_block_delta', 'index': 0, 'delta': {'type': 'text_delta', 'text': 'Here'}} + return + + if message_type == "content_block_stop": + # {'type': 'content_block_stop', 'index': 0} + return + + if message_type == "message_delta": + # {'type': 'message_delta', 'delta': {'stop_reason': 'end_turn', 'stop_sequence': None}, 'usage': {'output_tokens': 123}} + if ( + stop_reason := chunk.get("delta", {}).get("stop_reason") + ) is not None: + self._response["stopReason"] = stop_reason + return + + if message_type == "message_stop": + # {'type': 'message_stop', 'amazon-bedrock-invocationMetrics': {'inputTokenCount': 18, 'outputTokenCount': 123, 'invocationLatency': 5250, 'firstByteLatency': 290}} + if invocation_metrics := chunk.get( + "amazon-bedrock-invocationMetrics" + ): + self._process_invocation_metrics(invocation_metrics) + self._stream_done_callback(self._response) + return diff --git a/instrumentation/opentelemetry-instrumentation-botocore/tests/bedrock_utils.py b/instrumentation/opentelemetry-instrumentation-botocore/tests/bedrock_utils.py index 1467817e2e..f3d7f9e5c6 100644 --- a/instrumentation/opentelemetry-instrumentation-botocore/tests/bedrock_utils.py +++ b/instrumentation/opentelemetry-instrumentation-botocore/tests/bedrock_utils.py @@ -128,7 +128,7 @@ def assert_converse_completion_attributes( ) -def assert_converse_stream_completion_attributes( +def assert_stream_completion_attributes( span: ReadableSpan, request_model: str, input_tokens: int | None = None, diff --git a/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_invalid_model.yaml b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_invalid_model.yaml new file mode 100644 index 0000000000..1571adc412 --- /dev/null +++ b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_invalid_model.yaml @@ -0,0 +1,51 @@ +interactions: +- request: + body: null + headers: + Content-Length: + - '0' + User-Agent: + - !!binary | + Qm90bzMvMS4zNS41NiBtZC9Cb3RvY29yZSMxLjM1LjU2IHVhLzIuMCBvcy9saW51eCM2LjEuMC0x + MDM0LW9lbSBtZC9hcmNoI3g4Nl82NCBsYW5nL3B5dGhvbiMzLjEwLjEyIG1kL3B5aW1wbCNDUHl0 + aG9uIGNmZy9yZXRyeS1tb2RlI2xlZ2FjeSBCb3RvY29yZS8xLjM1LjU2 + X-Amz-Date: + - !!binary | + MjAyNTAxMjRUMTM0NDM5Wg== + X-Amz-Security-Token: + - test_aws_security_token + X-Amzn-Trace-Id: + - !!binary | + Um9vdD0xLTFlMjljM2Y1LTU2MzZhOWI4MmViYTYxOTFiOTcwOTI2YTtQYXJlbnQ9NzA1NzBlZjUy + YzJkZjliYjtTYW1wbGVkPTE= + amz-sdk-invocation-id: + - !!binary | + ZDg2MjFlMzAtNTk3Yi00ZWM3LWJlNGEtMThkMDQwZTRhMzcw + amz-sdk-request: + - !!binary | + YXR0ZW1wdD0x + authorization: + - Bearer test_aws_authorization + method: POST + uri: https://bedrock-runtime.us-east-1.amazonaws.com/model/does-not-exist/invoke-with-response-stream + response: + body: + string: '{"message":"The provided model identifier is invalid."}' + headers: + Connection: + - keep-alive + Content-Length: + - '55' + Content-Type: + - application/json + Date: + - Fri, 24 Jan 2025 13:44:40 GMT + Set-Cookie: test_set_cookie + x-amzn-ErrorType: + - ValidationException:http://internal.amazon.com/coral/com.amazon.bedrock/ + x-amzn-RequestId: + - 6460a108-875d-4e26-bcdf-f03c4c815f74 + status: + code: 400 + message: Bad Request +version: 1 diff --git a/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.nova].yaml b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.nova].yaml new file mode 100644 index 0000000000..99283f5726 --- /dev/null +++ b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.nova].yaml @@ -0,0 +1,144 @@ +interactions: +- request: + body: '{"messages": [{"role": "user", "content": [{"text": "Say this is a test"}]}], + "inferenceConfig": {"max_new_tokens": 10, "temperature": 0.8, "topP": 1, "stopSequences": + ["|"]}, "schemaVersion": "messages-v1"}' + headers: + Content-Length: + - '207' + User-Agent: + - !!binary | + Qm90bzMvMS4zNS41NiBtZC9Cb3RvY29yZSMxLjM1LjU2IHVhLzIuMCBvcy9saW51eCM2LjEuMC0x + MDM0LW9lbSBtZC9hcmNoI3g4Nl82NCBsYW5nL3B5dGhvbiMzLjEwLjEyIG1kL3B5aW1wbCNDUHl0 + aG9uIGNmZy9yZXRyeS1tb2RlI2xlZ2FjeSBCb3RvY29yZS8xLjM1LjU2 + X-Amz-Date: + - !!binary | + MjAyNTAxMjRUMTM0NDM3Wg== + X-Amz-Security-Token: + - test_aws_security_token + X-Amzn-Trace-Id: + - !!binary | + Um9vdD0xLWE0YWY3ZjVkLTY5YmE5ZDNiNjg5YjM2OTRkYThmZDk5NDtQYXJlbnQ9OThiYjVhY2U3 + MDE2YzZiZTtTYW1wbGVkPTE= + amz-sdk-invocation-id: + - !!binary | + MmZkNDA5NjQtYTBiNS00NzAwLTljYjUtNjI5MWQ2OWU3YTFm + amz-sdk-request: + - !!binary | + YXR0ZW1wdD0x + authorization: + - Bearer test_aws_authorization + method: POST + uri: https://bedrock-runtime.us-east-1.amazonaws.com/model/amazon.nova-micro-v1%3A0/invoke-with-response-stream + response: + body: + string: !!binary | + AAAA0QAAAEswuRGYCzpldmVudC10eXBlBwAFY2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0 + aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2ZW50eyJieXRlcyI6ImV5SnRaWE56WVdkbFUzUmhj + blFpT25zaWNtOXNaU0k2SW1GemMybHpkR0Z1ZENKOWZRPT0iLCJwIjoiYWJjZGVmZ2hpamtsbW5v + cHFyc3R1dnd4eXpBQkNERUZHSElKS0xNTk9QUVJTVFUifVUlBC4AAADcAAAAS8gp1SkLOmV2ZW50 + LXR5cGUHAAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10 + eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlKamIyNTBaVzUwUW14dlkydEVaV3gwWVNJNmV5SmtaV3gw + WVNJNmV5SjBaWGgwSWpvaVNYUWlmU3dpWTI5dWRHVnVkRUpzYjJOclNXNWtaWGdpT2pCOWZRPT0i + LCJwIjoiYWJjZGVmZ2hpamtsbW5vcHFyIn1vHubCAAAAywAAAEsa6Z67CzpldmVudC10eXBlBwAF + Y2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2 + ZW50eyJieXRlcyI6ImV5SmpiMjUwWlc1MFFteHZZMnRUZEc5d0lqcDdJbU52Ym5SbGJuUkNiRzlq + YTBsdVpHVjRJam93ZlgwPSIsInAiOiJhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ekFCQ0RFRkci + fTNzlqIAAADdAAAAS/VJ/JkLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBw + bGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlKamIyNTBaVzUw + UW14dlkydEVaV3gwWVNJNmV5SmtaV3gwWVNJNmV5SjBaWGgwSWpvaUlITnZkVzVrY3lKOUxDSmpi + MjUwWlc1MFFteHZZMnRKYm1SbGVDSTZNWDE5IiwicCI6ImFiY2RlZmdoaWprbG1ubyJ9PeTnmAAA + AK8AAABLdltPNgs6ZXZlbnQtdHlwZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNhdGlv + bi9qc29uDTptZXNzYWdlLXR5cGUHAAVldmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2WTJ0 + VGRHOXdJanA3SW1OdmJuUmxiblJDYkc5amEwbHVaR1Y0SWpveGZYMD0iLCJwIjoiYWJjZGUifUde + LEMAAADzAAAAS4u4bfwLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGlj + YXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlKamIyNTBaVzUwUW14 + dlkydEVaV3gwWVNJNmV5SmtaV3gwWVNJNmV5SjBaWGgwSWpvaUlHeHBhMlVpZlN3aVkyOXVkR1Z1 + ZEVKc2IyTnJTVzVrWlhnaU9qSjlmUT09IiwicCI6ImFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6 + QUJDREVGR0hJSksifYXE3G0AAADCAAAASxf5/MoLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVu + dC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoi + ZXlKamIyNTBaVzUwUW14dlkydFRkRzl3SWpwN0ltTnZiblJsYm5SQ2JHOWphMGx1WkdWNElqb3lm + WDA9IiwicCI6ImFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eCJ9qa04SwAAAM0AAABLlalrGws6ZXZl + bnQtdHlwZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNhdGlvbi9qc29uDTptZXNzYWdl + LXR5cGUHAAVldmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2WTJ0RVpXeDBZU0k2ZXlKa1pX + eDBZU0k2ZXlKMFpYaDBJam9pSUhsdmRTSjlMQ0pqYjI1MFpXNTBRbXh2WTJ0SmJtUmxlQ0k2TTMx + OSIsInAiOiJhYmMifRT7tlwAAADeAAAAS7LphkkLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVu + dC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoi + ZXlKamIyNTBaVzUwUW14dlkydFRkRzl3SWpwN0ltTnZiblJsYm5SQ2JHOWphMGx1WkdWNElqb3pm + WDA9IiwicCI6ImFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6QUJDREVGR0hJSktMTU5PUFFSU1RV + VldYWVoifZUya0cAAADwAAAAS8wYFywLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVudC10eXBl + BwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlKamIy + NTBaVzUwUW14dlkydEVaV3gwWVNJNmV5SmtaV3gwWVNJNmV5SjBaWGgwSWpvaUp5SjlMQ0pqYjI1 + MFpXNTBRbXh2WTJ0SmJtUmxlQ0k2TkgxOSIsInAiOiJhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5 + ekFCQ0RFRkdISUpLTE1OT1AifViTWGoAAAC0AAAAS2Fr6aULOmV2ZW50LXR5cGUHAAVjaHVuaw06 + Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5 + dGVzIjoiZXlKamIyNTBaVzUwUW14dlkydFRkRzl3SWpwN0ltTnZiblJsYm5SQ2JHOWphMGx1WkdW + NElqbzBmWDA9IiwicCI6ImFiY2RlZmdoaWoifbI54e4AAADyAAAAS7bYREwLOmV2ZW50LXR5cGUH + AAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAF + ZXZlbnR7ImJ5dGVzIjoiZXlKamIyNTBaVzUwUW14dlkydEVaV3gwWVNJNmV5SmtaV3gwWVNJNmV5 + SjBaWGgwSWpvaWRtVWlmU3dpWTI5dWRHVnVkRUpzYjJOclNXNWtaWGdpT2pWOWZRPT0iLCJwIjoi + YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNERUZHSElKS0xNTiJ9j3DJqQAAAMMAAABLKpnV + egs6ZXZlbnQtdHlwZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNhdGlvbi9qc29uDTpt + ZXNzYWdlLXR5cGUHAAVldmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2WTJ0VGRHOXdJanA3 + SW1OdmJuUmxiblJDYkc5amEwbHVaR1Y0SWpvMWZYMD0iLCJwIjoiYWJjZGVmZ2hpamtsbW5vcHFy + c3R1dnd4eSJ9F6CZmwAAAQcAAABL/VBIxAs6ZXZlbnQtdHlwZQcABWNodW5rDTpjb250ZW50LXR5 + cGUHABBhcHBsaWNhdGlvbi9qc29uDTptZXNzYWdlLXR5cGUHAAVldmVudHsiYnl0ZXMiOiJleUpq + YjI1MFpXNTBRbXh2WTJ0RVpXeDBZU0k2ZXlKa1pXeDBZU0k2ZXlKMFpYaDBJam9pSUdsemMzVmxa + Q0o5TENKamIyNTBaVzUwUW14dlkydEpibVJsZUNJNk5uMTkiLCJwIjoiYWJjZGVmZ2hpamtsbW5v + cHFyc3R1dnd4eXpBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWjAxMjM0In2LnbEKAAAAtQAAAEtc + C8AVCzpldmVudC10eXBlBwAFY2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0aW9uL2pzb24N + Om1lc3NhZ2UtdHlwZQcABWV2ZW50eyJieXRlcyI6ImV5SmpiMjUwWlc1MFFteHZZMnRUZEc5d0lq + cDdJbU52Ym5SbGJuUkNiRzlqYTBsdVpHVjRJam8yZlgwPSIsInAiOiJhYmNkZWZnaGlqayJ92s5l + DQAAAN0AAABL9Un8mQs6ZXZlbnQtdHlwZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNh + dGlvbi9qc29uDTptZXNzYWdlLXR5cGUHAAVldmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2 + WTJ0RVpXeDBZU0k2ZXlKa1pXeDBZU0k2ZXlKMFpYaDBJam9pSUdFaWZTd2lZMjl1ZEdWdWRFSnNi + Mk5yU1c1a1pYZ2lPamQ5ZlE9PSIsInAiOiJhYmNkZWZnaGlqa2xtbm9wcXJzIn2i7NJQAAAA0AAA + AEsN2TgoCzpldmVudC10eXBlBwAFY2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0aW9uL2pz + b24NOm1lc3NhZ2UtdHlwZQcABWV2ZW50eyJieXRlcyI6ImV5SmpiMjUwWlc1MFFteHZZMnRUZEc5 + d0lqcDdJbU52Ym5SbGJuUkNiRzlqYTBsdVpHVjRJam8zZlgwPSIsInAiOiJhYmNkZWZnaGlqa2xt + bm9wcXJzdHV2d3h5ekFCQ0RFRkdISUpLTCJ9vFdU3wAAAP8AAABLTkiA/Qs6ZXZlbnQtdHlwZQcA + BWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNhdGlvbi9qc29uDTptZXNzYWdlLXR5cGUHAAVl + dmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2WTJ0RVpXeDBZU0k2ZXlKa1pXeDBZU0k2ZXlK + MFpYaDBJam9pSUhSbGMzUWlmU3dpWTI5dWRHVnVkRUpzYjJOclNXNWtaWGdpT2poOWZRPT0iLCJw + IjoiYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNERUZHSElKS0xNTk9QUVJTVFVWVyJ9KX8k + OAAAAMwAAABLqMlCqws6ZXZlbnQtdHlwZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNh + dGlvbi9qc29uDTptZXNzYWdlLXR5cGUHAAVldmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2 + WTJ0VGRHOXdJanA3SW1OdmJuUmxiblJDYkc5amEwbHVaR1Y0SWpvNGZYMD0iLCJwIjoiYWJjZGVm + Z2hpamtsbW5vcHFyc3R1dnd4eXpBQkNERUZHSCJ9PV9naQAAAPAAAABLzBgXLAs6ZXZlbnQtdHlw + ZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNhdGlvbi9qc29uDTptZXNzYWdlLXR5cGUH + AAVldmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2WTJ0RVpXeDBZU0k2ZXlKa1pXeDBZU0k2 + ZXlKMFpYaDBJam9pSUhCeWIyMXdkQ0o5TENKamIyNTBaVzUwUW14dlkydEpibVJsZUNJNk9YMTki + LCJwIjoiYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNERUZHSCJ9r7jZaQAAANEAAABLMLkR + mAs6ZXZlbnQtdHlwZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNhdGlvbi9qc29uDTpt + ZXNzYWdlLXR5cGUHAAVldmVudHsiYnl0ZXMiOiJleUpqYjI1MFpXNTBRbXh2WTJ0VGRHOXdJanA3 + SW1OdmJuUmxiblJDYkc5amEwbHVaR1Y0SWpvNWZYMD0iLCJwIjoiYWJjZGVmZ2hpamtsbW5vcHFy + c3R1dnd4eXpBQkNERUZHSElKS0xNIn3bLXYKAAAA3QAAAEv1SfyZCzpldmVudC10eXBlBwAFY2h1 + bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2ZW50 + eyJieXRlcyI6ImV5SnRaWE56WVdkbFUzUnZjQ0k2ZXlKemRHOXdVbVZoYzI5dUlqb2liV0Y0WDNS + dmEyVnVjeUo5ZlE9PSIsInAiOiJhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ekFCQ0RFRkdISUpL + TE1OT1BRUlNUVVZXWFkifTuIaJUAAAGLAAAAS4lGFVcLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29u + dGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVz + IjoiZXlKdFpYUmhaR0YwWVNJNmV5SjFjMkZuWlNJNmV5SnBibkIxZEZSdmEyVnVjeUk2TlN3aWIz + VjBjSFYwVkc5clpXNXpJam94TUgwc0ltMWxkSEpwWTNNaU9udDlMQ0owY21GalpTSTZlMzE5TENK + aGJXRjZiMjR0WW1Wa2NtOWpheTFwYm5adlkyRjBhVzl1VFdWMGNtbGpjeUk2ZXlKcGJuQjFkRlJ2 + YTJWdVEyOTFiblFpT2pVc0ltOTFkSEIxZEZSdmEyVnVRMjkxYm5RaU9qRXdMQ0pwYm5adlkyRjBh + Vzl1VEdGMFpXNWplU0k2TVRjM0xDSm1hWEp6ZEVKNWRHVk1ZWFJsYm1ONUlqbzFPWDE5IiwicCI6 + ImFiY2RlZmdoaSJ9lvCfnQ== + headers: + Connection: + - keep-alive + Content-Type: + - application/vnd.amazon.eventstream + Date: + - Fri, 24 Jan 2025 13:44:37 GMT + Set-Cookie: test_set_cookie + Transfer-Encoding: + - chunked + X-Amzn-Bedrock-Content-Type: + - application/json + x-amzn-RequestId: + - 632c3a74-f6a4-43e5-b8ff-9c2f84daf7a6 + status: + code: 200 + message: OK +version: 1 diff --git a/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.titan].yaml b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.titan].yaml new file mode 100644 index 0000000000..50f920bf3a --- /dev/null +++ b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[amazon.titan].yaml @@ -0,0 +1,61 @@ +interactions: +- request: + body: '{"inputText": "Say this is a test", "textGenerationConfig": {"maxTokenCount": + 10, "temperature": 0.8, "topP": 1, "stopSequences": ["|"]}}' + headers: + Content-Length: + - '137' + User-Agent: + - !!binary | + Qm90bzMvMS4zNS41NiBtZC9Cb3RvY29yZSMxLjM1LjU2IHVhLzIuMCBvcy9saW51eCM2LjEuMC0x + MDM0LW9lbSBtZC9hcmNoI3g4Nl82NCBsYW5nL3B5dGhvbiMzLjEwLjEyIG1kL3B5aW1wbCNDUHl0 + aG9uIGNmZy9yZXRyeS1tb2RlI2xlZ2FjeSBCb3RvY29yZS8xLjM1LjU2 + X-Amz-Date: + - !!binary | + MjAyNTAxMjRUMTM0NDM3Wg== + X-Amz-Security-Token: + - test_aws_security_token + X-Amzn-Trace-Id: + - !!binary | + Um9vdD0xLTZmYzFlYzExLTczOTA3MmQwMDhhNzRhNjI3ZDg2NDI4ODtQYXJlbnQ9ZGQ0MjJhMjdl + MDdiMDU4NjtTYW1wbGVkPTE= + amz-sdk-invocation-id: + - !!binary | + OTNhOWIwYTctMGJmNi00NGI4LWJlNzItNDFiY2Y0NzdjNWQ5 + amz-sdk-request: + - !!binary | + YXR0ZW1wdD0x + authorization: + - Bearer test_aws_authorization + method: POST + uri: https://bedrock-runtime.us-east-1.amazonaws.com/model/amazon.titan-text-lite-v1/invoke-with-response-stream + response: + body: + string: !!binary | + AAAB9QAAAEvPpEv5CzpldmVudC10eXBlBwAFY2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0 + aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2ZW50eyJieXRlcyI6ImV5SnZkWFJ3ZFhSVVpYaDBJ + am9pTGx4dVRXOXlaU0JwYm1admNtMWhkR2x2YmlCM2FXeHNJR0psSUc1bFpXUmxaQ0JpWldadmNt + VWdZU0J3Y205d1pYSWlMQ0pwYm1SbGVDSTZNQ3dpZEc5MFlXeFBkWFJ3ZFhSVVpYaDBWRzlyWlc1 + RGIzVnVkQ0k2TVRBc0ltTnZiWEJzWlhScGIyNVNaV0Z6YjI0aU9pSk1SVTVIVkVnaUxDSnBibkIx + ZEZSbGVIUlViMnRsYmtOdmRXNTBJam8xTENKaGJXRjZiMjR0WW1Wa2NtOWpheTFwYm5adlkyRjBh + Vzl1VFdWMGNtbGpjeUk2ZXlKcGJuQjFkRlJ2YTJWdVEyOTFiblFpT2pVc0ltOTFkSEIxZEZSdmEy + VnVRMjkxYm5RaU9qRXdMQ0pwYm5adlkyRjBhVzl1VEdGMFpXNWplU0k2TnpBNExDSm1hWEp6ZEVK + NWRHVk1ZWFJsYm1ONUlqbzNNRGQ5ZlE9PSIsInAiOiJhYmNkZWZnIn2cuo/H + headers: + Connection: + - keep-alive + Content-Type: + - application/vnd.amazon.eventstream + Date: + - Fri, 24 Jan 2025 13:44:38 GMT + Set-Cookie: test_set_cookie + Transfer-Encoding: + - chunked + X-Amzn-Bedrock-Content-Type: + - application/json + x-amzn-RequestId: + - 47aaeeb3-fa59-4aae-b480-01632279e2fa + status: + code: 200 + message: OK +version: 1 diff --git a/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[anthropic.claude].yaml b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[anthropic.claude].yaml new file mode 100644 index 0000000000..03a48c80e7 --- /dev/null +++ b/instrumentation/opentelemetry-instrumentation-botocore/tests/cassettes/test_invoke_model_with_response_stream_with_content[anthropic.claude].yaml @@ -0,0 +1,124 @@ +interactions: +- request: + body: '{"messages": [{"role": "user", "content": [{"text": "Say this is a test", + "type": "text"}]}], "anthropic_version": "bedrock-2023-05-31", "max_tokens": + 10, "temperature": 0.8, "top_p": 1, "stop_sequences": ["|"]}' + headers: + Content-Length: + - '211' + User-Agent: + - !!binary | + Qm90bzMvMS4zNS41NiBtZC9Cb3RvY29yZSMxLjM1LjU2IHVhLzIuMCBvcy9saW51eCM2LjEuMC0x + MDM0LW9lbSBtZC9hcmNoI3g4Nl82NCBsYW5nL3B5dGhvbiMzLjEwLjEyIG1kL3B5aW1wbCNDUHl0 + aG9uIGNmZy9yZXRyeS1tb2RlI2xlZ2FjeSBCb3RvY29yZS8xLjM1LjU2 + X-Amz-Date: + - !!binary | + MjAyNTAxMjRUMTM0NDM4Wg== + X-Amz-Security-Token: + - test_aws_security_token + X-Amzn-Trace-Id: + - !!binary | + Um9vdD0xLWY5MjBjYzFhLTI1ZGI4MTgwYTZiOGQ3YWQ0MDI0Zjg3YTtQYXJlbnQ9NDhlOTc1MmFm + NjZjMTA5ODtTYW1wbGVkPTE= + amz-sdk-invocation-id: + - !!binary | + NGQ1NzllYzAtZDIxZi00NTVhLTkxOGMtNDgxNjE3ZjliZjQx + amz-sdk-request: + - !!binary | + YXR0ZW1wdD0x + authorization: + - Bearer test_aws_authorization + method: POST + uri: https://bedrock-runtime.us-east-1.amazonaws.com/model/anthropic.claude-v2/invoke-with-response-stream + response: + body: + string: !!binary | + AAABsQAAAEti17VwCzpldmVudC10eXBlBwAFY2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0 + aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2ZW50eyJieXRlcyI6ImV5SjBlWEJsSWpvaWJXVnpj + MkZuWlY5emRHRnlkQ0lzSW0xbGMzTmhaMlVpT25zaWFXUWlPaUp0YzJkZlltUnlhMTh3TVRaVWNW + Vk5aRU5GY0VWYU5YaDNURGhRYVdwSWJ6VWlMQ0owZVhCbElqb2liV1Z6YzJGblpTSXNJbkp2YkdV + aU9pSmhjM05wYzNSaGJuUWlMQ0p0YjJSbGJDSTZJbU5zWVhWa1pTMHlMakFpTENKamIyNTBaVzUw + SWpwYlhTd2ljM1J2Y0Y5eVpXRnpiMjRpT201MWJHd3NJbk4wYjNCZmMyVnhkV1Z1WTJVaU9tNTFi + R3dzSW5WellXZGxJanA3SW1sdWNIVjBYM1J2YTJWdWN5STZNVFFzSW05MWRIQjFkRjkwYjJ0bGJu + TWlPakY5ZlgwPSIsInAiOiJhYmNkZWZnaGlqayJ9gkyXHwAAAOIAAABL1jjTzgs6ZXZlbnQtdHlw + ZQcABWNodW5rDTpjb250ZW50LXR5cGUHABBhcHBsaWNhdGlvbi9qc29uDTptZXNzYWdlLXR5cGUH + AAVldmVudHsiYnl0ZXMiOiJleUowZVhCbElqb2lZMjl1ZEdWdWRGOWliRzlqYTE5emRHRnlkQ0lz + SW1sdVpHVjRJam93TENKamIyNTBaVzUwWDJKc2IyTnJJanA3SW5SNWNHVWlPaUowWlhoMElpd2lk + R1Y0ZENJNklpSjlmUT09IiwicCI6ImFiY2QifScLG7kAAAEOAAAAS/BAKrULOmV2ZW50LXR5cGUH + AAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAF + ZXZlbnR7ImJ5dGVzIjoiZXlKMGVYQmxJam9pWTI5dWRHVnVkRjlpYkc5amExOWtaV3gwWVNJc0lt + bHVaR1Y0SWpvd0xDSmtaV3gwWVNJNmV5SjBlWEJsSWpvaWRHVjRkRjlrWld4MFlTSXNJblJsZUhR + aU9pSlBhMkY1SW4xOSIsInAiOiJhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ekFCQ0RFRkdISUpL + TE1OT1BRUlNUVVYifZyNweMAAADxAAAAS/F4PpwLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVu + dC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoi + ZXlKMGVYQmxJam9pWTI5dWRHVnVkRjlpYkc5amExOWtaV3gwWVNJc0ltbHVaR1Y0SWpvd0xDSmta + V3gwWVNJNmV5SjBlWEJsSWpvaWRHVjRkRjlrWld4MFlTSXNJblJsZUhRaU9pSXNJbjE5IiwicCI6 + ImFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3In2bsnIkAAABAQAAAEtyEL1kCzpldmVudC10eXBlBwAF + Y2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2 + ZW50eyJieXRlcyI6ImV5SjBlWEJsSWpvaVkyOXVkR1Z1ZEY5aWJHOWphMTlrWld4MFlTSXNJbWx1 + WkdWNElqb3dMQ0prWld4MFlTSTZleUowZVhCbElqb2lkR1Y0ZEY5a1pXeDBZU0lzSW5SbGVIUWlP + aUlnU1NKOWZRPT0iLCJwIjoiYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkNERUZHSEkifS+F + e3sAAAEFAAAAS4eQG6QLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGlj + YXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlKMGVYQmxJam9pWTI5 + dWRHVnVkRjlpYkc5amExOWtaV3gwWVNJc0ltbHVaR1Y0SWpvd0xDSmtaV3gwWVNJNmV5SjBlWEJs + SWpvaWRHVjRkRjlrWld4MFlTSXNJblJsZUhRaU9pSWdkMmxzYkNKOWZRPT0iLCJwIjoiYWJjZGVm + Z2hpamtsbW5vcHFyc3R1dnd4eXpBQkNERUZHSEkifXVHT8sAAAD2AAAAS0NY4owLOmV2ZW50LXR5 + cGUHAAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBl + BwAFZXZlbnR7ImJ5dGVzIjoiZXlKMGVYQmxJam9pWTI5dWRHVnVkRjlpYkc5amExOWtaV3gwWVNJ + c0ltbHVaR1Y0SWpvd0xDSmtaV3gwWVNJNmV5SjBlWEJsSWpvaWRHVjRkRjlrWld4MFlTSXNJblJs + ZUhRaU9pSWdjMkY1SW4xOSIsInAiOiJhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3gifdFfmr0AAAD2 + AAAAS0NY4owLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24v + anNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlKMGVYQmxJam9pWTI5dWRHVnVk + RjlpYkc5amExOWtaV3gwWVNJc0ltbHVaR1Y0SWpvd0xDSmtaV3gwWVNJNmV5SjBlWEJsSWpvaWRH + VjRkRjlrWld4MFlTSXNJblJsZUhRaU9pSWdYQ0lpZlgwPSIsInAiOiJhYmNkZWZnaGlqa2xtbm9w + cXJzdHV2d3gifXU4Q4UAAADzAAAAS4u4bfwLOmV2ZW50LXR5cGUHAAVjaHVuaw06Y29udGVudC10 + eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlK + MGVYQmxJam9pWTI5dWRHVnVkRjlpYkc5amExOWtaV3gwWVNJc0ltbHVaR1Y0SWpvd0xDSmtaV3gw + WVNJNmV5SjBlWEJsSWpvaWRHVjRkRjlrWld4MFlTSXNJblJsZUhRaU9pSlVhR2x6SW4xOSIsInAi + OiJhYmNkZWZnaGlqa2xtbm9wcXJzdHUifZY82M0AAAEPAAAAS80gAwULOmV2ZW50LXR5cGUHAAVj + aHVuaw06Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10eXBlBwAFZXZl + bnR7ImJ5dGVzIjoiZXlKMGVYQmxJam9pWTI5dWRHVnVkRjlpYkc5amExOWtaV3gwWVNJc0ltbHVa + R1Y0SWpvd0xDSmtaV3gwWVNJNmV5SjBlWEJsSWpvaWRHVjRkRjlrWld4MFlTSXNJblJsZUhRaU9p + SWdhWE1pZlgwPSIsInAiOiJhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ekFCQ0RFRkdISUpLTE1O + T1BRUlNUVVZXIn2PKfsQAAAA+wAAAEu7yCY9CzpldmVudC10eXBlBwAFY2h1bmsNOmNvbnRlbnQt + dHlwZQcAEGFwcGxpY2F0aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2ZW50eyJieXRlcyI6ImV5 + SjBlWEJsSWpvaVkyOXVkR1Z1ZEY5aWJHOWphMTlrWld4MFlTSXNJbWx1WkdWNElqb3dMQ0prWld4 + MFlTSTZleUowZVhCbElqb2lkR1Y0ZEY5a1pXeDBZU0lzSW5SbGVIUWlPaUlnWVNKOWZRPT0iLCJw + IjoiYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpBQkMifWGL95MAAAEKAAAASwXAjHULOmV2ZW50 + LXR5cGUHAAVjaHVuaw06Y29udGVudC10eXBlBwAQYXBwbGljYXRpb24vanNvbg06bWVzc2FnZS10 + eXBlBwAFZXZlbnR7ImJ5dGVzIjoiZXlKMGVYQmxJam9pWTI5dWRHVnVkRjlpYkc5amExOWtaV3gw + WVNJc0ltbHVaR1Y0SWpvd0xDSmtaV3gwWVNJNmV5SjBlWEJsSWpvaWRHVjRkRjlrWld4MFlTSXNJ + blJsZUhRaU9pSWdkR1Z6ZENKOWZRPT0iLCJwIjoiYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXpB + QkNERUZHSElKS0xNTiJ9xf99iwAAALwAAABLURuiZAs6ZXZlbnQtdHlwZQcABWNodW5rDTpjb250 + ZW50LXR5cGUHABBhcHBsaWNhdGlvbi9qc29uDTptZXNzYWdlLXR5cGUHAAVldmVudHsiYnl0ZXMi + OiJleUowZVhCbElqb2lZMjl1ZEdWdWRGOWliRzlqYTE5emRHOXdJaXdpYVc1a1pYZ2lPakI5Iiwi + cCI6ImFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6In0ErvuiAAABDgAAAEvwQCq1CzpldmVudC10 + eXBlBwAFY2h1bmsNOmNvbnRlbnQtdHlwZQcAEGFwcGxpY2F0aW9uL2pzb24NOm1lc3NhZ2UtdHlw + ZQcABWV2ZW50eyJieXRlcyI6ImV5SjBlWEJsSWpvaWJXVnpjMkZuWlY5a1pXeDBZU0lzSW1SbGJI + UmhJanA3SW5OMGIzQmZjbVZoYzI5dUlqb2liV0Y0WDNSdmEyVnVjeUlzSW5OMGIzQmZjMlZ4ZFdW + dVkyVWlPbTUxYkd4OUxDSjFjMkZuWlNJNmV5SnZkWFJ3ZFhSZmRHOXJaVzV6SWpveE1IMTkiLCJw + IjoiYWJjZGVmZ2hpamtsIn0VL5oQAAABSQAAAEsak67sCzpldmVudC10eXBlBwAFY2h1bmsNOmNv + bnRlbnQtdHlwZQcAEGFwcGxpY2F0aW9uL2pzb24NOm1lc3NhZ2UtdHlwZQcABWV2ZW50eyJieXRl + cyI6ImV5SjBlWEJsSWpvaWJXVnpjMkZuWlY5emRHOXdJaXdpWVcxaGVtOXVMV0psWkhKdlkyc3Rh + VzUyYjJOaGRHbHZiazFsZEhKcFkzTWlPbnNpYVc1d2RYUlViMnRsYmtOdmRXNTBJam94TkN3aWIz + VjBjSFYwVkc5clpXNURiM1Z1ZENJNk1UQXNJbWx1ZG05allYUnBiMjVNWVhSbGJtTjVJam8xT1Rj + c0ltWnBjbk4wUW5sMFpVeGhkR1Z1WTNraU9qSTVNMzE5IiwicCI6ImFiY2RlZmdoaWprbG1ub3Bx + cnMifV7iQSc= + headers: + Connection: + - keep-alive + Content-Type: + - application/vnd.amazon.eventstream + Date: + - Fri, 24 Jan 2025 13:44:39 GMT + Set-Cookie: test_set_cookie + Transfer-Encoding: + - chunked + X-Amzn-Bedrock-Content-Type: + - application/json + x-amzn-RequestId: + - e52df188-e57f-43bb-a1bf-cfb42fd11fcd + status: + code: 200 + message: OK +version: 1 diff --git a/instrumentation/opentelemetry-instrumentation-botocore/tests/test_botocore_bedrock.py b/instrumentation/opentelemetry-instrumentation-botocore/tests/test_botocore_bedrock.py index ce3b4375e9..38f5d71b7f 100644 --- a/instrumentation/opentelemetry-instrumentation-botocore/tests/test_botocore_bedrock.py +++ b/instrumentation/opentelemetry-instrumentation-botocore/tests/test_botocore_bedrock.py @@ -27,7 +27,7 @@ from .bedrock_utils import ( assert_completion_attributes_from_streaming_body, assert_converse_completion_attributes, - assert_converse_stream_completion_attributes, + assert_stream_completion_attributes, ) BOTO3_VERSION = tuple(int(x) for x in boto3.__version__.split(".")) @@ -149,9 +149,12 @@ def test_converse_stream_with_content( output_tokens = usage["outputTokens"] assert text + assert finish_reason + assert input_tokens + assert output_tokens (span,) = span_exporter.get_finished_spans() - assert_converse_stream_completion_attributes( + assert_stream_completion_attributes( span, llm_model_value, input_tokens, @@ -188,7 +191,7 @@ def test_converse_stream_with_invalid_model( ) (span,) = span_exporter.get_finished_spans() - assert_converse_stream_completion_attributes( + assert_stream_completion_attributes( span, llm_model_value, operation_name="chat", @@ -322,3 +325,117 @@ def test_invoke_model_with_invalid_model( logs = log_exporter.get_finished_logs() assert len(logs) == 0 + + +@pytest.mark.parametrize( + "model_family", + ["amazon.nova", "amazon.titan", "anthropic.claude"], +) +@pytest.mark.vcr() +def test_invoke_model_with_response_stream_with_content( + span_exporter, + log_exporter, + bedrock_runtime_client, + instrument_with_content, + model_family, +): + llm_model_value = get_model_name_from_family(model_family) + max_tokens, temperature, top_p, stop_sequences = 10, 0.8, 1, ["|"] + body = get_invoke_model_body( + llm_model_value, max_tokens, temperature, top_p, stop_sequences + ) + response = bedrock_runtime_client.invoke_model_with_response_stream( + body=body, + modelId=llm_model_value, + ) + + # consume the stream in order to have it traced + finish_reason = None + input_tokens, output_tokens = None, None + text = "" + for event in response["body"]: + json_bytes = event["chunk"].get("bytes", b"") + decoded = json_bytes.decode("utf-8") + chunk = json.loads(decoded) + + # amazon.titan + if (stop_reason := chunk.get("completionReason")) is not None: + finish_reason = stop_reason + + if (output_text := chunk.get("outputText")) is not None: + text += output_text + + # amazon.titan, anthropic.claude + if invocation_metrics := chunk.get("amazon-bedrock-invocationMetrics"): + input_tokens = invocation_metrics["inputTokenCount"] + output_tokens = invocation_metrics["outputTokenCount"] + + # anthropic.claude + if (message_type := chunk.get("type")) is not None: + if message_type == "content_block_start": + text += chunk["content_block"]["text"] + elif message_type == "content_block_delta": + text += chunk["delta"]["text"] + elif message_type == "message_delta": + finish_reason = chunk["delta"]["stop_reason"] + + # amazon nova + if "contentBlockDelta" in chunk: + text += chunk["contentBlockDelta"]["delta"]["text"] + if "messageStop" in chunk: + finish_reason = chunk["messageStop"]["stopReason"] + if "metadata" in chunk: + usage = chunk["metadata"]["usage"] + input_tokens = usage["inputTokens"] + output_tokens = usage["outputTokens"] + + assert text + assert finish_reason + assert input_tokens + assert output_tokens + + (span,) = span_exporter.get_finished_spans() + assert_stream_completion_attributes( + span, + llm_model_value, + input_tokens, + output_tokens, + (finish_reason,), + "text_completion" if model_family == "amazon.titan" else "chat", + top_p, + temperature, + max_tokens, + stop_sequences, + ) + + logs = log_exporter.get_finished_logs() + assert len(logs) == 0 + + +@pytest.mark.vcr() +def test_invoke_model_with_response_stream_invalid_model( + span_exporter, + log_exporter, + bedrock_runtime_client, + instrument_with_content, +): + llm_model_value = "does-not-exist" + with pytest.raises(bedrock_runtime_client.exceptions.ClientError): + bedrock_runtime_client.invoke_model_with_response_stream( + body=b"", + modelId=llm_model_value, + ) + + (span,) = span_exporter.get_finished_spans() + assert_completion_attributes_from_streaming_body( + span, + llm_model_value, + None, + "chat", + ) + + assert span.status.status_code == StatusCode.ERROR + assert span.attributes[ERROR_TYPE] == "ValidationException" + + logs = log_exporter.get_finished_logs() + assert len(logs) == 0