From d3c9c59977edd6090cd33072ac5696eee65b7621 Mon Sep 17 00:00:00 2001 From: lievan Date: Mon, 16 Sep 2024 17:27:19 -0400 Subject: [PATCH 01/20] prompt templating --- ddtrace/llmobs/_constants.py | 1 + ddtrace/llmobs/_llmobs.py | 22 +++++++++++++++ ddtrace/llmobs/_trace_processor.py | 3 +++ ddtrace/llmobs/_utils.py | 27 +++++++++++++++++++ ddtrace/llmobs/utils.py | 1 + ...t-prompt-annotations-b8e406261197f61a.yaml | 6 +++++ 6 files changed, 60 insertions(+) create mode 100644 releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml diff --git a/ddtrace/llmobs/_constants.py b/ddtrace/llmobs/_constants.py index 6c6c2ae822..af022c5bc4 100644 --- a/ddtrace/llmobs/_constants.py +++ b/ddtrace/llmobs/_constants.py @@ -14,6 +14,7 @@ INPUT_MESSAGES = "_ml_obs.meta.input.messages" INPUT_VALUE = "_ml_obs.meta.input.value" INPUT_PARAMETERS = "_ml_obs.meta.input.parameters" +INPUT_PROMPT = "_ml_obs.meta.input.prompt" OUTPUT_DOCUMENTS = "_ml_obs.meta.output.documents" OUTPUT_MESSAGES = "_ml_obs.meta.output.messages" diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index b842edbce8..c22054d6d7 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -24,6 +24,7 @@ from ddtrace.llmobs._constants import INPUT_DOCUMENTS from ddtrace.llmobs._constants import INPUT_MESSAGES from ddtrace.llmobs._constants import INPUT_PARAMETERS +from ddtrace.llmobs._constants import INPUT_PROMPT from ddtrace.llmobs._constants import INPUT_VALUE from ddtrace.llmobs._constants import METADATA from ddtrace.llmobs._constants import METRICS @@ -46,6 +47,7 @@ from ddtrace.llmobs._utils import _get_session_id from ddtrace.llmobs._utils import _inject_llmobs_parent_id from ddtrace.llmobs._utils import _unserializable_default_repr +from ddtrace.llmobs._utils import validate_prompt from ddtrace.llmobs._writer import LLMObsEvalMetricWriter from ddtrace.llmobs._writer import LLMObsSpanWriter from ddtrace.llmobs.utils import Documents @@ -475,6 +477,7 @@ def annotate( cls, span: Optional[Span] = None, parameters: Optional[Dict[str, Any]] = None, + prompt: Optional[dict] = None, input_data: Optional[Any] = None, output_data: Optional[Any] = None, metadata: Optional[Dict[str, Any]] = None, @@ -532,6 +535,12 @@ def annotate( if not span_kind: log.debug("Span kind not specified, skipping annotation for input/output data") return + if prompt is not None: + if span_kind == "llm": + cls._tag_prompt(span, prompt) + else: + log.warning("Annotating prompts are only supported for LLM span kinds.") + if input_data or output_data: if span_kind == "llm": cls._tag_llm_io(span, input_messages=input_data, output_messages=output_data) @@ -542,6 +551,19 @@ def annotate( else: cls._tag_text_io(span, input_value=input_data, output_value=output_data) + @staticmethod + def _tag_prompt(span, prompt: dict) -> None: + """Tags a given LLMObs span with a prompt""" + validated_prompt = None + try: + validated_prompt = validate_prompt(prompt) + except TypeError: + log.warning("Failed to validate prompt with error: ", exc_info=True) + return + + if validated_prompt is not None: + span.set_tag_str(INPUT_PROMPT, json.dumps(validated_prompt)) + @staticmethod def _tag_params(span: Span, params: Dict[str, Any]) -> None: """Tags input parameters for a given LLMObs span. diff --git a/ddtrace/llmobs/_trace_processor.py b/ddtrace/llmobs/_trace_processor.py index ea86841657..4d22fb982e 100644 --- a/ddtrace/llmobs/_trace_processor.py +++ b/ddtrace/llmobs/_trace_processor.py @@ -16,6 +16,7 @@ from ddtrace.llmobs._constants import INPUT_DOCUMENTS from ddtrace.llmobs._constants import INPUT_MESSAGES from ddtrace.llmobs._constants import INPUT_PARAMETERS +from ddtrace.llmobs._constants import INPUT_PROMPT from ddtrace.llmobs._constants import INPUT_VALUE from ddtrace.llmobs._constants import METADATA from ddtrace.llmobs._constants import METRICS @@ -85,6 +86,8 @@ def _llmobs_span_event(self, span: Span) -> Dict[str, Any]: meta["output"]["value"] = span._meta.pop(OUTPUT_VALUE) if span_kind == "retrieval" and span.get_tag(OUTPUT_DOCUMENTS) is not None: meta["output"]["documents"] = json.loads(span._meta.pop(OUTPUT_DOCUMENTS)) + if span_kind == "llm" and span.get_tag(INPUT_PROMPT) is not None: + meta["input"]["prompt"] = json.loads(span._meta.pop(INPUT_PROMPT)) if span.error: meta[ERROR_MSG] = span.get_tag(ERROR_MSG) meta[ERROR_STACK] = span.get_tag(ERROR_STACK) diff --git a/ddtrace/llmobs/_utils.py b/ddtrace/llmobs/_utils.py index 2e55be8e44..f492bfb874 100644 --- a/ddtrace/llmobs/_utils.py +++ b/ddtrace/llmobs/_utils.py @@ -17,6 +17,33 @@ log = get_logger(__name__) +def validate_prompt(prompt: dict) -> dict: + validated_prompt = {} + if not isinstance(prompt, dict): + raise TypeError("Prompt must be a dictionary") + if prompt.get("variables"): + variables = prompt["variables"] + if not isinstance(variables, dict): + raise TypeError("Prompt variables must be a dictionary.") + for key, value in variables.items(): + if not isinstance(key, str) or not isinstance(value, str): + raise TypeError("Prompt variable keys and values must be strings.") + validated_prompt["variables"] = prompt["variables"] + if prompt.get("template"): + if not isinstance(prompt["template"], str): + raise TypeError("Prompt template must be a string") + validated_prompt["template"] = prompt["template"] + if prompt.get("version"): + if not isinstance(prompt["version"], str): + raise TypeError("Prompt version must be a string.") + validated_prompt["version"] = prompt["version"] + if prompt.get("id"): + if not isinstance(prompt["id"], str): + raise TypeError("Prompt id must be a string.") + validated_prompt["id"] = prompt["id"] + return validated_prompt + + class AnnotationContext: def __init__(self, _tracer, _annotation_callback): self._tracer = _tracer diff --git a/ddtrace/llmobs/utils.py b/ddtrace/llmobs/utils.py index c63a0e3f44..28ff96fdc3 100644 --- a/ddtrace/llmobs/utils.py +++ b/ddtrace/llmobs/utils.py @@ -19,6 +19,7 @@ ExportedLLMObsSpan = TypedDict("ExportedLLMObsSpan", {"span_id": str, "trace_id": str}) Document = TypedDict("Document", {"name": str, "id": str, "text": str, "score": float}, total=False) Message = TypedDict("Message", {"content": str, "role": str}, total=False) +Prompt = TypedDict("Prompt", {"variables": Dict[str, str], "template": str, "id": str, "version": str}, total=False) class Messages: diff --git a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml new file mode 100644 index 0000000000..0854f7f6ae --- /dev/null +++ b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml @@ -0,0 +1,6 @@ +--- +features: + - | + LLM Observability: This introduces the ability to annotate prompt templates via `LLMObs.annotate` on LLM spans. + Prompt templates should be passed in as a `Prompt` typed dictionary using `from ddtrace.llmobs.utils import Prompt` + or a dictionary following the `Prompt` schema. \ No newline at end of file From e936ea022bfb4e5f3c215a7e8283b8792a0e0bd0 Mon Sep 17 00:00:00 2001 From: lievan Date: Mon, 16 Sep 2024 17:31:50 -0400 Subject: [PATCH 02/20] add tests --- tests/llmobs/test_llmobs_service.py | 72 +++++++++++++++++++++ tests/llmobs/test_llmobs_trace_processor.py | 15 +++++ 2 files changed, 87 insertions(+) diff --git a/tests/llmobs/test_llmobs_service.py b/tests/llmobs/test_llmobs_service.py index 4e28767a1c..c537b3fb45 100644 --- a/tests/llmobs/test_llmobs_service.py +++ b/tests/llmobs/test_llmobs_service.py @@ -14,6 +14,7 @@ from ddtrace.llmobs._constants import INPUT_DOCUMENTS from ddtrace.llmobs._constants import INPUT_MESSAGES from ddtrace.llmobs._constants import INPUT_PARAMETERS +from ddtrace.llmobs._constants import INPUT_PROMPT from ddtrace.llmobs._constants import INPUT_VALUE from ddtrace.llmobs._constants import METADATA from ddtrace.llmobs._constants import METRICS @@ -28,6 +29,7 @@ from ddtrace.llmobs._constants import SPAN_START_WHILE_DISABLED_WARNING from ddtrace.llmobs._constants import TAGS from ddtrace.llmobs._llmobs import LLMObsTraceProcessor +from ddtrace.llmobs.utils import Prompt from tests.llmobs._utils import _expected_llmobs_eval_metric_event from tests.llmobs._utils import _expected_llmobs_llm_span_event from tests.llmobs._utils import _expected_llmobs_non_llm_span_event @@ -787,6 +789,76 @@ def test_annotate_metrics_wrong_type(LLMObs, mock_logs): ) +def test_annotate_prompt_dict(LLMObs): + with LLMObs.llm(model_name="test_model") as span: + LLMObs.annotate( + span=span, + prompt={ + "template": "{var1} {var3}", + "variables": {"var1": "var1", "var2": "var3"}, + "version": "1.0.0", + "id": "test_prompt", + }, + ) + assert json.loads(span.get_tag(INPUT_PROMPT)) == { + "template": "{var1} {var3}", + "variables": {"var1": "var1", "var2": "var3"}, + "version": "1.0.0", + "id": "test_prompt", + } + + +def test_annotate_prompt_typed_dict(LLMObs): + with LLMObs.llm(model_name="test_model") as span: + LLMObs.annotate( + span=span, + prompt=Prompt( + template="{var1} {var3}", + variables={"var1": "var1", "var2": "var3"}, + version="1.0.0", + id="test_prompt", + ), + ) + assert json.loads(span.get_tag(INPUT_PROMPT)) == { + "template": "{var1} {var3}", + "variables": {"var1": "var1", "var2": "var3"}, + "version": "1.0.0", + "id": "test_prompt", + } + + +def test_annotate_prompt_wrong_type(LLMObs, mock_logs): + with LLMObs.llm( + model_name="test_model", + ) as span: + LLMObs.annotate( + span=span, + prompt="prompt", + ) + assert span.get_tag(INPUT_PROMPT) is None + mock_logs.warning.assert_called_once_with("Failed to validate prompt with error: ", exc_info=True) + mock_logs.reset_mock() + + LLMObs.annotate( + span=span, + prompt={ + "template": 1, + }, + ) + mock_logs.warning.assert_called_once_with("Failed to validate prompt with error: ", exc_info=True) + mock_logs.reset_mock() + + +def test_annotate_prompt_wrong_kind(LLMObs, mock_logs): + with LLMObs.task(name="dummy") as span: + LLMObs.annotate( + prompt={"variables": {"var1": "var1"}}, + ) + assert span.get_tag(INPUT_PROMPT) is None + mock_logs.warning.assert_called_once_with("Annotating prompts are only supported for LLM span kinds.") + mock_logs.reset_mock() + + def test_span_error_sets_error(LLMObs, mock_llmobs_span_writer): with pytest.raises(ValueError): with LLMObs.llm(model_name="test_model", model_provider="test_model_provider") as span: diff --git a/tests/llmobs/test_llmobs_trace_processor.py b/tests/llmobs/test_llmobs_trace_processor.py index 908a102788..c0a199391d 100644 --- a/tests/llmobs/test_llmobs_trace_processor.py +++ b/tests/llmobs/test_llmobs_trace_processor.py @@ -1,3 +1,5 @@ +import json + import mock import pytest @@ -5,6 +7,7 @@ from ddtrace.ext import SpanTypes from ddtrace.llmobs._constants import INPUT_MESSAGES from ddtrace.llmobs._constants import INPUT_PARAMETERS +from ddtrace.llmobs._constants import INPUT_PROMPT from ddtrace.llmobs._constants import INPUT_VALUE from ddtrace.llmobs._constants import LANGCHAIN_APM_SPAN_NAME from ddtrace.llmobs._constants import METADATA @@ -326,6 +329,18 @@ def test_output_value_is_set(): assert tp._llmobs_span_event(llm_span)["meta"]["output"]["value"] == "value" +def test_prompt_is_set(): + """Test that prompt is set on the span event if they are present on the span.""" + dummy_tracer = DummyTracer() + mock_llmobs_span_writer = mock.MagicMock() + with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): + with dummy_tracer.trace("root_llm_span", span_type=SpanTypes.LLM) as llm_span: + llm_span.set_tag(SPAN_KIND, "llm") + llm_span.set_tag(INPUT_PROMPT, json.dumps({"variables": {"var1": "var2"}})) + tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) + assert tp._llmobs_span_event(llm_span)["meta"]["input"]["prompt"] == {"variables": {"var1": "var2"}} + + def test_metadata_is_set(): """Test that metadata is set on the span event if it is present on the span.""" dummy_tracer = DummyTracer() From 3fe76e760827bbb3c4b69a7a02883cd92c32bb3b Mon Sep 17 00:00:00 2001 From: lievan Date: Mon, 16 Sep 2024 17:35:27 -0400 Subject: [PATCH 03/20] prompt validation --- ddtrace/llmobs/_llmobs.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index c22054d6d7..1d4cab83b4 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -554,16 +554,13 @@ def annotate( @staticmethod def _tag_prompt(span, prompt: dict) -> None: """Tags a given LLMObs span with a prompt""" - validated_prompt = None try: validated_prompt = validate_prompt(prompt) + span.set_tag_str(INPUT_PROMPT, json.dumps(validated_prompt)) except TypeError: log.warning("Failed to validate prompt with error: ", exc_info=True) return - if validated_prompt is not None: - span.set_tag_str(INPUT_PROMPT, json.dumps(validated_prompt)) - @staticmethod def _tag_params(span: Span, params: Dict[str, Any]) -> None: """Tags input parameters for a given LLMObs span. From 75f8a6757181613245f46d32b2e877d5cca37d6d Mon Sep 17 00:00:00 2001 From: lievan Date: Tue, 17 Sep 2024 13:38:52 -0400 Subject: [PATCH 04/20] whitespace' --- tests/llmobs/test_llmobs_service.py | 20 ++++---------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/tests/llmobs/test_llmobs_service.py b/tests/llmobs/test_llmobs_service.py index c537b3fb45..8a59aade41 100644 --- a/tests/llmobs/test_llmobs_service.py +++ b/tests/llmobs/test_llmobs_service.py @@ -828,32 +828,20 @@ def test_annotate_prompt_typed_dict(LLMObs): def test_annotate_prompt_wrong_type(LLMObs, mock_logs): - with LLMObs.llm( - model_name="test_model", - ) as span: - LLMObs.annotate( - span=span, - prompt="prompt", - ) + with LLMObs.llm(model_name="test_model") as span: + LLMObs.annotate(span=span, prompt="prompt") assert span.get_tag(INPUT_PROMPT) is None mock_logs.warning.assert_called_once_with("Failed to validate prompt with error: ", exc_info=True) mock_logs.reset_mock() - LLMObs.annotate( - span=span, - prompt={ - "template": 1, - }, - ) + LLMObs.annotate(span=span, prompt={"template": 1}) mock_logs.warning.assert_called_once_with("Failed to validate prompt with error: ", exc_info=True) mock_logs.reset_mock() def test_annotate_prompt_wrong_kind(LLMObs, mock_logs): with LLMObs.task(name="dummy") as span: - LLMObs.annotate( - prompt={"variables": {"var1": "var1"}}, - ) + LLMObs.annotate(prompt={"variables": {"var1": "var1"}}) assert span.get_tag(INPUT_PROMPT) is None mock_logs.warning.assert_called_once_with("Annotating prompts are only supported for LLM span kinds.") mock_logs.reset_mock() From a1ef17a7ec1f648e24f65f2d46e08de039ae7620 Mon Sep 17 00:00:00 2001 From: lievan Date: Tue, 17 Sep 2024 13:49:51 -0400 Subject: [PATCH 05/20] document prompt better --- ddtrace/llmobs/_llmobs.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 1d4cab83b4..cfdc3d87d1 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -490,6 +490,8 @@ def annotate( :param Span span: Span to annotate. If no span is provided, the current active span will be used. Must be an LLMObs-type span, i.e. generated by the LLMObs SDK. + :param prompt: A dictionary represents the prompt used for an LLM call in the following form: + {"template": "...", "id": "...", "version": "...", "variables": dict[str, str]} :param input_data: A single input string, dictionary, or a list of dictionaries based on the span kind: - llm spans: accepts a string, or a dictionary of form {"content": "...", "role": "..."}, or a list of dictionaries with the same signature. From 7f75e2f4e82f6872b77025e3870927f91549d1f7 Mon Sep 17 00:00:00 2001 From: lievan Date: Tue, 17 Sep 2024 13:53:12 -0400 Subject: [PATCH 06/20] refactors --- ddtrace/llmobs/_llmobs.py | 2 +- ddtrace/llmobs/_utils.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index cfdc3d87d1..9b0b96d951 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -558,7 +558,7 @@ def _tag_prompt(span, prompt: dict) -> None: """Tags a given LLMObs span with a prompt""" try: validated_prompt = validate_prompt(prompt) - span.set_tag_str(INPUT_PROMPT, json.dumps(validated_prompt)) + span.set_tag_str(INPUT_PROMPT, json.dumps(validated_prompt, default=_unserializable_default_repr)) except TypeError: log.warning("Failed to validate prompt with error: ", exc_info=True) return diff --git a/ddtrace/llmobs/_utils.py b/ddtrace/llmobs/_utils.py index f492bfb874..f1bc032912 100644 --- a/ddtrace/llmobs/_utils.py +++ b/ddtrace/llmobs/_utils.py @@ -25,9 +25,11 @@ def validate_prompt(prompt: dict) -> dict: variables = prompt["variables"] if not isinstance(variables, dict): raise TypeError("Prompt variables must be a dictionary.") - for key, value in variables.items(): - if not isinstance(key, str) or not isinstance(value, str): - raise TypeError("Prompt variable keys and values must be strings.") + if not any(isinstance(k, str) or isinstance(v, str) for k, v in variables.items()): + raise TypeError("Prompt variable keys and values must be strings.") + # for key, value in variables.items(): + # if not isinstance(key, str) or not isinstance(value, str): + # raise TypeError("Prompt variable keys and values must be strings.") validated_prompt["variables"] = prompt["variables"] if prompt.get("template"): if not isinstance(prompt["template"], str): From a5018b76792c7348c99ec97844690ba21e258ad5 Mon Sep 17 00:00:00 2001 From: lievan Date: Tue, 17 Sep 2024 13:54:43 -0400 Subject: [PATCH 07/20] rel ntoe --- .../notes/support-prompt-annotations-b8e406261197f61a.yaml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml index 0854f7f6ae..a47f833fd5 100644 --- a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml +++ b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml @@ -1,6 +1,4 @@ --- features: - | - LLM Observability: This introduces the ability to annotate prompt templates via `LLMObs.annotate` on LLM spans. - Prompt templates should be passed in as a `Prompt` typed dictionary using `from ddtrace.llmobs.utils import Prompt` - or a dictionary following the `Prompt` schema. \ No newline at end of file + LLM Observability: LLM Observability: Introduces the prompt template argument to `LLMObs.annotate(prompt={...})` for LLM span kinds. \ No newline at end of file From 36fe0e1b8e256a0ab98e6952d3acbf739037212f Mon Sep 17 00:00:00 2001 From: lievan Date: Tue, 17 Sep 2024 14:19:47 -0400 Subject: [PATCH 08/20] add a link to release notes --- .../notes/support-prompt-annotations-b8e406261197f61a.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml index a47f833fd5..59dc5914d6 100644 --- a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml +++ b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml @@ -1,4 +1,5 @@ --- features: - | - LLM Observability: LLM Observability: Introduces the prompt template argument to `LLMObs.annotate(prompt={...})` for LLM span kinds. \ No newline at end of file + LLM Observability: LLM Observability: Introduces the prompt template argument to `LLMObs.annotate(prompt={...})` for LLM span kinds. + For more information on prompt annotations, see https://docs-staging.datadoghq.com/alai97/llm-obs-docs-nav-update/llm_observability/setup/sdk/#annotating-a-span. \ No newline at end of file From ac5ab8062d4ea8f8c00ad6133212733f8cbd1962 Mon Sep 17 00:00:00 2001 From: lievan Date: Wed, 18 Sep 2024 16:53:41 -0400 Subject: [PATCH 09/20] add other args to annotaiton context --- ddtrace/llmobs/_llmobs.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 9b0b96d951..90d2087197 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -230,16 +230,27 @@ def disable(cls) -> None: log.debug("%s disabled", cls.__name__) @classmethod - def annotation_context(cls, tags: Optional[Dict[str, Any]] = None) -> AnnotationContext: + def annotation_context( + cls, + tags: Optional[Dict[str, Any]] = None, + prompt: Optional[dict] = None, + metadata: Optional[Dict[str, Any]] = None, + ) -> AnnotationContext: """ Sets specified attributes on all LLMObs spans created while the returned AnnotationContext is active. - Do not use nested annotation contexts to override the same tags since the order in which annotations + Do not use nested annotation contexts to override attributes since the order in which annotations are applied is non-deterministic. :param tags: Dictionary of JSON serializable key-value tag pairs to set or update on the LLMObs span regarding the span's context. + :param metadata: Dictionary of JSON serializable key-value metadata pairs relevant to the input/output operation + described by the LLMObs span. + :param prompt: A dictionary represents the prompt used for an LLM call in the following form: + {"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "value_1", ...}} """ - return AnnotationContext(cls._instance.tracer, lambda span: cls.annotate(span, tags=tags)) + return AnnotationContext( + cls._instance.tracer, lambda span: cls.annotate(span, tags=tags, prompt=prompt, metadata=metadata) + ) @classmethod def flush(cls) -> None: @@ -491,7 +502,7 @@ def annotate( :param Span span: Span to annotate. If no span is provided, the current active span will be used. Must be an LLMObs-type span, i.e. generated by the LLMObs SDK. :param prompt: A dictionary represents the prompt used for an LLM call in the following form: - {"template": "...", "id": "...", "version": "...", "variables": dict[str, str]} + {"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "value_1", ...}} :param input_data: A single input string, dictionary, or a list of dictionaries based on the span kind: - llm spans: accepts a string, or a dictionary of form {"content": "...", "role": "..."}, or a list of dictionaries with the same signature. From f6997875c0d0355c843d6c0b76a9dcabc72c00fa Mon Sep 17 00:00:00 2001 From: lievan Date: Wed, 18 Sep 2024 16:58:14 -0400 Subject: [PATCH 10/20] update example for prompt --- ddtrace/llmobs/_llmobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 9b0b96d951..9d1d7ad0fb 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -491,7 +491,7 @@ def annotate( :param Span span: Span to annotate. If no span is provided, the current active span will be used. Must be an LLMObs-type span, i.e. generated by the LLMObs SDK. :param prompt: A dictionary represents the prompt used for an LLM call in the following form: - {"template": "...", "id": "...", "version": "...", "variables": dict[str, str]} + {"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "value_1", ...}}. :param input_data: A single input string, dictionary, or a list of dictionaries based on the span kind: - llm spans: accepts a string, or a dictionary of form {"content": "...", "role": "..."}, or a list of dictionaries with the same signature. From 929ae6186d5331add38bbdb08f0bf3743b96819b Mon Sep 17 00:00:00 2001 From: lievan Date: Wed, 18 Sep 2024 17:38:29 -0400 Subject: [PATCH 11/20] support modifying name and prompt for integration generated spans --- ddtrace/llmobs/_llmobs.py | 25 ++++---- ddtrace/llmobs/_trace_processor.py | 9 ++- tests/llmobs/test_llmobs_service.py | 64 +++++++++++++++++---- tests/llmobs/test_llmobs_trace_processor.py | 13 +++++ 4 files changed, 84 insertions(+), 27 deletions(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 90d2087197..fd20c0ef0c 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -231,25 +231,21 @@ def disable(cls) -> None: @classmethod def annotation_context( - cls, - tags: Optional[Dict[str, Any]] = None, - prompt: Optional[dict] = None, - metadata: Optional[Dict[str, Any]] = None, + cls, tags: Optional[Dict[str, Any]] = None, prompt: Optional[dict] = None, name: Optional[str] = None ) -> AnnotationContext: """ Sets specified attributes on all LLMObs spans created while the returned AnnotationContext is active. - Do not use nested annotation contexts to override attributes since the order in which annotations + Do not use annotation contexts to override attributes since the order in which annotations are applied is non-deterministic. :param tags: Dictionary of JSON serializable key-value tag pairs to set or update on the LLMObs span regarding the span's context. - :param metadata: Dictionary of JSON serializable key-value metadata pairs relevant to the input/output operation - described by the LLMObs span. - :param prompt: A dictionary represents the prompt used for an LLM call in the following form: + :param prompt: A dictionary that represents the prompt used for an LLM call in the following form: {"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "value_1", ...}} + :param name: Set to override the span name for any spans annotated within the returned context. """ return AnnotationContext( - cls._instance.tracer, lambda span: cls.annotate(span, tags=tags, prompt=prompt, metadata=metadata) + cls._instance.tracer, lambda span: cls.annotate(span, tags=tags, prompt=prompt, _name=name) ) @classmethod @@ -494,6 +490,7 @@ def annotate( metadata: Optional[Dict[str, Any]] = None, metrics: Optional[Dict[str, Any]] = None, tags: Optional[Dict[str, Any]] = None, + _name: Optional[str] = None, ) -> None: """ Sets parameters, inputs, outputs, tags, and metrics as provided for a given LLMObs span. @@ -545,15 +542,13 @@ def annotate( if parameters is not None: log.warning("Setting parameters is deprecated, please set parameters and other metadata as tags instead.") cls._tag_params(span, parameters) + if _name is not None: + span.name = _name + if prompt is not None: + cls._tag_prompt(span, prompt) if not span_kind: log.debug("Span kind not specified, skipping annotation for input/output data") return - if prompt is not None: - if span_kind == "llm": - cls._tag_prompt(span, prompt) - else: - log.warning("Annotating prompts are only supported for LLM span kinds.") - if input_data or output_data: if span_kind == "llm": cls._tag_llm_io(span, input_messages=input_data, output_messages=output_data) diff --git a/ddtrace/llmobs/_trace_processor.py b/ddtrace/llmobs/_trace_processor.py index 5a654a8fb9..76bdccaa94 100644 --- a/ddtrace/llmobs/_trace_processor.py +++ b/ddtrace/llmobs/_trace_processor.py @@ -86,8 +86,13 @@ def _llmobs_span_event(self, span: Span) -> Dict[str, Any]: meta["output"]["value"] = span._meta.pop(OUTPUT_VALUE) if span_kind == "retrieval" and span.get_tag(OUTPUT_DOCUMENTS) is not None: meta["output"]["documents"] = json.loads(span._meta.pop(OUTPUT_DOCUMENTS)) - if span_kind == "llm" and span.get_tag(INPUT_PROMPT) is not None: - meta["input"]["prompt"] = json.loads(span._meta.pop(INPUT_PROMPT)) + if span.get_tag(INPUT_PROMPT) is not None: + if span_kind != "llm": + log.warning( + "Dropping prompt on non-LLM span kind, annotating prompts is only supported for LLM span kinds." + ) + else: + meta["input"]["prompt"] = json.loads(span._meta.pop(INPUT_PROMPT)) if span.error: meta[ERROR_MSG] = span.get_tag(ERROR_MSG) meta[ERROR_STACK] = span.get_tag(ERROR_STACK) diff --git a/tests/llmobs/test_llmobs_service.py b/tests/llmobs/test_llmobs_service.py index 8a59aade41..cde2b71947 100644 --- a/tests/llmobs/test_llmobs_service.py +++ b/tests/llmobs/test_llmobs_service.py @@ -839,14 +839,6 @@ def test_annotate_prompt_wrong_type(LLMObs, mock_logs): mock_logs.reset_mock() -def test_annotate_prompt_wrong_kind(LLMObs, mock_logs): - with LLMObs.task(name="dummy") as span: - LLMObs.annotate(prompt={"variables": {"var1": "var1"}}) - assert span.get_tag(INPUT_PROMPT) is None - mock_logs.warning.assert_called_once_with("Annotating prompts are only supported for LLM span kinds.") - mock_logs.reset_mock() - - def test_span_error_sets_error(LLMObs, mock_llmobs_span_writer): with pytest.raises(ValueError): with LLMObs.llm(model_name="test_model", model_provider="test_model_provider") as span: @@ -1572,13 +1564,39 @@ def test_annotation_context_modifies_span_tags(LLMObs): assert json.loads(span.get_tag(TAGS)) == {"foo": "bar"} -def test_annotation_context_finished_context_does_not_modify_spans(LLMObs): +def test_annotation_context_modifies_prompt(LLMObs): + with LLMObs.annotation_context(prompt={"template": "test_template"}): + with LLMObs.llm(name="test_agent", model_name="test") as span: + assert json.loads(span.get_tag(INPUT_PROMPT)) == {"template": "test_template"} + + +def test_annotation_context_modifies_name(LLMObs): + with LLMObs.annotation_context(name="test_agent_override"): + with LLMObs.llm(name="test_agent", model_name="test") as span: + assert span.name == "test_agent_override" + + +def test_annotation_context_finished_context_does_not_modify_tags(LLMObs): with LLMObs.annotation_context(tags={"foo": "bar"}): pass with LLMObs.agent(name="test_agent") as span: assert span.get_tag(TAGS) is None +def test_annotation_context_finished_context_does_not_modify_prompt(LLMObs): + with LLMObs.annotation_context(prompt={"template": "test_template"}): + pass + with LLMObs.llm(name="test_agent", model_name="test") as span: + assert span.get_tag(INPUT_PROMPT) is None + + +def test_annotation_context_finished_context_does_not_modify_name(LLMObs): + with LLMObs.annotation_context(name="test_agent_override"): + pass + with LLMObs.agent(name="test_agent") as span: + assert span.name == "test_agent" + + def test_annotation_context_nested(LLMObs): with LLMObs.annotation_context(tags={"foo": "bar", "boo": "bar"}): with LLMObs.annotation_context(tags={"car": "car"}): @@ -1592,13 +1610,39 @@ async def test_annotation_context_async_modifies_span_tags(LLMObs): assert json.loads(span.get_tag(TAGS)) == {"foo": "bar"} -async def test_annotation_context_async_finished_context_does_not_modify_spans(LLMObs): +async def test_annotation_context_async_modifies_prompt(LLMObs): + async with LLMObs.annotation_context(prompt={"template": "test_template"}): + with LLMObs.llm(name="test_agent", model_name="test") as span: + assert json.loads(span.get_tag(INPUT_PROMPT)) == {"template": "test_template"} + + +async def test_annotation_context_async_modifies_name(LLMObs): + async with LLMObs.annotation_context(name="test_agent_override"): + with LLMObs.llm(name="test_agent", model_name="test") as span: + assert span.name == "test_agent_override" + + +async def test_annotation_context_async_finished_context_does_not_modify_tags(LLMObs): async with LLMObs.annotation_context(tags={"foo": "bar"}): pass with LLMObs.agent(name="test_agent") as span: assert span.get_tag(TAGS) is None +async def test_annotation_context_async_finished_context_does_not_modify_prompt(LLMObs): + async with LLMObs.annotation_context(prompt={"template": "test_template"}): + pass + with LLMObs.llm(name="test_agent", model_name="test") as span: + assert span.get_tag(INPUT_PROMPT) is None + + +async def test_annotation_context_finished_context_async_does_not_modify_name(LLMObs): + async with LLMObs.annotation_context(name="test_agent_override"): + pass + with LLMObs.agent(name="test_agent") as span: + assert span.name == "test_agent" + + async def test_annotation_context_async_nested(LLMObs): async with LLMObs.annotation_context(tags={"foo": "bar", "boo": "bar"}): async with LLMObs.annotation_context(tags={"car": "car"}): diff --git a/tests/llmobs/test_llmobs_trace_processor.py b/tests/llmobs/test_llmobs_trace_processor.py index c0a199391d..da1544c5e6 100644 --- a/tests/llmobs/test_llmobs_trace_processor.py +++ b/tests/llmobs/test_llmobs_trace_processor.py @@ -341,6 +341,19 @@ def test_prompt_is_set(): assert tp._llmobs_span_event(llm_span)["meta"]["input"]["prompt"] == {"variables": {"var1": "var2"}} +def test_prompt_is_not_set_for_non_llm_spans(): + """Test that prompt is NOT set on the span event if the span is not an LLM span.""" + dummy_tracer = DummyTracer() + mock_llmobs_span_writer = mock.MagicMock() + with override_global_config(dict(_llmobs_ml_app="unnamed-ml-app")): + with dummy_tracer.trace("task_span", span_type=SpanTypes.LLM) as task_span: + task_span.set_tag(SPAN_KIND, "task") + task_span.set_tag(INPUT_VALUE, "ival") + task_span.set_tag(INPUT_PROMPT, json.dumps({"variables": {"var1": "var2"}})) + tp = LLMObsTraceProcessor(llmobs_span_writer=mock_llmobs_span_writer) + assert tp._llmobs_span_event(task_span)["meta"]["input"].get("prompt") is None + + def test_metadata_is_set(): """Test that metadata is set on the span event if it is present on the span.""" dummy_tracer = DummyTracer() From 0f7525d08ff00afc43447c4398103782be7ab84e Mon Sep 17 00:00:00 2001 From: lievan Date: Wed, 18 Sep 2024 17:45:54 -0400 Subject: [PATCH 12/20] rel note --- ...tion-context-modify-name-and-prompt-cc74b3b268983181.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml diff --git a/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml b/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml new file mode 100644 index 0000000000..a935291ffd --- /dev/null +++ b/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml @@ -0,0 +1,5 @@ +--- +features: + - | + For new features such as a new integration or component. Use present tense with the following format: + LLM Observability: Introduces `prompt` and `name` arguments to LLMObs.annotation_context to support setting the `name` and `prompt` of integration generated spans. \ No newline at end of file From a6ae5a920aae257b8f91da6a9aa32321fa2dc8b8 Mon Sep 17 00:00:00 2001 From: lievan Date: Wed, 18 Sep 2024 20:16:26 -0400 Subject: [PATCH 13/20] that --- ddtrace/llmobs/_llmobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 9d1d7ad0fb..49c5f462ab 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -490,7 +490,7 @@ def annotate( :param Span span: Span to annotate. If no span is provided, the current active span will be used. Must be an LLMObs-type span, i.e. generated by the LLMObs SDK. - :param prompt: A dictionary represents the prompt used for an LLM call in the following form: + :param prompt: A dictionary that represents the prompt used for an LLM call in the following form: {"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "value_1", ...}}. :param input_data: A single input string, dictionary, or a list of dictionaries based on the span kind: - llm spans: accepts a string, or a dictionary of form {"content": "...", "role": "..."}, From dead1c61be3334825f857b98310bdacafa826fe1 Mon Sep 17 00:00:00 2001 From: lievan Date: Thu, 19 Sep 2024 11:33:47 -0400 Subject: [PATCH 14/20] fix doc link --- .../notes/support-prompt-annotations-b8e406261197f61a.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml index 59dc5914d6..466a035978 100644 --- a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml +++ b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml @@ -2,4 +2,4 @@ features: - | LLM Observability: LLM Observability: Introduces the prompt template argument to `LLMObs.annotate(prompt={...})` for LLM span kinds. - For more information on prompt annotations, see https://docs-staging.datadoghq.com/alai97/llm-obs-docs-nav-update/llm_observability/setup/sdk/#annotating-a-span. \ No newline at end of file + For more information on prompt annotations, see https://docs.datadoghq.com/llm_observability/setup/sdk/#annotating-a-span. \ No newline at end of file From 9f0d3456b126a6af6b098aaf9d0dbe9781a80d7d Mon Sep 17 00:00:00 2001 From: lievan <42917263+lievan@users.noreply.github.com> Date: Thu, 19 Sep 2024 14:09:02 -0400 Subject: [PATCH 15/20] Update releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml Co-authored-by: Yun Kim <35776586+Yun-Kim@users.noreply.github.com> --- .../notes/support-prompt-annotations-b8e406261197f61a.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml index 466a035978..5a99d19d63 100644 --- a/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml +++ b/releasenotes/notes/support-prompt-annotations-b8e406261197f61a.yaml @@ -1,5 +1,5 @@ --- features: - | - LLM Observability: LLM Observability: Introduces the prompt template argument to `LLMObs.annotate(prompt={...})` for LLM span kinds. + LLM Observability: Introduces prompt template annotation, which can be passed as an argument to `LLMObs.annotate(prompt={...})` for LLM span kinds. For more information on prompt annotations, see https://docs.datadoghq.com/llm_observability/setup/sdk/#annotating-a-span. \ No newline at end of file From 2fd304aef1e9772dcd9c089fd0cacc61b73c0166 Mon Sep 17 00:00:00 2001 From: lievan Date: Thu, 19 Sep 2024 14:23:28 -0400 Subject: [PATCH 16/20] refactor validate prompt --- ddtrace/llmobs/_utils.py | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/ddtrace/llmobs/_utils.py b/ddtrace/llmobs/_utils.py index f1bc032912..c34edf7e7f 100644 --- a/ddtrace/llmobs/_utils.py +++ b/ddtrace/llmobs/_utils.py @@ -1,4 +1,6 @@ +from typing import Dict from typing import Optional +from typing import Union import ddtrace from ddtrace import Span @@ -17,32 +19,32 @@ log = get_logger(__name__) -def validate_prompt(prompt: dict) -> dict: - validated_prompt = {} +def validate_prompt(prompt: dict) -> Dict[str, Union[str, dict]]: + validated_prompt = {} # type: Dict[str, Union[str, dict]] if not isinstance(prompt, dict): raise TypeError("Prompt must be a dictionary") - if prompt.get("variables"): - variables = prompt["variables"] + variables = prompt.get("variables") + template = prompt.get("template") + version = prompt.get("version") + prompt_id = prompt.get("id") + if variables is not None: if not isinstance(variables, dict): raise TypeError("Prompt variables must be a dictionary.") if not any(isinstance(k, str) or isinstance(v, str) for k, v in variables.items()): raise TypeError("Prompt variable keys and values must be strings.") - # for key, value in variables.items(): - # if not isinstance(key, str) or not isinstance(value, str): - # raise TypeError("Prompt variable keys and values must be strings.") - validated_prompt["variables"] = prompt["variables"] - if prompt.get("template"): - if not isinstance(prompt["template"], str): + validated_prompt["variables"] = variables + if template is not None: + if not isinstance(template, str): raise TypeError("Prompt template must be a string") - validated_prompt["template"] = prompt["template"] - if prompt.get("version"): - if not isinstance(prompt["version"], str): + validated_prompt["template"] = template + if version is not None: + if not isinstance(version, str): raise TypeError("Prompt version must be a string.") - validated_prompt["version"] = prompt["version"] - if prompt.get("id"): - if not isinstance(prompt["id"], str): + validated_prompt["version"] = version + if prompt_id is not None: + if not isinstance(prompt_id, str): raise TypeError("Prompt id must be a string.") - validated_prompt["id"] = prompt["id"] + validated_prompt["id"] = prompt_id return validated_prompt From c6212d2da0e4d4a171fed2ef754cf641a90869a0 Mon Sep 17 00:00:00 2001 From: lievan Date: Thu, 19 Sep 2024 16:38:16 -0400 Subject: [PATCH 17/20] clarify annotate --- ddtrace/llmobs/_llmobs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index ba65565505..3872fe5bd3 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -235,7 +235,7 @@ def annotation_context( ) -> AnnotationContext: """ Sets specified attributes on all LLMObs spans created while the returned AnnotationContext is active. - Do not use annotation contexts to override attributes since the order in which annotations + Do not use nested annotation contexts to override the same attributes since the order in which annotations are applied is non-deterministic. :param tags: Dictionary of JSON serializable key-value tag pairs to set or update on the LLMObs span From ed7e20580138596beb6812b4e9ae9860ff6a1936 Mon Sep 17 00:00:00 2001 From: lievan Date: Fri, 20 Sep 2024 11:39:43 -0400 Subject: [PATCH 18/20] delete print --- ddtrace/llmobs/_llmobs.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 12d26c575c..3872fe5bd3 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -566,7 +566,6 @@ def _tag_prompt(span, prompt: dict) -> None: validated_prompt = validate_prompt(prompt) span.set_tag_str(INPUT_PROMPT, safe_json(validated_prompt)) except TypeError: - print("IN HERE") log.warning("Failed to validate prompt with error: ", exc_info=True) return From a7f160a615c17080eba15b97ec881ba99e07a82b Mon Sep 17 00:00:00 2001 From: lievan Date: Mon, 23 Sep 2024 13:29:39 -0400 Subject: [PATCH 19/20] improve docs --- ddtrace/llmobs/_llmobs.py | 8 ++++++-- ...n-context-modify-name-and-prompt-cc74b3b268983181.yaml | 3 +-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/ddtrace/llmobs/_llmobs.py b/ddtrace/llmobs/_llmobs.py index 3872fe5bd3..ba51bcc3bf 100644 --- a/ddtrace/llmobs/_llmobs.py +++ b/ddtrace/llmobs/_llmobs.py @@ -241,7 +241,9 @@ def annotation_context( :param tags: Dictionary of JSON serializable key-value tag pairs to set or update on the LLMObs span regarding the span's context. :param prompt: A dictionary that represents the prompt used for an LLM call in the following form: - {"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "value_1", ...}} + `{"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "...", ...}}`. + Can also be set using the `ddtrace.llmobs.utils.Prompt` constructor class. + This argument is only applicable to LLM spans. :param name: Set to override the span name for any spans annotated within the returned context. """ return AnnotationContext( @@ -499,7 +501,9 @@ def annotate( :param Span span: Span to annotate. If no span is provided, the current active span will be used. Must be an LLMObs-type span, i.e. generated by the LLMObs SDK. :param prompt: A dictionary that represents the prompt used for an LLM call in the following form: - {"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "value_1", ...}}. + `{"template": "...", "id": "...", "version": "...", "variables": {"variable_1": "...", ...}}`. + Can also be set using the `ddtrace.llmobs.utils.Prompt` constructor class. + This argument is only applicable to LLM spans. :param input_data: A single input string, dictionary, or a list of dictionaries based on the span kind: - llm spans: accepts a string, or a dictionary of form {"content": "...", "role": "..."}, or a list of dictionaries with the same signature. diff --git a/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml b/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml index a935291ffd..daa505d267 100644 --- a/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml +++ b/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml @@ -1,5 +1,4 @@ --- features: - | - For new features such as a new integration or component. Use present tense with the following format: - LLM Observability: Introduces `prompt` and `name` arguments to LLMObs.annotation_context to support setting the `name` and `prompt` of integration generated spans. \ No newline at end of file + LLM Observability: Introduces `prompt` and `name` arguments to ``LLMObs.annotation_context`` to support setting an integration generated span's name and `prompt` field. \ No newline at end of file From 2910871898311960215ed5cfdeefff372786662f Mon Sep 17 00:00:00 2001 From: lievan Date: Wed, 25 Sep 2024 08:07:37 -0400 Subject: [PATCH 20/20] add docs in release note, pop prompt --- ddtrace/llmobs/_trace_processor.py | 3 ++- ...tation-context-modify-name-and-prompt-cc74b3b268983181.yaml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ddtrace/llmobs/_trace_processor.py b/ddtrace/llmobs/_trace_processor.py index 76bdccaa94..46d8c98695 100644 --- a/ddtrace/llmobs/_trace_processor.py +++ b/ddtrace/llmobs/_trace_processor.py @@ -87,12 +87,13 @@ def _llmobs_span_event(self, span: Span) -> Dict[str, Any]: if span_kind == "retrieval" and span.get_tag(OUTPUT_DOCUMENTS) is not None: meta["output"]["documents"] = json.loads(span._meta.pop(OUTPUT_DOCUMENTS)) if span.get_tag(INPUT_PROMPT) is not None: + prompt_json_str = span._meta.pop(INPUT_PROMPT) if span_kind != "llm": log.warning( "Dropping prompt on non-LLM span kind, annotating prompts is only supported for LLM span kinds." ) else: - meta["input"]["prompt"] = json.loads(span._meta.pop(INPUT_PROMPT)) + meta["input"]["prompt"] = json.loads(prompt_json_str) if span.error: meta[ERROR_MSG] = span.get_tag(ERROR_MSG) meta[ERROR_STACK] = span.get_tag(ERROR_STACK) diff --git a/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml b/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml index daa505d267..ef218a6a80 100644 --- a/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml +++ b/releasenotes/notes/annotation-context-modify-name-and-prompt-cc74b3b268983181.yaml @@ -1,4 +1,5 @@ --- features: - | - LLM Observability: Introduces `prompt` and `name` arguments to ``LLMObs.annotation_context`` to support setting an integration generated span's name and `prompt` field. \ No newline at end of file + LLM Observability: Introduces `prompt` and `name` arguments to ``LLMObs.annotation_context`` to support setting an integration generated span's name and `prompt` field. + For more information on annotation contexts, see https://docs.datadoghq.com/llm_observability/setup/sdk/#annotating-a-span. \ No newline at end of file