Skip to content

Commit 3d9eb57

Browse files
feat(otel): read gen_ai.conversation.id from OTEL baggage and set as span attribute (#429)
Read gen_ai.conversation.id from OTEL baggage in get_traced_request_and_span() and set it as an attribute on SDK-created spans. This allows callers that propagate conversation ID via standard OTEL baggage (opentelemetry.baggage) to have it automatically attached to descendant Mistral SDK spans, per GenAI semantic conventions. Generated by Mistral Vibe. Co-authored-by: Mistral Vibe <vibe@mistral.ai>
1 parent 95f440a commit 3d9eb57

File tree

2 files changed

+80
-0
lines changed

2 files changed

+80
-0
lines changed

src/mistralai/extra/observability/otel.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import opentelemetry.semconv.attributes.server_attributes as server_attributes
2121
from opentelemetry import context as context_api
2222
from opentelemetry import propagate, trace
23+
from opentelemetry.baggage import get_baggage
2324
from opentelemetry.trace import Span, Status, StatusCode, Tracer, set_span_in_context
2425

2526
from .serialization import (
@@ -453,6 +454,12 @@ def get_traced_request_and_span(
453454
try:
454455
span = tracer.start_span(name=operation_id)
455456
span.set_attributes({"agent.trace.public": ""})
457+
# Propagate gen_ai.conversation.id from OTEL baggage if present
458+
conversation_id = get_baggage(gen_ai_attributes.GEN_AI_CONVERSATION_ID)
459+
if conversation_id:
460+
span.set_attribute(
461+
gen_ai_attributes.GEN_AI_CONVERSATION_ID, str(conversation_id)
462+
)
456463
# Inject the span context into the request headers to be used by the backend service to continue the trace
457464
propagate.inject(request.headers, context=set_span_in_context(span))
458465
span = enrich_span_from_request(span, operation_id, request)

src/mistralai/extra/tests/test_otel_tracing.py

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,9 @@
1818
from unittest.mock import MagicMock
1919

2020
import httpx
21+
from opentelemetry import context as context_api
2122
from opentelemetry import trace
23+
from opentelemetry.baggage import set_baggage
2224
from opentelemetry.sdk.trace import TracerProvider
2325
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
2426
from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter
@@ -1525,5 +1527,76 @@ def failing_tool(x: int) -> str:
15251527
)
15261528

15271529

1530+
# -- Baggage propagation: gen_ai.conversation.id ---------------------------
1531+
1532+
def test_conversation_id_from_baggage(self):
1533+
"""When gen_ai.conversation.id is set in OTEL baggage, it must appear as a span attribute."""
1534+
request = ChatCompletionRequest(
1535+
model="mistral-small-latest",
1536+
messages=[UserMessage(content="Hello")],
1537+
)
1538+
response = ChatCompletionResponse(
1539+
id="cmpl-baggage-001",
1540+
object="chat.completion",
1541+
model="mistral-small-latest",
1542+
created=1700000010,
1543+
choices=[
1544+
ChatCompletionChoice(
1545+
index=0,
1546+
message=AssistantMessage(content="Hi!", tool_calls=None),
1547+
finish_reason="stop",
1548+
),
1549+
],
1550+
usage=UsageInfo(prompt_tokens=5, completion_tokens=2, total_tokens=7),
1551+
)
1552+
1553+
# Attach baggage to the current context
1554+
ctx = set_baggage("gen_ai.conversation.id", "conv-from-baggage-123")
1555+
token = context_api.attach(ctx)
1556+
try:
1557+
self._run_hook_lifecycle(
1558+
"chat_completion_v1_chat_completions_post",
1559+
request,
1560+
response,
1561+
)
1562+
finally:
1563+
context_api.detach(token)
1564+
1565+
span = self._get_single_span()
1566+
self.assertEqual(
1567+
span.attributes["gen_ai.conversation.id"], "conv-from-baggage-123"
1568+
)
1569+
1570+
def test_no_conversation_id_without_baggage(self):
1571+
"""When no baggage is set, gen_ai.conversation.id must NOT appear on a chat span."""
1572+
request = ChatCompletionRequest(
1573+
model="mistral-small-latest",
1574+
messages=[UserMessage(content="Hello")],
1575+
)
1576+
response = ChatCompletionResponse(
1577+
id="cmpl-nobag-001",
1578+
object="chat.completion",
1579+
model="mistral-small-latest",
1580+
created=1700000011,
1581+
choices=[
1582+
ChatCompletionChoice(
1583+
index=0,
1584+
message=AssistantMessage(content="Hi!", tool_calls=None),
1585+
finish_reason="stop",
1586+
),
1587+
],
1588+
usage=UsageInfo(prompt_tokens=5, completion_tokens=2, total_tokens=7),
1589+
)
1590+
1591+
self._run_hook_lifecycle(
1592+
"chat_completion_v1_chat_completions_post",
1593+
request,
1594+
response,
1595+
)
1596+
1597+
span = self._get_single_span()
1598+
self.assertNotIn("gen_ai.conversation.id", span.attributes)
1599+
1600+
15281601
if __name__ == "__main__":
15291602
unittest.main()

0 commit comments

Comments
 (0)