Skip to content

Commit 6bf2101

Browse files
committed
make gen_ai span format compliances
1 parent ef72733 commit 6bf2101

3 files changed

Lines changed: 46 additions & 23 deletions

File tree

src/xai_sdk/chat.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -369,10 +369,11 @@ def _make_span_request_attributes(self) -> dict[str, Any]: # noqa: C901, PLR091
369369
"""Creates a dictionary with all relevant request attributes to be set on the span as it is created."""
370370
attributes: dict[str, Any] = {
371371
"gen_ai.operation.name": "chat",
372-
"gen_ai.system": "xai",
372+
"gen_ai.provider.name": "xai",
373373
"gen_ai.output.type": "text",
374374
"gen_ai.request.model": self._proto.model,
375375
"server.port": 443,
376+
"server.address": "api.x.ai",
376377
}
377378

378379
if should_disable_sensitive_attributes():

tests/aio/chat_test.py

Lines changed: 22 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -651,7 +651,7 @@ async def test_sample_creates_span_with_correct_attributes(mock_tracer: mock.Mag
651651

652652
expected_request_attributes = {
653653
"gen_ai.operation.name": "chat",
654-
"gen_ai.system": "xai",
654+
"gen_ai.provider.name": "xai",
655655
"gen_ai.output.type": "text",
656656
"gen_ai.request.model": "grok-3",
657657
"gen_ai.request.logprobs": False,
@@ -660,6 +660,7 @@ async def test_sample_creates_span_with_correct_attributes(mock_tracer: mock.Mag
660660
"gen_ai.request.temperature": 1.0,
661661
"gen_ai.request.parallel_tool_calls": True,
662662
"server.port": 443,
663+
"server.address": "api.x.ai",
663664
"gen_ai.conversation.id": conversation_id,
664665
"gen_ai.prompt.0.role": "user",
665666
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -709,10 +710,11 @@ async def test_sample_creates_span_without_sensitive_attributes_when_disabled(
709710

710711
expected_request_attributes = {
711712
"gen_ai.operation.name": "chat",
712-
"gen_ai.system": "xai",
713+
"gen_ai.provider.name": "xai",
713714
"gen_ai.output.type": "text",
714715
"gen_ai.request.model": "grok-3",
715716
"server.port": 443,
717+
"server.address": "api.x.ai",
716718
}
717719

718720
mock_tracer.start_as_current_span.assert_called_once_with(
@@ -765,7 +767,7 @@ async def test_sample_creates_span_with_correct_optional_attributes(mock_tracer:
765767

766768
expected_request_attributes = {
767769
"gen_ai.operation.name": "chat",
768-
"gen_ai.system": "xai",
770+
"gen_ai.provider.name": "xai",
769771
"gen_ai.output.type": "json_object",
770772
"gen_ai.request.model": "grok-3",
771773
"gen_ai.request.logprobs": True,
@@ -774,6 +776,7 @@ async def test_sample_creates_span_with_correct_optional_attributes(mock_tracer:
774776
"gen_ai.request.temperature": 0.5,
775777
"gen_ai.request.parallel_tool_calls": False,
776778
"server.port": 443,
779+
"server.address": "api.x.ai",
777780
"gen_ai.conversation.id": conversation_id,
778781
"gen_ai.request.max_tokens": 100,
779782
"gen_ai.request.seed": 123,
@@ -815,7 +818,7 @@ async def test_sample_batch_creates_span_with_correct_attributes(mock_tracer: mo
815818

816819
expected_request_attributes = {
817820
"gen_ai.operation.name": "chat",
818-
"gen_ai.system": "xai",
821+
"gen_ai.provider.name": "xai",
819822
"gen_ai.output.type": "text",
820823
"gen_ai.request.model": "grok-3",
821824
"gen_ai.request.logprobs": False,
@@ -824,6 +827,7 @@ async def test_sample_batch_creates_span_with_correct_attributes(mock_tracer: mo
824827
"gen_ai.request.temperature": 1.0,
825828
"gen_ai.request.parallel_tool_calls": True,
826829
"server.port": 443,
830+
"server.address": "api.x.ai",
827831
"gen_ai.conversation.id": conversation_id,
828832
"gen_ai.prompt.0.role": "user",
829833
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -882,7 +886,7 @@ async def test_stream_creates_span_with_correct_attributes(mock_tracer: mock.Mag
882886

883887
expected_request_attributes = {
884888
"gen_ai.operation.name": "chat",
885-
"gen_ai.system": "xai",
889+
"gen_ai.provider.name": "xai",
886890
"gen_ai.output.type": "text",
887891
"gen_ai.request.model": "grok-3",
888892
"gen_ai.request.logprobs": False,
@@ -891,6 +895,7 @@ async def test_stream_creates_span_with_correct_attributes(mock_tracer: mock.Mag
891895
"gen_ai.request.temperature": 1.0,
892896
"gen_ai.request.parallel_tool_calls": True,
893897
"server.port": 443,
898+
"server.address": "api.x.ai",
894899
"gen_ai.conversation.id": conversation_id,
895900
"gen_ai.prompt.0.role": "user",
896901
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -947,7 +952,7 @@ async def test_stream_batch_creates_span_with_correct_attributes(mock_tracer: mo
947952

948953
expected_request_attributes = {
949954
"gen_ai.operation.name": "chat",
950-
"gen_ai.system": "xai",
955+
"gen_ai.provider.name": "xai",
951956
"gen_ai.output.type": "text",
952957
"gen_ai.request.model": "grok-3",
953958
"gen_ai.request.logprobs": False,
@@ -956,6 +961,7 @@ async def test_stream_batch_creates_span_with_correct_attributes(mock_tracer: mo
956961
"gen_ai.request.temperature": 1.0,
957962
"gen_ai.request.parallel_tool_calls": True,
958963
"server.port": 443,
964+
"server.address": "api.x.ai",
959965
"gen_ai.conversation.id": conversation_id,
960966
"gen_ai.prompt.0.role": "user",
961967
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -1019,7 +1025,7 @@ class TestResponse(BaseModel):
10191025

10201026
expected_request_attributes = {
10211027
"gen_ai.operation.name": "chat",
1022-
"gen_ai.system": "xai",
1028+
"gen_ai.provider.name": "xai",
10231029
"gen_ai.output.type": "json_schema",
10241030
"gen_ai.request.model": "grok-3",
10251031
"gen_ai.request.logprobs": False,
@@ -1028,6 +1034,7 @@ class TestResponse(BaseModel):
10281034
"gen_ai.request.temperature": 1.0,
10291035
"gen_ai.request.parallel_tool_calls": True,
10301036
"server.port": 443,
1037+
"server.address": "api.x.ai",
10311038
"gen_ai.conversation.id": conversation_id,
10321039
"gen_ai.prompt.0.role": "user",
10331040
"gen_ai.prompt.0.content": "What's the weather in London?",
@@ -1073,7 +1080,7 @@ async def test_defer_creates_span_with_correct_attributes(mock_tracer: mock.Magi
10731080

10741081
expected_request_attributes = {
10751082
"gen_ai.operation.name": "chat",
1076-
"gen_ai.system": "xai",
1083+
"gen_ai.provider.name": "xai",
10771084
"gen_ai.output.type": "text",
10781085
"gen_ai.request.model": "grok-3",
10791086
"gen_ai.request.logprobs": False,
@@ -1082,6 +1089,7 @@ async def test_defer_creates_span_with_correct_attributes(mock_tracer: mock.Magi
10821089
"gen_ai.request.temperature": 1.0,
10831090
"gen_ai.request.parallel_tool_calls": True,
10841091
"server.port": 443,
1092+
"server.address": "api.x.ai",
10851093
"gen_ai.conversation.id": conversation_id,
10861094
"gen_ai.prompt.0.role": "user",
10871095
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -1131,7 +1139,7 @@ async def test_defer_batch_creates_span_with_correct_attributes(mock_tracer: moc
11311139

11321140
expected_request_attributes = {
11331141
"gen_ai.operation.name": "chat",
1134-
"gen_ai.system": "xai",
1142+
"gen_ai.provider.name": "xai",
11351143
"gen_ai.output.type": "text",
11361144
"gen_ai.request.model": "grok-3",
11371145
"gen_ai.request.logprobs": False,
@@ -1140,6 +1148,7 @@ async def test_defer_batch_creates_span_with_correct_attributes(mock_tracer: moc
11401148
"gen_ai.request.temperature": 1.0,
11411149
"gen_ai.request.parallel_tool_calls": True,
11421150
"server.port": 443,
1151+
"server.address": "api.x.ai",
11431152
"gen_ai.conversation.id": conversation_id,
11441153
"gen_ai.prompt.0.role": "user",
11451154
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -1200,7 +1209,7 @@ async def test_chat_with_function_calling_creates_span_with_correct_attributes(
12001209

12011210
expected_request_attributes = {
12021211
"gen_ai.operation.name": "chat",
1203-
"gen_ai.system": "xai",
1212+
"gen_ai.provider.name": "xai",
12041213
"gen_ai.output.type": "text",
12051214
"gen_ai.request.model": "grok-3",
12061215
"gen_ai.request.logprobs": False,
@@ -1209,6 +1218,7 @@ async def test_chat_with_function_calling_creates_span_with_correct_attributes(
12091218
"gen_ai.request.temperature": 1.0,
12101219
"gen_ai.request.parallel_tool_calls": True,
12111220
"server.port": 443,
1221+
"server.address": "api.x.ai",
12121222
"gen_ai.conversation.id": conversation_id,
12131223
"gen_ai.prompt.0.role": "user",
12141224
"gen_ai.prompt.0.content": "What's the weather in London?",
@@ -1281,7 +1291,7 @@ async def test_chat_with_function_call_result_creates_span_with_correct_attribut
12811291

12821292
expected_request_attributes = {
12831293
"gen_ai.operation.name": "chat",
1284-
"gen_ai.system": "xai",
1294+
"gen_ai.provider.name": "xai",
12851295
"gen_ai.output.type": "text",
12861296
"gen_ai.request.model": "grok-3",
12871297
"gen_ai.request.logprobs": False,
@@ -1290,6 +1300,7 @@ async def test_chat_with_function_call_result_creates_span_with_correct_attribut
12901300
"gen_ai.request.temperature": 1.0,
12911301
"gen_ai.request.parallel_tool_calls": True,
12921302
"server.port": 443,
1303+
"server.address": "api.x.ai",
12931304
"gen_ai.conversation.id": conversation_id,
12941305
"gen_ai.prompt.0.role": "user",
12951306
"gen_ai.prompt.0.content": "What's the weather in London?",

tests/sync/chat_test.py

Lines changed: 22 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -624,7 +624,7 @@ def test_sample_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
624624

625625
expected_request_attributes = {
626626
"gen_ai.operation.name": "chat",
627-
"gen_ai.system": "xai",
627+
"gen_ai.provider.name": "xai",
628628
"gen_ai.output.type": "text",
629629
"gen_ai.request.model": "grok-3",
630630
"gen_ai.request.logprobs": False,
@@ -633,6 +633,7 @@ def test_sample_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
633633
"gen_ai.request.temperature": 1.0,
634634
"gen_ai.request.parallel_tool_calls": True,
635635
"server.port": 443,
636+
"server.address": "api.x.ai",
636637
"gen_ai.conversation.id": conversation_id,
637638
"gen_ai.prompt.0.role": "user",
638639
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -679,10 +680,11 @@ def test_sample_creates_span_without_sensitive_attributes_when_disabled(mock_tra
679680

680681
expected_request_attributes = {
681682
"gen_ai.operation.name": "chat",
682-
"gen_ai.system": "xai",
683+
"gen_ai.provider.name": "xai",
683684
"gen_ai.output.type": "text",
684685
"gen_ai.request.model": "grok-3",
685686
"server.port": 443,
687+
"server.address": "api.x.ai",
686688
}
687689

688690
mock_tracer.start_as_current_span.assert_called_once_with(
@@ -735,7 +737,7 @@ def test_sample_creates_span_with_correct_optional_attributes(mock_tracer: mock.
735737

736738
expected_request_attributes = {
737739
"gen_ai.operation.name": "chat",
738-
"gen_ai.system": "xai",
740+
"gen_ai.provider.name": "xai",
739741
"gen_ai.output.type": "json_object",
740742
"gen_ai.request.model": "grok-3",
741743
"gen_ai.request.logprobs": True,
@@ -744,6 +746,7 @@ def test_sample_creates_span_with_correct_optional_attributes(mock_tracer: mock.
744746
"gen_ai.request.temperature": 0.5,
745747
"gen_ai.request.parallel_tool_calls": False,
746748
"server.port": 443,
749+
"server.address": "api.x.ai",
747750
"gen_ai.conversation.id": conversation_id,
748751
"gen_ai.request.max_tokens": 100,
749752
"gen_ai.request.seed": 123,
@@ -784,7 +787,7 @@ def test_sample_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
784787

785788
expected_request_attributes = {
786789
"gen_ai.operation.name": "chat",
787-
"gen_ai.system": "xai",
790+
"gen_ai.provider.name": "xai",
788791
"gen_ai.output.type": "text",
789792
"gen_ai.request.model": "grok-3",
790793
"gen_ai.request.logprobs": False,
@@ -793,6 +796,7 @@ def test_sample_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
793796
"gen_ai.request.temperature": 1.0,
794797
"gen_ai.request.parallel_tool_calls": True,
795798
"server.port": 443,
799+
"server.address": "api.x.ai",
796800
"gen_ai.conversation.id": conversation_id,
797801
"gen_ai.prompt.0.role": "user",
798802
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -851,7 +855,7 @@ def test_stream_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
851855

852856
expected_request_attributes = {
853857
"gen_ai.operation.name": "chat",
854-
"gen_ai.system": "xai",
858+
"gen_ai.provider.name": "xai",
855859
"gen_ai.output.type": "text",
856860
"gen_ai.request.model": "grok-3",
857861
"gen_ai.request.logprobs": False,
@@ -860,6 +864,7 @@ def test_stream_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
860864
"gen_ai.request.temperature": 1.0,
861865
"gen_ai.request.parallel_tool_calls": True,
862866
"server.port": 443,
867+
"server.address": "api.x.ai",
863868
"gen_ai.conversation.id": conversation_id,
864869
"gen_ai.prompt.0.role": "user",
865870
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -916,7 +921,7 @@ def test_stream_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
916921

917922
expected_request_attributes = {
918923
"gen_ai.operation.name": "chat",
919-
"gen_ai.system": "xai",
924+
"gen_ai.provider.name": "xai",
920925
"gen_ai.output.type": "text",
921926
"gen_ai.request.model": "grok-3",
922927
"gen_ai.request.logprobs": False,
@@ -925,6 +930,7 @@ def test_stream_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
925930
"gen_ai.request.temperature": 1.0,
926931
"gen_ai.request.parallel_tool_calls": True,
927932
"server.port": 443,
933+
"server.address": "api.x.ai",
928934
"gen_ai.conversation.id": conversation_id,
929935
"gen_ai.prompt.0.role": "user",
930936
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -985,7 +991,7 @@ class TestResponse(BaseModel):
985991

986992
expected_request_attributes = {
987993
"gen_ai.operation.name": "chat",
988-
"gen_ai.system": "xai",
994+
"gen_ai.provider.name": "xai",
989995
"gen_ai.output.type": "json_schema",
990996
"gen_ai.request.model": "grok-3",
991997
"gen_ai.request.logprobs": False,
@@ -994,6 +1000,7 @@ class TestResponse(BaseModel):
9941000
"gen_ai.request.temperature": 1.0,
9951001
"gen_ai.request.parallel_tool_calls": True,
9961002
"server.port": 443,
1003+
"server.address": "api.x.ai",
9971004
"gen_ai.conversation.id": conversation_id,
9981005
"gen_ai.prompt.0.role": "user",
9991006
"gen_ai.prompt.0.content": "What's the weather in London?",
@@ -1038,7 +1045,7 @@ def test_defer_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock,
10381045

10391046
expected_request_attributes = {
10401047
"gen_ai.operation.name": "chat",
1041-
"gen_ai.system": "xai",
1048+
"gen_ai.provider.name": "xai",
10421049
"gen_ai.output.type": "text",
10431050
"gen_ai.request.model": "grok-3",
10441051
"gen_ai.request.logprobs": False,
@@ -1047,6 +1054,7 @@ def test_defer_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock,
10471054
"gen_ai.request.temperature": 1.0,
10481055
"gen_ai.request.parallel_tool_calls": True,
10491056
"server.port": 443,
1057+
"server.address": "api.x.ai",
10501058
"gen_ai.conversation.id": conversation_id,
10511059
"gen_ai.prompt.0.role": "user",
10521060
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -1095,7 +1103,7 @@ def test_defer_batch_creates_span_with_correct_attributes(mock_tracer: mock.Magi
10951103

10961104
expected_request_attributes = {
10971105
"gen_ai.operation.name": "chat",
1098-
"gen_ai.system": "xai",
1106+
"gen_ai.provider.name": "xai",
10991107
"gen_ai.output.type": "text",
11001108
"gen_ai.request.model": "grok-3",
11011109
"gen_ai.request.logprobs": False,
@@ -1104,6 +1112,7 @@ def test_defer_batch_creates_span_with_correct_attributes(mock_tracer: mock.Magi
11041112
"gen_ai.request.temperature": 1.0,
11051113
"gen_ai.request.parallel_tool_calls": True,
11061114
"server.port": 443,
1115+
"server.address": "api.x.ai",
11071116
"gen_ai.conversation.id": conversation_id,
11081117
"gen_ai.prompt.0.role": "user",
11091118
"gen_ai.prompt.0.content": "Hello, how are you?",
@@ -1161,7 +1170,7 @@ def test_chat_with_function_calling_creates_span_with_correct_attributes(mock_tr
11611170

11621171
expected_request_attributes = {
11631172
"gen_ai.operation.name": "chat",
1164-
"gen_ai.system": "xai",
1173+
"gen_ai.provider.name": "xai",
11651174
"gen_ai.output.type": "text",
11661175
"gen_ai.request.model": "grok-3",
11671176
"gen_ai.request.logprobs": False,
@@ -1170,6 +1179,7 @@ def test_chat_with_function_calling_creates_span_with_correct_attributes(mock_tr
11701179
"gen_ai.request.temperature": 1.0,
11711180
"gen_ai.request.parallel_tool_calls": True,
11721181
"server.port": 443,
1182+
"server.address": "api.x.ai",
11731183
"gen_ai.conversation.id": conversation_id,
11741184
"gen_ai.prompt.0.role": "user",
11751185
"gen_ai.prompt.0.content": "What's the weather in London?",
@@ -1241,7 +1251,7 @@ def test_chat_with_function_call_result_creates_span_with_correct_attributes(
12411251

12421252
expected_request_attributes = {
12431253
"gen_ai.operation.name": "chat",
1244-
"gen_ai.system": "xai",
1254+
"gen_ai.provider.name": "xai",
12451255
"gen_ai.output.type": "text",
12461256
"gen_ai.request.model": "grok-3",
12471257
"gen_ai.request.logprobs": False,
@@ -1250,6 +1260,7 @@ def test_chat_with_function_call_result_creates_span_with_correct_attributes(
12501260
"gen_ai.request.temperature": 1.0,
12511261
"gen_ai.request.parallel_tool_calls": True,
12521262
"server.port": 443,
1263+
"server.address": "api.x.ai",
12531264
"gen_ai.conversation.id": conversation_id,
12541265
"gen_ai.prompt.0.role": "user",
12551266
"gen_ai.prompt.0.content": "What's the weather in London?",

0 commit comments

Comments
 (0)