@@ -624,7 +624,7 @@ def test_sample_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
624624
625625 expected_request_attributes = {
626626 "gen_ai.operation.name" : "chat" ,
627- "gen_ai.system " : "xai" ,
627+ "gen_ai.provider.name " : "xai" ,
628628 "gen_ai.output.type" : "text" ,
629629 "gen_ai.request.model" : "grok-3" ,
630630 "gen_ai.request.logprobs" : False ,
@@ -633,6 +633,7 @@ def test_sample_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
633633 "gen_ai.request.temperature" : 1.0 ,
634634 "gen_ai.request.parallel_tool_calls" : True ,
635635 "server.port" : 443 ,
636+ "server.address" : "api.x.ai" ,
636637 "gen_ai.conversation.id" : conversation_id ,
637638 "gen_ai.prompt.0.role" : "user" ,
638639 "gen_ai.prompt.0.content" : "Hello, how are you?" ,
@@ -679,10 +680,11 @@ def test_sample_creates_span_without_sensitive_attributes_when_disabled(mock_tra
679680
680681 expected_request_attributes = {
681682 "gen_ai.operation.name" : "chat" ,
682- "gen_ai.system " : "xai" ,
683+ "gen_ai.provider.name " : "xai" ,
683684 "gen_ai.output.type" : "text" ,
684685 "gen_ai.request.model" : "grok-3" ,
685686 "server.port" : 443 ,
687+ "server.address" : "api.x.ai" ,
686688 }
687689
688690 mock_tracer .start_as_current_span .assert_called_once_with (
@@ -735,7 +737,7 @@ def test_sample_creates_span_with_correct_optional_attributes(mock_tracer: mock.
735737
736738 expected_request_attributes = {
737739 "gen_ai.operation.name" : "chat" ,
738- "gen_ai.system " : "xai" ,
740+ "gen_ai.provider.name " : "xai" ,
739741 "gen_ai.output.type" : "json_object" ,
740742 "gen_ai.request.model" : "grok-3" ,
741743 "gen_ai.request.logprobs" : True ,
@@ -744,6 +746,7 @@ def test_sample_creates_span_with_correct_optional_attributes(mock_tracer: mock.
744746 "gen_ai.request.temperature" : 0.5 ,
745747 "gen_ai.request.parallel_tool_calls" : False ,
746748 "server.port" : 443 ,
749+ "server.address" : "api.x.ai" ,
747750 "gen_ai.conversation.id" : conversation_id ,
748751 "gen_ai.request.max_tokens" : 100 ,
749752 "gen_ai.request.seed" : 123 ,
@@ -784,7 +787,7 @@ def test_sample_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
784787
785788 expected_request_attributes = {
786789 "gen_ai.operation.name" : "chat" ,
787- "gen_ai.system " : "xai" ,
790+ "gen_ai.provider.name " : "xai" ,
788791 "gen_ai.output.type" : "text" ,
789792 "gen_ai.request.model" : "grok-3" ,
790793 "gen_ai.request.logprobs" : False ,
@@ -793,6 +796,7 @@ def test_sample_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
793796 "gen_ai.request.temperature" : 1.0 ,
794797 "gen_ai.request.parallel_tool_calls" : True ,
795798 "server.port" : 443 ,
799+ "server.address" : "api.x.ai" ,
796800 "gen_ai.conversation.id" : conversation_id ,
797801 "gen_ai.prompt.0.role" : "user" ,
798802 "gen_ai.prompt.0.content" : "Hello, how are you?" ,
@@ -851,7 +855,7 @@ def test_stream_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
851855
852856 expected_request_attributes = {
853857 "gen_ai.operation.name" : "chat" ,
854- "gen_ai.system " : "xai" ,
858+ "gen_ai.provider.name " : "xai" ,
855859 "gen_ai.output.type" : "text" ,
856860 "gen_ai.request.model" : "grok-3" ,
857861 "gen_ai.request.logprobs" : False ,
@@ -860,6 +864,7 @@ def test_stream_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock
860864 "gen_ai.request.temperature" : 1.0 ,
861865 "gen_ai.request.parallel_tool_calls" : True ,
862866 "server.port" : 443 ,
867+ "server.address" : "api.x.ai" ,
863868 "gen_ai.conversation.id" : conversation_id ,
864869 "gen_ai.prompt.0.role" : "user" ,
865870 "gen_ai.prompt.0.content" : "Hello, how are you?" ,
@@ -916,7 +921,7 @@ def test_stream_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
916921
917922 expected_request_attributes = {
918923 "gen_ai.operation.name" : "chat" ,
919- "gen_ai.system " : "xai" ,
924+ "gen_ai.provider.name " : "xai" ,
920925 "gen_ai.output.type" : "text" ,
921926 "gen_ai.request.model" : "grok-3" ,
922927 "gen_ai.request.logprobs" : False ,
@@ -925,6 +930,7 @@ def test_stream_batch_creates_span_with_correct_attributes(mock_tracer: mock.Mag
925930 "gen_ai.request.temperature" : 1.0 ,
926931 "gen_ai.request.parallel_tool_calls" : True ,
927932 "server.port" : 443 ,
933+ "server.address" : "api.x.ai" ,
928934 "gen_ai.conversation.id" : conversation_id ,
929935 "gen_ai.prompt.0.role" : "user" ,
930936 "gen_ai.prompt.0.content" : "Hello, how are you?" ,
@@ -985,7 +991,7 @@ class TestResponse(BaseModel):
985991
986992 expected_request_attributes = {
987993 "gen_ai.operation.name" : "chat" ,
988- "gen_ai.system " : "xai" ,
994+ "gen_ai.provider.name " : "xai" ,
989995 "gen_ai.output.type" : "json_schema" ,
990996 "gen_ai.request.model" : "grok-3" ,
991997 "gen_ai.request.logprobs" : False ,
@@ -994,6 +1000,7 @@ class TestResponse(BaseModel):
9941000 "gen_ai.request.temperature" : 1.0 ,
9951001 "gen_ai.request.parallel_tool_calls" : True ,
9961002 "server.port" : 443 ,
1003+ "server.address" : "api.x.ai" ,
9971004 "gen_ai.conversation.id" : conversation_id ,
9981005 "gen_ai.prompt.0.role" : "user" ,
9991006 "gen_ai.prompt.0.content" : "What's the weather in London?" ,
@@ -1038,7 +1045,7 @@ def test_defer_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock,
10381045
10391046 expected_request_attributes = {
10401047 "gen_ai.operation.name" : "chat" ,
1041- "gen_ai.system " : "xai" ,
1048+ "gen_ai.provider.name " : "xai" ,
10421049 "gen_ai.output.type" : "text" ,
10431050 "gen_ai.request.model" : "grok-3" ,
10441051 "gen_ai.request.logprobs" : False ,
@@ -1047,6 +1054,7 @@ def test_defer_creates_span_with_correct_attributes(mock_tracer: mock.MagicMock,
10471054 "gen_ai.request.temperature" : 1.0 ,
10481055 "gen_ai.request.parallel_tool_calls" : True ,
10491056 "server.port" : 443 ,
1057+ "server.address" : "api.x.ai" ,
10501058 "gen_ai.conversation.id" : conversation_id ,
10511059 "gen_ai.prompt.0.role" : "user" ,
10521060 "gen_ai.prompt.0.content" : "Hello, how are you?" ,
@@ -1095,7 +1103,7 @@ def test_defer_batch_creates_span_with_correct_attributes(mock_tracer: mock.Magi
10951103
10961104 expected_request_attributes = {
10971105 "gen_ai.operation.name" : "chat" ,
1098- "gen_ai.system " : "xai" ,
1106+ "gen_ai.provider.name " : "xai" ,
10991107 "gen_ai.output.type" : "text" ,
11001108 "gen_ai.request.model" : "grok-3" ,
11011109 "gen_ai.request.logprobs" : False ,
@@ -1104,6 +1112,7 @@ def test_defer_batch_creates_span_with_correct_attributes(mock_tracer: mock.Magi
11041112 "gen_ai.request.temperature" : 1.0 ,
11051113 "gen_ai.request.parallel_tool_calls" : True ,
11061114 "server.port" : 443 ,
1115+ "server.address" : "api.x.ai" ,
11071116 "gen_ai.conversation.id" : conversation_id ,
11081117 "gen_ai.prompt.0.role" : "user" ,
11091118 "gen_ai.prompt.0.content" : "Hello, how are you?" ,
@@ -1161,7 +1170,7 @@ def test_chat_with_function_calling_creates_span_with_correct_attributes(mock_tr
11611170
11621171 expected_request_attributes = {
11631172 "gen_ai.operation.name" : "chat" ,
1164- "gen_ai.system " : "xai" ,
1173+ "gen_ai.provider.name " : "xai" ,
11651174 "gen_ai.output.type" : "text" ,
11661175 "gen_ai.request.model" : "grok-3" ,
11671176 "gen_ai.request.logprobs" : False ,
@@ -1170,6 +1179,7 @@ def test_chat_with_function_calling_creates_span_with_correct_attributes(mock_tr
11701179 "gen_ai.request.temperature" : 1.0 ,
11711180 "gen_ai.request.parallel_tool_calls" : True ,
11721181 "server.port" : 443 ,
1182+ "server.address" : "api.x.ai" ,
11731183 "gen_ai.conversation.id" : conversation_id ,
11741184 "gen_ai.prompt.0.role" : "user" ,
11751185 "gen_ai.prompt.0.content" : "What's the weather in London?" ,
@@ -1241,7 +1251,7 @@ def test_chat_with_function_call_result_creates_span_with_correct_attributes(
12411251
12421252 expected_request_attributes = {
12431253 "gen_ai.operation.name" : "chat" ,
1244- "gen_ai.system " : "xai" ,
1254+ "gen_ai.provider.name " : "xai" ,
12451255 "gen_ai.output.type" : "text" ,
12461256 "gen_ai.request.model" : "grok-3" ,
12471257 "gen_ai.request.logprobs" : False ,
@@ -1250,6 +1260,7 @@ def test_chat_with_function_call_result_creates_span_with_correct_attributes(
12501260 "gen_ai.request.temperature" : 1.0 ,
12511261 "gen_ai.request.parallel_tool_calls" : True ,
12521262 "server.port" : 443 ,
1263+ "server.address" : "api.x.ai" ,
12531264 "gen_ai.conversation.id" : conversation_id ,
12541265 "gen_ai.prompt.0.role" : "user" ,
12551266 "gen_ai.prompt.0.content" : "What's the weather in London?" ,
0 commit comments