Skip to content

Commit e9d8d27

Browse files
committed
Done some code refactoring
1 parent f3bc46c commit e9d8d27

1 file changed

Lines changed: 9 additions & 38 deletions

File tree

  • packages/opentelemetry-instrumentation-watsonx/opentelemetry/instrumentation/watsonx

packages/opentelemetry-instrumentation-watsonx/opentelemetry/instrumentation/watsonx/__init__.py

Lines changed: 9 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -494,7 +494,6 @@ def wrapper(wrapped, instance, args, kwargs):
494494

495495
@dont_throw
496496
def _handle_input(span, event_logger, name, instance, args, kwargs):
497-
messages = None
498497
_set_api_attributes(span)
499498
if "generate" in name:
500499
set_model_input_attributes(span, instance)
@@ -503,35 +502,19 @@ def _handle_input(span, event_logger, name, instance, args, kwargs):
503502

504503
if "chat" in name:
505504
messages = kwargs.get("messages")
506-
if messages is None and args and isinstance(args[0], list):
507-
messages = args[0]
508505
if messages and span.is_recording():
509506
for index, msg in enumerate(messages):
510-
if not isinstance(msg, dict):
511-
continue
512507
role = msg.get("role")
513508
content = msg.get("content")
514-
if role and isinstance(content, str):
509+
if role and content:
515510
_set_span_attribute(
516511
span,
517512
f"{GenAIAttributes.GEN_AI_PROMPT}.{index}.{role}",
518513
content.strip(),
519514
)
520515

521516
if should_emit_events() and event_logger:
522-
if "chat" in name and isinstance(messages, list):
523-
for msg in messages:
524-
if not isinstance(msg, dict):
525-
continue
526-
emit_event(
527-
MessageEvent(
528-
content=msg.get("content"),
529-
role=msg.get("role", "user"),
530-
),
531-
event_logger,
532-
)
533-
else:
534-
_emit_input_events(args, kwargs, event_logger)
517+
_emit_input_events(args, kwargs, event_logger)
535518

536519

537520
@dont_throw
@@ -592,30 +575,19 @@ def _handle_chat_response(
592575
duration_histogram,
593576
duration,
594577
):
595-
if not span.is_recording() or not isinstance(response, dict):
578+
if not span.is_recording():
596579
return
597580

598-
model_id = response.get("model_id") or response.get("model") or "unknown"
599-
581+
model_id = response.get("model_id") or response.get("model")
600582
_set_span_attribute(span, GenAIAttributes.GEN_AI_RESPONSE_MODEL, model_id)
601583

602584
# Content
603-
choices = response.get("choices") or []
585+
choices = response.get("choices", [])
604586
for index, choice in enumerate(choices):
605587
message = choice.get("message", {})
606588
content = message.get("content")
607-
finish_reason = choice.get("finish_reason") or "unknown"
608-
609-
if should_emit_events() and event_logger:
610-
emit_event(
611-
ChoiceEvent(
612-
index=index,
613-
message=message,
614-
finish_reason=finish_reason or "unknown",
615-
),
616-
event_logger,
617-
)
618-
elif content and should_send_prompts():
589+
finish_reason = choice.get("finish_reason")
590+
if content and should_send_prompts():
619591
_set_span_attribute(
620592
span,
621593
f"{GenAIAttributes.GEN_AI_COMPLETION}.{index}.content",
@@ -630,7 +602,7 @@ def _handle_chat_response(
630602
response_counter.add(1, attributes=attributes)
631603

632604
# Usage
633-
usage = response.get("usage") or {}
605+
usage = response.get("usage", {})
634606
prompt_tokens = usage.get("prompt_tokens", 0)
635607
completion_tokens = usage.get("completion_tokens", 0)
636608
total_tokens = usage.get("total_tokens", prompt_tokens + completion_tokens)
@@ -686,7 +658,7 @@ def _wrap(
686658
},
687659
)
688660

689-
661+
_handle_input(span, event_logger, name, instance, args, kwargs)
690662
if "generate" in name or "chat" in name:
691663
if to_wrap.get("method") == "generate_text_stream":
692664
if (raw_flag := kwargs.get("raw_response", None)) is None:
@@ -700,7 +672,6 @@ def _wrap(
700672
kwargs["messages"] = [
701673
{"role": "user", "content": prompt}
702674
]
703-
_handle_input(span, event_logger, name, instance, args, kwargs)
704675

705676
try:
706677
start_time = time.time()

0 commit comments

Comments
 (0)