Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion examples/demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@
if not chunk.choices:
continue

print(chunk.choices[0].delta.content, end="")
delta = chunk.choices[0].delta
print(delta.content if delta and delta.content is not None else "", end="")
print()

# Response headers:
Expand Down
3 changes: 2 additions & 1 deletion examples/module_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
)

for chunk in stream:
print(chunk.choices[0].delta.content or "", end="", flush=True)
delta = chunk.choices[0].delta
print(delta.content if delta and delta.content is not None else "", end="", flush=True)

print()
2 changes: 1 addition & 1 deletion src/openai/cli/_api/chat/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def _stream_create(params: CompletionCreateParamsStreaming) -> None:
if should_print_header:
sys.stdout.write("===== Chat Completion {} =====\n".format(choice.index))

content = choice.delta.content or ""
content = choice.delta.content if choice.delta and choice.delta.content is not None else ""
sys.stdout.write(content)

if should_print_header:
Expand Down
39 changes: 28 additions & 11 deletions src/openai/lib/streaming/chat/_completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,6 +364,9 @@ def _accumulate_chunk(self, chunk: ChatCompletionChunk) -> ParsedChatCompletionS
return _convert_initial_chunk_into_snapshot(chunk)

for choice in chunk.choices:
choice_delta = choice.delta
choice_delta_dict = choice_delta.to_dict() if choice_delta is not None else {}

try:
choice_snapshot = completion_snapshot.choices[choice.index]
previous_tool_calls = choice_snapshot.message.tool_calls or []
Expand Down Expand Up @@ -393,7 +396,7 @@ def _accumulate_chunk(self, chunk: ChatCompletionChunk) -> ParsedChatCompletionS
),
),
),
cast("dict[object, object]", choice.delta.to_dict()),
cast("dict[object, object]", choice_delta_dict),
),
),
)
Expand All @@ -415,7 +418,7 @@ def _accumulate_chunk(self, chunk: ChatCompletionChunk) -> ParsedChatCompletionS
type_=ParsedChoiceSnapshot,
value={
**choice.model_dump(exclude_unset=True, exclude={"delta"}),
"message": choice.delta.to_dict(),
"message": choice_delta_dict,
},
),
)
Expand Down Expand Up @@ -445,7 +448,7 @@ def _accumulate_chunk(self, chunk: ChatCompletionChunk) -> ParsedChatCompletionS
partial_mode=True,
)

for tool_call_chunk in choice.delta.tool_calls or []:
for tool_call_chunk in choice_delta.tool_calls if choice_delta and choice_delta.tool_calls else []:
tool_call_snapshot = (choice_snapshot.message.tool_calls or [])[tool_call_chunk.index]

if tool_call_snapshot.type == "function":
Expand Down Expand Up @@ -505,33 +508,42 @@ def _build_events(
for choice in chunk.choices:
choice_state = self._get_choice_state(choice)
choice_snapshot = completion_snapshot.choices[choice.index]
choice_delta = choice.delta

if choice.delta.content is not None and choice_snapshot.message.content is not None:
if (
choice_delta is not None
and choice_delta.content is not None
and choice_snapshot.message.content is not None
):
events_to_fire.append(
build(
ContentDeltaEvent,
type="content.delta",
delta=choice.delta.content,
delta=choice_delta.content,
snapshot=choice_snapshot.message.content,
parsed=choice_snapshot.message.parsed,
)
)

if choice.delta.refusal is not None and choice_snapshot.message.refusal is not None:
if (
choice_delta is not None
and choice_delta.refusal is not None
and choice_snapshot.message.refusal is not None
):
events_to_fire.append(
build(
RefusalDeltaEvent,
type="refusal.delta",
delta=choice.delta.refusal,
delta=choice_delta.refusal,
snapshot=choice_snapshot.message.refusal,
)
)

if choice.delta.tool_calls:
if choice_delta is not None and choice_delta.tool_calls:
tool_calls = choice_snapshot.message.tool_calls
assert tool_calls is not None

for tool_call_delta in choice.delta.tool_calls:
for tool_call_delta in choice_delta.tool_calls:
tool_call = tool_calls[tool_call_delta.index]

if tool_call.type == "function":
Expand Down Expand Up @@ -617,7 +629,9 @@ def get_done_events(
tool_index=self.__current_tool_call_index,
)

for tool_call in choice_chunk.delta.tool_calls or []:
choice_delta = choice_chunk.delta

for tool_call in choice_delta.tool_calls if choice_delta and choice_delta.tool_calls else []:
if self.__current_tool_call_index != tool_call.index:
events_to_fire.extend(
self._content_done_events(choice_snapshot=choice_snapshot, response_format=response_format)
Expand Down Expand Up @@ -742,9 +756,12 @@ def _convert_initial_chunk_into_snapshot(chunk: ChatCompletionChunk) -> ParsedCh
choices = cast("list[object]", data["choices"])

for choice in chunk.choices:
choice_delta = choice.delta
choice_delta_dict = choice_delta.to_dict() if choice_delta is not None else {}

choices[choice.index] = {
**choice.model_dump(exclude_unset=True, exclude={"delta"}),
"message": choice.delta.to_dict(),
"message": choice_delta_dict,
}

return cast(
Expand Down
2 changes: 1 addition & 1 deletion src/openai/types/chat/chat_completion_chunk.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ class ChoiceLogprobs(BaseModel):


class Choice(BaseModel):
delta: ChoiceDelta
delta: Optional[ChoiceDelta] = None
"""A chat completion delta generated by streamed model responses."""

finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] = None
Expand Down
49 changes: 49 additions & 0 deletions tests/lib/chat/test_completions_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from openai import OpenAI, AsyncOpenAI
from openai._utils import consume_sync_iterator, assert_signatures_in_sync
from openai._compat import model_copy
from openai._models import construct_type
from openai.types.chat import ChatCompletionChunk
from openai.lib.streaming.chat import (
ContentDoneEvent,
Expand Down Expand Up @@ -1068,6 +1069,54 @@ def streamer(client: OpenAI) -> Iterator[ChatCompletionChunk]:
)


def test_chat_completion_state_accepts_null_delta() -> None:
state = ChatCompletionStreamState()

first_chunk = cast(
ChatCompletionChunk,
construct_type(
type_=ChatCompletionChunk,
value={
"id": "chatcmpl-test",
"choices": [
{
"delta": {"content": "Hello", "role": "assistant"},
"finish_reason": None,
"index": 0,
}
],
"created": 1720000000,
"model": "gpt-4o-mini",
"object": "chat.completion.chunk",
},
),
)
final_chunk = cast(
ChatCompletionChunk,
construct_type(
type_=ChatCompletionChunk,
value={
"id": "chatcmpl-test",
"choices": [{"delta": None, "finish_reason": "stop", "index": 0}],
"created": 1720000000,
"model": "gpt-4o-mini",
"object": "chat.completion.chunk",
},
),
)

assert first_chunk.choices[0].delta is not None
assert final_chunk.choices[0].delta is None

state.handle_chunk(first_chunk)
events = list(state.handle_chunk(final_chunk))

assert [event.type for event in events] == ["chunk", "content.done"]
completion = state.get_final_completion()
assert completion.choices[0].message.content == "Hello"
assert completion.choices[0].finish_reason == "stop"


@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"])
def test_stream_method_in_sync(sync: bool, client: OpenAI, async_client: AsyncOpenAI) -> None:
checking_client: OpenAI | AsyncOpenAI = client if sync else async_client
Expand Down