Skip to content

Fix #1227 Chat Completions model fails to handle streaming with openai 1.97.1+ #1246

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jul 25, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ requires-python = ">=3.9"
license = "MIT"
authors = [{ name = "OpenAI", email = "support@openai.com" }]
dependencies = [
"openai>=1.96.1, <2",
"openai>=1.97.1,<2",
"pydantic>=2.10, <3",
"griffe>=1.5.6, <2",
"typing-extensions>=4.12.2, <5",
Expand Down
25 changes: 14 additions & 11 deletions src/agents/models/chatcmpl_stream_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ async def handle_stream(
is not None, # fixed 0 -> 0 or 1
type="response.output_text.delta",
sequence_number=sequence_number.get_and_increment(),
logprobs=[],
)
# Accumulate the text into the response part
state.text_content_index_and_output[1].text += delta.content
Expand Down Expand Up @@ -288,10 +289,11 @@ async def handle_stream(
function_call = state.function_calls[tc_delta.index]

# Start streaming as soon as we have function name and call_id
if (not state.function_call_streaming[tc_delta.index] and
function_call.name and
function_call.call_id):

if (
not state.function_call_streaming[tc_delta.index]
and function_call.name
and function_call.call_id
):
# Calculate the output index for this function call
function_call_starting_index = 0
if state.reasoning_content_index_and_output:
Expand All @@ -308,9 +310,9 @@ async def handle_stream(

# Mark this function call as streaming and store its output index
state.function_call_streaming[tc_delta.index] = True
state.function_call_output_idx[
tc_delta.index
] = function_call_starting_index
state.function_call_output_idx[tc_delta.index] = (
function_call_starting_index
)

# Send initial function call added event
yield ResponseOutputItemAddedEvent(
Expand All @@ -327,10 +329,11 @@ async def handle_stream(
)

# Stream arguments if we've started streaming this function call
if (state.function_call_streaming.get(tc_delta.index, False) and
tc_function and
tc_function.arguments):

if (
state.function_call_streaming.get(tc_delta.index, False)
and tc_function
and tc_function.arguments
):
output_index = state.function_call_output_idx[tc_delta.index]
yield ResponseFunctionCallArgumentsDeltaEvent(
delta=tc_function.arguments,
Expand Down
1 change: 1 addition & 0 deletions tests/voice/test_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ async def stream_response(
output_index=0,
item_id=item.id,
sequence_number=0,
logprobs=[],
)

yield ResponseCompletedEvent(
Expand Down
8 changes: 4 additions & 4 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.