Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ With an image URL:

```python
prompt = "What is in this image?"
img_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/d5/2023_06_08_Raccoon1.jpg/1599px-2023_06_08_Raccoon1.jpg"
img_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/d5/2023_06_08_Raccoon1.jpg/640px-2023_06_08_Raccoon1.jpg"

response = client.responses.create(
model="gpt-5.2",
Expand All @@ -93,6 +93,8 @@ response = client.responses.create(
)
```

If you get `BadRequestError` with `"invalid_value"` when downloading the image, either swap in a smaller URL you control or switch to the base64 example below so the image is uploaded directly rather than fetched from the web.

With the image as a base64 encoded string:

```python
Expand Down
116 changes: 54 additions & 62 deletions src/openai/_streaming.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,46 +55,40 @@ def __stream__(self) -> Iterator[_T]:
process_data = self._client._process_response_data
iterator = self._iter_events()

def _raise_streaming_error(data: object) -> None:
if not is_mapping(data):
return
error = data.get("error")
if not error:
return

message: str | None = None
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=error,
)

try:
for sse in iterator:
if sse.data.startswith("[DONE]"):
break

# we have to special case the Assistants `thread.` events since we won't have an "event" key in the data
if sse.event and sse.event.startswith("thread."):
data = sse.json()

if sse.event == "error" and is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)
data = sse.json()
if sse.event == "error":
_raise_streaming_error(data)

if sse.event and sse.event.startswith("thread."):
_raise_streaming_error(data)
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
else:
data = sse.json()
if is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)

_raise_streaming_error(data)
yield process_data(data=data, cast_to=cast_to, response=response)

finally:
Expand Down Expand Up @@ -158,46 +152,40 @@ async def __stream__(self) -> AsyncIterator[_T]:
process_data = self._client._process_response_data
iterator = self._iter_events()

def _raise_streaming_error(data: object) -> None:
if not is_mapping(data):
return
error = data.get("error")
if not error:
return

message: str | None = None
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=error,
)

try:
async for sse in iterator:
if sse.data.startswith("[DONE]"):
break

# we have to special case the Assistants `thread.` events since we won't have an "event" key in the data
if sse.event and sse.event.startswith("thread."):
data = sse.json()

if sse.event == "error" and is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)
data = sse.json()
if sse.event == "error":
_raise_streaming_error(data)

if sse.event and sse.event.startswith("thread."):
_raise_streaming_error(data)
yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response)
else:
data = sse.json()
if is_mapping(data) and data.get("error"):
message = None
error = data.get("error")
if is_mapping(error):
message = error.get("message")
if not message or not isinstance(message, str):
message = "An error occurred during streaming"

raise APIError(
message=message,
request=self.response.request,
body=data["error"],
)

_raise_streaming_error(data)
yield process_data(data=data, cast_to=cast_to, response=response)

finally:
Expand All @@ -223,6 +211,10 @@ async def close(self) -> None:
"""
await self.response.aclose()

async def aclose(self) -> None:
"""Alias for `close` so contexts depending on `aclose` still work."""
await self.close()


class ServerSentEvent:
def __init__(
Expand Down
1 change: 1 addition & 0 deletions src/openai/types/shared/chat_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
"gpt-4-32k",
"gpt-4-32k-0314",
"gpt-4-32k-0613",
"gpt-audio-mini",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-0301",
Expand Down
8 changes: 4 additions & 4 deletions tests/api_resources/test_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_method_create_with_all_params_overload_1(self, client: OpenAI) -> None:
background=True,
context_management=[
{
"type": "type",
"type": "compaction",
"compact_threshold": 1000,
}
],
Expand Down Expand Up @@ -119,7 +119,7 @@ def test_method_create_with_all_params_overload_2(self, client: OpenAI) -> None:
background=True,
context_management=[
{
"type": "type",
"type": "compaction",
"compact_threshold": 1000,
}
],
Expand Down Expand Up @@ -440,7 +440,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
background=True,
context_management=[
{
"type": "type",
"type": "compaction",
"compact_threshold": 1000,
}
],
Expand Down Expand Up @@ -527,7 +527,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
background=True,
context_management=[
{
"type": "type",
"type": "compaction",
"compact_threshold": 1000,
}
],
Expand Down