Skip to content

Commit 5bdf8a7

Browse files
authored
Sample to show passing in JSON schema for structured outputs (#2362)
1 parent cf13e35 commit 5bdf8a7

File tree

4 files changed

+116
-2
lines changed

4 files changed

+116
-2
lines changed

python/packages/core/agent_framework/_agents.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -853,6 +853,7 @@ async def run(
853853
await self._async_exit_stack.enter_async_context(mcp_server)
854854
final_tools.extend(mcp_server.functions)
855855

856+
merged_additional_options = additional_chat_options or {}
856857
co = run_chat_options & ChatOptions(
857858
model_id=model_id,
858859
conversation_id=thread.service_thread_id,
@@ -871,7 +872,7 @@ async def run(
871872
tools=final_tools,
872873
top_p=top_p,
873874
user=user,
874-
**(additional_chat_options or {}),
875+
additional_properties=merged_additional_options, # type: ignore[arg-type]
875876
)
876877
# Filter chat_options from kwargs to prevent duplicate keyword argument
877878
filtered_kwargs = {k: v for k, v in kwargs.items() if k != "chat_options"}
@@ -986,6 +987,7 @@ async def run_stream(
986987
await self._async_exit_stack.enter_async_context(mcp_server)
987988
final_tools.extend(mcp_server.functions)
988989

990+
merged_additional_options = additional_chat_options or {}
989991
co = run_chat_options & ChatOptions(
990992
conversation_id=thread.service_thread_id,
991993
allow_multiple_tool_calls=allow_multiple_tool_calls,
@@ -1004,7 +1006,7 @@ async def run_stream(
10041006
tools=final_tools,
10051007
top_p=top_p,
10061008
user=user,
1007-
**(additional_chat_options or {}),
1009+
additional_properties=merged_additional_options, # type: ignore[arg-type]
10081010
)
10091011

10101012
# Filter chat_options from kwargs to prevent duplicate keyword argument

python/samples/README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ This directory contains samples demonstrating the capabilities of Microsoft Agen
118118
| [`getting_started/agents/openai/openai_chat_client_with_local_mcp.py`](./getting_started/agents/openai/openai_chat_client_with_local_mcp.py) | OpenAI Chat Client with Local MCP Example |
119119
| [`getting_started/agents/openai/openai_chat_client_with_thread.py`](./getting_started/agents/openai/openai_chat_client_with_thread.py) | OpenAI Chat Client with Thread Management Example |
120120
| [`getting_started/agents/openai/openai_chat_client_with_web_search.py`](./getting_started/agents/openai/openai_chat_client_with_web_search.py) | OpenAI Chat Client with Web Search Example |
121+
| [`getting_started/agents/openai/openai_chat_client_with_runtime_json_schema.py`](./getting_started/agents/openai/openai_chat_client_with_runtime_json_schema.py) | OpenAI Chat Client with runtime JSON Schema for structured output without a Pydantic model |
121122
| [`getting_started/agents/openai/openai_responses_client_basic.py`](./getting_started/agents/openai/openai_responses_client_basic.py) | OpenAI Responses Client Basic Example |
122123
| [`getting_started/agents/openai/openai_responses_client_image_analysis.py`](./getting_started/agents/openai/openai_responses_client_image_analysis.py) | OpenAI Responses Client Image Analysis Example |
123124
| [`getting_started/agents/openai/openai_responses_client_image_generation.py`](./getting_started/agents/openai/openai_responses_client_image_generation.py) | OpenAI Responses Client Image Generation Example |

python/samples/getting_started/agents/openai/README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ This folder contains examples demonstrating different ways to create and use age
1919
| [`openai_chat_client_with_local_mcp.py`](openai_chat_client_with_local_mcp.py) | Shows how to integrate OpenAI agents with local Model Context Protocol (MCP) servers for enhanced functionality and tool integration. |
2020
| [`openai_chat_client_with_thread.py`](openai_chat_client_with_thread.py) | Demonstrates thread management with OpenAI agents, including automatic thread creation for stateless conversations and explicit thread management for maintaining conversation context across multiple interactions. |
2121
| [`openai_chat_client_with_web_search.py`](openai_chat_client_with_web_search.py) | Shows how to use web search capabilities with OpenAI agents to retrieve and use information from the internet in responses. |
22+
| [`openai_chat_client_with_runtime_json_schema.py`](openai_chat_client_with_runtime_json_schema.py) | Shows how to supply a runtime JSON Schema via `additional_chat_options` for structured output without defining a Pydantic model. |
2223
| [`openai_responses_client_basic.py`](openai_responses_client_basic.py) | The simplest way to create an agent using `ChatAgent` with `OpenAIResponsesClient`. Shows both streaming and non-streaming responses for structured response generation with OpenAI models. |
2324
| [`openai_responses_client_image_analysis.py`](openai_responses_client_image_analysis.py) | Demonstrates how to use vision capabilities with agents to analyze images. |
2425
| [`openai_responses_client_image_generation.py`](openai_responses_client_image_generation.py) | Demonstrates how to use image generation capabilities with OpenAI agents to create images based on text descriptions. Requires PIL (Pillow) for image display. |
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import asyncio
4+
import json
5+
6+
from agent_framework.openai import OpenAIChatClient
7+
8+
"""
9+
OpenAI Chat Client Runtime JSON Schema Example
10+
11+
Demonstrates structured outputs when the schema is only known at runtime.
12+
Uses additional_chat_options to pass a JSON Schema payload directly to OpenAI
13+
without defining a Pydantic model up front.
14+
"""
15+
16+
17+
runtime_schema = {
18+
"title": "WeatherDigest",
19+
"type": "object",
20+
"properties": {
21+
"location": {"type": "string"},
22+
"conditions": {"type": "string"},
23+
"temperature_c": {"type": "number"},
24+
"advisory": {"type": "string"},
25+
},
26+
# OpenAI strict mode requires every property to appear in required.
27+
"required": ["location", "conditions", "temperature_c", "advisory"],
28+
"additionalProperties": False,
29+
}
30+
31+
32+
async def non_streaming_example() -> None:
33+
print("=== Non-streaming runtime JSON schema example ===")
34+
35+
agent = OpenAIChatClient().create_agent(
36+
name="RuntimeSchemaAgent",
37+
instructions="Return only JSON that matches the provided schema. Do not add commentary.",
38+
)
39+
40+
query = "Give a brief weather digest for Seattle."
41+
print(f"User: {query}")
42+
43+
response = await agent.run(
44+
query,
45+
additional_chat_options={
46+
"response_format": {
47+
"type": "json_schema",
48+
"json_schema": {
49+
"name": runtime_schema["title"],
50+
"strict": True,
51+
"schema": runtime_schema,
52+
},
53+
},
54+
},
55+
)
56+
57+
print("Model output:")
58+
print(response.text)
59+
60+
parsed = json.loads(response.text)
61+
print("Parsed dict:")
62+
print(parsed)
63+
64+
65+
async def streaming_example() -> None:
66+
print("=== Streaming runtime JSON schema example ===")
67+
68+
agent = OpenAIChatClient().create_agent(
69+
name="RuntimeSchemaAgent",
70+
instructions="Return only JSON that matches the provided schema. Do not add commentary.",
71+
)
72+
73+
query = "Give a brief weather digest for Portland."
74+
print(f"User: {query}")
75+
76+
chunks = []
77+
async for chunk in agent.run_stream(
78+
query,
79+
additional_chat_options={
80+
"response_format": {
81+
"type": "json_schema",
82+
"json_schema": {
83+
"name": runtime_schema["title"],
84+
"strict": True,
85+
"schema": runtime_schema,
86+
},
87+
},
88+
},
89+
):
90+
if chunk.text:
91+
chunks.append(chunk.text)
92+
93+
raw_text = "".join(chunks)
94+
print("Model output:")
95+
print(raw_text)
96+
97+
parsed = json.loads(raw_text)
98+
print("Parsed dict:")
99+
print(parsed)
100+
101+
102+
async def main() -> None:
103+
print("=== OpenAI Chat Client with runtime JSON Schema ===")
104+
105+
await non_streaming_example()
106+
await streaming_example()
107+
108+
109+
if __name__ == "__main__":
110+
asyncio.run(main())

0 commit comments

Comments
 (0)