Skip to content

Commit 6eadfac

Browse files
committed
Adding Responses Agent integration tests
1 parent 6a866ff commit 6eadfac

File tree

2 files changed

+390
-1
lines changed

2 files changed

+390
-1
lines changed

python/semantic_kernel/agents/open_ai/responses_agent_thread_actions.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -459,7 +459,6 @@ async def invoke_stream(
459459
msg = function_result_messages[0]
460460
if output_messages is not None:
461461
output_messages.append(msg)
462-
yield msg # Always yield the first message if eligible
463462

464463
if any(result.terminate for result in results if result is not None):
465464
break # Only break if any result has terminate=True
Lines changed: 390 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,390 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import os
4+
from typing import Annotated
5+
6+
import pytest
7+
from pydantic import BaseModel
8+
9+
from semantic_kernel.agents import AzureResponsesAgent, OpenAIResponsesAgent
10+
from semantic_kernel.contents.chat_message_content import ChatMessageContent
11+
from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent
12+
from semantic_kernel.contents.utils.author_role import AuthorRole
13+
from semantic_kernel.functions.kernel_function_decorator import kernel_function
14+
15+
16+
class WeatherPlugin:
17+
"""A sample Mock weather plugin."""
18+
19+
@kernel_function(description="Get real-time weather information.")
20+
def current_weather(self, location: Annotated[str, "The location to get the weather"]) -> str:
21+
"""Returns the current weather."""
22+
return f"The weather in {location} is sunny."
23+
24+
25+
class Step(BaseModel):
26+
explanation: str
27+
output: str
28+
29+
30+
class Reasoning(BaseModel):
31+
steps: list[Step]
32+
final_answer: str
33+
34+
35+
class TestOpenAIResponsesAgentIntegration:
36+
@pytest.fixture(params=["azure", "openai"])
37+
async def responses_agent(self, request):
38+
raw_param = request.param
39+
40+
if isinstance(raw_param, str):
41+
agent_type, params = raw_param, {}
42+
elif isinstance(raw_param, tuple) and len(raw_param) == 2:
43+
agent_type, params = raw_param
44+
else:
45+
raise ValueError(f"Unsupported param format: {raw_param}")
46+
47+
tools, plugins, text = [], [], None
48+
49+
if agent_type == "azure":
50+
client, model = AzureResponsesAgent.setup_resources()
51+
AgentClass = AzureResponsesAgent
52+
else: # agent_type == "openai"
53+
client, model = OpenAIResponsesAgent.setup_resources()
54+
AgentClass = OpenAIResponsesAgent
55+
56+
if params.get("enable_web_search"):
57+
web_search_tool = OpenAIResponsesAgent.configure_web_search_tool()
58+
tools.append(web_search_tool)
59+
60+
if params.get("enable_structured_outputs"):
61+
text = OpenAIResponsesAgent.configure_response_format(Reasoning)
62+
63+
if params.get("enable_file_search"):
64+
pdf_file_path = os.path.join(
65+
os.path.dirname(os.path.dirname(os.path.realpath(__file__))), "resources", "employees.pdf"
66+
)
67+
with open(pdf_file_path, "rb") as file:
68+
file = await client.files.create(file=file, purpose="assistants")
69+
vector_store = await client.vector_stores.create(
70+
name="responses_file_search_int_tests",
71+
file_ids=[file.id],
72+
)
73+
file_search_tool = (
74+
AzureResponsesAgent.configure_file_search_tool(vector_store.id)
75+
if agent_type == "azure"
76+
else OpenAIResponsesAgent.configure_file_search_tool(vector_store.id)
77+
)
78+
tools.append(file_search_tool)
79+
80+
if params.get("enable_kernel_function"):
81+
plugins.append(WeatherPlugin())
82+
83+
agent = AgentClass(
84+
ai_model_id=model,
85+
client=client,
86+
name="SKPythonIntegrationTestResponsesAgent",
87+
instructions="You are a helpful agent that help users with their questions.",
88+
plugins=plugins,
89+
tools=tools,
90+
text=text,
91+
)
92+
93+
yield agent # yield agent for test method to use
94+
95+
# region Simple 'Hello' messages tests
96+
97+
@pytest.mark.parametrize("responses_agent", ["azure", "openai"], indirect=True, ids=["azure", "openai"])
98+
async def test_get_response(self, responses_agent: OpenAIResponsesAgent):
99+
"""Test get response of the agent."""
100+
response = await responses_agent.get_response(messages="Hello")
101+
assert isinstance(response.message, ChatMessageContent)
102+
assert response.message.role == AuthorRole.ASSISTANT
103+
assert response.message.content is not None
104+
105+
@pytest.mark.parametrize("responses_agent", ["azure", "openai"], indirect=True, ids=["azure", "openai"])
106+
async def test_get_response_with_thread(self, responses_agent: OpenAIResponsesAgent):
107+
"""Test get response of the agent with a thread."""
108+
thread = None
109+
user_messages = ["Hello, I am John Doe.", "What is my name?"]
110+
for user_message in user_messages:
111+
response = await responses_agent.get_response(messages=user_message, thread=thread)
112+
thread = response.thread
113+
assert thread is not None
114+
assert isinstance(response.message, ChatMessageContent)
115+
assert response.message.role == AuthorRole.ASSISTANT
116+
assert response.message.content is not None
117+
await thread.delete() if thread else None
118+
119+
@pytest.mark.parametrize("responses_agent", ["azure", "openai"], indirect=True, ids=["azure", "openai"])
120+
async def test_invoke(self, responses_agent: OpenAIResponsesAgent):
121+
"""Test invoke of the agent."""
122+
async for response in responses_agent.invoke(messages="Hello"):
123+
assert isinstance(response.message, ChatMessageContent)
124+
assert response.message.role == AuthorRole.ASSISTANT
125+
assert response.message.content is not None
126+
127+
@pytest.mark.parametrize("responses_agent", ["azure", "openai"], indirect=True, ids=["azure", "openai"])
128+
async def test_invoke_with_thread(self, responses_agent: OpenAIResponsesAgent):
129+
"""Test invoke of the agent with a thread."""
130+
thread = None
131+
user_messages = ["Hello, I am John Doe.", "What is my name?"]
132+
for user_message in user_messages:
133+
async for response in responses_agent.invoke(messages=user_message, thread=thread):
134+
thread = response.thread
135+
assert thread is not None
136+
assert isinstance(response.message, ChatMessageContent)
137+
assert response.message.role == AuthorRole.ASSISTANT
138+
assert response.message.content is not None
139+
await thread.delete() if thread else None
140+
141+
@pytest.mark.parametrize("responses_agent", ["azure", "openai"], indirect=True, ids=["azure", "openai"])
142+
async def test_invoke_stream(self, responses_agent: OpenAIResponsesAgent):
143+
"""Test invoke stream of the agent."""
144+
async for response in responses_agent.invoke_stream(messages="Hello"):
145+
assert isinstance(response.message, StreamingChatMessageContent)
146+
assert response.message.role == AuthorRole.ASSISTANT
147+
assert response.message.content is not None
148+
149+
@pytest.mark.parametrize("responses_agent", ["azure", "openai"], indirect=True, ids=["azure", "openai"])
150+
async def test_invoke_stream_with_thread(self, responses_agent: OpenAIResponsesAgent):
151+
"""Test invoke stream of the agent with a thread."""
152+
thread = None
153+
user_messages = ["Hello, I am John Doe.", "What is my name?"]
154+
for user_message in user_messages:
155+
async for response in responses_agent.invoke_stream(messages=user_message, thread=thread):
156+
thread = response.thread
157+
assert thread is not None
158+
assert isinstance(response.message, StreamingChatMessageContent)
159+
assert response.message.role == AuthorRole.ASSISTANT
160+
assert response.message.content is not None
161+
await thread.delete() if thread else None
162+
163+
# endregion
164+
165+
# region Web Search tests
166+
167+
@pytest.mark.parametrize(
168+
"responses_agent",
169+
[
170+
# Azure OpenAI Responses API doesn't yet support the web search tool
171+
("openai", {"enable_web_search": True}),
172+
],
173+
indirect=["responses_agent"],
174+
ids=["openai-web-search-get-response"],
175+
)
176+
async def test_web_search_get_response(self, responses_agent: OpenAIResponsesAgent):
177+
"""Test code interpreter."""
178+
input_text = "Find articles about the latest AI trends."
179+
response = await responses_agent.get_response(messages=input_text)
180+
assert isinstance(response.message, ChatMessageContent)
181+
assert response.message.role == AuthorRole.ASSISTANT
182+
assert response.message.content is not None
183+
184+
@pytest.mark.parametrize(
185+
"responses_agent",
186+
[
187+
# Azure OpenAI Responses API doesn't yet support the web search tool
188+
("openai", {"enable_web_search": True}),
189+
],
190+
indirect=["responses_agent"],
191+
ids=["openai-web-search-invoke"],
192+
)
193+
async def test_web_search_invoke(self, responses_agent: OpenAIResponsesAgent):
194+
"""Test code interpreter."""
195+
input_text = "Find articles about the latest AI trends."
196+
async for response in responses_agent.invoke(messages=input_text):
197+
assert isinstance(response.message, ChatMessageContent)
198+
assert response.message.role == AuthorRole.ASSISTANT
199+
assert response.message.content is not None
200+
201+
@pytest.mark.parametrize(
202+
"responses_agent",
203+
[
204+
# Azure OpenAI Responses API doesn't yet support the web search tool
205+
("openai", {"enable_web_search": True}),
206+
],
207+
indirect=["responses_agent"],
208+
ids=["openai-websearch-invoke-stream"],
209+
)
210+
async def test_web_search_invoke_stream(self, responses_agent: OpenAIResponsesAgent):
211+
"""Test code interpreter streaming."""
212+
input_text = "Find articles about the latest AI trends."
213+
async for response in responses_agent.invoke_stream(messages=input_text):
214+
assert isinstance(response.message, StreamingChatMessageContent)
215+
assert response.message.role == AuthorRole.ASSISTANT
216+
assert response.message.content is not None
217+
218+
# endregion
219+
220+
# region File search tests
221+
222+
@pytest.mark.parametrize(
223+
"responses_agent",
224+
[
225+
("azure", {"enable_file_search": True}),
226+
("openai", {"enable_file_search": True}),
227+
],
228+
indirect=["responses_agent"],
229+
ids=["azure-file-search-get-response", "openai-file-search-get-response"],
230+
)
231+
async def test_file_search_get_response(self, responses_agent: OpenAIResponsesAgent):
232+
"""Test code interpreter."""
233+
input_text = "Who is the youngest employee?"
234+
response = await responses_agent.get_response(messages=input_text)
235+
assert isinstance(response.message, ChatMessageContent)
236+
assert response.message.role == AuthorRole.ASSISTANT
237+
238+
@pytest.mark.parametrize(
239+
"responses_agent",
240+
[
241+
("azure", {"enable_file_search": True}),
242+
("openai", {"enable_file_search": True}),
243+
],
244+
indirect=["responses_agent"],
245+
ids=["azure-file-search-invoke", "openai-file-search-invoke"],
246+
)
247+
async def test_file_search_invoke(self, responses_agent: OpenAIResponsesAgent):
248+
"""Test code interpreter."""
249+
input_text = "Who is the youngest employee?"
250+
async for response in responses_agent.invoke(messages=input_text):
251+
assert isinstance(response.message, ChatMessageContent)
252+
assert response.message.role == AuthorRole.ASSISTANT
253+
254+
@pytest.mark.parametrize(
255+
"responses_agent",
256+
[
257+
("azure", {"enable_file_search": True}),
258+
("openai", {"enable_file_search": True}),
259+
],
260+
indirect=["responses_agent"],
261+
ids=["azure-file-search-invoke-stream", "openai-file-search-invoke-stream"],
262+
)
263+
async def test_file_search_invoke_stream(self, responses_agent: OpenAIResponsesAgent):
264+
"""Test code interpreter streaming."""
265+
input_text = "Who is the youngest employee?"
266+
async for response in responses_agent.invoke_stream(messages=input_text):
267+
assert isinstance(response.message, StreamingChatMessageContent)
268+
assert response.message.role == AuthorRole.ASSISTANT
269+
270+
# endregion
271+
272+
# region Function calling tests
273+
274+
@pytest.mark.parametrize(
275+
"responses_agent",
276+
[
277+
("azure", {"enable_kernel_function": True}),
278+
("openai", {"enable_kernel_function": True}),
279+
],
280+
indirect=["responses_agent"],
281+
ids=["azure-function-calling-get-response", "openai-function-calling-get-response"],
282+
)
283+
async def test_function_calling_get_response(self, responses_agent: OpenAIResponsesAgent):
284+
"""Test function calling."""
285+
response = await responses_agent.get_response(
286+
messages="What is the weather in Seattle?",
287+
)
288+
assert isinstance(response.message, ChatMessageContent)
289+
assert response.message.role == AuthorRole.ASSISTANT
290+
assert "sunny" in response.message.content
291+
292+
@pytest.mark.parametrize(
293+
"responses_agent",
294+
[
295+
("azure", {"enable_kernel_function": True}),
296+
("openai", {"enable_kernel_function": True}),
297+
],
298+
indirect=["responses_agent"],
299+
ids=["azure-function-calling-invoke", "openai-function-calling-invoke"],
300+
)
301+
async def test_function_calling_invoke(self, responses_agent: OpenAIResponsesAgent):
302+
"""Test function calling."""
303+
async for response in responses_agent.invoke(
304+
messages="What is the weather in Seattle?",
305+
):
306+
assert isinstance(response.message, ChatMessageContent)
307+
assert response.message.role == AuthorRole.ASSISTANT
308+
assert "sunny" in response.message.content
309+
310+
@pytest.mark.parametrize(
311+
"responses_agent",
312+
[
313+
("azure", {"enable_kernel_function": True}),
314+
("openai", {"enable_kernel_function": True}),
315+
],
316+
indirect=["responses_agent"],
317+
ids=["azure-function-calling-invoke-stream", "openai-function-calling-invoke-stream"],
318+
)
319+
async def test_function_calling_stream(self, responses_agent: OpenAIResponsesAgent):
320+
"""Test function calling streaming."""
321+
full_message: str = ""
322+
async for response in responses_agent.invoke_stream(
323+
messages="What is the weather in Seattle?",
324+
):
325+
assert isinstance(response.message, StreamingChatMessageContent)
326+
assert response.message.role == AuthorRole.ASSISTANT
327+
full_message += response.message.content
328+
assert "sunny" in full_message
329+
330+
# endregion
331+
332+
# region Structured Outputs
333+
334+
@pytest.mark.parametrize(
335+
"responses_agent",
336+
[
337+
("azure", {"enable_structured_outputs": True}),
338+
("openai", {"enable_structured_outputs": True}),
339+
],
340+
indirect=["responses_agent"],
341+
ids=["azure-structured-outputs-get-response", "openai-structured-outputs-get-response"],
342+
)
343+
async def test_structured_outputs_get_response(self, responses_agent: OpenAIResponsesAgent):
344+
"""Test function calling."""
345+
response = await responses_agent.get_response(
346+
messages="What is the weather in Seattle?",
347+
)
348+
assert isinstance(response.message, ChatMessageContent)
349+
assert response.message.role == AuthorRole.ASSISTANT
350+
assert Reasoning.model_validate_json(response.message.content)
351+
352+
@pytest.mark.parametrize(
353+
"responses_agent",
354+
[
355+
("azure", {"enable_structured_outputs": True}),
356+
("openai", {"enable_structured_outputs": True}),
357+
],
358+
indirect=["responses_agent"],
359+
ids=["azure-structured-outputs-invoke", "openai-structured-outputs-invoke"],
360+
)
361+
async def test_structured_outputs_invoke(self, responses_agent: OpenAIResponsesAgent):
362+
"""Test function calling."""
363+
async for response in responses_agent.invoke(
364+
messages="What is the weather in Seattle?",
365+
):
366+
assert isinstance(response.message, ChatMessageContent)
367+
assert response.message.role == AuthorRole.ASSISTANT
368+
assert Reasoning.model_validate_json(response.message.content)
369+
370+
@pytest.mark.parametrize(
371+
"responses_agent",
372+
[
373+
("azure", {"enable_structured_outputs": True}),
374+
("openai", {"enable_structured_outputs": True}),
375+
],
376+
indirect=["responses_agent"],
377+
ids=["azure-structured-outputs-invoke-stream", "openai-structured-outputs-invoke-stream"],
378+
)
379+
async def test_structed_outputs_stream(self, responses_agent: OpenAIResponsesAgent):
380+
"""Test function calling streaming."""
381+
full_message: str = ""
382+
async for response in responses_agent.invoke_stream(
383+
messages="What is the weather in Seattle?",
384+
):
385+
assert isinstance(response.message, StreamingChatMessageContent)
386+
assert response.message.role == AuthorRole.ASSISTANT
387+
full_message += response.message.content
388+
assert Reasoning.model_validate_json(full_message)
389+
390+
# endregion

0 commit comments

Comments
 (0)