From b7a910ae1d9387149ac8fbf857215314d36bea06 Mon Sep 17 00:00:00 2001 From: Evan Mattson Date: Fri, 28 Mar 2025 19:51:43 +0900 Subject: [PATCH 1/2] Pass kernel args through to kernel invoke function call --- python/samples/concepts/README.md | 1 + .../azure_ai_agent_prompt_templating.py | 114 ++++++++++++++++++ .../agents/azure_ai/agent_thread_actions.py | 20 ++- .../open_ai/assistant_thread_actions.py | 23 +++- .../test_assistant_thread_actions.py | 4 + 5 files changed, 153 insertions(+), 9 deletions(-) create mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py diff --git a/python/samples/concepts/README.md b/python/samples/concepts/README.md index fa5cddf07f0d..85c20901f0e9 100644 --- a/python/samples/concepts/README.md +++ b/python/samples/concepts/README.md @@ -9,6 +9,7 @@ - [Azure AI Agent as Kernel Function](./agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py) - [Azure AI Agent with Azure AI Search](./agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py) - [Azure AI Agent File Manipulation](./agents/azure_ai_agent/azure_ai_agent_file_manipulation.py) +- [Azure AI Agent Prompt Templating](./agents/azure_ai_agent/azure_ai_agent_prompt_templating.py) - [Azure AI Agent Chat History Callback](./agents/azure_ai_agent/azure_ai_agent_streaming_chat_history_callback.py) - [Azure AI Agent Streaming](./agents/azure_ai_agent/azure_ai_agent_streaming.py) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py new file mode 100644 index 000000000000..e1b1f31032ee --- /dev/null +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py @@ -0,0 +1,114 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel.agents import AzureAIAgent, AzureAIAgentSettings +from semantic_kernel.functions import KernelArguments +from semantic_kernel.prompt_template import PromptTemplateConfig + +""" +The following sample demonstrates how to create a chat completion +agent using Azure OpenAI within Semantic Kernel. +It uses parameterized prompts and shows how to swap between +"semantic-kernel," "jinja2," and "handlebars" template formats, +This sample highlights the agent's chat history conversation +is managed and how kernel arguments are passed in and used. +""" + +# Define the inputs and styles to be used in the agent +inputs = [ + ("Home cooking is great.", None), + ("Talk about world peace.", "iambic pentameter"), + ("Say something about doing your best.", "e. e. cummings"), + ("What do you think about having fun?", "old school rap"), +] + + +async def invoke_chat_completion_agent(agent: AzureAIAgent, inputs): + """Invokes the given agent with each (input, style) in inputs.""" + + thread = None + + for user_input, style in inputs: + print(f"[USER]: {user_input}\n") + + # If style is specified, override the 'style' argument + argument_overrides = None + if style: + argument_overrides = KernelArguments(style=style) + + # Stream agent responses + async for response in agent.invoke_stream(messages=user_input, thread=thread, arguments=argument_overrides): + print(f"{response.content}", end="", flush=True) + thread = response.thread + print("\n") + + +async def invoke_agent_with_template(template_str: str, template_format: str, default_style: str = "haiku"): + """Creates an agent with the specified template and format, then invokes it using invoke_chat_completion_agent.""" + + # Configure the prompt template + prompt_config = PromptTemplateConfig(template=template_str, template_format=template_format) + + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client( + credential=creds, + conn_str=ai_agent_settings.project_connection_string.get_secret_value(), + ) as client, + ): + # Create agent definition + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name="MyPoetAgent", + ) + + # Create the AzureAI Agent + agent = AzureAIAgent( + client=client, + definition=agent_definition, + prompt_template_config=prompt_config, + arguments=KernelArguments(style=default_style), + ) + + await invoke_chat_completion_agent(agent, inputs) + + +async def main(): + # 1) Using "semantic-kernel" format + print("\n===== SEMANTIC-KERNEL FORMAT =====\n") + semantic_kernel_template = """ + Write a one verse poem on the requested topic in the style of {{$style}}. + Always state the requested style of the poem. + """ + await invoke_agent_with_template( + template_str=semantic_kernel_template, + template_format="semantic-kernel", + default_style="haiku", + ) + + # 2) Using "jinja2" format + print("\n===== JINJA2 FORMAT =====\n") + jinja2_template = """ + Write a one verse poem on the requested topic in the style of {{style}}. + Always state the requested style of the poem. + """ + await invoke_agent_with_template(template_str=jinja2_template, template_format="jinja2", default_style="haiku") + + # 3) Using "handlebars" format + print("\n===== HANDLEBARS FORMAT =====\n") + handlebars_template = """ + Write a one verse poem on the requested topic in the style of {{style}}. + Always state the requested style of the poem. + """ + await invoke_agent_with_template( + template_str=handlebars_template, template_format="handlebars", default_style="haiku" + ) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py b/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py index c189da5802f5..aee21fc38ec8 100644 --- a/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py +++ b/python/semantic_kernel/agents/azure_ai/agent_thread_actions.py @@ -198,7 +198,9 @@ async def invoke( from semantic_kernel.contents.chat_history import ChatHistory chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + _ = await cls._invoke_function_calls( + kernel=kernel, fccs=fccs, chat_history=chat_history, arguments=arguments + ) tool_outputs = cls._format_tool_outputs(fccs, chat_history) await agent.client.agents.submit_tool_outputs_to_run( @@ -404,6 +406,7 @@ async def invoke_stream( thread_id=thread_id, output_messages=output_messages, kernel=kernel, + arguments=arguments, function_steps=function_steps, active_messages=active_messages, ): @@ -417,6 +420,7 @@ async def _process_stream_events( agent: "AzureAIAgent", thread_id: str, kernel: "Kernel", + arguments: KernelArguments, function_steps: dict[str, FunctionCallContent], active_messages: dict[str, RunStep], output_messages: "list[ChatMessageContent] | None" = None, @@ -465,6 +469,7 @@ async def _process_stream_events( kernel=kernel, run=run, function_steps=function_steps, + arguments=arguments, ) if action_result is None: raise RuntimeError( @@ -823,11 +828,15 @@ async def _retrieve_message( @classmethod async def _invoke_function_calls( - cls: type[_T], kernel: "Kernel", fccs: list["FunctionCallContent"], chat_history: "ChatHistory" + cls: type[_T], + kernel: "Kernel", + fccs: list["FunctionCallContent"], + chat_history: "ChatHistory", + arguments: KernelArguments, ) -> list[Any]: """Invoke the function calls.""" tasks = [ - kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) + kernel.invoke_function_call(function_call=function_call, chat_history=chat_history, arguments=arguments) for function_call in fccs ] return await asyncio.gather(*tasks) @@ -858,6 +867,7 @@ async def _handle_streaming_requires_action( kernel: "Kernel", run: ThreadRun, function_steps: dict[str, "FunctionCallContent"], + arguments: KernelArguments, **kwargs: Any, ) -> FunctionActionResult | None: """Handle the requires action event for a streaming run.""" @@ -867,7 +877,9 @@ async def _handle_streaming_requires_action( from semantic_kernel.contents.chat_history import ChatHistory chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + _ = await cls._invoke_function_calls( + kernel=kernel, fccs=fccs, chat_history=chat_history, arguments=arguments + ) function_result_streaming_content = merge_streaming_function_results(chat_history.messages)[0] tool_outputs = cls._format_tool_outputs(fccs, chat_history) return FunctionActionResult( diff --git a/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py b/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py index 97ff5598ad05..4f969b7b57af 100644 --- a/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py +++ b/python/semantic_kernel/agents/open_ai/assistant_thread_actions.py @@ -250,7 +250,9 @@ async def invoke( from semantic_kernel.contents.chat_history import ChatHistory chat_history = ChatHistory() - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + _ = await cls._invoke_function_calls( + kernel=kernel, fccs=fccs, chat_history=chat_history, arguments=arguments + ) tool_outputs = cls._format_tool_outputs(fccs, chat_history) await agent.client.beta.threads.runs.submit_tool_outputs( @@ -474,7 +476,11 @@ async def invoke_stream( elif event.event == "thread.run.requires_action": run = event.data function_action_result = await cls._handle_streaming_requires_action( - agent.name, kernel, run, function_steps + agent.name, + kernel, + run, + function_steps, + arguments, ) if function_action_result is None: raise AgentInvokeException( @@ -533,6 +539,7 @@ async def _handle_streaming_requires_action( kernel: "Kernel", run: "Run", function_steps: dict[str, "FunctionCallContent"], + arguments: KernelArguments, **kwargs: Any, ) -> FunctionActionResult | None: """Handle the requires action event for a streaming run.""" @@ -542,7 +549,9 @@ async def _handle_streaming_requires_action( from semantic_kernel.contents.chat_history import ChatHistory chat_history = ChatHistory() if kwargs.get("chat_history") is None else kwargs["chat_history"] - _ = await cls._invoke_function_calls(kernel=kernel, fccs=fccs, chat_history=chat_history) + _ = await cls._invoke_function_calls( + kernel=kernel, fccs=fccs, chat_history=chat_history, arguments=arguments + ) function_result_streaming_content = merge_streaming_function_results(chat_history.messages)[0] tool_outputs = cls._format_tool_outputs(fccs, chat_history) return FunctionActionResult( @@ -625,11 +634,15 @@ async def _retrieve_message( @classmethod async def _invoke_function_calls( - cls: type[_T], kernel: "Kernel", fccs: list["FunctionCallContent"], chat_history: "ChatHistory" + cls: type[_T], + kernel: "Kernel", + fccs: list["FunctionCallContent"], + chat_history: "ChatHistory", + arguments: KernelArguments, ) -> list[Any]: """Invoke the function calls.""" tasks = [ - kernel.invoke_function_call(function_call=function_call, chat_history=chat_history) + kernel.invoke_function_call(function_call=function_call, chat_history=chat_history, arguments=arguments) for function_call in fccs ] return await asyncio.gather(*tasks) diff --git a/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py b/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py index 660372d684ea..33c23beef120 100644 --- a/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py +++ b/python/tests/unit/agents/openai_assistant/test_assistant_thread_actions.py @@ -731,6 +731,7 @@ async def test_handle_streaming_requires_action_returns_result(): dummy_tool_outputs = {"output": "value"} dummy_kernel = MagicMock() dummy_agent_name = "TestAgent" + dummy_args = {} with ( patch( "semantic_kernel.agents.open_ai.assistant_thread_actions.get_function_call_contents", @@ -752,6 +753,7 @@ async def test_handle_streaming_requires_action_returns_result(): dummy_kernel, dummy_run, dummy_function_steps, # type: ignore + dummy_args, ) assert result is not None assert isinstance(result, FunctionActionResult) @@ -766,11 +768,13 @@ async def test_handle_streaming_requires_action_returns_none(): dummy_function_steps = {"step1": MagicMock()} dummy_kernel = MagicMock() dummy_agent_name = "TestAgent" + dummy_args = {} with patch("semantic_kernel.agents.open_ai.assistant_thread_actions.get_function_call_contents", return_value=None): result = await AssistantThreadActions._handle_streaming_requires_action( dummy_agent_name, dummy_kernel, dummy_run, dummy_function_steps, # type: ignore + dummy_args, ) assert result is None From c483e701c6a86baa84073f8fd2b28b53a5c9782a Mon Sep 17 00:00:00 2001 From: Evan Mattson Date: Fri, 28 Mar 2025 19:54:24 +0900 Subject: [PATCH 2/2] Fix docstring --- .../azure_ai_agent/azure_ai_agent_prompt_templating.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py index e1b1f31032ee..a21abe723247 100644 --- a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_prompt_templating.py @@ -9,12 +9,12 @@ from semantic_kernel.prompt_template import PromptTemplateConfig """ -The following sample demonstrates how to create a chat completion +The following sample demonstrates how to create an Azure AI agent using Azure OpenAI within Semantic Kernel. It uses parameterized prompts and shows how to swap between "semantic-kernel," "jinja2," and "handlebars" template formats, -This sample highlights the agent's chat history conversation -is managed and how kernel arguments are passed in and used. +This sample highlights the agent's prompt templates are managed +and how kernel arguments are passed in and used. """ # Define the inputs and styles to be used in the agent