Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,12 @@
using System.Text;
using System.Text.Json;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.Google;
using Microsoft.SemanticKernel.Connectors.Google.Core;
using Microsoft.SemanticKernel.Http;
using Moq;
using Xunit;

namespace SemanticKernel.Connectors.Google.UnitTests.Core.Gemini.Clients;
Expand Down Expand Up @@ -423,8 +425,12 @@ public async Task ItCreatesPostRequestWithSemanticKernelVersionHeaderAsync()
[Fact]
public async Task ItCreatesPostRequestWithResponseSchemaPropertyAsync()
{
// Get a mock logger that will return true for IsEnabled(LogLevel.Trace)
var mockLogger = new Mock<ILogger<GeminiChatGenerationTests>>();
mockLogger.Setup(x => x.IsEnabled(LogLevel.Trace)).Returns(true);

// Arrange
var client = this.CreateChatCompletionClient();
var client = this.CreateChatCompletionClient(logger: mockLogger.Object);
var chatHistory = CreateSampleChatHistory();
var settings = new GeminiPromptExecutionSettings { ResponseMimeType = "application/json", ResponseSchema = typeof(List<int>) };

Expand Down Expand Up @@ -504,7 +510,8 @@ private static ChatHistory CreateSampleChatHistory()
private GeminiChatCompletionClient CreateChatCompletionClient(
string modelId = "fake-model",
string? bearerKey = null,
HttpClient? httpClient = null)
HttpClient? httpClient = null,
ILogger? logger = null)
{
if (bearerKey is not null)
{
Expand All @@ -514,14 +521,16 @@ private GeminiChatCompletionClient CreateChatCompletionClient(
apiVersion: VertexAIVersion.V1,
bearerTokenProvider: () => new ValueTask<string>(bearerKey),
location: "fake-location",
projectId: "fake-project-id");
projectId: "fake-project-id",
logger: logger);
}

return new GeminiChatCompletionClient(
httpClient: httpClient ?? this._httpClient,
modelId: modelId,
apiVersion: GoogleAIVersion.V1,
apiKey: "fake-key");
apiKey: "fake-key",
logger: logger);
}

public void Dispose()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,12 @@ private ChatCompletionState ValidateInputAndCreateChatCompletionState(

if (this.Logger.IsEnabled(LogLevel.Trace))
{
// JsonSerializer can't serialize Type. Get schema JsonElement
if (geminiExecutionSettings.ResponseSchema is Type)
{
geminiExecutionSettings.ResponseSchema = GeminiRequest.GetResponseSchemaConfig(geminiExecutionSettings.ResponseSchema);
}

this.Logger.LogTrace("ChatHistory: {ChatHistory}, Settings: {Settings}",
JsonSerializer.Serialize(chatHistory, JsonOptionsCache.ChatHistory),
JsonSerializer.Serialize(geminiExecutionSettings));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ private static void AddConfiguration(GeminiPromptExecutionSettings executionSett
};
}

private static JsonElement? GetResponseSchemaConfig(object? responseSchemaSettings)
internal static JsonElement? GetResponseSchemaConfig(object? responseSchemaSettings)
{
if (responseSchemaSettings is null)
{
Expand Down
Loading