Skip to content

Commit 3a2363d

Browse files
authored
[prompt] [integration-tests] Fix intput/output token counts for OpenAILLMClient (#370)
1 parent cb53c34 commit 3a2363d

File tree

7 files changed

+37
-6
lines changed

7 files changed

+37
-6
lines changed

TESTING.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,8 @@ Integration tests that interact with LLM services require API tokens to be set a
9494
- `OPEN_ROUTER_API_TEST_KEY` - Required for tests using OpenRouter
9595

9696
You need to set these environment variables before running the integration tests that use the corresponding LLM clients.
97+
To simplify development, you can also create `env.properties` file (already gitignored) using [env.template.propertes](./integration-tests/env.template.properties) as a template.
98+
Then properties specified there would be automatically applied as environment variables when you run any test task.
9799

98100
## Running Ollama tests
99101

examples/src/main/kotlin/ai/koog/agents/example/ApiKeyService.kt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,4 +9,7 @@ internal object ApiKeyService {
99

1010
val googleApiKey: String
1111
get() = System.getenv("GOOGLE_API_KEY") ?: throw IllegalArgumentException("GOOGLE_API_KEY env is not set")
12+
13+
val openRouterApiKey: String
14+
get() = System.getenv("OPENROUTER_API_KEY") ?: throw IllegalArgumentException("OPENROUTER_API_KEY env is not set")
1215
}

integration-tests/build.gradle.kts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ version = rootProject.version
44
plugins {
55
id("ai.kotlin.multiplatform")
66
alias(libs.plugins.kotlin.serialization)
7+
id("ai.koog.gradle.plugins.credentialsresolver")
78
}
89

910
kotlin {
@@ -36,6 +37,16 @@ kotlin {
3637
}
3738
}
3839

40+
val envs = credentialsResolver.resolve(
41+
layout.projectDirectory.file(provider { "env.properties" })
42+
)
43+
44+
tasks.withType<Test> {
45+
doFirst {
46+
environment(envs.get())
47+
}
48+
}
49+
3950
dokka {
4051
dokkaSourceSets.configureEach {
4152
suppress.set(true)
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
# Copy this file without "template".
2+
# Add your envs here:
3+
# FOO=bar
4+
# Or, if you want to integrate with 1password, you can put reference, e.g. op://foo/bar:
5+
# FOO=op://foo/bar
6+
7+
OPEN_AI_API_TEST_KEY=
8+
ANTHROPIC_API_TEST_KEY=
9+
GEMINI_API_TEST_KEY=
10+
OPEN_ROUTER_API_TEST_KEY=

integration-tests/src/jvmTest/kotlin/ai/koog/integration/tests/SingleLLMPromptExecutorIntegrationTest.kt

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,10 +122,15 @@ class SingleLLMPromptExecutorIntegrationTest {
122122
assertNotNull(response, "Response should not be null")
123123
assertTrue(response.isNotEmpty(), "Response should not be empty")
124124
assertTrue(response.first() is Message.Assistant, "Response should be an Assistant message")
125+
126+
val message = response.first() as Message.Assistant
125127
assertTrue(
126-
(response.first() as Message.Assistant).content.contains("Paris", ignoreCase = true),
128+
message.content.contains("Paris", ignoreCase = true),
127129
"Response should contain 'Paris'"
128130
)
131+
assertNotNull(message.metaInfo.inputTokensCount, "Input tokens count should not be null")
132+
assertNotNull(message.metaInfo.outputTokensCount, "Output tokens count should not be null")
133+
assertNotNull(message.metaInfo.totalTokensCount, "Total tokens count should not be null")
129134
}
130135
}
131136

prompt/prompt-executor/prompt-executor-clients/prompt-executor-openai-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai/DataModel.kt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -136,8 +136,8 @@ internal data class OpenAIChoice(
136136

137137
@Serializable
138138
internal data class OpenAIUsage(
139-
val inputTokens: Int? = null,
140-
val outputTokens: Int? = null,
139+
val promptTokens: Int? = null,
140+
val completionTokens: Int? = null,
141141
val totalTokens: Int
142142
)
143143

prompt/prompt-executor/prompt-executor-clients/prompt-executor-openai-client/src/commonMain/kotlin/ai/koog/prompt/executor/clients/openai/OpenAILLMClient.kt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -326,7 +326,7 @@ public open class OpenAILLMClient(
326326
return withContext(Dispatchers.SuitableForIO) {
327327
val response = httpClient.post(settings.chatCompletionsPath) {
328328
setBody(request)
329-
}
329+
}
330330

331331
if (response.status.isSuccess()) {
332332
response.body<OpenAIResponse>()
@@ -450,8 +450,8 @@ public open class OpenAILLMClient(
450450

451451
// Extract token count from the response
452452
val totalTokensCount = response.usage?.totalTokens
453-
val inputTokensCount = response.usage?.inputTokens
454-
val outputTokensCount = response.usage?.outputTokens
453+
val inputTokensCount = response.usage?.promptTokens
454+
val outputTokensCount = response.usage?.completionTokens
455455

456456
val metaInfo = ResponseMetaInfo.create(
457457
clock,

0 commit comments

Comments
 (0)