Skip to content

Commit a8fe1a0

Browse files
committed
Rename koog-ktor-plugin module to koog-ktor and refactor related build files, tests, and code for consistency. Rewrite AI agent functions with improved parameterization and modularity.
1 parent 1150d2f commit a8fe1a0

File tree

15 files changed

+107
-114
lines changed

15 files changed

+107
-114
lines changed

build.gradle.kts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ dependencies {
204204
dokka(project(":prompt:prompt-tokenizer"))
205205
dokka(project(":prompt:prompt-xml"))
206206
dokka(project(":koog-spring-boot-starter"))
207-
dokka(project(":koog-ktor-plugin"))
207+
"dokka"(project(":koog-ktor"))
208208
dokka(project(":rag:rag-base"))
209209
dokka(project(":rag:vector-storage"))
210210
}

examples/build.gradle.kts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,7 @@ dependencies {
3333
api(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-bedrock-client"))
3434
api(project(":prompt:prompt-executor:prompt-executor-llms"))
3535
api(project(":prompt:prompt-executor:prompt-executor-llms-all"))
36-
37-
api(project(":koog-ktor-plugin"))
36+
api(project(":koog-ktor"))
3837

3938
api(libs.kotlinx.datetime)
4039

examples/src/main/kotlin/ai/koog/agents/example/ktor/KtorIntegrationExample.kt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ fun Application.configureKoog() {
6767
fallback { }
6868
}
6969

70-
agent {
70+
agentConfig {
7171
mcp {
7272
sse("put some url here...")
7373
}
@@ -111,6 +111,7 @@ private fun Route.agenticRoutes() {
111111

112112
if (isHarmful) {
113113
call.respond(HttpStatusCode.BadRequest, "Harmful content detected")
114+
return@get
114115
}
115116

116117
val updatedRequest = llm().execute(prompt("id") {

koog-agents/build.gradle.kts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ val excluded = setOf(
1212
":examples",
1313
":integration-tests",
1414
":koog-spring-boot-starter",
15-
":koog-ktor-plugin",
15+
":koog-ktor",
1616
project.path, // the current project should not depend on itself
1717
)
1818

koog-ktor-plugin/Module.md renamed to koog-ktor/Module.md

Lines changed: 61 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,16 @@
1-
# Module koog-ktor-plugin
1+
# Module koog-ktor
22

33
Ktor server integration for the Koog AI agents framework.
44

55
## Overview
66

7-
The `koog-ktor-plugin` module provides seamless integration between the Koog AI agents framework and Ktor server applications. It includes:
7+
The `koog-ktor` module provides seamless integration between the Koog AI agents framework and Ktor server applications.
8+
It includes:
89

910
- A Ktor plugin for easy installation and configuration
1011
- Support for multiple LLM providers (OpenAI, Anthropic, Google, OpenRouter, Ollama)
1112
- Agent configuration with tools, features, and prompt customization
1213
- Extension functions for routes to interact with LLMs and agents
13-
- Content moderation capabilities
1414
- JVM-specific support for Model Context Protocol (MCP) integration
1515

1616
## Using in your project
@@ -19,58 +19,47 @@ Add the dependency to your `build.gradle.kts`:
1919

2020
```kotlin
2121
dependencies {
22-
implementation("ai.koog:koog-ktor-plugin:$koogVersion")
22+
implementation("ai.koog:koog-ktor:$koogVersion")
2323
}
2424
```
2525

2626
## Basic Usage
2727

28-
Install and configure the plugin in your Ktor application:
28+
Provide one, or many apikey-s, and the Koog plugin will automatically connect to the provider when needed.
29+
For additional, or provider-specific settings, See [YAML/CONF Configuration](#yamlconf-configuration) below.
30+
31+
```yaml
32+
koog:
33+
openai.apikey: "$OPENAI_API_KEY:your-openai-api-key"
34+
anthropic.apikey: "$ANTHROPIC_API_KEY:your-anthropic-api-key"
35+
google.apikey: "$GOOGLE_API_KEY:your-google-api-key"
36+
openrouter.apikey: "$OPENROUTER_API_KEY:your-openrouter-api-key"
37+
ollama.enabled: "$DEBUG:false"
38+
```
39+
40+
### Installing and configuring the plugin
41+
42+
The Koog plugin can also be configured by code, and some complex configurations can only be done by code.
43+
See [Programmatic Configuration](#programmatic-configuration) below.
2944
3045
```kotlin
3146
fun Application.module() {
3247
install(Koog) {
33-
// Configure LLM providers
3448
llm {
35-
openAI(apiKey = "your-openai-api-key") {
36-
baseUrl = "https://api.openai.com"
37-
}
38-
39-
// Optional: Configure other providers
49+
openAI(apiKey = "your-openai-api-key")
4050
anthropic(apiKey = "your-anthropic-api-key")
4151
ollama { baseUrl = "http://localhost:11434" }
4252
google(apiKey = "your-google-api-key")
4353
openRouter(apiKey = "your-openrouter-api-key")
4454
}
45-
46-
// Configure agent
47-
agent {
48-
// Register tools
49-
registerTools {
50-
// Add tools using reflection
51-
tool(::yourToolFunction)
52-
}
53-
54-
// Configure prompt
55-
prompt {
56-
system("You are a helpful assistant")
57-
}
58-
59-
// JVM-specific: Configure MCP integration
60-
mcp {
61-
sse("your-mcp-server-url")
62-
}
63-
}
6455
}
65-
66-
// Use in routes
56+
6757
routing {
6858
route("/ai") {
6959
post("/chat") {
7060
val userInput = call.receive<String>()
71-
72-
// Use agent to respond
73-
call.agentRespond(userInput)
61+
val output = singleRunAgent(userInput)
62+
call.respond(HttpStatusCode.OK, output)
7463
}
7564
}
7665
}
@@ -84,19 +73,19 @@ fun Application.module() {
8473
```kotlin
8574
post("/moderated-chat") {
8675
val userInput = call.receive<String>()
87-
76+
8877
// Moderate content
89-
val isHarmful = moderateWithLLM(OpenAIModels.Moderation.Omni) {
90-
user(userInput)
91-
}.isHarmful
92-
78+
val isHarmful = llm().moderate(prompt("id") {
79+
user(userRequest)
80+
}, OpenAIModels.Moderation.Omni).isHarmful
81+
9382
if (isHarmful) {
9483
call.respond(HttpStatusCode.BadRequest, "Harmful content detected")
9584
return@post
9685
}
97-
98-
// Process with agent
99-
call.agentRespond(userInput)
86+
87+
val output = singleRunAgent(userInput)
88+
call.respond(HttpStatusCode.OK, output)
10089
}
10190
```
10291

@@ -105,14 +94,16 @@ post("/moderated-chat") {
10594
```kotlin
10695
post("/llm-chat") {
10796
val userInput = call.receive<String>()
108-
109-
// Ask LLM directly
110-
val response = askLLM(OllamaModels.Meta.LLAMA_3_2) {
111-
system("You are a helpful assistant")
112-
user(userInput)
113-
}.single() as Message.Assistant
114-
115-
call.respond(response.content)
97+
98+
val response = llm().execute(prompt("id") {
99+
system(
100+
"You are a helpful assistant that can correct user answers. " +
101+
"You will get a user's question and your task is to make it more clear for the further processing."
102+
)
103+
user(userRequest)
104+
}, OllamaModels.Meta.LLAMA_3_2)
105+
106+
call.respond(HttpStatusCode.OK, response.content)
116107
}
117108
```
118109

@@ -121,9 +112,9 @@ post("/llm-chat") {
121112
```kotlin
122113
post("/custom-agent") {
123114
val userInput = call.receive<String>()
124-
125-
// Use custom strategy
126-
call.agentRespond(userInput, strategy = reActStrategy())
115+
116+
val output = aiAgent(reActStrategy(), userInput)
117+
call.respond(HttpStatusCode.OK, output)
127118
}
128119
```
129120

@@ -145,7 +136,7 @@ llm {
145136
socketTimeoutMillis = 30000
146137
}
147138
}
148-
139+
149140
// Set fallback LLM
150141
fallback {
151142
provider = LLMProvider.Ollama
@@ -156,7 +147,8 @@ llm {
156147

157148
#### YAML/CONF Configuration
158149

159-
You can also configure LLM providers using YAML or CONF files. The plugin will automatically read the configuration from the application's configuration file:
150+
You can also configure LLM providers using YAML or CONF files. The plugin will automatically read the configuration from
151+
the application's configuration file:
160152

161153
```yaml
162154
# application.yaml or application.conf
@@ -168,28 +160,29 @@ koog:
168160
requestTimeoutMillis: 30000
169161
connectTimeoutMillis: 10000
170162
socketTimeoutMillis: 30000
171-
163+
172164
anthropic:
173165
apikey: "your-anthropic-api-key"
174166
baseUrl: "https://api.anthropic.com"
175167
timeout:
176168
requestTimeoutMillis: 30000
177-
169+
178170
google:
179171
apikey: "your-google-api-key"
180172
baseUrl: "https://generativelanguage.googleapis.com"
181-
173+
182174
openrouter:
183175
apikey: "your-openrouter-api-key"
184176
baseUrl: "https://openrouter.ai"
185-
177+
186178
ollama:
187179
baseUrl: "http://localhost:11434"
188180
timeout:
189181
requestTimeoutMillis: 60000
190182
```
191183
192-
When using configuration files, you can still provide programmatic configuration that will override the settings from the file:
184+
When using configuration files, you can still provide programmatic configuration that will override the settings from
185+
the file:
193186
194187
```kotlin
195188
install(Koog) {
@@ -198,7 +191,7 @@ install(Koog) {
198191
// This will override the API key from the configuration file
199192
openAI(apiKey = System.getenv("OPENAI_API_KEY") ?: "override-from-code")
200193
}
201-
194+
202195
// Rest of your configuration...
203196
}
204197
```
@@ -211,21 +204,21 @@ Configure agent behavior, tools, and features:
211204
agent {
212205
// Set model
213206
model = OpenAIModels.GPT4.Turbo
214-
207+
215208
// Set max iterations
216209
maxAgentIterations = 10
217-
210+
218211
// Register tools
219212
registerTools {
220213
tool(::searchTool)
221214
tool(::calculatorTool)
222215
}
223-
216+
224217
// Configure prompt
225218
prompt {
226219
system("You are a helpful assistant specialized in...")
227220
}
228-
221+
229222
// Install features
230223
install(OpenTelemetry) {
231224
// Configure feature
@@ -242,10 +235,10 @@ agent {
242235
mcp {
243236
// Use Server-Sent Events
244237
sse("https://your-mcp-server.com/sse")
245-
238+
246239
// Or use process
247240
process(yourMcpProcess)
248-
241+
249242
// Or use existing client
250243
client(yourMcpClient)
251244
}
File renamed without changes.

0 commit comments

Comments
 (0)