Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ class TraceFeatureMessageFileWriterTest {
val testModel = LLModel(
provider = TestLLMProvider(),
id = "test-llm-id",
capabilities = emptyList()
capabilities = emptyList(),
contextLength = 1_000,
)

var runId = ""
Expand Down Expand Up @@ -310,7 +311,8 @@ class TraceFeatureMessageFileWriterTest {
val testModel = LLModel(
provider = TestLLMProvider(),
id = "test-llm-id",
capabilities = emptyList()
capabilities = emptyList(),
contextLength = 1_000,
)

var runId = ""
Expand Down Expand Up @@ -388,4 +390,4 @@ class TraceFeatureMessageFileWriterTest {
assertContentEquals(expectedLogMessages, actualMessages)
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ class TraceFeatureMessageLogWriterTest {
val testModel = LLModel(
provider = TestLLMProvider(),
id = "test-llm-id",
capabilities = emptyList()
capabilities = emptyList(),
contextLength = 1_000,
)

var runId = ""
Expand Down Expand Up @@ -282,7 +283,8 @@ class TraceFeatureMessageLogWriterTest {
val testModel = LLModel(
provider = TestLLMProvider(),
id = "test-llm-id",
capabilities = emptyList()
capabilities = emptyList(),
contextLength = 1_000,
)

var runId = ""
Expand Down Expand Up @@ -360,4 +362,4 @@ class TraceFeatureMessageLogWriterTest {
assertContentEquals(expectedLogMessages, actualMessages)
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,8 @@ class TraceFeatureMessageRemoteWriterTest {
val testModel = LLModel(
provider = TestLLMProvider(),
id = "test-llm-id",
capabilities = emptyList()
capabilities = emptyList(),
contextLength = 1_000,
)

val expectedLLMCallWithToolsPrompt = expectedPrompt.copy(
Expand Down Expand Up @@ -379,7 +380,8 @@ class TraceFeatureMessageRemoteWriterTest {
val testModel = LLModel(
provider = TestLLMProvider(),
id = "test-llm-id",
capabilities = emptyList()
capabilities = emptyList(),
contextLength = 1_000,
)

val expectedPrompt = Prompt(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ public object OllamaEmbeddingModels {
public val NOMIC_EMBED_TEXT: LLModel = LLModel(
provider = LLMProvider.Ollama,
id = "nomic-embed-text",
capabilities = listOf(LLMCapability.Embed)
capabilities = listOf(LLMCapability.Embed),
contextLength = 2_048,
)

/**
Expand All @@ -39,7 +40,8 @@ public object OllamaEmbeddingModels {
public val ALL_MINI_LM: LLModel = LLModel(
provider = LLMProvider.Ollama,
id = "all-minilm",
capabilities = listOf(LLMCapability.Embed)
capabilities = listOf(LLMCapability.Embed),
contextLength = 512,
)

/**
Expand All @@ -54,7 +56,8 @@ public object OllamaEmbeddingModels {
public val MULTILINGUAL_E5: LLModel = LLModel(
provider = LLMProvider.Ollama,
id = "zylonai/multilingual-e5-large",
capabilities = listOf(LLMCapability.Embed)
capabilities = listOf(LLMCapability.Embed),
contextLength = 512,
)

/**
Expand All @@ -69,7 +72,8 @@ public object OllamaEmbeddingModels {
public val BGE_LARGE: LLModel = LLModel(
provider = LLMProvider.Ollama,
id = "bge-large",
capabilities = listOf(LLMCapability.Embed)
capabilities = listOf(LLMCapability.Embed),
contextLength = 512,
)

/**
Expand All @@ -85,6 +89,7 @@ public object OllamaEmbeddingModels {
public val MXBAI_EMBED_LARGE: LLModel = LLModel(
provider = LLMProvider.Ollama,
id = "mxbai-embed-large",
capabilities = listOf(LLMCapability.Embed)
capabilities = listOf(LLMCapability.Embed),
contextLength = 512,
)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,12 @@ class CachedPromptExecutorTest {
private val testClock = object : Clock {
override fun now() = testResponse.first().metaInfo.timestamp
}
private val testModel = LLModel(object : LLMProvider("", "") {}, "", emptyList())
private val testModel = LLModel(
provider = object : LLMProvider("", "") {},
id = "",
capabilities = emptyList(),
contextLength = 1_000L,
)
}

// Mock implementation of PromptCache
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ public object AnthropicModels: LLModelDefinitions {
LLMCapability.Vision.Image,
LLMCapability.Schema.JSON.Full,
LLMCapability.Completion
)
),
contextLength = 200_000,
maxOutputTokens = 4_096,
)

/**
Expand All @@ -68,7 +70,9 @@ public object AnthropicModels: LLModelDefinitions {
LLMCapability.Vision.Image,
LLMCapability.Schema.JSON.Full,
LLMCapability.Completion
)
),
contextLength = 200_000,
maxOutputTokens = 4_096,
)

/**
Expand All @@ -90,7 +94,9 @@ public object AnthropicModels: LLModelDefinitions {
LLMCapability.Vision.Image,
LLMCapability.Schema.JSON.Full,
LLMCapability.Completion
)
),
contextLength = 200_000,
maxOutputTokens = 8_192,
)

/**
Expand All @@ -112,7 +118,9 @@ public object AnthropicModels: LLModelDefinitions {
LLMCapability.Vision.Image,
LLMCapability.Schema.JSON.Full,
LLMCapability.Completion
)
),
contextLength = 200_000,
maxOutputTokens = 8_192,
)

/**
Expand All @@ -134,7 +142,9 @@ public object AnthropicModels: LLModelDefinitions {
LLMCapability.Vision.Image,
LLMCapability.Schema.JSON.Full,
LLMCapability.Completion
)
),
contextLength = 200_000,
maxOutputTokens = 64_000,
)

/**
Expand All @@ -155,7 +165,9 @@ public object AnthropicModels: LLModelDefinitions {
LLMCapability.Vision.Image,
LLMCapability.Schema.JSON.Full,
LLMCapability.Completion
)
),
contextLength = 200_000,
maxOutputTokens = 64_000,
)

/**
Expand All @@ -177,7 +189,9 @@ public object AnthropicModels: LLModelDefinitions {
LLMCapability.Vision.Image,
LLMCapability.Schema.JSON.Full,
LLMCapability.Completion
)
),
contextLength = 200_000,
maxOutputTokens = 32_000,
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ kotlin {
api(project(":agents:agents-tools"))
api(project(":agents:agents-utils"))
api(project(":prompt:prompt-executor:prompt-executor-clients"))
implementation(project(":prompt:prompt-executor:prompt-executor-clients:prompt-executor-anthropic-client"))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why did you add it to commonMain? It should be not needed here since we provide only JVM implementation for Bedrock, and this dependency is already included in the jvmMain block.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I answered for both your comments in the main thread, because this is the same answer.

api(project(":prompt:prompt-llm"))
api(project(":prompt:prompt-model"))
api(libs.kotlinx.coroutines.core)
Expand Down Expand Up @@ -56,4 +57,4 @@ kotlin {
}
}

publishToMaven()
publishToMaven()
Loading
Loading