AI rule node: add max output tokens for all providers

This commit is contained in:
Dmytro Skarzhynets 2025-06-27 19:37:12 +03:00
parent bc96c63fcf
commit d81d41fd7b
No known key found for this signature in database
GPG Key ID: 2B51652F224037DF
9 changed files with 18 additions and 0 deletions

View File

@ -63,6 +63,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -78,6 +79,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -94,6 +96,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topK(modelConfig.topK())
.frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty())
.maxOutputTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -165,6 +168,9 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
if (modelConfig.frequencyPenalty() != null) {
generationConfigBuilder.setPresencePenalty(modelConfig.frequencyPenalty().floatValue());
}
if (modelConfig.maxOutputTokens() != null) {
generationConfigBuilder.setMaxOutputTokens(modelConfig.maxOutputTokens());
}
var generationConfig = generationConfigBuilder.build();
// construct generative model instance
@ -191,6 +197,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -205,6 +212,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.topK(modelConfig.topK())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -227,6 +235,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
var defaultChatRequestParams = ChatRequestParameters.builder()
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.maxOutputTokens(modelConfig.maxOutputTokens())
.build();
return BedrockChatModel.builder()
@ -248,6 +257,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();

View File

@ -31,6 +31,7 @@ public record AmazonBedrockChatModel(
String modelId,
Double temperature,
Double topP,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<AmazonBedrockChatModel.Config> {}

View File

@ -32,6 +32,7 @@ public record AnthropicChatModel(
Double temperature,
Double topP,
Integer topK,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<AnthropicChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record AzureOpenAiChatModel(
Double topP,
Double frequencyPenalty,
Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<AzureOpenAiChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record GitHubModelsChatModel(
Double topP,
Double frequencyPenalty,
Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<GitHubModelsChatModel.Config> {}

View File

@ -34,6 +34,7 @@ public record GoogleAiGeminiChatModel(
Integer topK,
Double frequencyPenalty,
Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<GoogleAiGeminiChatModel.Config> {}

View File

@ -34,6 +34,7 @@ public record GoogleVertexAiGeminiChatModel(
Integer topK,
Double frequencyPenalty,
Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record MistralAiChatModel(
Double topP,
Double frequencyPenalty,
Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<MistralAiChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record OpenAiChatModel(
Double topP,
Double frequencyPenalty,
Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<OpenAiChatModel.Config> {}