AI rule node: add max output tokens for all providers

This commit is contained in:
Dmytro Skarzhynets 2025-06-27 19:37:12 +03:00
parent bc96c63fcf
commit d81d41fd7b
No known key found for this signature in database
GPG Key ID: 2B51652F224037DF
9 changed files with 18 additions and 0 deletions

View File

@ -63,6 +63,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP()) .topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty()) .frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty()) .presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -78,6 +79,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP()) .topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty()) .frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty()) .presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -94,6 +96,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topK(modelConfig.topK()) .topK(modelConfig.topK())
.frequencyPenalty(modelConfig.frequencyPenalty()) .frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty()) .presencePenalty(modelConfig.presencePenalty())
.maxOutputTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -165,6 +168,9 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
if (modelConfig.frequencyPenalty() != null) { if (modelConfig.frequencyPenalty() != null) {
generationConfigBuilder.setPresencePenalty(modelConfig.frequencyPenalty().floatValue()); generationConfigBuilder.setPresencePenalty(modelConfig.frequencyPenalty().floatValue());
} }
if (modelConfig.maxOutputTokens() != null) {
generationConfigBuilder.setMaxOutputTokens(modelConfig.maxOutputTokens());
}
var generationConfig = generationConfigBuilder.build(); var generationConfig = generationConfigBuilder.build();
// construct generative model instance // construct generative model instance
@ -191,6 +197,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP()) .topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty()) .frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty()) .presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -205,6 +212,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP()) .topP(modelConfig.topP())
.topK(modelConfig.topK()) .topK(modelConfig.topK())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -227,6 +235,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
var defaultChatRequestParams = ChatRequestParameters.builder() var defaultChatRequestParams = ChatRequestParameters.builder()
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP()) .topP(modelConfig.topP())
.maxOutputTokens(modelConfig.maxOutputTokens())
.build(); .build();
return BedrockChatModel.builder() return BedrockChatModel.builder()
@ -248,6 +257,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.topP(modelConfig.topP()) .topP(modelConfig.topP())
.frequencyPenalty(modelConfig.frequencyPenalty()) .frequencyPenalty(modelConfig.frequencyPenalty())
.presencePenalty(modelConfig.presencePenalty()) .presencePenalty(modelConfig.presencePenalty())
.maxTokens(modelConfig.maxOutputTokens())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();

View File

@ -31,6 +31,7 @@ public record AmazonBedrockChatModel(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP, Double topP,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<AmazonBedrockChatModel.Config> {} ) implements AiChatModelConfig<AmazonBedrockChatModel.Config> {}

View File

@ -32,6 +32,7 @@ public record AnthropicChatModel(
Double temperature, Double temperature,
Double topP, Double topP,
Integer topK, Integer topK,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<AnthropicChatModel.Config> {} ) implements AiChatModelConfig<AnthropicChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record AzureOpenAiChatModel(
Double topP, Double topP,
Double frequencyPenalty, Double frequencyPenalty,
Double presencePenalty, Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<AzureOpenAiChatModel.Config> {} ) implements AiChatModelConfig<AzureOpenAiChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record GitHubModelsChatModel(
Double topP, Double topP,
Double frequencyPenalty, Double frequencyPenalty,
Double presencePenalty, Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<GitHubModelsChatModel.Config> {} ) implements AiChatModelConfig<GitHubModelsChatModel.Config> {}

View File

@ -34,6 +34,7 @@ public record GoogleAiGeminiChatModel(
Integer topK, Integer topK,
Double frequencyPenalty, Double frequencyPenalty,
Double presencePenalty, Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<GoogleAiGeminiChatModel.Config> {} ) implements AiChatModelConfig<GoogleAiGeminiChatModel.Config> {}

View File

@ -34,6 +34,7 @@ public record GoogleVertexAiGeminiChatModel(
Integer topK, Integer topK,
Double frequencyPenalty, Double frequencyPenalty,
Double presencePenalty, Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> {} ) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record MistralAiChatModel(
Double topP, Double topP,
Double frequencyPenalty, Double frequencyPenalty,
Double presencePenalty, Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<MistralAiChatModel.Config> {} ) implements AiChatModelConfig<MistralAiChatModel.Config> {}

View File

@ -33,6 +33,7 @@ public record OpenAiChatModel(
Double topP, Double topP,
Double frequencyPenalty, Double frequencyPenalty,
Double presencePenalty, Double presencePenalty,
Integer maxOutputTokens,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<OpenAiChatModel.Config> {} ) implements AiChatModelConfig<OpenAiChatModel.Config> {}