AI rule node: support top N for all models

This commit is contained in:
Dmytro Skarzhynets 2025-06-27 16:37:10 +03:00
parent 3f2a6440a7
commit 3f58ff01c3
No known key found for this signature in database
GPG Key ID: 2B51652F224037DF
10 changed files with 50 additions and 196 deletions

View File

@ -56,6 +56,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -68,6 +69,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey())
.deploymentName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -80,6 +82,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -112,6 +115,9 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
if (modelConfig.temperature() != null) {
generationConfigBuilder.setTemperature(modelConfig.temperature().floatValue());
}
if (modelConfig.topP() != null) {
generationConfigBuilder.setTopP(modelConfig.topP().floatValue());
}
var generationConfig = generationConfigBuilder.build();
// construct generative model instance
@ -128,6 +134,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -140,6 +147,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
@ -161,6 +169,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
var defaultChatRequestParams = ChatRequestParameters.builder()
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.build();
return BedrockChatModel.builder()
@ -179,6 +188,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.gitHubToken(chatModel.providerConfig().personalAccessToken())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();

View File

@ -23,14 +23,10 @@ public sealed interface AiChatModelConfig<C extends AiChatModelConfig<C>> extend
GoogleVertexAiGeminiChatModel.Config, MistralAiChatModel.Config, AnthropicChatModel.Config,
AmazonBedrockChatModel.Config, GitHubModelsChatModel.Config {
Double temperature();
Integer timeoutSeconds();
Integer maxRetries();
C withTemperature(Double temperature);
C withTimeoutSeconds(Integer timeoutSeconds);
C withMaxRetries(Integer maxRetries);

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.AmazonBedrockProviderConfig;
public record AmazonBedrockChatModel(
AiModelType modelType,
AmazonBedrockProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<AmazonBedrockChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<AmazonBedrockChatModel.Config> {
@Override
public AmazonBedrockChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AmazonBedrockChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AmazonBedrockChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<AmazonBedrockChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public AmazonBedrockChatModel withModelConfig(AmazonBedrockChatModel.Config config) {
return new AmazonBedrockChatModel(modelType, providerConfig, config);
}
}

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.AnthropicProviderConfig;
public record AnthropicChatModel(
AiModelType modelType,
AnthropicProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<AnthropicChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<AnthropicChatModel.Config> {
@Override
public AnthropicChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AnthropicChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AnthropicChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<AnthropicChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public AnthropicChatModel withModelConfig(AnthropicChatModel.Config config) {
return new AnthropicChatModel(modelType, providerConfig, config);
}
}

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig;
public record AzureOpenAiChatModel(
AiModelType modelType,
AzureOpenAiProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<AzureOpenAiChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<AzureOpenAiChatModel.Config> {
@Override
public AzureOpenAiChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AzureOpenAiChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AzureOpenAiChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<AzureOpenAiChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public AzureOpenAiChatModel withModelConfig(AzureOpenAiChatModel.Config config) {
return new AzureOpenAiChatModel(modelType, providerConfig, config);
}
}

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.GithubModelsProviderConfig;
public record GitHubModelsChatModel(
AiModelType modelType,
GithubModelsProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<GitHubModelsChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<GitHubModelsChatModel.Config> {
@Override
public GitHubModelsChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public GitHubModelsChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public GitHubModelsChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<GitHubModelsChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public GitHubModelsChatModel withModelConfig(GitHubModelsChatModel.Config config) {
return new GitHubModelsChatModel(modelType, providerConfig, config);
}
}

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConfig;
public record GoogleAiGeminiChatModel(
AiModelType modelType,
GoogleAiGeminiProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<GoogleAiGeminiChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<GoogleAiGeminiChatModel.Config> {
@Override
public Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<GoogleAiGeminiChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public GoogleAiGeminiChatModel withModelConfig(GoogleAiGeminiChatModel.Config config) {
return new GoogleAiGeminiChatModel(modelType, providerConfig, config);
}
}

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
public record GoogleVertexAiGeminiChatModel(
AiModelType modelType,
GoogleVertexAiGeminiProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<GoogleVertexAiGeminiChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds, // TODO: not supported by Vertex AI
Integer maxRetries
) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> {
@Override
public Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public GoogleVertexAiGeminiChatModel withModelConfig(GoogleVertexAiGeminiChatModel.Config config) {
return new GoogleVertexAiGeminiChatModel(modelType, providerConfig, config);
}
}

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig;
public record MistralAiChatModel(
AiModelType modelType,
MistralAiProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<MistralAiChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<MistralAiChatModel.Config> {
@Override
public Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<MistralAiChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public MistralAiChatModel withModelConfig(Config config) {
return new MistralAiChatModel(modelType, providerConfig, config);
}
}

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
public record OpenAiChatModel(
AiModelType modelType,
OpenAiProviderConfig providerConfig,
Config modelConfig
@With Config modelConfig
) implements AiChatModel<OpenAiChatModel.Config> {
@With
public record Config(
String modelId,
Double temperature,
Double topP,
Integer timeoutSeconds,
Integer maxRetries
) implements AiChatModelConfig<OpenAiChatModel.Config> {
@Override
public OpenAiChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public OpenAiChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public OpenAiChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
) implements AiChatModelConfig<OpenAiChatModel.Config> {}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public OpenAiChatModel withModelConfig(OpenAiChatModel.Config config) {
return new OpenAiChatModel(modelType, providerConfig, config);
}
}