AI rule node: support top N for all models

This commit is contained in:
Dmytro Skarzhynets 2025-06-27 16:37:10 +03:00
parent 3f2a6440a7
commit 3f58ff01c3
No known key found for this signature in database
GPG Key ID: 2B51652F224037DF
10 changed files with 50 additions and 196 deletions

View File

@ -56,6 +56,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey()) .apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId()) .modelName(modelConfig.modelId())
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -68,6 +69,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey()) .apiKey(chatModel.providerConfig().apiKey())
.deploymentName(modelConfig.modelId()) .deploymentName(modelConfig.modelId())
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -80,6 +82,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey()) .apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId()) .modelName(modelConfig.modelId())
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -112,6 +115,9 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
if (modelConfig.temperature() != null) { if (modelConfig.temperature() != null) {
generationConfigBuilder.setTemperature(modelConfig.temperature().floatValue()); generationConfigBuilder.setTemperature(modelConfig.temperature().floatValue());
} }
if (modelConfig.topP() != null) {
generationConfigBuilder.setTopP(modelConfig.topP().floatValue());
}
var generationConfig = generationConfigBuilder.build(); var generationConfig = generationConfigBuilder.build();
// construct generative model instance // construct generative model instance
@ -128,6 +134,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey()) .apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId()) .modelName(modelConfig.modelId())
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -140,6 +147,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.apiKey(chatModel.providerConfig().apiKey()) .apiKey(chatModel.providerConfig().apiKey())
.modelName(modelConfig.modelId()) .modelName(modelConfig.modelId())
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();
@ -161,6 +169,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
var defaultChatRequestParams = ChatRequestParameters.builder() var defaultChatRequestParams = ChatRequestParameters.builder()
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.build(); .build();
return BedrockChatModel.builder() return BedrockChatModel.builder()
@ -179,6 +188,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.gitHubToken(chatModel.providerConfig().personalAccessToken()) .gitHubToken(chatModel.providerConfig().personalAccessToken())
.modelName(modelConfig.modelId()) .modelName(modelConfig.modelId())
.temperature(modelConfig.temperature()) .temperature(modelConfig.temperature())
.topP(modelConfig.topP())
.timeout(toDuration(modelConfig.timeoutSeconds())) .timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries()) .maxRetries(modelConfig.maxRetries())
.build(); .build();

View File

@ -23,14 +23,10 @@ public sealed interface AiChatModelConfig<C extends AiChatModelConfig<C>> extend
GoogleVertexAiGeminiChatModel.Config, MistralAiChatModel.Config, AnthropicChatModel.Config, GoogleVertexAiGeminiChatModel.Config, MistralAiChatModel.Config, AnthropicChatModel.Config,
AmazonBedrockChatModel.Config, GitHubModelsChatModel.Config { AmazonBedrockChatModel.Config, GitHubModelsChatModel.Config {
Double temperature();
Integer timeoutSeconds(); Integer timeoutSeconds();
Integer maxRetries(); Integer maxRetries();
C withTemperature(Double temperature);
C withTimeoutSeconds(Integer timeoutSeconds); C withTimeoutSeconds(Integer timeoutSeconds);
C withMaxRetries(Integer maxRetries); C withMaxRetries(Integer maxRetries);

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.AmazonBedrockProviderConfig; import org.thingsboard.server.common.data.ai.provider.AmazonBedrockProviderConfig;
public record AmazonBedrockChatModel( public record AmazonBedrockChatModel(
AiModelType modelType, AiModelType modelType,
AmazonBedrockProviderConfig providerConfig, AmazonBedrockProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<AmazonBedrockChatModel.Config> { ) implements AiChatModel<AmazonBedrockChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<AmazonBedrockChatModel.Config> { ) implements AiChatModelConfig<AmazonBedrockChatModel.Config> {}
@Override
public AmazonBedrockChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AmazonBedrockChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AmazonBedrockChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public AmazonBedrockChatModel withModelConfig(AmazonBedrockChatModel.Config config) {
return new AmazonBedrockChatModel(modelType, providerConfig, config);
}
} }

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.AnthropicProviderConfig; import org.thingsboard.server.common.data.ai.provider.AnthropicProviderConfig;
public record AnthropicChatModel( public record AnthropicChatModel(
AiModelType modelType, AiModelType modelType,
AnthropicProviderConfig providerConfig, AnthropicProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<AnthropicChatModel.Config> { ) implements AiChatModel<AnthropicChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<AnthropicChatModel.Config> { ) implements AiChatModelConfig<AnthropicChatModel.Config> {}
@Override
public AnthropicChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AnthropicChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AnthropicChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public AnthropicChatModel withModelConfig(AnthropicChatModel.Config config) {
return new AnthropicChatModel(modelType, providerConfig, config);
}
} }

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig; import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig;
public record AzureOpenAiChatModel( public record AzureOpenAiChatModel(
AiModelType modelType, AiModelType modelType,
AzureOpenAiProviderConfig providerConfig, AzureOpenAiProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<AzureOpenAiChatModel.Config> { ) implements AiChatModel<AzureOpenAiChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<AzureOpenAiChatModel.Config> { ) implements AiChatModelConfig<AzureOpenAiChatModel.Config> {}
@Override
public AzureOpenAiChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AzureOpenAiChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AzureOpenAiChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public AzureOpenAiChatModel withModelConfig(AzureOpenAiChatModel.Config config) {
return new AzureOpenAiChatModel(modelType, providerConfig, config);
}
} }

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.GithubModelsProviderConfig; import org.thingsboard.server.common.data.ai.provider.GithubModelsProviderConfig;
public record GitHubModelsChatModel( public record GitHubModelsChatModel(
AiModelType modelType, AiModelType modelType,
GithubModelsProviderConfig providerConfig, GithubModelsProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<GitHubModelsChatModel.Config> { ) implements AiChatModel<GitHubModelsChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<GitHubModelsChatModel.Config> { ) implements AiChatModelConfig<GitHubModelsChatModel.Config> {}
@Override
public GitHubModelsChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public GitHubModelsChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public GitHubModelsChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public GitHubModelsChatModel withModelConfig(GitHubModelsChatModel.Config config) {
return new GitHubModelsChatModel(modelType, providerConfig, config);
}
} }

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConfig; import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConfig;
public record GoogleAiGeminiChatModel( public record GoogleAiGeminiChatModel(
AiModelType modelType, AiModelType modelType,
GoogleAiGeminiProviderConfig providerConfig, GoogleAiGeminiProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<GoogleAiGeminiChatModel.Config> { ) implements AiChatModel<GoogleAiGeminiChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<GoogleAiGeminiChatModel.Config> { ) implements AiChatModelConfig<GoogleAiGeminiChatModel.Config> {}
@Override
public Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public GoogleAiGeminiChatModel withModelConfig(GoogleAiGeminiChatModel.Config config) {
return new GoogleAiGeminiChatModel(modelType, providerConfig, config);
}
} }

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig; import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
public record GoogleVertexAiGeminiChatModel( public record GoogleVertexAiGeminiChatModel(
AiModelType modelType, AiModelType modelType,
GoogleVertexAiGeminiProviderConfig providerConfig, GoogleVertexAiGeminiProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<GoogleVertexAiGeminiChatModel.Config> { ) implements AiChatModel<GoogleVertexAiGeminiChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, // TODO: not supported by Vertex AI Integer timeoutSeconds, // TODO: not supported by Vertex AI
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> { ) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> {}
@Override
public Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public GoogleVertexAiGeminiChatModel withModelConfig(GoogleVertexAiGeminiChatModel.Config config) {
return new GoogleVertexAiGeminiChatModel(modelType, providerConfig, config);
}
} }

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig; import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig;
public record MistralAiChatModel( public record MistralAiChatModel(
AiModelType modelType, AiModelType modelType,
MistralAiProviderConfig providerConfig, MistralAiProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<MistralAiChatModel.Config> { ) implements AiChatModel<MistralAiChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<MistralAiChatModel.Config> { ) implements AiChatModelConfig<MistralAiChatModel.Config> {}
@Override
public Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public MistralAiChatModel withModelConfig(Config config) {
return new MistralAiChatModel(modelType, providerConfig, config);
}
} }

View File

@ -16,47 +16,28 @@
package org.thingsboard.server.common.data.ai.model.chat; package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel; import dev.langchain4j.model.chat.ChatModel;
import lombok.With;
import org.thingsboard.server.common.data.ai.model.AiModelType; import org.thingsboard.server.common.data.ai.model.AiModelType;
import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig; import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
public record OpenAiChatModel( public record OpenAiChatModel(
AiModelType modelType, AiModelType modelType,
OpenAiProviderConfig providerConfig, OpenAiProviderConfig providerConfig,
Config modelConfig @With Config modelConfig
) implements AiChatModel<OpenAiChatModel.Config> { ) implements AiChatModel<OpenAiChatModel.Config> {
@With
public record Config( public record Config(
String modelId, String modelId,
Double temperature, Double temperature,
Double topP,
Integer timeoutSeconds, Integer timeoutSeconds,
Integer maxRetries Integer maxRetries
) implements AiChatModelConfig<OpenAiChatModel.Config> { ) implements AiChatModelConfig<OpenAiChatModel.Config> {}
@Override
public OpenAiChatModel.Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public OpenAiChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public OpenAiChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override @Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) { public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this); return configurer.configureChatModel(this);
} }
@Override
public OpenAiChatModel withModelConfig(OpenAiChatModel.Config config) {
return new OpenAiChatModel(modelType, providerConfig, config);
}
} }