AI rule node: add support for Google Vertex AI Gemini

This commit is contained in:
Dmytro Skarzhynets 2025-06-25 14:31:41 +03:00
parent 536f252b16
commit d5c885dcde
No known key found for this signature in database
GPG Key ID: 2B51652F224037DF
21 changed files with 326 additions and 103 deletions

View File

@ -393,6 +393,10 @@
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-google-ai-gemini</artifactId>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-vertex-ai-gemini</artifactId>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-mistral-ai</artifactId>

View File

@ -15,25 +15,37 @@
*/
package org.thingsboard.server.service.ai;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.vertexai.Transport;
import com.google.cloud.vertexai.VertexAI;
import com.google.cloud.vertexai.api.GenerationConfig;
import com.google.cloud.vertexai.generativeai.GenerativeModel;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.vertexai.gemini.VertexAiGeminiChatModel;
import org.springframework.stereotype.Component;
import org.thingsboard.common.util.JacksonUtil;
import org.thingsboard.server.common.data.ai.model.chat.AzureOpenAiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.Langchain4jChatModelConfigurer;
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModel;
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.time.Duration;
@Component
public class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigurer {
class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigurer {
@Override
public ChatModel configureChatModel(OpenAiChatModel chatModel) {
OpenAiChatModel.Config modelConfig = chatModel.modelConfig();
return dev.langchain4j.model.openai.OpenAiChatModel.builder()
.apiKey(chatModel.providerConfig().apiKey())
.modelName(chatModel.modelId())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
@ -45,7 +57,7 @@ public class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelC
AzureOpenAiChatModel.Config modelConfig = chatModel.modelConfig();
return dev.langchain4j.model.azure.AzureOpenAiChatModel.builder()
.apiKey(chatModel.providerConfig().apiKey())
.deploymentName(chatModel.modelId())
.deploymentName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
@ -57,19 +69,56 @@ public class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelC
GoogleAiGeminiChatModel.Config modelConfig = chatModel.modelConfig();
return dev.langchain4j.model.googleai.GoogleAiGeminiChatModel.builder()
.apiKey(chatModel.providerConfig().apiKey())
.modelName(chatModel.modelId())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())
.build();
}
@Override
public ChatModel configureChatModel(GoogleVertexAiGeminiChatModel chatModel) {
GoogleVertexAiGeminiProviderConfig providerConfig = chatModel.providerConfig();
GoogleVertexAiGeminiChatModel.Config modelConfig = chatModel.modelConfig();
// construct service account credentials using service account key JSON
ObjectNode serviceAccountKeyJson = providerConfig.serviceAccountKey();
ServiceAccountCredentials serviceAccountCredentials;
try {
serviceAccountCredentials = ServiceAccountCredentials
.fromStream(new ByteArrayInputStream(JacksonUtil.writeValueAsBytes(serviceAccountKeyJson)));
} catch (IOException e) {
throw new RuntimeException("Failed to parse service account key JSON", e);
}
// construct Vertex AI instance
var vertexAI = new VertexAI.Builder()
.setProjectId(providerConfig.projectId())
.setLocation(providerConfig.location())
.setCredentials(serviceAccountCredentials)
.setTransport(Transport.REST) // GRPC also possible, but likely does not work with service account keys
.build();
// map model config to generation config
var generationConfigBuilder = GenerationConfig.newBuilder();
if (modelConfig.temperature() != null) {
generationConfigBuilder.setTemperature(modelConfig.temperature().floatValue());
}
var generationConfig = generationConfigBuilder.build();
// construct generative model instance
var generativeModel = new GenerativeModel(modelConfig.modelId(), vertexAI)
.withGenerationConfig(generationConfig);
return new VertexAiGeminiChatModel(generativeModel, generationConfig, modelConfig.maxRetries());
}
@Override
public ChatModel configureChatModel(MistralAiChatModel chatModel) {
MistralAiChatModel.Config modelConfig = chatModel.modelConfig();
return dev.langchain4j.model.mistralai.MistralAiChatModel.builder()
.apiKey(chatModel.providerConfig().apiKey())
.modelName(chatModel.modelId())
.modelName(modelConfig.modelId())
.temperature(modelConfig.temperature())
.timeout(toDuration(modelConfig.timeoutSeconds()))
.maxRetries(modelConfig.maxRetries())

View File

@ -15,61 +15,22 @@
*/
package org.thingsboard.server.common.data.ai.model;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModel;
import com.fasterxml.jackson.databind.annotation.JsonTypeIdResolver;
import org.thingsboard.server.common.data.ai.provider.AiProviderConfig;
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
use = JsonTypeInfo.Id.CUSTOM,
include = JsonTypeInfo.As.PROPERTY,
property = "modelId",
visible = true
property = "@type"
)
@JsonSubTypes({
// OpenAI models
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "o4-mini"),
// @JsonSubTypes.Type(value = OpenAiChatModel.class, name = "o3-pro"), needs verification with Gov ID :)
// @JsonSubTypes.Type(value = OpenAiChatModel.class, name = "o3"), needs verification with Gov ID :)
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "o3-mini"),
// @JsonSubTypes.Type(value = OpenAiChatModel.class, name = "o1-pro"), LC4j sends requests to v1/chat/completions, but o1-pro is only supported in v1/responses
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "o1"),
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "gpt-4.1"),
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "gpt-4.1-mini"),
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "gpt-4.1-nano"),
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "gpt-4o"),
@JsonSubTypes.Type(value = OpenAiChatModel.class, name = "gpt-4o-mini"),
// Google AI Gemini models
@JsonSubTypes.Type(value = GoogleAiGeminiChatModel.class, name = "gemini-2.5-pro"),
@JsonSubTypes.Type(value = GoogleAiGeminiChatModel.class, name = "gemini-2.5-flash"),
@JsonSubTypes.Type(value = GoogleAiGeminiChatModel.class, name = "gemini-2.0-flash"),
@JsonSubTypes.Type(value = GoogleAiGeminiChatModel.class, name = "gemini-2.0-flash-lite"),
@JsonSubTypes.Type(value = GoogleAiGeminiChatModel.class, name = "gemini-1.5-pro"),
@JsonSubTypes.Type(value = GoogleAiGeminiChatModel.class, name = "gemini-1.5-flash"),
@JsonSubTypes.Type(value = GoogleAiGeminiChatModel.class, name = "gemini-1.5-flash-8b"),
// Mistral AI models
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "magistral-medium-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "magistral-small-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "mistral-large-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "mistral-medium-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "mistral-small-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "pixtral-large-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "ministral-8b-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "ministral-3b-latest"),
@JsonSubTypes.Type(value = MistralAiChatModel.class, name = "open-mistral-nemo")
})
@JsonTypeIdResolver(AiModelTypeIdResolver.class)
public interface AiModel<C extends AiModelConfig<C>> {
AiProviderConfig providerConfig();
AiModelType modelType();
String modelId();
C modelConfig();
AiModel<C> withModelConfig(C config);

View File

@ -15,4 +15,8 @@
*/
package org.thingsboard.server.common.data.ai.model;
public interface AiModelConfig<C extends AiModelConfig<C>> {}
public interface AiModelConfig<C extends AiModelConfig<C>> {
String modelId();
}

View File

@ -0,0 +1,120 @@
/**
* Copyright © 2016-2025 The Thingsboard Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thingsboard.server.common.data.ai.model;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.DatabindContext;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.jsontype.impl.TypeIdResolverBase;
import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModel;
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModel;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public final class AiModelTypeIdResolver extends TypeIdResolverBase {
private static final Map<String, Class<?>> typeIdToModelClass;
static {
Map<String, Class<?>> map = new HashMap<>();
// OpenAI models
map.put("OPENAI::o4-mini", OpenAiChatModel.class);
// map.put("OPENAI::o3-pro", OpenAiChatModel.class); // needs verification with Gov ID :)
// map.put("OPENAI::o3", OpenAiChatModel.class); // needs verification with Gov ID :)
map.put("OPENAI::o3-mini", OpenAiChatModel.class);
// map.put("OPENAI::o1-pro", OpenAiChatModel.class); // LC4j sends requests to v1/chat/completions, but o1-pro is only supported in v1/responses
map.put("OPENAI::o1", OpenAiChatModel.class);
map.put("OPENAI::gpt-4.1", OpenAiChatModel.class);
map.put("OPENAI::gpt-4.1-mini", OpenAiChatModel.class);
map.put("OPENAI::gpt-4.1-nano", OpenAiChatModel.class);
map.put("OPENAI::gpt-4o", OpenAiChatModel.class);
map.put("OPENAI::gpt-4o-mini", OpenAiChatModel.class);
// Google AI Gemini models
map.put("GOOGLE_AI_GEMINI::gemini-2.5-pro", GoogleAiGeminiChatModel.class);
map.put("GOOGLE_AI_GEMINI::gemini-2.5-flash", GoogleAiGeminiChatModel.class);
map.put("GOOGLE_AI_GEMINI::gemini-2.0-flash", GoogleAiGeminiChatModel.class);
map.put("GOOGLE_AI_GEMINI::gemini-2.0-flash-lite", GoogleAiGeminiChatModel.class);
map.put("GOOGLE_AI_GEMINI::gemini-1.5-pro", GoogleAiGeminiChatModel.class);
map.put("GOOGLE_AI_GEMINI::gemini-1.5-flash", GoogleAiGeminiChatModel.class);
map.put("GOOGLE_AI_GEMINI::gemini-1.5-flash-8b", GoogleAiGeminiChatModel.class);
// Google Vertex AI Gemini models
map.put("GOOGLE_VERTEX_AI_GEMINI::gemini-2.5-pro", GoogleVertexAiGeminiChatModel.class);
map.put("GOOGLE_VERTEX_AI_GEMINI::gemini-2.5-flash", GoogleVertexAiGeminiChatModel.class);
map.put("GOOGLE_VERTEX_AI_GEMINI::gemini-2.0-flash", GoogleVertexAiGeminiChatModel.class);
map.put("GOOGLE_VERTEX_AI_GEMINI::gemini-2.0-flash-lite", GoogleVertexAiGeminiChatModel.class);
map.put("GOOGLE_VERTEX_AI_GEMINI::gemini-1.5-pro", GoogleVertexAiGeminiChatModel.class);
map.put("GOOGLE_VERTEX_AI_GEMINI::gemini-1.5-flash", GoogleVertexAiGeminiChatModel.class);
map.put("GOOGLE_VERTEX_AI_GEMINI::gemini-1.5-flash-8b", GoogleVertexAiGeminiChatModel.class);
// Mistral AI models
map.put("MISTRAL_AI::magistral-medium-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::magistral-small-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::mistral-large-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::mistral-medium-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::mistral-small-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::pixtral-large-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::ministral-8b-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::ministral-3b-latest", MistralAiChatModel.class);
map.put("MISTRAL_AI::open-mistral-nemo", MistralAiChatModel.class);
typeIdToModelClass = Collections.unmodifiableMap(map);
}
private JavaType baseType;
@Override
public void init(JavaType baseType) {
this.baseType = baseType;
}
@Override
public String idFromValue(Object value) {
return generateId((AiModel<?>) value);
}
@Override
public String idFromValueAndType(Object value, Class<?> suggestedType) {
return generateId((AiModel<?>) value);
}
@Override
public JavaType typeFromId(DatabindContext context, String id) {
Class<?> modelClass = typeIdToModelClass.get(id);
if (modelClass == null) {
throw new IllegalArgumentException("Unknown model type ID: " + id);
}
return context.constructSpecializedType(baseType, modelClass);
}
@Override
public JsonTypeInfo.Id getMechanism() {
return JsonTypeInfo.Id.CUSTOM;
}
private static String generateId(AiModel<?> model) {
String provider = model.providerConfig().provider().name();
String modelId = model.modelConfig().modelId();
return provider + "::" + modelId;
}
}

View File

@ -20,7 +20,7 @@ import org.thingsboard.server.common.data.ai.model.AiModel;
import org.thingsboard.server.common.data.ai.model.AiModelType;
public sealed interface AiChatModel<C extends AiChatModelConfig<C>> extends AiModel<C>
permits OpenAiChatModel, AzureOpenAiChatModel, GoogleAiGeminiChatModel, MistralAiChatModel {
permits OpenAiChatModel, AzureOpenAiChatModel, GoogleAiGeminiChatModel, GoogleVertexAiGeminiChatModel, MistralAiChatModel {
ChatModel configure(Langchain4jChatModelConfigurer configurer);

View File

@ -18,7 +18,7 @@ package org.thingsboard.server.common.data.ai.model.chat;
import org.thingsboard.server.common.data.ai.model.AiModelConfig;
public sealed interface AiChatModelConfig<C extends AiChatModelConfig<C>> extends AiModelConfig<C>
permits OpenAiChatModel.Config, AzureOpenAiChatModel.Config, GoogleAiGeminiChatModel.Config, MistralAiChatModel.Config {
permits OpenAiChatModel.Config, AzureOpenAiChatModel.Config, GoogleAiGeminiChatModel.Config, GoogleVertexAiGeminiChatModel.Config, MistralAiChatModel.Config {
Double temperature();

View File

@ -20,11 +20,11 @@ import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig;
public record AzureOpenAiChatModel(
AzureOpenAiProviderConfig providerConfig,
String modelId,
Config modelConfig
) implements AiChatModel<AzureOpenAiChatModel.Config> {
public record Config(
String modelId,
Double temperature,
Integer timeoutSeconds,
Integer maxRetries
@ -32,17 +32,17 @@ public record AzureOpenAiChatModel(
@Override
public AzureOpenAiChatModel.Config withTemperature(Double temperature) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AzureOpenAiChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public AzureOpenAiChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@ -54,7 +54,7 @@ public record AzureOpenAiChatModel(
@Override
public AzureOpenAiChatModel withModelConfig(AzureOpenAiChatModel.Config config) {
return new AzureOpenAiChatModel(providerConfig, modelId, config);
return new AzureOpenAiChatModel(providerConfig, config);
}
}

View File

@ -20,11 +20,11 @@ import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConf
public record GoogleAiGeminiChatModel(
GoogleAiGeminiProviderConfig providerConfig,
String modelId,
Config modelConfig
) implements AiChatModel<GoogleAiGeminiChatModel.Config> {
public record Config(
String modelId,
Double temperature,
Integer timeoutSeconds,
Integer maxRetries
@ -32,17 +32,17 @@ public record GoogleAiGeminiChatModel(
@Override
public Config withTemperature(Double temperature) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@ -54,7 +54,7 @@ public record GoogleAiGeminiChatModel(
@Override
public GoogleAiGeminiChatModel withModelConfig(GoogleAiGeminiChatModel.Config config) {
return new GoogleAiGeminiChatModel(providerConfig, modelId, config);
return new GoogleAiGeminiChatModel(providerConfig, config);
}
}

View File

@ -0,0 +1,60 @@
/**
* Copyright © 2016-2025 The Thingsboard Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
public record GoogleVertexAiGeminiChatModel(
GoogleVertexAiGeminiProviderConfig providerConfig,
Config modelConfig
) implements AiChatModel<GoogleVertexAiGeminiChatModel.Config> {
public record Config(
String modelId,
Double temperature,
Integer timeoutSeconds, // TODO: not supported by Vertex AI
Integer maxRetries
) implements AiChatModelConfig<GoogleVertexAiGeminiChatModel.Config> {
@Override
public Config withTemperature(Double temperature) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public GoogleVertexAiGeminiChatModel withModelConfig(GoogleVertexAiGeminiChatModel.Config config) {
return new GoogleVertexAiGeminiChatModel(providerConfig, config);
}
}

View File

@ -25,6 +25,8 @@ public interface Langchain4jChatModelConfigurer {
ChatModel configureChatModel(GoogleAiGeminiChatModel chatModel);
ChatModel configureChatModel(GoogleVertexAiGeminiChatModel chatModel);
ChatModel configureChatModel(MistralAiChatModel chatModel);
}

View File

@ -20,11 +20,11 @@ import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig;
public record MistralAiChatModel(
MistralAiProviderConfig providerConfig,
String modelId,
Config modelConfig
) implements AiChatModel<MistralAiChatModel.Config> {
public record Config(
String modelId,
Double temperature,
Integer timeoutSeconds,
Integer maxRetries
@ -32,17 +32,17 @@ public record MistralAiChatModel(
@Override
public Config withTemperature(Double temperature) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public Config withMaxRetries(Integer maxRetries) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@ -54,7 +54,7 @@ public record MistralAiChatModel(
@Override
public MistralAiChatModel withModelConfig(Config config) {
return new MistralAiChatModel(providerConfig, modelId, config);
return new MistralAiChatModel(providerConfig, config);
}
}

View File

@ -20,11 +20,11 @@ import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
public record OpenAiChatModel(
OpenAiProviderConfig providerConfig,
String modelId,
Config modelConfig
) implements AiChatModel<OpenAiChatModel.Config> {
public record Config(
String modelId,
Double temperature,
Integer timeoutSeconds,
Integer maxRetries
@ -32,17 +32,17 @@ public record OpenAiChatModel(
@Override
public OpenAiChatModel.Config withTemperature(Double temperature) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public OpenAiChatModel.Config withTimeoutSeconds(Integer timeoutSeconds) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
@Override
public OpenAiChatModel.Config withMaxRetries(Integer maxRetries) {
return new Config(temperature, timeoutSeconds, maxRetries);
return new Config(modelId, temperature, timeoutSeconds, maxRetries);
}
}
@ -54,7 +54,7 @@ public record OpenAiChatModel(
@Override
public OpenAiChatModel withModelConfig(OpenAiChatModel.Config config) {
return new OpenAiChatModel(providerConfig, modelId, config);
return new OpenAiChatModel(providerConfig, config);
}
}

View File

@ -20,6 +20,7 @@ public enum AiProvider {
OPENAI,
AZURE_OPENAI,
GOOGLE_AI_GEMINI,
GOOGLE_VERTEX_AI_GEMINI,
MISTRAL_AI
}

View File

@ -27,13 +27,12 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
@JsonSubTypes.Type(value = OpenAiProviderConfig.class, name = "OPENAI"),
@JsonSubTypes.Type(value = AzureOpenAiProviderConfig.class, name = "AZURE_OPENAI"),
@JsonSubTypes.Type(value = GoogleAiGeminiProviderConfig.class, name = "GOOGLE_AI_GEMINI"),
@JsonSubTypes.Type(value = GoogleVertexAiGeminiProviderConfig.class, name = "GOOGLE_VERTEX_AI_GEMINI"),
@JsonSubTypes.Type(value = MistralAiProviderConfig.class, name = "MISTRAL_AI")
})
public sealed interface AiProviderConfig
permits OpenAiProviderConfig, AzureOpenAiProviderConfig, GoogleAiGeminiProviderConfig, MistralAiProviderConfig {
permits OpenAiProviderConfig, AzureOpenAiProviderConfig, GoogleAiGeminiProviderConfig, GoogleVertexAiGeminiProviderConfig, MistralAiProviderConfig {
AiProvider provider();
String apiKey();
}

View File

@ -22,9 +22,4 @@ public record AzureOpenAiProviderConfig(String apiKey) implements AiProviderConf
return AiProvider.AZURE_OPENAI;
}
@Override
public String apiKey() {
return apiKey;
}
}

View File

@ -22,9 +22,4 @@ public record GoogleAiGeminiProviderConfig(String apiKey) implements AiProviderC
return AiProvider.GOOGLE_AI_GEMINI;
}
@Override
public String apiKey() {
return apiKey;
}
}

View File

@ -0,0 +1,31 @@
/**
* Copyright © 2016-2025 The Thingsboard Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thingsboard.server.common.data.ai.provider;
import com.fasterxml.jackson.databind.node.ObjectNode;
public record GoogleVertexAiGeminiProviderConfig(
String projectId,
String location,
ObjectNode serviceAccountKey
) implements AiProviderConfig {
@Override
public AiProvider provider() {
return AiProvider.GOOGLE_VERTEX_AI_GEMINI;
}
}

View File

@ -22,9 +22,4 @@ public record MistralAiProviderConfig(String apiKey) implements AiProviderConfig
return AiProvider.MISTRAL_AI;
}
@Override
public String apiKey() {
return apiKey;
}
}

View File

@ -22,9 +22,4 @@ public record OpenAiProviderConfig(String apiKey) implements AiProviderConfig {
return AiProvider.OPENAI;
}
@Override
public String apiKey() {
return apiKey;
}
}

View File

@ -35,15 +35,27 @@ interface AiModelSettingsRepository extends JpaRepository<AiModelSettingsEntity,
Optional<AiModelSettingsEntity> findByTenantIdAndName(UUID tenantId, String name);
@Query(nativeQuery = true, value = """
SELECT *
FROM ai_model_settings ai_model
WHERE ai_model.tenant_id = :tenantId
AND (:textSearch IS NULL
OR ai_model.name ILIKE '%' || :textSearch || '%'
OR (ai_model.configuration -> 'providerConfig' ->> 'provider') ILIKE '%' || :textSearch || '%'
OR (ai_model.configuration ->> 'modelId') ILIKE '%' || :textSearch || '%')
""")
@Query(
value = """
SELECT *
FROM ai_model_settings ai_model
WHERE ai_model.tenant_id = :tenantId
AND (:textSearch IS NULL
OR ai_model.name ILIKE '%' || :textSearch || '%'
OR (ai_model.configuration -> 'providerConfig' ->> 'provider') ILIKE '%' || :textSearch || '%'
OR (ai_model.configuration -> 'modelConfig' ->> 'modelId') ILIKE '%' || :textSearch || '%')
""",
countQuery = """
SELECT COUNT(*)
FROM ai_model_settings ai_model
WHERE ai_model.tenant_id = :tenantId
AND (:textSearch IS NULL
OR ai_model.name ILIKE '%' || :textSearch || '%'
OR (ai_model.configuration -> 'providerConfig' ->> 'provider') ILIKE '%' || :textSearch || '%'
OR (ai_model.configuration -> 'modelConfig' ->> 'modelId') ILIKE '%' || :textSearch || '%')
""",
nativeQuery = true
)
Page<AiModelSettingsEntity> findByTenantId(@Param("tenantId") UUID tenantId, @Param("textSearch") String textSearch, Pageable pageable);
@Query("SELECT ai_model.id FROM AiModelSettingsEntity ai_model WHERE ai_model.tenantId = :tenantId")