AI models: add support for Ollama

This commit is contained in:
Dmytro Skarzhynets 2025-09-15 17:32:57 +03:00 committed by Vladyslav_Prykhodko
parent e88114de72
commit 5c7f20a151
15 changed files with 146 additions and 20 deletions

View File

@ -419,6 +419,10 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-ollama</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -32,6 +32,7 @@ import dev.langchain4j.model.chat.request.ChatRequestParameters;
import dev.langchain4j.model.github.GitHubModelsChatModel;
import dev.langchain4j.model.googleai.GoogleAiGeminiChatModel;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.vertexai.gemini.VertexAiGeminiChatModel;
import org.springframework.stereotype.Component;
@ -43,6 +44,7 @@ import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModelC
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
import org.thingsboard.server.common.data.ai.model.chat.Langchain4jChatModelConfigurer;
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
import org.thingsboard.server.common.data.ai.model.chat.OllamaChatModelConfig;
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
import org.thingsboard.server.common.data.ai.provider.AmazonBedrockProviderConfig;
import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig;
@ -262,6 +264,20 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
.build();
}
@Override
public ChatModel configureChatModel(OllamaChatModelConfig chatModelConfig) {
return OllamaChatModel.builder()
.baseUrl(chatModelConfig.providerConfig().baseUrl())
.modelName(chatModelConfig.modelId())
.temperature(chatModelConfig.temperature())
.topP(chatModelConfig.topP())
.topK(chatModelConfig.topK())
.numPredict(chatModelConfig.maxOutputTokens())
.timeout(toDuration(chatModelConfig.timeoutSeconds()))
.maxRetries(chatModelConfig.maxRetries())
.build();
}
private static Duration toDuration(Integer timeoutSeconds) {
return timeoutSeconds != null ? Duration.ofSeconds(timeoutSeconds) : null;
}

View File

@ -22,7 +22,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
property = "status",
include = JsonTypeInfo.As.PROPERTY,
include = JsonTypeInfo.As.EXISTING_PROPERTY,
visible = true
)
@JsonSubTypes({
@ -51,9 +51,7 @@ public sealed interface TbChatResponse permits TbChatResponse.Success, TbChatRes
}
record Failure(
@Schema(
description = "A string containing details about the failure"
)
@Schema(description = "A string containing details about the failure")
String errorDetails
) implements TbChatResponse {

View File

@ -24,6 +24,7 @@ import org.thingsboard.server.common.data.ai.model.chat.GitHubModelsChatModelCon
import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModelConfig;
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
import org.thingsboard.server.common.data.ai.model.chat.OllamaChatModelConfig;
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
import org.thingsboard.server.common.data.ai.provider.AiProvider;
import org.thingsboard.server.common.data.ai.provider.AiProviderConfig;
@ -34,6 +35,7 @@ import org.thingsboard.server.common.data.ai.provider.GitHubModelsProviderConfig
import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConfig;
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig;
import org.thingsboard.server.common.data.ai.provider.OllamaProviderConfig;
import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
@JsonTypeInfo(
@ -50,7 +52,8 @@ import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
@JsonSubTypes.Type(value = MistralAiChatModelConfig.class, name = "MISTRAL_AI"),
@JsonSubTypes.Type(value = AnthropicChatModelConfig.class, name = "ANTHROPIC"),
@JsonSubTypes.Type(value = AmazonBedrockChatModelConfig.class, name = "AMAZON_BEDROCK"),
@JsonSubTypes.Type(value = GitHubModelsChatModelConfig.class, name = "GITHUB_MODELS")
@JsonSubTypes.Type(value = GitHubModelsChatModelConfig.class, name = "GITHUB_MODELS"),
@JsonSubTypes.Type(value = OllamaChatModelConfig.class, name = "OLLAMA")
})
public interface AiModelConfig {
@ -69,7 +72,8 @@ public interface AiModelConfig {
@JsonSubTypes.Type(value = MistralAiProviderConfig.class, name = "MISTRAL_AI"),
@JsonSubTypes.Type(value = AnthropicProviderConfig.class, name = "ANTHROPIC"),
@JsonSubTypes.Type(value = AmazonBedrockProviderConfig.class, name = "AMAZON_BEDROCK"),
@JsonSubTypes.Type(value = GitHubModelsProviderConfig.class, name = "GITHUB_MODELS")
@JsonSubTypes.Type(value = GitHubModelsProviderConfig.class, name = "GITHUB_MODELS"),
@JsonSubTypes.Type(value = OllamaProviderConfig.class, name = "OLLAMA")
})
AiProviderConfig providerConfig();

View File

@ -24,7 +24,7 @@ public sealed interface AiChatModelConfig<C extends AiChatModelConfig<C>> extend
permits
OpenAiChatModelConfig, AzureOpenAiChatModelConfig, GoogleAiGeminiChatModelConfig,
GoogleVertexAiGeminiChatModelConfig, MistralAiChatModelConfig, AnthropicChatModelConfig,
AmazonBedrockChatModelConfig, GitHubModelsChatModelConfig {
AmazonBedrockChatModelConfig, GitHubModelsChatModelConfig, OllamaChatModelConfig {
ChatModel configure(Langchain4jChatModelConfigurer configurer);

View File

@ -35,4 +35,6 @@ public interface Langchain4jChatModelConfigurer {
ChatModel configureChatModel(GitHubModelsChatModelConfig chatModelConfig);
ChatModel configureChatModel(OllamaChatModelConfig chatModelConfig);
}

View File

@ -0,0 +1,57 @@
/**
* Copyright © 2016-2025 The Thingsboard Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thingsboard.server.common.data.ai.model.chat;
import dev.langchain4j.model.chat.ChatModel;
import jakarta.validation.Valid;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Positive;
import jakarta.validation.constraints.PositiveOrZero;
import lombok.Builder;
import lombok.With;
import org.thingsboard.server.common.data.ai.provider.AiProvider;
import org.thingsboard.server.common.data.ai.provider.OllamaProviderConfig;
@Builder
public record OllamaChatModelConfig(
@NotNull @Valid OllamaProviderConfig providerConfig,
@NotBlank String modelId,
@PositiveOrZero Double temperature,
@Positive @Max(1) Double topP,
@PositiveOrZero Integer topK,
@Positive Integer maxOutputTokens,
@With @Positive Integer timeoutSeconds,
@With @PositiveOrZero Integer maxRetries
) implements AiChatModelConfig<OllamaChatModelConfig> {
@Override
public AiProvider provider() {
return AiProvider.OLLAMA;
}
@Override
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
return configurer.configureChatModel(this);
}
@Override
public boolean supportsJsonMode() {
return true;
}
}

View File

@ -24,6 +24,7 @@ public enum AiProvider {
MISTRAL_AI,
ANTHROPIC,
AMAZON_BEDROCK,
GITHUB_MODELS
GITHUB_MODELS,
OLLAMA
}

View File

@ -19,4 +19,4 @@ public sealed interface AiProviderConfig
permits
OpenAiProviderConfig, AzureOpenAiProviderConfig, GoogleAiGeminiProviderConfig,
GoogleVertexAiGeminiProviderConfig, MistralAiProviderConfig, AnthropicProviderConfig,
AmazonBedrockProviderConfig, GitHubModelsProviderConfig {}
AmazonBedrockProviderConfig, GitHubModelsProviderConfig, OllamaProviderConfig {}

View File

@ -0,0 +1,22 @@
/**
* Copyright © 2016-2025 The Thingsboard Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.thingsboard.server.common.data.ai.provider;
import jakarta.validation.constraints.NotBlank;
public record OllamaProviderConfig(
@NotBlank String baseUrl
) implements AiProviderConfig {}

View File

@ -60,9 +60,7 @@ public sealed interface TbResponseFormat permits TbTextResponseFormat, TbJsonRes
@Override
public ResponseFormat toLangChainResponseFormat() {
return ResponseFormat.builder()
.type(ResponseFormatType.TEXT)
.build();
return ResponseFormat.TEXT;
}
}
@ -76,9 +74,7 @@ public sealed interface TbResponseFormat permits TbTextResponseFormat, TbJsonRes
@Override
public ResponseFormat toLangChainResponseFormat() {
return ResponseFormat.builder()
.type(ResponseFormatType.JSON)
.build();
return ResponseFormat.JSON;
}
}

View File

@ -150,6 +150,15 @@
</mat-error>
</mat-form-field>
}
@if (providerFieldsList.includes('baseUrl')) {
<mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
<mat-label translate>ai-models.baseurl</mat-label>
<input required matInput formControlName="baseUrl">
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('baseUrl').hasError('required')">
{{ 'ai-models.baseurl-required' | translate }}
</mat-error>
</mat-form-field>
}
</div>
</section>
</section>

View File

@ -100,6 +100,7 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
region: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.region : '', [Validators.required]],
accessKeyId: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.accessKeyId : '', [Validators.required]],
secretAccessKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.secretAccessKey : '', [Validators.required]],
baseUrl: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.baseUrl : '', [Validators.required]],
}),
modelId: [this.data.AIModel ? this.data.AIModel.configuration?.modelId : '', [Validators.required]],
temperature: [this.data.AIModel ? this.data.AIModel.configuration?.temperature : null, [Validators.min(0)]],

View File

@ -34,6 +34,7 @@ export interface AiModel extends Omit<BaseData<AiModelId>, 'label'>, HasTenantId
region?: string;
accessKeyId?: string;
secretAccessKey?: string;
baseUrl?: string;
};
modelId: string;
temperature?: number;
@ -57,7 +58,8 @@ export enum AiProvider {
MISTRAL_AI = 'MISTRAL_AI',
ANTHROPIC = 'ANTHROPIC',
AMAZON_BEDROCK = 'AMAZON_BEDROCK',
GITHUB_MODELS = 'GITHUB_MODELS'
GITHUB_MODELS = 'GITHUB_MODELS',
OLLAMA = 'OLLAMA'
}
export const AiProviderTranslations = new Map<AiProvider, string>(
@ -69,7 +71,8 @@ export const AiProviderTranslations = new Map<AiProvider, string>(
[AiProvider.MISTRAL_AI , 'ai-models.ai-providers.mistral-ai'],
[AiProvider.ANTHROPIC , 'ai-models.ai-providers.anthropic'],
[AiProvider.AMAZON_BEDROCK , 'ai-models.ai-providers.amazon-bedrock'],
[AiProvider.GITHUB_MODELS , 'ai-models.ai-providers.github-models']
[AiProvider.GITHUB_MODELS , 'ai-models.ai-providers.github-models'],
[AiProvider.OLLAMA , 'ai-models.ai-providers.ollama']
]
);
@ -84,7 +87,8 @@ export const ProviderFieldsAllList = [
'serviceVersion',
'region',
'accessKeyId',
'secretAccessKey'
'secretAccessKey',
'baseUrl'
];
export const ModelFieldsAllList = ['temperature', 'topP', 'topK', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'];
@ -191,6 +195,14 @@ export const AiModelMap = new Map<AiProvider, { modelList: string[], providerFie
modelFieldsList: ['temperature', 'topP', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'],
},
],
[
AiProvider.OLLAMA,
{
modelList: [],
providerFieldsList: ['baseUrl'],
modelFieldsList: ['temperature', 'topP', 'topK', 'maxOutputTokens'],
},
],
]);
export const AiRuleNodeResponseFormatTypeOnlyText: AiProvider[] = [AiProvider.AMAZON_BEDROCK, AiProvider.ANTHROPIC, AiProvider.GITHUB_MODELS];
@ -216,7 +228,8 @@ export interface AiModelWithUserMsg {
projectId?: string;
location?: string;
serviceAccountKey?: string;
fileName?: string
fileName?: string;
baseUrl?: string;
};
modelId: string;
maxRetries: number;

View File

@ -1112,7 +1112,8 @@
"mistral-ai": "Mistral AI",
"anthropic": "Anthropic",
"amazon-bedrock": "Amazon Bedrock",
"github-models": "GitHub Models"
"github-models": "GitHub Models",
"ollama": "Ollama"
},
"name-required": "Name is required.",
"name-max-length": "Name must be 255 characters or less.",
@ -1159,6 +1160,8 @@
"max-output-tokens-hint": "Sets the maximum number of tokens that the \nmodel can generate in a single response.",
"endpoint": "Endpoint",
"endpoint-required": "Endpoint is required.",
"baseurl": "Base URL",
"baseurl-required": "Base URL is required.",
"service-version": "Service version",
"check-connectivity": "Check connectivity",
"check-connectivity-success": "Test request was successful",