AI models: add support for Ollama
This commit is contained in:
		
							parent
							
								
									e88114de72
								
							
						
					
					
						commit
						5c7f20a151
					
				@ -419,6 +419,10 @@
 | 
				
			|||||||
                </exclusion>
 | 
					                </exclusion>
 | 
				
			||||||
            </exclusions>
 | 
					            </exclusions>
 | 
				
			||||||
        </dependency>
 | 
					        </dependency>
 | 
				
			||||||
 | 
					        <dependency>
 | 
				
			||||||
 | 
					            <groupId>dev.langchain4j</groupId>
 | 
				
			||||||
 | 
					            <artifactId>langchain4j-ollama</artifactId>
 | 
				
			||||||
 | 
					        </dependency>
 | 
				
			||||||
    </dependencies>
 | 
					    </dependencies>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    <build>
 | 
					    <build>
 | 
				
			||||||
 | 
				
			|||||||
@ -32,6 +32,7 @@ import dev.langchain4j.model.chat.request.ChatRequestParameters;
 | 
				
			|||||||
import dev.langchain4j.model.github.GitHubModelsChatModel;
 | 
					import dev.langchain4j.model.github.GitHubModelsChatModel;
 | 
				
			||||||
import dev.langchain4j.model.googleai.GoogleAiGeminiChatModel;
 | 
					import dev.langchain4j.model.googleai.GoogleAiGeminiChatModel;
 | 
				
			||||||
import dev.langchain4j.model.mistralai.MistralAiChatModel;
 | 
					import dev.langchain4j.model.mistralai.MistralAiChatModel;
 | 
				
			||||||
 | 
					import dev.langchain4j.model.ollama.OllamaChatModel;
 | 
				
			||||||
import dev.langchain4j.model.openai.OpenAiChatModel;
 | 
					import dev.langchain4j.model.openai.OpenAiChatModel;
 | 
				
			||||||
import dev.langchain4j.model.vertexai.gemini.VertexAiGeminiChatModel;
 | 
					import dev.langchain4j.model.vertexai.gemini.VertexAiGeminiChatModel;
 | 
				
			||||||
import org.springframework.stereotype.Component;
 | 
					import org.springframework.stereotype.Component;
 | 
				
			||||||
@ -43,6 +44,7 @@ import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModelC
 | 
				
			|||||||
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.model.chat.Langchain4jChatModelConfigurer;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.Langchain4jChatModelConfigurer;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
 | 
				
			||||||
 | 
					import org.thingsboard.server.common.data.ai.model.chat.OllamaChatModelConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.provider.AmazonBedrockProviderConfig;
 | 
					import org.thingsboard.server.common.data.ai.provider.AmazonBedrockProviderConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig;
 | 
					import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig;
 | 
				
			||||||
@ -262,6 +264,20 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
 | 
				
			|||||||
                .build();
 | 
					                .build();
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public ChatModel configureChatModel(OllamaChatModelConfig chatModelConfig) {
 | 
				
			||||||
 | 
					        return OllamaChatModel.builder()
 | 
				
			||||||
 | 
					                .baseUrl(chatModelConfig.providerConfig().baseUrl())
 | 
				
			||||||
 | 
					                .modelName(chatModelConfig.modelId())
 | 
				
			||||||
 | 
					                .temperature(chatModelConfig.temperature())
 | 
				
			||||||
 | 
					                .topP(chatModelConfig.topP())
 | 
				
			||||||
 | 
					                .topK(chatModelConfig.topK())
 | 
				
			||||||
 | 
					                .numPredict(chatModelConfig.maxOutputTokens())
 | 
				
			||||||
 | 
					                .timeout(toDuration(chatModelConfig.timeoutSeconds()))
 | 
				
			||||||
 | 
					                .maxRetries(chatModelConfig.maxRetries())
 | 
				
			||||||
 | 
					                .build();
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    private static Duration toDuration(Integer timeoutSeconds) {
 | 
					    private static Duration toDuration(Integer timeoutSeconds) {
 | 
				
			||||||
        return timeoutSeconds != null ? Duration.ofSeconds(timeoutSeconds) : null;
 | 
					        return timeoutSeconds != null ? Duration.ofSeconds(timeoutSeconds) : null;
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
				
			|||||||
@ -22,7 +22,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
 | 
				
			|||||||
@JsonTypeInfo(
 | 
					@JsonTypeInfo(
 | 
				
			||||||
        use = JsonTypeInfo.Id.NAME,
 | 
					        use = JsonTypeInfo.Id.NAME,
 | 
				
			||||||
        property = "status",
 | 
					        property = "status",
 | 
				
			||||||
        include = JsonTypeInfo.As.PROPERTY,
 | 
					        include = JsonTypeInfo.As.EXISTING_PROPERTY,
 | 
				
			||||||
        visible = true
 | 
					        visible = true
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
@JsonSubTypes({
 | 
					@JsonSubTypes({
 | 
				
			||||||
@ -51,9 +51,7 @@ public sealed interface TbChatResponse permits TbChatResponse.Success, TbChatRes
 | 
				
			|||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    record Failure(
 | 
					    record Failure(
 | 
				
			||||||
            @Schema(
 | 
					            @Schema(description = "A string containing details about the failure")
 | 
				
			||||||
                    description = "A string containing details about the failure"
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            String errorDetails
 | 
					            String errorDetails
 | 
				
			||||||
    ) implements TbChatResponse {
 | 
					    ) implements TbChatResponse {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
				
			|||||||
@ -24,6 +24,7 @@ import org.thingsboard.server.common.data.ai.model.chat.GitHubModelsChatModelCon
 | 
				
			|||||||
import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModelConfig;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModelConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
 | 
				
			||||||
 | 
					import org.thingsboard.server.common.data.ai.model.chat.OllamaChatModelConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
 | 
					import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.provider.AiProvider;
 | 
					import org.thingsboard.server.common.data.ai.provider.AiProvider;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.provider.AiProviderConfig;
 | 
					import org.thingsboard.server.common.data.ai.provider.AiProviderConfig;
 | 
				
			||||||
@ -34,6 +35,7 @@ import org.thingsboard.server.common.data.ai.provider.GitHubModelsProviderConfig
 | 
				
			|||||||
import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConfig;
 | 
					import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
 | 
					import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig;
 | 
					import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig;
 | 
				
			||||||
 | 
					import org.thingsboard.server.common.data.ai.provider.OllamaProviderConfig;
 | 
				
			||||||
import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
 | 
					import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@JsonTypeInfo(
 | 
					@JsonTypeInfo(
 | 
				
			||||||
@ -50,7 +52,8 @@ import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
 | 
				
			|||||||
        @JsonSubTypes.Type(value = MistralAiChatModelConfig.class, name = "MISTRAL_AI"),
 | 
					        @JsonSubTypes.Type(value = MistralAiChatModelConfig.class, name = "MISTRAL_AI"),
 | 
				
			||||||
        @JsonSubTypes.Type(value = AnthropicChatModelConfig.class, name = "ANTHROPIC"),
 | 
					        @JsonSubTypes.Type(value = AnthropicChatModelConfig.class, name = "ANTHROPIC"),
 | 
				
			||||||
        @JsonSubTypes.Type(value = AmazonBedrockChatModelConfig.class, name = "AMAZON_BEDROCK"),
 | 
					        @JsonSubTypes.Type(value = AmazonBedrockChatModelConfig.class, name = "AMAZON_BEDROCK"),
 | 
				
			||||||
        @JsonSubTypes.Type(value = GitHubModelsChatModelConfig.class, name = "GITHUB_MODELS")
 | 
					        @JsonSubTypes.Type(value = GitHubModelsChatModelConfig.class, name = "GITHUB_MODELS"),
 | 
				
			||||||
 | 
					        @JsonSubTypes.Type(value = OllamaChatModelConfig.class, name = "OLLAMA")
 | 
				
			||||||
})
 | 
					})
 | 
				
			||||||
public interface AiModelConfig {
 | 
					public interface AiModelConfig {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -69,7 +72,8 @@ public interface AiModelConfig {
 | 
				
			|||||||
            @JsonSubTypes.Type(value = MistralAiProviderConfig.class, name = "MISTRAL_AI"),
 | 
					            @JsonSubTypes.Type(value = MistralAiProviderConfig.class, name = "MISTRAL_AI"),
 | 
				
			||||||
            @JsonSubTypes.Type(value = AnthropicProviderConfig.class, name = "ANTHROPIC"),
 | 
					            @JsonSubTypes.Type(value = AnthropicProviderConfig.class, name = "ANTHROPIC"),
 | 
				
			||||||
            @JsonSubTypes.Type(value = AmazonBedrockProviderConfig.class, name = "AMAZON_BEDROCK"),
 | 
					            @JsonSubTypes.Type(value = AmazonBedrockProviderConfig.class, name = "AMAZON_BEDROCK"),
 | 
				
			||||||
            @JsonSubTypes.Type(value = GitHubModelsProviderConfig.class, name = "GITHUB_MODELS")
 | 
					            @JsonSubTypes.Type(value = GitHubModelsProviderConfig.class, name = "GITHUB_MODELS"),
 | 
				
			||||||
 | 
					            @JsonSubTypes.Type(value = OllamaProviderConfig.class, name = "OLLAMA")
 | 
				
			||||||
    })
 | 
					    })
 | 
				
			||||||
    AiProviderConfig providerConfig();
 | 
					    AiProviderConfig providerConfig();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
				
			|||||||
@ -24,7 +24,7 @@ public sealed interface AiChatModelConfig<C extends AiChatModelConfig<C>> extend
 | 
				
			|||||||
        permits
 | 
					        permits
 | 
				
			||||||
        OpenAiChatModelConfig, AzureOpenAiChatModelConfig, GoogleAiGeminiChatModelConfig,
 | 
					        OpenAiChatModelConfig, AzureOpenAiChatModelConfig, GoogleAiGeminiChatModelConfig,
 | 
				
			||||||
        GoogleVertexAiGeminiChatModelConfig, MistralAiChatModelConfig, AnthropicChatModelConfig,
 | 
					        GoogleVertexAiGeminiChatModelConfig, MistralAiChatModelConfig, AnthropicChatModelConfig,
 | 
				
			||||||
        AmazonBedrockChatModelConfig, GitHubModelsChatModelConfig {
 | 
					        AmazonBedrockChatModelConfig, GitHubModelsChatModelConfig, OllamaChatModelConfig {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ChatModel configure(Langchain4jChatModelConfigurer configurer);
 | 
					    ChatModel configure(Langchain4jChatModelConfigurer configurer);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
				
			|||||||
@ -35,4 +35,6 @@ public interface Langchain4jChatModelConfigurer {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    ChatModel configureChatModel(GitHubModelsChatModelConfig chatModelConfig);
 | 
					    ChatModel configureChatModel(GitHubModelsChatModelConfig chatModelConfig);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ChatModel configureChatModel(OllamaChatModelConfig chatModelConfig);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
				
			|||||||
@ -0,0 +1,57 @@
 | 
				
			|||||||
 | 
					/**
 | 
				
			||||||
 | 
					 * Copyright © 2016-2025 The Thingsboard Authors
 | 
				
			||||||
 | 
					 *
 | 
				
			||||||
 | 
					 * Licensed under the Apache License, Version 2.0 (the "License");
 | 
				
			||||||
 | 
					 * you may not use this file except in compliance with the License.
 | 
				
			||||||
 | 
					 * You may obtain a copy of the License at
 | 
				
			||||||
 | 
					 *
 | 
				
			||||||
 | 
					 *     http://www.apache.org/licenses/LICENSE-2.0
 | 
				
			||||||
 | 
					 *
 | 
				
			||||||
 | 
					 * Unless required by applicable law or agreed to in writing, software
 | 
				
			||||||
 | 
					 * distributed under the License is distributed on an "AS IS" BASIS,
 | 
				
			||||||
 | 
					 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
				
			||||||
 | 
					 * See the License for the specific language governing permissions and
 | 
				
			||||||
 | 
					 * limitations under the License.
 | 
				
			||||||
 | 
					 */
 | 
				
			||||||
 | 
					package org.thingsboard.server.common.data.ai.model.chat;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import dev.langchain4j.model.chat.ChatModel;
 | 
				
			||||||
 | 
					import jakarta.validation.Valid;
 | 
				
			||||||
 | 
					import jakarta.validation.constraints.Max;
 | 
				
			||||||
 | 
					import jakarta.validation.constraints.NotBlank;
 | 
				
			||||||
 | 
					import jakarta.validation.constraints.NotNull;
 | 
				
			||||||
 | 
					import jakarta.validation.constraints.Positive;
 | 
				
			||||||
 | 
					import jakarta.validation.constraints.PositiveOrZero;
 | 
				
			||||||
 | 
					import lombok.Builder;
 | 
				
			||||||
 | 
					import lombok.With;
 | 
				
			||||||
 | 
					import org.thingsboard.server.common.data.ai.provider.AiProvider;
 | 
				
			||||||
 | 
					import org.thingsboard.server.common.data.ai.provider.OllamaProviderConfig;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					@Builder
 | 
				
			||||||
 | 
					public record OllamaChatModelConfig(
 | 
				
			||||||
 | 
					        @NotNull @Valid OllamaProviderConfig providerConfig,
 | 
				
			||||||
 | 
					        @NotBlank String modelId,
 | 
				
			||||||
 | 
					        @PositiveOrZero Double temperature,
 | 
				
			||||||
 | 
					        @Positive @Max(1) Double topP,
 | 
				
			||||||
 | 
					        @PositiveOrZero Integer topK,
 | 
				
			||||||
 | 
					        @Positive Integer maxOutputTokens,
 | 
				
			||||||
 | 
					        @With @Positive Integer timeoutSeconds,
 | 
				
			||||||
 | 
					        @With @PositiveOrZero Integer maxRetries
 | 
				
			||||||
 | 
					) implements AiChatModelConfig<OllamaChatModelConfig> {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public AiProvider provider() {
 | 
				
			||||||
 | 
					        return AiProvider.OLLAMA;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
 | 
				
			||||||
 | 
					        return configurer.configureChatModel(this);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @Override
 | 
				
			||||||
 | 
					    public boolean supportsJsonMode() {
 | 
				
			||||||
 | 
					        return true;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@ -24,6 +24,7 @@ public enum AiProvider {
 | 
				
			|||||||
    MISTRAL_AI,
 | 
					    MISTRAL_AI,
 | 
				
			||||||
    ANTHROPIC,
 | 
					    ANTHROPIC,
 | 
				
			||||||
    AMAZON_BEDROCK,
 | 
					    AMAZON_BEDROCK,
 | 
				
			||||||
    GITHUB_MODELS
 | 
					    GITHUB_MODELS,
 | 
				
			||||||
 | 
					    OLLAMA
 | 
				
			||||||
 | 
					
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
				
			|||||||
@ -19,4 +19,4 @@ public sealed interface AiProviderConfig
 | 
				
			|||||||
        permits
 | 
					        permits
 | 
				
			||||||
        OpenAiProviderConfig, AzureOpenAiProviderConfig, GoogleAiGeminiProviderConfig,
 | 
					        OpenAiProviderConfig, AzureOpenAiProviderConfig, GoogleAiGeminiProviderConfig,
 | 
				
			||||||
        GoogleVertexAiGeminiProviderConfig, MistralAiProviderConfig, AnthropicProviderConfig,
 | 
					        GoogleVertexAiGeminiProviderConfig, MistralAiProviderConfig, AnthropicProviderConfig,
 | 
				
			||||||
        AmazonBedrockProviderConfig, GitHubModelsProviderConfig {}
 | 
					        AmazonBedrockProviderConfig, GitHubModelsProviderConfig, OllamaProviderConfig {}
 | 
				
			||||||
 | 
				
			|||||||
@ -0,0 +1,22 @@
 | 
				
			|||||||
 | 
					/**
 | 
				
			||||||
 | 
					 * Copyright © 2016-2025 The Thingsboard Authors
 | 
				
			||||||
 | 
					 *
 | 
				
			||||||
 | 
					 * Licensed under the Apache License, Version 2.0 (the "License");
 | 
				
			||||||
 | 
					 * you may not use this file except in compliance with the License.
 | 
				
			||||||
 | 
					 * You may obtain a copy of the License at
 | 
				
			||||||
 | 
					 *
 | 
				
			||||||
 | 
					 *     http://www.apache.org/licenses/LICENSE-2.0
 | 
				
			||||||
 | 
					 *
 | 
				
			||||||
 | 
					 * Unless required by applicable law or agreed to in writing, software
 | 
				
			||||||
 | 
					 * distributed under the License is distributed on an "AS IS" BASIS,
 | 
				
			||||||
 | 
					 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | 
				
			||||||
 | 
					 * See the License for the specific language governing permissions and
 | 
				
			||||||
 | 
					 * limitations under the License.
 | 
				
			||||||
 | 
					 */
 | 
				
			||||||
 | 
					package org.thingsboard.server.common.data.ai.provider;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import jakarta.validation.constraints.NotBlank;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					public record OllamaProviderConfig(
 | 
				
			||||||
 | 
					        @NotBlank String baseUrl
 | 
				
			||||||
 | 
					) implements AiProviderConfig {}
 | 
				
			||||||
@ -60,9 +60,7 @@ public sealed interface TbResponseFormat permits TbTextResponseFormat, TbJsonRes
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        @Override
 | 
					        @Override
 | 
				
			||||||
        public ResponseFormat toLangChainResponseFormat() {
 | 
					        public ResponseFormat toLangChainResponseFormat() {
 | 
				
			||||||
            return ResponseFormat.builder()
 | 
					            return ResponseFormat.TEXT;
 | 
				
			||||||
                    .type(ResponseFormatType.TEXT)
 | 
					 | 
				
			||||||
                    .build();
 | 
					 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@ -76,9 +74,7 @@ public sealed interface TbResponseFormat permits TbTextResponseFormat, TbJsonRes
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        @Override
 | 
					        @Override
 | 
				
			||||||
        public ResponseFormat toLangChainResponseFormat() {
 | 
					        public ResponseFormat toLangChainResponseFormat() {
 | 
				
			||||||
            return ResponseFormat.builder()
 | 
					            return ResponseFormat.JSON;
 | 
				
			||||||
                    .type(ResponseFormatType.JSON)
 | 
					 | 
				
			||||||
                    .build();
 | 
					 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
				
			|||||||
@ -150,6 +150,15 @@
 | 
				
			|||||||
                </mat-error>
 | 
					                </mat-error>
 | 
				
			||||||
              </mat-form-field>
 | 
					              </mat-form-field>
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
 | 
					            @if (providerFieldsList.includes('baseUrl')) {
 | 
				
			||||||
 | 
					              <mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
 | 
				
			||||||
 | 
					                <mat-label translate>ai-models.baseurl</mat-label>
 | 
				
			||||||
 | 
					                <input required matInput formControlName="baseUrl">
 | 
				
			||||||
 | 
					                <mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('baseUrl').hasError('required')">
 | 
				
			||||||
 | 
					                  {{ 'ai-models.baseurl-required' | translate }}
 | 
				
			||||||
 | 
					                </mat-error>
 | 
				
			||||||
 | 
					              </mat-form-field>
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
          </div>
 | 
					          </div>
 | 
				
			||||||
        </section>
 | 
					        </section>
 | 
				
			||||||
      </section>
 | 
					      </section>
 | 
				
			||||||
 | 
				
			|||||||
@ -100,6 +100,7 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
 | 
				
			|||||||
          region: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.region : '', [Validators.required]],
 | 
					          region: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.region : '', [Validators.required]],
 | 
				
			||||||
          accessKeyId: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.accessKeyId : '', [Validators.required]],
 | 
					          accessKeyId: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.accessKeyId : '', [Validators.required]],
 | 
				
			||||||
          secretAccessKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.secretAccessKey : '', [Validators.required]],
 | 
					          secretAccessKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.secretAccessKey : '', [Validators.required]],
 | 
				
			||||||
 | 
					          baseUrl: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.baseUrl : '', [Validators.required]],
 | 
				
			||||||
        }),
 | 
					        }),
 | 
				
			||||||
        modelId: [this.data.AIModel ? this.data.AIModel.configuration?.modelId : '', [Validators.required]],
 | 
					        modelId: [this.data.AIModel ? this.data.AIModel.configuration?.modelId : '', [Validators.required]],
 | 
				
			||||||
        temperature: [this.data.AIModel ? this.data.AIModel.configuration?.temperature : null, [Validators.min(0)]],
 | 
					        temperature: [this.data.AIModel ? this.data.AIModel.configuration?.temperature : null, [Validators.min(0)]],
 | 
				
			||||||
 | 
				
			|||||||
@ -34,6 +34,7 @@ export interface AiModel extends Omit<BaseData<AiModelId>, 'label'>, HasTenantId
 | 
				
			|||||||
      region?: string;
 | 
					      region?: string;
 | 
				
			||||||
      accessKeyId?: string;
 | 
					      accessKeyId?: string;
 | 
				
			||||||
      secretAccessKey?: string;
 | 
					      secretAccessKey?: string;
 | 
				
			||||||
 | 
					      baseUrl?: string;
 | 
				
			||||||
    };
 | 
					    };
 | 
				
			||||||
    modelId: string;
 | 
					    modelId: string;
 | 
				
			||||||
    temperature?: number;
 | 
					    temperature?: number;
 | 
				
			||||||
@ -57,7 +58,8 @@ export enum AiProvider {
 | 
				
			|||||||
  MISTRAL_AI = 'MISTRAL_AI',
 | 
					  MISTRAL_AI = 'MISTRAL_AI',
 | 
				
			||||||
  ANTHROPIC = 'ANTHROPIC',
 | 
					  ANTHROPIC = 'ANTHROPIC',
 | 
				
			||||||
  AMAZON_BEDROCK = 'AMAZON_BEDROCK',
 | 
					  AMAZON_BEDROCK = 'AMAZON_BEDROCK',
 | 
				
			||||||
  GITHUB_MODELS = 'GITHUB_MODELS'
 | 
					  GITHUB_MODELS = 'GITHUB_MODELS',
 | 
				
			||||||
 | 
					  OLLAMA = 'OLLAMA'
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const AiProviderTranslations = new Map<AiProvider, string>(
 | 
					export const AiProviderTranslations = new Map<AiProvider, string>(
 | 
				
			||||||
@ -69,7 +71,8 @@ export const AiProviderTranslations = new Map<AiProvider, string>(
 | 
				
			|||||||
    [AiProvider.MISTRAL_AI , 'ai-models.ai-providers.mistral-ai'],
 | 
					    [AiProvider.MISTRAL_AI , 'ai-models.ai-providers.mistral-ai'],
 | 
				
			||||||
    [AiProvider.ANTHROPIC , 'ai-models.ai-providers.anthropic'],
 | 
					    [AiProvider.ANTHROPIC , 'ai-models.ai-providers.anthropic'],
 | 
				
			||||||
    [AiProvider.AMAZON_BEDROCK , 'ai-models.ai-providers.amazon-bedrock'],
 | 
					    [AiProvider.AMAZON_BEDROCK , 'ai-models.ai-providers.amazon-bedrock'],
 | 
				
			||||||
    [AiProvider.GITHUB_MODELS , 'ai-models.ai-providers.github-models']
 | 
					    [AiProvider.GITHUB_MODELS , 'ai-models.ai-providers.github-models'],
 | 
				
			||||||
 | 
					    [AiProvider.OLLAMA , 'ai-models.ai-providers.ollama']
 | 
				
			||||||
  ]
 | 
					  ]
 | 
				
			||||||
);
 | 
					);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -84,7 +87,8 @@ export const ProviderFieldsAllList = [
 | 
				
			|||||||
  'serviceVersion',
 | 
					  'serviceVersion',
 | 
				
			||||||
  'region',
 | 
					  'region',
 | 
				
			||||||
  'accessKeyId',
 | 
					  'accessKeyId',
 | 
				
			||||||
  'secretAccessKey'
 | 
					  'secretAccessKey',
 | 
				
			||||||
 | 
					  'baseUrl'
 | 
				
			||||||
];
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const ModelFieldsAllList = ['temperature', 'topP', 'topK', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'];
 | 
					export const ModelFieldsAllList = ['temperature', 'topP', 'topK', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'];
 | 
				
			||||||
@ -191,6 +195,14 @@ export const AiModelMap = new Map<AiProvider, { modelList: string[], providerFie
 | 
				
			|||||||
      modelFieldsList: ['temperature', 'topP', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'],
 | 
					      modelFieldsList: ['temperature', 'topP', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'],
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
  ],
 | 
					  ],
 | 
				
			||||||
 | 
					  [
 | 
				
			||||||
 | 
					    AiProvider.OLLAMA,
 | 
				
			||||||
 | 
					    {
 | 
				
			||||||
 | 
					      modelList: [],
 | 
				
			||||||
 | 
					      providerFieldsList: ['baseUrl'],
 | 
				
			||||||
 | 
					      modelFieldsList: ['temperature', 'topP', 'topK', 'maxOutputTokens'],
 | 
				
			||||||
 | 
					    },
 | 
				
			||||||
 | 
					  ],
 | 
				
			||||||
]);
 | 
					]);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const AiRuleNodeResponseFormatTypeOnlyText: AiProvider[] = [AiProvider.AMAZON_BEDROCK, AiProvider.ANTHROPIC, AiProvider.GITHUB_MODELS];
 | 
					export const AiRuleNodeResponseFormatTypeOnlyText: AiProvider[] = [AiProvider.AMAZON_BEDROCK, AiProvider.ANTHROPIC, AiProvider.GITHUB_MODELS];
 | 
				
			||||||
@ -216,7 +228,8 @@ export interface AiModelWithUserMsg {
 | 
				
			|||||||
      projectId?: string;
 | 
					      projectId?: string;
 | 
				
			||||||
      location?: string;
 | 
					      location?: string;
 | 
				
			||||||
      serviceAccountKey?: string;
 | 
					      serviceAccountKey?: string;
 | 
				
			||||||
      fileName?: string
 | 
					      fileName?: string;
 | 
				
			||||||
 | 
					      baseUrl?: string;
 | 
				
			||||||
    };
 | 
					    };
 | 
				
			||||||
    modelId: string;
 | 
					    modelId: string;
 | 
				
			||||||
    maxRetries: number;
 | 
					    maxRetries: number;
 | 
				
			||||||
 | 
				
			|||||||
@ -1112,7 +1112,8 @@
 | 
				
			|||||||
            "mistral-ai": "Mistral AI",
 | 
					            "mistral-ai": "Mistral AI",
 | 
				
			||||||
            "anthropic": "Anthropic",
 | 
					            "anthropic": "Anthropic",
 | 
				
			||||||
            "amazon-bedrock": "Amazon Bedrock",
 | 
					            "amazon-bedrock": "Amazon Bedrock",
 | 
				
			||||||
            "github-models": "GitHub Models"
 | 
					            "github-models": "GitHub Models",
 | 
				
			||||||
 | 
					            "ollama": "Ollama"
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
        "name-required": "Name is required.",
 | 
					        "name-required": "Name is required.",
 | 
				
			||||||
        "name-max-length": "Name must be 255 characters or less.",
 | 
					        "name-max-length": "Name must be 255 characters or less.",
 | 
				
			||||||
@ -1159,6 +1160,8 @@
 | 
				
			|||||||
        "max-output-tokens-hint": "Sets the maximum number of tokens that the \nmodel can generate in a single response.",
 | 
					        "max-output-tokens-hint": "Sets the maximum number of tokens that the \nmodel can generate in a single response.",
 | 
				
			||||||
        "endpoint": "Endpoint",
 | 
					        "endpoint": "Endpoint",
 | 
				
			||||||
        "endpoint-required": "Endpoint is required.",
 | 
					        "endpoint-required": "Endpoint is required.",
 | 
				
			||||||
 | 
					        "baseurl": "Base URL",
 | 
				
			||||||
 | 
					        "baseurl-required": "Base URL is required.",
 | 
				
			||||||
        "service-version": "Service version",
 | 
					        "service-version": "Service version",
 | 
				
			||||||
        "check-connectivity": "Check connectivity",
 | 
					        "check-connectivity": "Check connectivity",
 | 
				
			||||||
        "check-connectivity-success": "Test request was successful",
 | 
					        "check-connectivity-success": "Test request was successful",
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user