Merge pull request #14014 from dskarzh/feature/ollama-support
AI models: add support for Ollama
This commit is contained in:
commit
3b661bba66
@ -419,6 +419,10 @@
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>dev.langchain4j</groupId>
|
||||
<artifactId>langchain4j-ollama</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
@ -32,8 +32,10 @@ import dev.langchain4j.model.chat.request.ChatRequestParameters;
|
||||
import dev.langchain4j.model.github.GitHubModelsChatModel;
|
||||
import dev.langchain4j.model.googleai.GoogleAiGeminiChatModel;
|
||||
import dev.langchain4j.model.mistralai.MistralAiChatModel;
|
||||
import dev.langchain4j.model.ollama.OllamaChatModel;
|
||||
import dev.langchain4j.model.openai.OpenAiChatModel;
|
||||
import dev.langchain4j.model.vertexai.gemini.VertexAiGeminiChatModel;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.AmazonBedrockChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.AnthropicChatModelConfig;
|
||||
@ -43,10 +45,12 @@ import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModelC
|
||||
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.Langchain4jChatModelConfigurer;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.OllamaChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.AmazonBedrockProviderConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.AzureOpenAiProviderConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.OllamaProviderConfig;
|
||||
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
|
||||
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
|
||||
import software.amazon.awssdk.regions.Region;
|
||||
@ -54,7 +58,11 @@ import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.Base64;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
@Component
|
||||
class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigurer {
|
||||
@ -134,7 +142,7 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
|
||||
|
||||
// set request timeout from model config
|
||||
if (chatModelConfig.timeoutSeconds() != null) {
|
||||
retrySettings.setTotalTimeout(org.threeten.bp.Duration.ofSeconds(chatModelConfig.timeoutSeconds()));
|
||||
retrySettings.setTotalTimeoutDuration(Duration.ofSeconds(chatModelConfig.timeoutSeconds()));
|
||||
}
|
||||
|
||||
// set updated retry settings
|
||||
@ -262,6 +270,35 @@ class Langchain4jChatModelConfigurerImpl implements Langchain4jChatModelConfigur
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChatModel configureChatModel(OllamaChatModelConfig chatModelConfig) {
|
||||
var builder = OllamaChatModel.builder()
|
||||
.baseUrl(chatModelConfig.providerConfig().baseUrl())
|
||||
.modelName(chatModelConfig.modelId())
|
||||
.temperature(chatModelConfig.temperature())
|
||||
.topP(chatModelConfig.topP())
|
||||
.topK(chatModelConfig.topK())
|
||||
.numCtx(chatModelConfig.contextLength())
|
||||
.numPredict(chatModelConfig.maxOutputTokens())
|
||||
.timeout(toDuration(chatModelConfig.timeoutSeconds()))
|
||||
.maxRetries(chatModelConfig.maxRetries());
|
||||
|
||||
var auth = chatModelConfig.providerConfig().auth();
|
||||
if (auth instanceof OllamaProviderConfig.OllamaAuth.Basic basicAuth) {
|
||||
String credentials = basicAuth.username() + ":" + basicAuth.password();
|
||||
String encodedCredentials = Base64.getEncoder().encodeToString(credentials.getBytes(StandardCharsets.UTF_8));
|
||||
builder.customHeaders(singletonMap(HttpHeaders.AUTHORIZATION, "Basic " + encodedCredentials));
|
||||
} else if (auth instanceof OllamaProviderConfig.OllamaAuth.Token tokenAuth) {
|
||||
builder.customHeaders(singletonMap(HttpHeaders.AUTHORIZATION, "Bearer " + tokenAuth.token()));
|
||||
} else if (auth instanceof OllamaProviderConfig.OllamaAuth.None) {
|
||||
// do nothing
|
||||
} else {
|
||||
throw new UnsupportedOperationException("Unknown authentication type: " + auth.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static Duration toDuration(Integer timeoutSeconds) {
|
||||
return timeoutSeconds != null ? Duration.ofSeconds(timeoutSeconds) : null;
|
||||
}
|
||||
|
||||
@ -22,7 +22,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
|
||||
@JsonTypeInfo(
|
||||
use = JsonTypeInfo.Id.NAME,
|
||||
property = "status",
|
||||
include = JsonTypeInfo.As.PROPERTY,
|
||||
include = JsonTypeInfo.As.EXISTING_PROPERTY,
|
||||
visible = true
|
||||
)
|
||||
@JsonSubTypes({
|
||||
@ -51,9 +51,7 @@ public sealed interface TbChatResponse permits TbChatResponse.Success, TbChatRes
|
||||
}
|
||||
|
||||
record Failure(
|
||||
@Schema(
|
||||
description = "A string containing details about the failure"
|
||||
)
|
||||
@Schema(description = "A string containing details about the failure")
|
||||
String errorDetails
|
||||
) implements TbChatResponse {
|
||||
|
||||
|
||||
@ -24,6 +24,7 @@ import org.thingsboard.server.common.data.ai.model.chat.GitHubModelsChatModelCon
|
||||
import org.thingsboard.server.common.data.ai.model.chat.GoogleAiGeminiChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.GoogleVertexAiGeminiChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.MistralAiChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.OllamaChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.model.chat.OpenAiChatModelConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.AiProvider;
|
||||
import org.thingsboard.server.common.data.ai.provider.AiProviderConfig;
|
||||
@ -34,6 +35,7 @@ import org.thingsboard.server.common.data.ai.provider.GitHubModelsProviderConfig
|
||||
import org.thingsboard.server.common.data.ai.provider.GoogleAiGeminiProviderConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.GoogleVertexAiGeminiProviderConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.MistralAiProviderConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.OllamaProviderConfig;
|
||||
import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
|
||||
|
||||
@JsonTypeInfo(
|
||||
@ -50,7 +52,8 @@ import org.thingsboard.server.common.data.ai.provider.OpenAiProviderConfig;
|
||||
@JsonSubTypes.Type(value = MistralAiChatModelConfig.class, name = "MISTRAL_AI"),
|
||||
@JsonSubTypes.Type(value = AnthropicChatModelConfig.class, name = "ANTHROPIC"),
|
||||
@JsonSubTypes.Type(value = AmazonBedrockChatModelConfig.class, name = "AMAZON_BEDROCK"),
|
||||
@JsonSubTypes.Type(value = GitHubModelsChatModelConfig.class, name = "GITHUB_MODELS")
|
||||
@JsonSubTypes.Type(value = GitHubModelsChatModelConfig.class, name = "GITHUB_MODELS"),
|
||||
@JsonSubTypes.Type(value = OllamaChatModelConfig.class, name = "OLLAMA")
|
||||
})
|
||||
public interface AiModelConfig {
|
||||
|
||||
@ -69,7 +72,8 @@ public interface AiModelConfig {
|
||||
@JsonSubTypes.Type(value = MistralAiProviderConfig.class, name = "MISTRAL_AI"),
|
||||
@JsonSubTypes.Type(value = AnthropicProviderConfig.class, name = "ANTHROPIC"),
|
||||
@JsonSubTypes.Type(value = AmazonBedrockProviderConfig.class, name = "AMAZON_BEDROCK"),
|
||||
@JsonSubTypes.Type(value = GitHubModelsProviderConfig.class, name = "GITHUB_MODELS")
|
||||
@JsonSubTypes.Type(value = GitHubModelsProviderConfig.class, name = "GITHUB_MODELS"),
|
||||
@JsonSubTypes.Type(value = OllamaProviderConfig.class, name = "OLLAMA")
|
||||
})
|
||||
AiProviderConfig providerConfig();
|
||||
|
||||
|
||||
@ -24,7 +24,7 @@ public sealed interface AiChatModelConfig<C extends AiChatModelConfig<C>> extend
|
||||
permits
|
||||
OpenAiChatModelConfig, AzureOpenAiChatModelConfig, GoogleAiGeminiChatModelConfig,
|
||||
GoogleVertexAiGeminiChatModelConfig, MistralAiChatModelConfig, AnthropicChatModelConfig,
|
||||
AmazonBedrockChatModelConfig, GitHubModelsChatModelConfig {
|
||||
AmazonBedrockChatModelConfig, GitHubModelsChatModelConfig, OllamaChatModelConfig {
|
||||
|
||||
ChatModel configure(Langchain4jChatModelConfigurer configurer);
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ public record AmazonBedrockChatModelConfig(
|
||||
@NotBlank String modelId,
|
||||
@PositiveOrZero Double temperature,
|
||||
@Positive @Max(1) Double topP,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<AmazonBedrockChatModelConfig> {
|
||||
|
||||
@ -34,7 +34,7 @@ public record AnthropicChatModelConfig(
|
||||
@PositiveOrZero Double temperature,
|
||||
@Positive @Max(1) Double topP,
|
||||
@PositiveOrZero Integer topK,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<AnthropicChatModelConfig> {
|
||||
|
||||
@ -35,7 +35,7 @@ public record AzureOpenAiChatModelConfig(
|
||||
@Positive @Max(1) Double topP,
|
||||
Double frequencyPenalty,
|
||||
Double presencePenalty,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<AzureOpenAiChatModelConfig> {
|
||||
|
||||
@ -35,7 +35,7 @@ public record GitHubModelsChatModelConfig(
|
||||
@Positive @Max(1) Double topP,
|
||||
Double frequencyPenalty,
|
||||
Double presencePenalty,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<GitHubModelsChatModelConfig> {
|
||||
|
||||
@ -36,7 +36,7 @@ public record GoogleAiGeminiChatModelConfig(
|
||||
@PositiveOrZero Integer topK,
|
||||
Double frequencyPenalty,
|
||||
Double presencePenalty,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<GoogleAiGeminiChatModelConfig> {
|
||||
|
||||
@ -36,7 +36,7 @@ public record GoogleVertexAiGeminiChatModelConfig(
|
||||
@PositiveOrZero Integer topK,
|
||||
Double frequencyPenalty,
|
||||
Double presencePenalty,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<GoogleVertexAiGeminiChatModelConfig> {
|
||||
|
||||
@ -35,4 +35,6 @@ public interface Langchain4jChatModelConfigurer {
|
||||
|
||||
ChatModel configureChatModel(GitHubModelsChatModelConfig chatModelConfig);
|
||||
|
||||
ChatModel configureChatModel(OllamaChatModelConfig chatModelConfig);
|
||||
|
||||
}
|
||||
|
||||
@ -35,7 +35,7 @@ public record MistralAiChatModelConfig(
|
||||
@Positive @Max(1) Double topP,
|
||||
Double frequencyPenalty,
|
||||
Double presencePenalty,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<MistralAiChatModelConfig> {
|
||||
|
||||
@ -0,0 +1,58 @@
|
||||
/**
|
||||
* Copyright © 2016-2025 The Thingsboard Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.thingsboard.server.common.data.ai.model.chat;
|
||||
|
||||
import dev.langchain4j.model.chat.ChatModel;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.Max;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import jakarta.validation.constraints.Positive;
|
||||
import jakarta.validation.constraints.PositiveOrZero;
|
||||
import lombok.Builder;
|
||||
import lombok.With;
|
||||
import org.thingsboard.server.common.data.ai.provider.AiProvider;
|
||||
import org.thingsboard.server.common.data.ai.provider.OllamaProviderConfig;
|
||||
|
||||
@Builder
|
||||
public record OllamaChatModelConfig(
|
||||
@NotNull @Valid OllamaProviderConfig providerConfig,
|
||||
@NotBlank String modelId,
|
||||
@PositiveOrZero Double temperature,
|
||||
@Positive @Max(1) Double topP,
|
||||
@PositiveOrZero Integer topK,
|
||||
Integer contextLength,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<OllamaChatModelConfig> {
|
||||
|
||||
@Override
|
||||
public AiProvider provider() {
|
||||
return AiProvider.OLLAMA;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChatModel configure(Langchain4jChatModelConfigurer configurer) {
|
||||
return configurer.configureChatModel(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsJsonMode() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
@ -35,7 +35,7 @@ public record OpenAiChatModelConfig(
|
||||
@Positive @Max(1) Double topP,
|
||||
Double frequencyPenalty,
|
||||
Double presencePenalty,
|
||||
@Positive Integer maxOutputTokens,
|
||||
Integer maxOutputTokens,
|
||||
@With @Positive Integer timeoutSeconds,
|
||||
@With @PositiveOrZero Integer maxRetries
|
||||
) implements AiChatModelConfig<OpenAiChatModelConfig> {
|
||||
|
||||
@ -24,6 +24,7 @@ public enum AiProvider {
|
||||
MISTRAL_AI,
|
||||
ANTHROPIC,
|
||||
AMAZON_BEDROCK,
|
||||
GITHUB_MODELS
|
||||
GITHUB_MODELS,
|
||||
OLLAMA
|
||||
|
||||
}
|
||||
|
||||
@ -19,4 +19,4 @@ public sealed interface AiProviderConfig
|
||||
permits
|
||||
OpenAiProviderConfig, AzureOpenAiProviderConfig, GoogleAiGeminiProviderConfig,
|
||||
GoogleVertexAiGeminiProviderConfig, MistralAiProviderConfig, AnthropicProviderConfig,
|
||||
AmazonBedrockProviderConfig, GitHubModelsProviderConfig {}
|
||||
AmazonBedrockProviderConfig, GitHubModelsProviderConfig, OllamaProviderConfig {}
|
||||
|
||||
@ -0,0 +1,48 @@
|
||||
/**
|
||||
* Copyright © 2016-2025 The Thingsboard Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.thingsboard.server.common.data.ai.provider;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import jakarta.validation.Valid;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
public record OllamaProviderConfig(
|
||||
@NotNull String baseUrl,
|
||||
@NotNull @Valid OllamaAuth auth
|
||||
) implements AiProviderConfig {
|
||||
|
||||
@JsonTypeInfo(
|
||||
use = JsonTypeInfo.Id.NAME,
|
||||
include = JsonTypeInfo.As.PROPERTY,
|
||||
property = "type"
|
||||
)
|
||||
@JsonSubTypes({
|
||||
@JsonSubTypes.Type(value = OllamaAuth.None.class, name = "NONE"),
|
||||
@JsonSubTypes.Type(value = OllamaAuth.Basic.class, name = "BASIC"),
|
||||
@JsonSubTypes.Type(value = OllamaAuth.Token.class, name = "TOKEN")
|
||||
})
|
||||
public sealed interface OllamaAuth {
|
||||
|
||||
record None() implements OllamaAuth {}
|
||||
|
||||
record Basic(@NotNull String username, @NotNull String password) implements OllamaAuth {}
|
||||
|
||||
record Token(@NotNull String token) implements OllamaAuth {}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -60,9 +60,7 @@ public sealed interface TbResponseFormat permits TbTextResponseFormat, TbJsonRes
|
||||
|
||||
@Override
|
||||
public ResponseFormat toLangChainResponseFormat() {
|
||||
return ResponseFormat.builder()
|
||||
.type(ResponseFormatType.TEXT)
|
||||
.build();
|
||||
return ResponseFormat.TEXT;
|
||||
}
|
||||
|
||||
}
|
||||
@ -76,9 +74,7 @@ public sealed interface TbResponseFormat permits TbTextResponseFormat, TbJsonRes
|
||||
|
||||
@Override
|
||||
public ResponseFormat toLangChainResponseFormat() {
|
||||
return ResponseFormat.builder()
|
||||
.type(ResponseFormatType.JSON)
|
||||
.build();
|
||||
return ResponseFormat.JSON;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -55,31 +55,34 @@
|
||||
</mat-option>
|
||||
</mat-select>
|
||||
</mat-form-field>
|
||||
<div formGroupName="providerConfig" class="tb-form-panel no-border no-padding">
|
||||
<div formGroupName="providerConfig" class="tb-form-panel no-border no-padding no-gap">
|
||||
@if (providerFieldsList.includes('personalAccessToken')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.personal-access-token</mat-label>
|
||||
<input type="password" required matInput formControlName="personalAccessToken" autocomplete="new-password">
|
||||
<tb-toggle-password matSuffix></tb-toggle-password>
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('personalAccessToken').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.personalAccessToken').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.personalAccessToken').hasError('pattern')">
|
||||
{{ 'ai-models.personal-access-token-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (providerFieldsList.includes('projectId')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.project-id</mat-label>
|
||||
<input matInput required formControlName="projectId">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('projectId').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.projectId').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.projectId').hasError('pattern')">
|
||||
{{ 'ai-models.project-id-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (providerFieldsList.includes('location')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.location</mat-label>
|
||||
<input matInput required formControlName="location">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('location').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.location').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.location').hasError('pattern')">
|
||||
{{ 'ai-models.location-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
@ -98,16 +101,17 @@
|
||||
</tb-file-input>
|
||||
}
|
||||
@if (providerFieldsList.includes('endpoint')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.endpoint</mat-label>
|
||||
<input required matInput formControlName="endpoint">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('endpoint').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.endpoint').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.endpoint').hasError('pattern')">
|
||||
{{ 'ai-models.endpoint-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (providerFieldsList.includes('serviceVersion')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.service-version</mat-label>
|
||||
<input matInput formControlName="serviceVersion">
|
||||
</mat-form-field>
|
||||
@ -117,25 +121,28 @@
|
||||
<mat-label translate>ai-models.api-key</mat-label>
|
||||
<input type="password" required matInput formControlName="apiKey" autocomplete="new-password">
|
||||
<tb-toggle-password matSuffix></tb-toggle-password>
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('apiKey').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.apiKey').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.apiKey').hasError('pattern')">
|
||||
{{ 'ai-models.api-key-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (providerFieldsList.includes('region')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.region</mat-label>
|
||||
<input required matInput formControlName="region">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('region').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.region').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.region').hasError('pattern')">
|
||||
{{ 'ai-models.region-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (providerFieldsList.includes('accessKeyId')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline" subscriptSizing="dynamic">
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.access-key-id</mat-label>
|
||||
<input required matInput formControlName="accessKeyId">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('accessKeyId').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.accessKeyId').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.accessKeyId').hasError('pattern')">
|
||||
{{ 'ai-models.access-key-id-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
@ -145,11 +152,68 @@
|
||||
<mat-label translate>ai-models.secret-access-key</mat-label>
|
||||
<input type="password" required matInput formControlName="secretAccessKey" autocomplete="new-password">
|
||||
<tb-toggle-password matSuffix></tb-toggle-password>
|
||||
<mat-error *ngIf="aiModelForms.get('configuration').get('providerConfig').get('secretAccessKey').hasError('required')">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.secretAccessKey').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.secretAccessKey').hasError('pattern')">
|
||||
{{ 'ai-models.secret-access-key-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (providerFieldsList.includes('baseUrl')) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.baseurl</mat-label>
|
||||
<input required matInput formControlName="baseUrl">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.baseUrl').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.baseUrl').hasError('pattern')">
|
||||
{{ 'ai-models.baseurl-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (provider === aiProvider.OLLAMA) {
|
||||
<div class="tb-form-panel stroked no-gap no-padding-bottom mb-4" formGroupName="auth">
|
||||
<div class="flex flex-row items-center justify-between xs:flex-col xs:items-start xs:gap-3">
|
||||
<div class="tb-form-panel-title" tb-hint-tooltip-icon="{{ authenticationHint }}">
|
||||
{{ 'ai-models.authentication' | translate }}
|
||||
</div>
|
||||
<tb-toggle-select formControlName="type">
|
||||
<tb-toggle-option [value]="AuthenticationType.NONE">{{ 'ai-models.authentication-type.none' | translate }}</tb-toggle-option>
|
||||
<tb-toggle-option [value]="AuthenticationType.BASIC">{{ 'ai-models.authentication-type.basic' | translate }}</tb-toggle-option>
|
||||
<tb-toggle-option [value]="AuthenticationType.TOKEN">{{ 'ai-models.authentication-type.token' | translate }}</tb-toggle-option>
|
||||
</tb-toggle-select>
|
||||
</div>
|
||||
<div class="tb-form-panel no-padding no-border no-gap padding-top">
|
||||
@if (aiModelForms.get('configuration.providerConfig.auth.type').value === AuthenticationType.BASIC) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.username</mat-label>
|
||||
<input required matInput formControlName="username">
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.auth.username').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.auth.username').hasError('pattern')">
|
||||
{{ 'ai-models.username-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.password</mat-label>
|
||||
<input type="password" required matInput formControlName="password" autocomplete="new-password">
|
||||
<tb-toggle-password matSuffix></tb-toggle-password>
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.auth.password').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.auth.password').hasError('pattern')">
|
||||
{{ 'ai-models.password-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
@if (aiModelForms.get('configuration.providerConfig.auth.type').value === AuthenticationType.TOKEN) {
|
||||
<mat-form-field class="mat-block flex-1" appearance="outline">
|
||||
<mat-label translate>ai-models.token</mat-label>
|
||||
<input type="password" required matInput formControlName="token" autocomplete="new-password">
|
||||
<tb-toggle-password matSuffix></tb-toggle-password>
|
||||
<mat-error *ngIf="aiModelForms.get('configuration.providerConfig.auth.token').hasError('required') ||
|
||||
aiModelForms.get('configuration.providerConfig.auth.token').hasError('pattern')">
|
||||
{{ 'ai-models.token-required' | translate }}
|
||||
</mat-error>
|
||||
</mat-form-field>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</section>
|
||||
</section>
|
||||
@ -255,15 +319,18 @@
|
||||
</div>
|
||||
<mat-form-field appearance="outline" class="number" subscriptSizing="dynamic">
|
||||
<input matInput formControlName="maxOutputTokens"
|
||||
type="number" min="1" step="1" placeholder="{{ 'ai-models.set' | translate }}">
|
||||
<mat-icon matSuffix
|
||||
matTooltipPosition="above"
|
||||
matTooltipClass="tb-error-tooltip"
|
||||
[matTooltip]="'ai-models.max-output-tokens-min' | translate"
|
||||
*ngIf="aiModelForms.get('configuration').get('maxOutputTokens').hasError('min')"
|
||||
class="tb-error">
|
||||
warning
|
||||
</mat-icon>
|
||||
type="number" step="1" placeholder="{{ 'ai-models.set' | translate }}">
|
||||
</mat-form-field>
|
||||
</div>
|
||||
}
|
||||
@if (modelFieldsList.includes('contextLength')) {
|
||||
<div class="tb-form-row space-between">
|
||||
<div tb-hint-tooltip-icon="{{ 'ai-models.context-length-hint' | translate }}">
|
||||
{{ 'ai-models.context-length' | translate }}
|
||||
</div>
|
||||
<mat-form-field appearance="outline" class="number" subscriptSizing="dynamic">
|
||||
<input matInput formControlName="contextLength"
|
||||
type="number" step="1" placeholder="{{ 'ai-models.set' | translate }}">
|
||||
</mat-form-field>
|
||||
</div>
|
||||
}
|
||||
|
||||
@ -30,6 +30,7 @@ import {
|
||||
AiModelMap,
|
||||
AiProvider,
|
||||
AiProviderTranslations,
|
||||
AuthenticationType,
|
||||
ModelType,
|
||||
ProviderFieldsAllList
|
||||
} from '@shared/models/ai-model.models';
|
||||
@ -37,6 +38,7 @@ import { AiModelService } from '@core/http/ai-model.service';
|
||||
import { CheckConnectivityDialogComponent } from '@home/components/ai-model/check-connectivity-dialog.component';
|
||||
import { map } from 'rxjs/operators';
|
||||
import { deepTrim } from '@core/utils';
|
||||
import { TranslateService } from '@ngx-translate/core';
|
||||
|
||||
export interface AIModelDialogData {
|
||||
AIModel?: AiModel;
|
||||
@ -62,18 +64,23 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
|
||||
providerMap: AiProvider[] = Object.keys(AiProvider) as AiProvider[];
|
||||
providerTranslationMap = AiProviderTranslations;
|
||||
|
||||
AuthenticationType = AuthenticationType;
|
||||
|
||||
provider: AiProvider = AiProvider.OPENAI;
|
||||
|
||||
aiModelForms: FormGroup;
|
||||
|
||||
isAdd = false;
|
||||
|
||||
authenticationHint: string;
|
||||
|
||||
constructor(protected store: Store<AppState>,
|
||||
protected router: Router,
|
||||
protected dialogRef: MatDialogRef<AIModelDialogComponent, AiModel>,
|
||||
@Inject(MAT_DIALOG_DATA) public data: AIModelDialogData,
|
||||
private fb: FormBuilder,
|
||||
private aiModelService: AiModelService,
|
||||
private translate: TranslateService,
|
||||
private dialog: MatDialog) {
|
||||
super(store, router, dialogRef);
|
||||
|
||||
@ -89,17 +96,24 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
|
||||
configuration: this.fb.group({
|
||||
provider: [this.provider, []],
|
||||
providerConfig: this.fb.group({
|
||||
apiKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.apiKey : '', [Validators.required]],
|
||||
personalAccessToken: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.personalAccessToken : '', [Validators.required]],
|
||||
endpoint: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.endpoint : '', [Validators.required]],
|
||||
apiKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.apiKey : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
personalAccessToken: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.personalAccessToken : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
endpoint: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.endpoint : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
serviceVersion: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.serviceVersion : ''],
|
||||
projectId: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.projectId : '', [Validators.required]],
|
||||
location: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.location : '', [Validators.required]],
|
||||
projectId: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.projectId : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
location: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.location : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
serviceAccountKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.serviceAccountKey : '', [Validators.required]],
|
||||
fileName: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.fileName : '', [Validators.required]],
|
||||
region: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.region : '', [Validators.required]],
|
||||
accessKeyId: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.accessKeyId : '', [Validators.required]],
|
||||
secretAccessKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.secretAccessKey : '', [Validators.required]],
|
||||
region: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.region : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
accessKeyId: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.accessKeyId : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
secretAccessKey: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.secretAccessKey : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
baseUrl: [this.data.AIModel ? this.data.AIModel.configuration.providerConfig?.baseUrl : '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
auth: this.fb.group({
|
||||
type: [this.data.AIModel?.configuration?.providerConfig?.auth?.type ?? AuthenticationType.NONE],
|
||||
username: [this.data.AIModel?.configuration?.providerConfig?.auth?.username ?? '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
password: [this.data.AIModel?.configuration?.providerConfig?.auth?.password ?? '', [Validators.required, Validators.pattern(/.*\S.*/)]],
|
||||
token: [this.data.AIModel?.configuration?.providerConfig?.auth?.token ?? '', [Validators.required, Validators.pattern(/.*\S.*/)]]
|
||||
})
|
||||
}),
|
||||
modelId: [this.data.AIModel ? this.data.AIModel.configuration?.modelId : '', [Validators.required]],
|
||||
temperature: [this.data.AIModel ? this.data.AIModel.configuration?.temperature : null, [Validators.min(0)]],
|
||||
@ -107,7 +121,8 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
|
||||
topK: [this.data.AIModel ? this.data.AIModel.configuration?.topK : null, [Validators.min(0)]],
|
||||
frequencyPenalty: [this.data.AIModel ? this.data.AIModel.configuration?.frequencyPenalty : null],
|
||||
presencePenalty: [this.data.AIModel ? this.data.AIModel.configuration?.presencePenalty : null],
|
||||
maxOutputTokens: [this.data.AIModel ? this.data.AIModel.configuration?.maxOutputTokens : null, [Validators.min(1)]]
|
||||
maxOutputTokens: [this.data.AIModel ? this.data.AIModel.configuration?.maxOutputTokens : null],
|
||||
contextLength: [this.data.AIModel ? this.data.AIModel.configuration?.contextLength : null]
|
||||
})
|
||||
});
|
||||
|
||||
@ -118,7 +133,23 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
|
||||
this.aiModelForms.get('configuration.modelId').reset('');
|
||||
this.aiModelForms.get('configuration.providerConfig').reset({});
|
||||
this.updateValidation(provider);
|
||||
})
|
||||
});
|
||||
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.type').valueChanges.pipe(
|
||||
takeUntilDestroyed()
|
||||
).subscribe((type: AuthenticationType) => {
|
||||
this.getAuthenticationHint(type);
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.username').disable();
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.password').disable();
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.token').disable();
|
||||
if (type === AuthenticationType.BASIC) {
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.username').enable();
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.password').enable();
|
||||
}
|
||||
if (type === AuthenticationType.TOKEN) {
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.token').enable();
|
||||
}
|
||||
});
|
||||
|
||||
this.updateValidation(this.provider);
|
||||
}
|
||||
@ -130,6 +161,16 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
|
||||
);
|
||||
}
|
||||
|
||||
private getAuthenticationHint(type: AuthenticationType) {
|
||||
if (type === AuthenticationType.BASIC) {
|
||||
this.authenticationHint = this.translate.instant('ai-models.authentication-basic-hint');
|
||||
} else if (type === AuthenticationType.TOKEN) {
|
||||
this.authenticationHint = this.translate.instant('ai-models.authentication-token-hint');
|
||||
} else {
|
||||
this.authenticationHint = null;
|
||||
}
|
||||
}
|
||||
|
||||
private updateValidation(provider: AiProvider) {
|
||||
ProviderFieldsAllList.forEach(key => {
|
||||
if (AiModelMap.get(provider).providerFieldsList.includes(key)) {
|
||||
@ -137,7 +178,13 @@ export class AIModelDialogComponent extends DialogComponent<AIModelDialogCompone
|
||||
} else {
|
||||
this.aiModelForms.get('configuration.providerConfig').get(key).disable();
|
||||
}
|
||||
})
|
||||
});
|
||||
if (provider === AiProvider.OLLAMA) {
|
||||
this.aiModelForms.get('configuration.providerConfig.auth').enable();
|
||||
this.aiModelForms.get('configuration.providerConfig.auth.type').patchValue(this.data.AIModel?.configuration?.providerConfig?.auth?.type ?? AuthenticationType.NONE, {emitEvent: true});
|
||||
} else {
|
||||
this.aiModelForms.get('configuration.providerConfig.auth').disable();
|
||||
}
|
||||
}
|
||||
|
||||
get providerFieldsList(): string[] {
|
||||
|
||||
@ -34,6 +34,13 @@ export interface AiModel extends Omit<BaseData<AiModelId>, 'label'>, HasTenantId
|
||||
region?: string;
|
||||
accessKeyId?: string;
|
||||
secretAccessKey?: string;
|
||||
baseUrl?: string;
|
||||
auth?: {
|
||||
type: AuthenticationType;
|
||||
username?: string;
|
||||
password?: string;
|
||||
token?: string
|
||||
}
|
||||
};
|
||||
modelId: string;
|
||||
temperature?: number;
|
||||
@ -42,6 +49,7 @@ export interface AiModel extends Omit<BaseData<AiModelId>, 'label'>, HasTenantId
|
||||
frequencyPenalty?: number;
|
||||
presencePenalty?: number;
|
||||
maxOutputTokens?: number;
|
||||
contextLength?: number;
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,7 +65,8 @@ export enum AiProvider {
|
||||
MISTRAL_AI = 'MISTRAL_AI',
|
||||
ANTHROPIC = 'ANTHROPIC',
|
||||
AMAZON_BEDROCK = 'AMAZON_BEDROCK',
|
||||
GITHUB_MODELS = 'GITHUB_MODELS'
|
||||
GITHUB_MODELS = 'GITHUB_MODELS',
|
||||
OLLAMA = 'OLLAMA'
|
||||
}
|
||||
|
||||
export const AiProviderTranslations = new Map<AiProvider, string>(
|
||||
@ -69,7 +78,8 @@ export const AiProviderTranslations = new Map<AiProvider, string>(
|
||||
[AiProvider.MISTRAL_AI , 'ai-models.ai-providers.mistral-ai'],
|
||||
[AiProvider.ANTHROPIC , 'ai-models.ai-providers.anthropic'],
|
||||
[AiProvider.AMAZON_BEDROCK , 'ai-models.ai-providers.amazon-bedrock'],
|
||||
[AiProvider.GITHUB_MODELS , 'ai-models.ai-providers.github-models']
|
||||
[AiProvider.GITHUB_MODELS , 'ai-models.ai-providers.github-models'],
|
||||
[AiProvider.OLLAMA , 'ai-models.ai-providers.ollama']
|
||||
]
|
||||
);
|
||||
|
||||
@ -84,10 +94,11 @@ export const ProviderFieldsAllList = [
|
||||
'serviceVersion',
|
||||
'region',
|
||||
'accessKeyId',
|
||||
'secretAccessKey'
|
||||
'secretAccessKey',
|
||||
'baseUrl'
|
||||
];
|
||||
|
||||
export const ModelFieldsAllList = ['temperature', 'topP', 'topK', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'];
|
||||
export const ModelFieldsAllList = ['temperature', 'topP', 'topK', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens', 'contextLength'];
|
||||
|
||||
export const AiModelMap = new Map<AiProvider, { modelList: string[], providerFieldsList: string[], modelFieldsList: string[] }>([
|
||||
[
|
||||
@ -191,6 +202,14 @@ export const AiModelMap = new Map<AiProvider, { modelList: string[], providerFie
|
||||
modelFieldsList: ['temperature', 'topP', 'frequencyPenalty', 'presencePenalty', 'maxOutputTokens'],
|
||||
},
|
||||
],
|
||||
[
|
||||
AiProvider.OLLAMA,
|
||||
{
|
||||
modelList: [],
|
||||
providerFieldsList: ['baseUrl'],
|
||||
modelFieldsList: ['temperature', 'topP', 'topK', 'maxOutputTokens', 'contextLength'],
|
||||
},
|
||||
],
|
||||
]);
|
||||
|
||||
export const AiRuleNodeResponseFormatTypeOnlyText: AiProvider[] = [AiProvider.AMAZON_BEDROCK, AiProvider.ANTHROPIC, AiProvider.GITHUB_MODELS];
|
||||
@ -216,7 +235,8 @@ export interface AiModelWithUserMsg {
|
||||
projectId?: string;
|
||||
location?: string;
|
||||
serviceAccountKey?: string;
|
||||
fileName?: string
|
||||
fileName?: string;
|
||||
baseUrl?: string;
|
||||
};
|
||||
modelId: string;
|
||||
maxRetries: number;
|
||||
@ -228,3 +248,8 @@ export interface CheckConnectivityResult {
|
||||
status: string;
|
||||
errorDetails: string;
|
||||
}
|
||||
export enum AuthenticationType {
|
||||
NONE = 'NONE',
|
||||
BASIC = 'BASIC',
|
||||
TOKEN = 'TOKEN'
|
||||
}
|
||||
|
||||
@ -1112,7 +1112,8 @@
|
||||
"mistral-ai": "Mistral AI",
|
||||
"anthropic": "Anthropic",
|
||||
"amazon-bedrock": "Amazon Bedrock",
|
||||
"github-models": "GitHub Models"
|
||||
"github-models": "GitHub Models",
|
||||
"ollama": "Ollama"
|
||||
},
|
||||
"name-required": "Name is required.",
|
||||
"name-max-length": "Name must be 255 characters or less.",
|
||||
@ -1155,17 +1156,34 @@
|
||||
"frequency-penalty": "Frequency penalty",
|
||||
"frequency-penalty-hint": "Applies a penalty to a token's likelihood that increases based on its frequency in the text.",
|
||||
"max-output-tokens": "Maximum output tokens",
|
||||
"max-output-tokens-min": "Must be greater than 0.",
|
||||
"max-output-tokens-hint": "Sets the maximum number of tokens that the \nmodel can generate in a single response.",
|
||||
"context-length": "Context length",
|
||||
"context-length-hint": "Defines the size of the context window in tokens. This value sets the total memory limit for the model, including both the user's input and the generated response.",
|
||||
"endpoint": "Endpoint",
|
||||
"endpoint-required": "Endpoint is required.",
|
||||
"baseurl": "Base URL",
|
||||
"baseurl-required": "Base URL is required.",
|
||||
"service-version": "Service version",
|
||||
"check-connectivity": "Check connectivity",
|
||||
"check-connectivity-success": "Test request was successful",
|
||||
"check-connectivity-failed": "Test request failed",
|
||||
"no-model-matching": "No models matching '{{entity}}' were found.",
|
||||
"model-required": "Model is required.",
|
||||
"no-model-text": "No models found."
|
||||
"no-model-text": "No models found.",
|
||||
"authentication": "Authentication",
|
||||
"authentication-basic-hint": "Uses standard HTTP Basic authentication. The username and password will be combined, Base64-encoded, and sent in an \"Authorization\" header with each request to the Ollama server.",
|
||||
"authentication-token-hint": "Uses Bearer token authentication. The provided token will be sent directly in an \"Authorization\" eader with each request to the Ollama server.",
|
||||
"authentication-type": {
|
||||
"none": "None",
|
||||
"basic": "Basic",
|
||||
"token": "Token"
|
||||
},
|
||||
"username": "Username",
|
||||
"username-required": "Username is required.",
|
||||
"password": "Password",
|
||||
"password-required": "Password is required.",
|
||||
"token": "Token",
|
||||
"token-required": "Token is required."
|
||||
},
|
||||
"confirm-on-exit": {
|
||||
"message": "You have unsaved changes. Are you sure you want to leave this page?",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user