|
- package domain
-
- import (
- "math"
- "strings"
- )
-
- const (
- LLMProviderOpenAI = "openai"
- LLMProviderAnthropic = "anthropic"
- LLMProviderGoogle = "google"
- LLMProviderXAI = "xai"
- LLMProviderOllama = "ollama"
-
- defaultLLMTemperature = 0.2
- minLLMTemperature = 0.0
- maxLLMTemperature = 2.0
- defaultLLMMaxTokens = 1200
- minLLMMaxTokens = 64
- maxLLMMaxTokens = 8192
- )
-
- type LLMModelOption struct {
- Value string
- Label string
- }
-
- type LLMProviderOption struct {
- Value string
- Label string
- Models []LLMModelOption
- }
-
- func DefaultLLMProvider() string {
- return LLMProviderOpenAI
- }
-
- func LLMProviderOptions() []LLMProviderOption {
- return []LLMProviderOption{
- {
- Value: LLMProviderOpenAI,
- Label: "OpenAI",
- Models: []LLMModelOption{
- {Value: "gpt-5.2", Label: "gpt-5.2"},
- {Value: "gpt-5.4", Label: "gpt-5.4"},
- },
- },
- {
- Value: LLMProviderAnthropic,
- Label: "Anthropic",
- Models: []LLMModelOption{
- {Value: "claude-sonnet-4-5", Label: "claude-sonnet-4-5"},
- {Value: "claude-opus-4-1", Label: "claude-opus-4-1"},
- },
- },
- {
- Value: LLMProviderGoogle,
- Label: "Google",
- Models: []LLMModelOption{
- {Value: "gemini-2.5-pro", Label: "gemini-2.5-pro"},
- {Value: "gemini-2.5-flash", Label: "gemini-2.5-flash"},
- },
- },
- {
- Value: LLMProviderXAI,
- Label: "xAI",
- Models: []LLMModelOption{
- {Value: "grok-4", Label: "grok-4"},
- {Value: "grok-3-mini", Label: "grok-3-mini"},
- },
- },
- {
- Value: LLMProviderOllama,
- Label: "Ollama",
- Models: []LLMModelOption{
- {Value: "llama3.2", Label: "llama3.2"},
- {Value: "qwen2.5", Label: "qwen2.5"},
- {Value: "mistral", Label: "mistral"},
- },
- },
- }
- }
-
- func LLMModelsByProvider(provider string) []LLMModelOption {
- normalized := NormalizeLLMProvider(provider)
- for _, option := range LLMProviderOptions() {
- if option.Value == normalized {
- out := make([]LLMModelOption, len(option.Models))
- copy(out, option.Models)
- return out
- }
- }
- return nil
- }
-
- func NormalizeLLMProvider(provider string) string {
- value := strings.ToLower(strings.TrimSpace(provider))
- for _, option := range LLMProviderOptions() {
- if option.Value == value {
- return value
- }
- }
- return DefaultLLMProvider()
- }
-
- func NormalizeLLMModel(provider, model string) string {
- models := LLMModelsByProvider(provider)
- if len(models) == 0 {
- return ""
- }
- value := strings.TrimSpace(model)
- for _, option := range models {
- if option.Value == value {
- return value
- }
- }
- return models[0].Value
- }
-
- func DefaultLLMTemperature() float64 {
- return defaultLLMTemperature
- }
-
- func NormalizeLLMTemperature(value float64) float64 {
- if math.IsNaN(value) || math.IsInf(value, 0) {
- return defaultLLMTemperature
- }
- if value < minLLMTemperature {
- value = minLLMTemperature
- }
- if value > maxLLMTemperature {
- value = maxLLMTemperature
- }
- return math.Round(value*100) / 100
- }
-
- func DefaultLLMMaxTokens() int {
- return defaultLLMMaxTokens
- }
-
- func NormalizeLLMMaxTokens(value int) int {
- if value <= 0 {
- return defaultLLMMaxTokens
- }
- if value < minLLMMaxTokens {
- return minLLMMaxTokens
- }
- if value > maxLLMMaxTokens {
- return maxLLMMaxTokens
- }
- return value
- }
-
- func LLMAPIKeyForProvider(provider string, settings AppSettings) string {
- switch NormalizeLLMProvider(provider) {
- case LLMProviderOpenAI:
- return strings.TrimSpace(settings.OpenAIAPIKeyEncrypted)
- case LLMProviderAnthropic:
- return strings.TrimSpace(settings.AnthropicAPIKeyEncrypted)
- case LLMProviderGoogle:
- return strings.TrimSpace(settings.GoogleAPIKeyEncrypted)
- case LLMProviderXAI:
- return strings.TrimSpace(settings.XAIAPIKeyEncrypted)
- case LLMProviderOllama:
- return strings.TrimSpace(settings.OllamaAPIKeyEncrypted)
- default:
- return ""
- }
- }
|