From 839589c04034121b99fb3ded03859c16dfb3353f Mon Sep 17 00:00:00 2001 From: Jan Svabenik Date: Fri, 27 Mar 2026 07:24:23 +0100 Subject: [PATCH] feat: add llm provider settings foundation --- README.md | 4 +- docs/TARGET_STATE_AND_ROADMAP.md | 2 + internal/app/app.go | 11 ++ internal/domain/llm_settings.go | 108 ++++++++++++++++++ internal/domain/models.go | 22 ++-- internal/httpserver/handlers/ui.go | 98 +++++++++++++--- .../migrations/006_add_llm_settings.sql | 23 ++++ internal/store/sqlite/store.go | 46 +++++++- web/templates/settings.gohtml | 68 ++++++++++- 9 files changed, 353 insertions(+), 29 deletions(-) create mode 100644 internal/domain/llm_settings.go create mode 100644 internal/store/sqlite/migrations/006_add_llm_settings.sql diff --git a/README.md b/README.md index 1b5436f..ef1a0a3 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ Die App kann heute: - Drafts anlegen, aktualisieren und im Status `draft` -> `reviewed` -> `submitted` fuehren. - Externen Draft-Intake ueber `POST /api/drafts/intake` verarbeiten (Stammdaten + optional Website-/Stilkontext, kein Direkt-Build). - Globalen Master-Prompt in Settings pflegen sowie Prompt-Bloecke fuer den spaeteren LLM-Flow als Standard konfigurieren. +- Im Settings-/Config-Bereich die LLM-Basiskonfiguration pflegen: aktiver Provider, aktives Modell, Base URL fuer Ollama/kompatible Endpoints sowie getrennte API-Key-Speicher je Provider (OpenAI, Anthropic, Google, xAI, Ollama). - Im Draft-/Build-UI den User-Flow auf Stammdaten, Intake-/Website-Kontext, Stil-Auswahl und Template-Felder fokussieren; Prompt-Interna liegen in Settings. - Interne semantische Zielslots (z. B. `hero.title`, `service_items[n].description`) auf Template-Felder abbilden als Vorbereitung fuer spaeteren LLM-Autofill. - Repeated-Bereiche in semantischen Slots werden block-/rollenbasiert getrennt (z. B. Services/Team/Testimonials pro Item statt Sammel-Slot). @@ -21,6 +22,7 @@ Die App kann heute: Wichtig: - Leadharvester liefert nur Intake-Daten (Stammdaten + optional Kontext) in Drafts. - LLM-Autofill bleibt Assistenz im Review-Flow: Vorschlaege werden separat gespeichert und manuell angewendet; bei LLM-Ausfall greift deterministischer Rule-based Fallback. +- Die neue Provider-/Modell-Konfiguration ist Phase-A-Grundlage fuer spaeteres Routing; der bestehende LLM-Suggestions-Runtimepfad bleibt in diesem Schritt unveraendert. ## Lokaler Start @@ -36,7 +38,7 @@ Wichtig: ## Persistenz Default ist SQLite. -Gespeichert werden Settings, Templates, Manifeste/Felder, Drafts und Site-Builds. +Gespeichert werden Settings (inkl. Prompt-Konfig und LLM-Provider-/Modell-/Key-Grundlagen), Templates, Manifeste/Felder, Drafts und Site-Builds. ## Draft-/Review-Flow diff --git a/docs/TARGET_STATE_AND_ROADMAP.md b/docs/TARGET_STATE_AND_ROADMAP.md index e949f0f..bfb4574 100644 --- a/docs/TARGET_STATE_AND_ROADMAP.md +++ b/docs/TARGET_STATE_AND_ROADMAP.md @@ -42,6 +42,7 @@ Aktueller Stand: - Semantische Zielslots (z. B. `hero.title`, `service_items[n].description`) werden intern auf konkrete Template-Felder gemappt als Vorbereitung fuer spaeteren LLM-Autofill. - Repeated-Sektionen (u. a. Services/Team/Testimonials) werden in der Slot-Vorschau block- und rollentypisch pro Item getrennt statt in Sammel-Slots zusammenzufallen. - LLM-first Suggestion-State fuer Draft-/Build-UI ist vorhanden: Vorschlaege werden separat von Feldwerten gespeichert und per Generate/Regenerate/Apply (global und per Feld) explizit gesteuert; Rule-based bleibt als Fallback/Testpfad aktiv. +- Settings-Grundlage fuer spaetere Providerwahl ist vorhanden: aktiver LLM-Provider, aktives Modell, Base URL fuer Ollama/kompatible Endpoints sowie getrennte API-Key-Felder je Provider (OpenAI, Anthropic, Google, xAI, Ollama) sind persistent in `app_settings`. - Technische Felddetails (z. B. Feldpfade/Slots/Suggestion-Metadaten) sind im UI per Debug-Toggle optional einblendbar. - Build-Start erfordert bereits einen Template-Manifest-Status `reviewed`/`validated`. - Prozessuale Review-Gates (z. B. Freigabe-Policy, Rollen, Pflichtchecks pro Feld) sind noch nicht vollstaendig ausgebaut. @@ -108,6 +109,7 @@ Statusmarker: - [-] Stilprofil-Logik unter Beruecksichtigung von `businessType` + Tonalitaet (Kontext wird in den LLM-Pfad uebergeben; Qualitaets-/Governance-Feinschliff offen). - [-] Prompt-/Systemsteuerung (Master-Prompt + Prompt-Bloecke) in Settings in den LLM-Suggestionspfad eingebunden; Build-Flow ohne prominente Prompt-Interna. - [x] Semantische Slot-Mappings zwischen Template-Feldern und Zielrollen als Bruecke fuer LLM-Autofill aktiv genutzt (inkl. verbesserter Trennung in Repeated-Bereichen). +- [-] Phase A Provider-/Modell-Settings-Fundament in Settings/UI/Persistenz umgesetzt (inkl. provider-spezifischer Key-Speicherung); produktive Runtime-Umschaltung pro Provider/Modell folgt in spaeteren Phasen. ### F) Security und Betriebsreife - [ ] Verbindliche Secret-Strategie (verschluesselte Speicherung statt einfacher Platzhalterlogik). diff --git a/internal/app/app.go b/internal/app/app.go index 16615e8..eb7fe7e 100644 --- a/internal/app/app.go +++ b/internal/app/app.go @@ -84,10 +84,20 @@ func New(cfg config.Config) (*App, error) { LanguageOutputMode: "EN", JobPollIntervalSeconds: cfg.PollIntervalSeconds, JobPollTimeoutSeconds: cfg.PollTimeoutSeconds, + LLMActiveProvider: domain.DefaultLLMProvider(), + LLMActiveModel: domain.NormalizeLLMModel(domain.DefaultLLMProvider(), ""), MasterPrompt: domain.SeedMasterPrompt, PromptBlocks: domain.DefaultPromptBlocks(), } if existing, err := settingsStore.GetSettings(context.Background()); err == nil && existing != nil { + baseSettings.LLMActiveProvider = existing.LLMActiveProvider + baseSettings.LLMActiveModel = existing.LLMActiveModel + baseSettings.LLMBaseURL = existing.LLMBaseURL + baseSettings.OpenAIAPIKeyEncrypted = existing.OpenAIAPIKeyEncrypted + baseSettings.AnthropicAPIKeyEncrypted = existing.AnthropicAPIKeyEncrypted + baseSettings.GoogleAPIKeyEncrypted = existing.GoogleAPIKeyEncrypted + baseSettings.XAIAPIKeyEncrypted = existing.XAIAPIKeyEncrypted + baseSettings.OllamaAPIKeyEncrypted = existing.OllamaAPIKeyEncrypted baseSettings.MasterPrompt = existing.MasterPrompt baseSettings.PromptBlocks = existing.PromptBlocks } @@ -102,6 +112,7 @@ func New(cfg config.Config) (*App, error) { server := httpserver.New(cfg.HTTPAddr, logger, func(r chi.Router) { r.Get("/", ui.Home) r.Get("/settings", ui.Settings) + r.Post("/settings/llm", ui.SaveLLMSettings) r.Post("/settings/prompt", ui.SavePromptSettings) r.Get("/templates", ui.Templates) r.Post("/templates/sync", ui.SyncTemplates) diff --git a/internal/domain/llm_settings.go b/internal/domain/llm_settings.go new file mode 100644 index 0000000..a80d0d9 --- /dev/null +++ b/internal/domain/llm_settings.go @@ -0,0 +1,108 @@ +package domain + +import "strings" + +const ( + LLMProviderOpenAI = "openai" + LLMProviderAnthropic = "anthropic" + LLMProviderGoogle = "google" + LLMProviderXAI = "xai" + LLMProviderOllama = "ollama" +) + +type LLMModelOption struct { + Value string + Label string +} + +type LLMProviderOption struct { + Value string + Label string + Models []LLMModelOption +} + +func DefaultLLMProvider() string { + return LLMProviderOpenAI +} + +func LLMProviderOptions() []LLMProviderOption { + return []LLMProviderOption{ + { + Value: LLMProviderOpenAI, + Label: "OpenAI", + Models: []LLMModelOption{ + {Value: "gpt-5.2", Label: "gpt-5.2"}, + {Value: "gpt-5.4", Label: "gpt-5.4"}, + }, + }, + { + Value: LLMProviderAnthropic, + Label: "Anthropic", + Models: []LLMModelOption{ + {Value: "claude-sonnet-4-5", Label: "claude-sonnet-4-5"}, + {Value: "claude-opus-4-1", Label: "claude-opus-4-1"}, + }, + }, + { + Value: LLMProviderGoogle, + Label: "Google", + Models: []LLMModelOption{ + {Value: "gemini-2.5-pro", Label: "gemini-2.5-pro"}, + {Value: "gemini-2.5-flash", Label: "gemini-2.5-flash"}, + }, + }, + { + Value: LLMProviderXAI, + Label: "xAI", + Models: []LLMModelOption{ + {Value: "grok-4", Label: "grok-4"}, + {Value: "grok-3-mini", Label: "grok-3-mini"}, + }, + }, + { + Value: LLMProviderOllama, + Label: "Ollama", + Models: []LLMModelOption{ + {Value: "llama3.2", Label: "llama3.2"}, + {Value: "qwen2.5", Label: "qwen2.5"}, + {Value: "mistral", Label: "mistral"}, + }, + }, + } +} + +func LLMModelsByProvider(provider string) []LLMModelOption { + normalized := NormalizeLLMProvider(provider) + for _, option := range LLMProviderOptions() { + if option.Value == normalized { + out := make([]LLMModelOption, len(option.Models)) + copy(out, option.Models) + return out + } + } + return nil +} + +func NormalizeLLMProvider(provider string) string { + value := strings.ToLower(strings.TrimSpace(provider)) + for _, option := range LLMProviderOptions() { + if option.Value == value { + return value + } + } + return DefaultLLMProvider() +} + +func NormalizeLLMModel(provider, model string) string { + models := LLMModelsByProvider(provider) + if len(models) == 0 { + return "" + } + value := strings.TrimSpace(model) + for _, option := range models { + if option.Value == value { + return value + } + } + return models[0].Value +} diff --git a/internal/domain/models.go b/internal/domain/models.go index ff7636d..743bee2 100644 --- a/internal/domain/models.go +++ b/internal/domain/models.go @@ -146,11 +146,19 @@ type DraftContext struct { } type AppSettings struct { - QCBaseURL string `json:"qcBaseUrl"` - QCBearerTokenEncrypted string `json:"qcBearerTokenEncrypted"` - LanguageOutputMode string `json:"languageOutputMode"` - JobPollIntervalSeconds int `json:"jobPollIntervalSeconds"` - JobPollTimeoutSeconds int `json:"jobPollTimeoutSeconds"` - MasterPrompt string `json:"masterPrompt,omitempty"` - PromptBlocks []PromptBlockConfig `json:"promptBlocks,omitempty"` + QCBaseURL string `json:"qcBaseUrl"` + QCBearerTokenEncrypted string `json:"qcBearerTokenEncrypted"` + LanguageOutputMode string `json:"languageOutputMode"` + JobPollIntervalSeconds int `json:"jobPollIntervalSeconds"` + JobPollTimeoutSeconds int `json:"jobPollTimeoutSeconds"` + LLMActiveProvider string `json:"llmActiveProvider,omitempty"` + LLMActiveModel string `json:"llmActiveModel,omitempty"` + LLMBaseURL string `json:"llmBaseUrl,omitempty"` + OpenAIAPIKeyEncrypted string `json:"openAiApiKeyEncrypted,omitempty"` + AnthropicAPIKeyEncrypted string `json:"anthropicApiKeyEncrypted,omitempty"` + GoogleAPIKeyEncrypted string `json:"googleApiKeyEncrypted,omitempty"` + XAIAPIKeyEncrypted string `json:"xaiApiKeyEncrypted,omitempty"` + OllamaAPIKeyEncrypted string `json:"ollamaApiKeyEncrypted,omitempty"` + MasterPrompt string `json:"masterPrompt,omitempty"` + PromptBlocks []PromptBlockConfig `json:"promptBlocks,omitempty"` } diff --git a/internal/httpserver/handlers/ui.go b/internal/httpserver/handlers/ui.go index 427eb91..0aab5d7 100644 --- a/internal/httpserver/handlers/ui.go +++ b/internal/httpserver/handlers/ui.go @@ -54,14 +54,24 @@ type homePageData struct { type settingsPageData struct { pageData - QCBaseURL string - PollIntervalSeconds int - PollTimeoutSeconds int - PollMaxConcurrent int - TokenConfigured bool - LanguageOutputMode string - MasterPrompt string - PromptBlocks []domain.PromptBlockConfig + QCBaseURL string + PollIntervalSeconds int + PollTimeoutSeconds int + PollMaxConcurrent int + TokenConfigured bool + LanguageOutputMode string + LLMProviderOptions []domain.LLMProviderOption + LLMModelOptions []domain.LLMModelOption + LLMActiveProvider string + LLMActiveModel string + LLMBaseURL string + OpenAIKeyConfigured bool + AnthropicKeyConfigured bool + GoogleKeyConfigured bool + XAIKeyConfigured bool + OllamaKeyConfigured bool + MasterPrompt string + PromptBlocks []domain.PromptBlockConfig } type templatesPageData struct { @@ -234,16 +244,28 @@ func (u *UI) Home(w http.ResponseWriter, r *http.Request) { func (u *UI) Settings(w http.ResponseWriter, r *http.Request) { settings := u.loadPromptSettings(r.Context()) + activeProvider := domain.NormalizeLLMProvider(settings.LLMActiveProvider) + modelOptions := domain.LLMModelsByProvider(activeProvider) u.render.Render(w, "settings", settingsPageData{ - pageData: basePageData(r, "Settings", "/settings"), - QCBaseURL: u.cfg.QCBaseURL, - PollIntervalSeconds: u.cfg.PollIntervalSeconds, - PollTimeoutSeconds: u.cfg.PollTimeoutSeconds, - PollMaxConcurrent: u.cfg.PollMaxConcurrent, - TokenConfigured: strings.TrimSpace(u.cfg.QCToken) != "", - LanguageOutputMode: "EN", - MasterPrompt: settings.MasterPrompt, - PromptBlocks: settings.PromptBlocks, + pageData: basePageData(r, "Settings", "/settings"), + QCBaseURL: u.cfg.QCBaseURL, + PollIntervalSeconds: u.cfg.PollIntervalSeconds, + PollTimeoutSeconds: u.cfg.PollTimeoutSeconds, + PollMaxConcurrent: u.cfg.PollMaxConcurrent, + TokenConfigured: strings.TrimSpace(u.cfg.QCToken) != "", + LanguageOutputMode: "EN", + LLMProviderOptions: domain.LLMProviderOptions(), + LLMModelOptions: modelOptions, + LLMActiveProvider: activeProvider, + LLMActiveModel: domain.NormalizeLLMModel(activeProvider, settings.LLMActiveModel), + LLMBaseURL: strings.TrimSpace(settings.LLMBaseURL), + OpenAIKeyConfigured: strings.TrimSpace(settings.OpenAIAPIKeyEncrypted) != "", + AnthropicKeyConfigured: strings.TrimSpace(settings.AnthropicAPIKeyEncrypted) != "", + GoogleKeyConfigured: strings.TrimSpace(settings.GoogleAPIKeyEncrypted) != "", + XAIKeyConfigured: strings.TrimSpace(settings.XAIAPIKeyEncrypted) != "", + OllamaKeyConfigured: strings.TrimSpace(settings.OllamaAPIKeyEncrypted) != "", + MasterPrompt: settings.MasterPrompt, + PromptBlocks: settings.PromptBlocks, }) } @@ -262,6 +284,37 @@ func (u *UI) SavePromptSettings(w http.ResponseWriter, r *http.Request) { http.Redirect(w, r, "/settings?msg=prompt+settings+saved", http.StatusSeeOther) } +func (u *UI) SaveLLMSettings(w http.ResponseWriter, r *http.Request) { + if err := r.ParseForm(); err != nil { + http.Redirect(w, r, "/settings?err=invalid+form", http.StatusSeeOther) + return + } + settings := u.loadPromptSettings(r.Context()) + settings.LLMActiveProvider = domain.NormalizeLLMProvider(r.FormValue("llm_provider")) + settings.LLMActiveModel = domain.NormalizeLLMModel(settings.LLMActiveProvider, r.FormValue("llm_model")) + settings.LLMBaseURL = strings.TrimSpace(r.FormValue("llm_base_url")) + if value := strings.TrimSpace(r.FormValue("llm_api_key_openai")); value != "" { + settings.OpenAIAPIKeyEncrypted = value + } + if value := strings.TrimSpace(r.FormValue("llm_api_key_anthropic")); value != "" { + settings.AnthropicAPIKeyEncrypted = value + } + if value := strings.TrimSpace(r.FormValue("llm_api_key_google")); value != "" { + settings.GoogleAPIKeyEncrypted = value + } + if value := strings.TrimSpace(r.FormValue("llm_api_key_xai")); value != "" { + settings.XAIAPIKeyEncrypted = value + } + if value := strings.TrimSpace(r.FormValue("llm_api_key_ollama")); value != "" { + settings.OllamaAPIKeyEncrypted = value + } + if err := u.settings.UpsertSettings(r.Context(), settings); err != nil { + http.Redirect(w, r, "/settings?err="+urlQuery(err.Error()), http.StatusSeeOther) + return + } + http.Redirect(w, r, "/settings?msg=llm+settings+saved", http.StatusSeeOther) +} + func (u *UI) Templates(w http.ResponseWriter, r *http.Request) { templates, err := u.templateSvc.ListTemplates(r.Context()) if err != nil { @@ -1598,12 +1651,15 @@ func buildDraftContextFromForm(form buildFormInput, globalData map[string]any) * } func (u *UI) loadPromptSettings(ctx context.Context) domain.AppSettings { + defaultProvider := domain.DefaultLLMProvider() settings := domain.AppSettings{ QCBaseURL: u.cfg.QCBaseURL, QCBearerTokenEncrypted: u.cfg.QCToken, LanguageOutputMode: "EN", JobPollIntervalSeconds: u.cfg.PollIntervalSeconds, JobPollTimeoutSeconds: u.cfg.PollTimeoutSeconds, + LLMActiveProvider: defaultProvider, + LLMActiveModel: domain.NormalizeLLMModel(defaultProvider, ""), MasterPrompt: domain.SeedMasterPrompt, PromptBlocks: domain.DefaultPromptBlocks(), } @@ -1629,6 +1685,14 @@ func (u *UI) loadPromptSettings(ctx context.Context) domain.AppSettings { if stored.JobPollTimeoutSeconds > 0 { settings.JobPollTimeoutSeconds = stored.JobPollTimeoutSeconds } + settings.LLMActiveProvider = domain.NormalizeLLMProvider(stored.LLMActiveProvider) + settings.LLMActiveModel = domain.NormalizeLLMModel(settings.LLMActiveProvider, stored.LLMActiveModel) + settings.LLMBaseURL = strings.TrimSpace(stored.LLMBaseURL) + settings.OpenAIAPIKeyEncrypted = strings.TrimSpace(stored.OpenAIAPIKeyEncrypted) + settings.AnthropicAPIKeyEncrypted = strings.TrimSpace(stored.AnthropicAPIKeyEncrypted) + settings.GoogleAPIKeyEncrypted = strings.TrimSpace(stored.GoogleAPIKeyEncrypted) + settings.XAIAPIKeyEncrypted = strings.TrimSpace(stored.XAIAPIKeyEncrypted) + settings.OllamaAPIKeyEncrypted = strings.TrimSpace(stored.OllamaAPIKeyEncrypted) settings.MasterPrompt = domain.NormalizeMasterPrompt(stored.MasterPrompt) settings.PromptBlocks = domain.NormalizePromptBlocks(stored.PromptBlocks) return settings diff --git a/internal/store/sqlite/migrations/006_add_llm_settings.sql b/internal/store/sqlite/migrations/006_add_llm_settings.sql new file mode 100644 index 0000000..599b70b --- /dev/null +++ b/internal/store/sqlite/migrations/006_add_llm_settings.sql @@ -0,0 +1,23 @@ +ALTER TABLE app_settings +ADD COLUMN llm_active_provider TEXT NOT NULL DEFAULT 'openai'; + +ALTER TABLE app_settings +ADD COLUMN llm_active_model TEXT NOT NULL DEFAULT ''; + +ALTER TABLE app_settings +ADD COLUMN llm_base_url TEXT NOT NULL DEFAULT ''; + +ALTER TABLE app_settings +ADD COLUMN openai_api_key_encrypted TEXT NOT NULL DEFAULT ''; + +ALTER TABLE app_settings +ADD COLUMN anthropic_api_key_encrypted TEXT NOT NULL DEFAULT ''; + +ALTER TABLE app_settings +ADD COLUMN google_api_key_encrypted TEXT NOT NULL DEFAULT ''; + +ALTER TABLE app_settings +ADD COLUMN xai_api_key_encrypted TEXT NOT NULL DEFAULT ''; + +ALTER TABLE app_settings +ADD COLUMN ollama_api_key_encrypted TEXT NOT NULL DEFAULT ''; diff --git a/internal/store/sqlite/store.go b/internal/store/sqlite/store.go index 4a3b67f..1e3f384 100644 --- a/internal/store/sqlite/store.go +++ b/internal/store/sqlite/store.go @@ -410,16 +410,29 @@ func (s *Store) UpsertSettings(ctx context.Context, settings domain.AppSettings) if err != nil { return fmt.Errorf("marshal prompt blocks: %w", err) } + provider := domain.NormalizeLLMProvider(settings.LLMActiveProvider) + model := domain.NormalizeLLMModel(provider, settings.LLMActiveModel) _, err = s.db.ExecContext(ctx, ` INSERT INTO app_settings ( - id, qc_base_url, qc_bearer_token_encrypted, language_output_mode, job_poll_interval_seconds, job_poll_timeout_seconds, master_prompt, prompt_blocks_json, updated_at - ) VALUES (1, ?, ?, ?, ?, ?, ?, ?, ?) + id, qc_base_url, qc_bearer_token_encrypted, language_output_mode, job_poll_interval_seconds, job_poll_timeout_seconds, + llm_active_provider, llm_active_model, llm_base_url, + openai_api_key_encrypted, anthropic_api_key_encrypted, google_api_key_encrypted, xai_api_key_encrypted, ollama_api_key_encrypted, + master_prompt, prompt_blocks_json, updated_at + ) VALUES (1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(id) DO UPDATE SET qc_base_url = excluded.qc_base_url, qc_bearer_token_encrypted = excluded.qc_bearer_token_encrypted, language_output_mode = excluded.language_output_mode, job_poll_interval_seconds = excluded.job_poll_interval_seconds, job_poll_timeout_seconds = excluded.job_poll_timeout_seconds, + llm_active_provider = excluded.llm_active_provider, + llm_active_model = excluded.llm_active_model, + llm_base_url = excluded.llm_base_url, + openai_api_key_encrypted = excluded.openai_api_key_encrypted, + anthropic_api_key_encrypted = excluded.anthropic_api_key_encrypted, + google_api_key_encrypted = excluded.google_api_key_encrypted, + xai_api_key_encrypted = excluded.xai_api_key_encrypted, + ollama_api_key_encrypted = excluded.ollama_api_key_encrypted, master_prompt = excluded.master_prompt, prompt_blocks_json = excluded.prompt_blocks_json, updated_at = excluded.updated_at`, @@ -428,6 +441,14 @@ func (s *Store) UpsertSettings(ctx context.Context, settings domain.AppSettings) defaultString(settings.LanguageOutputMode, "EN"), settings.JobPollIntervalSeconds, settings.JobPollTimeoutSeconds, + provider, + model, + strings.TrimSpace(settings.LLMBaseURL), + strings.TrimSpace(settings.OpenAIAPIKeyEncrypted), + strings.TrimSpace(settings.AnthropicAPIKeyEncrypted), + strings.TrimSpace(settings.GoogleAPIKeyEncrypted), + strings.TrimSpace(settings.XAIAPIKeyEncrypted), + strings.TrimSpace(settings.OllamaAPIKeyEncrypted), domain.NormalizeMasterPrompt(settings.MasterPrompt), promptBlocksRaw, time.Now().UTC().Format(time.RFC3339Nano), @@ -437,7 +458,10 @@ func (s *Store) UpsertSettings(ctx context.Context, settings domain.AppSettings) func (s *Store) GetSettings(ctx context.Context) (*domain.AppSettings, error) { row := s.db.QueryRowContext(ctx, ` - SELECT qc_base_url, qc_bearer_token_encrypted, language_output_mode, job_poll_interval_seconds, job_poll_timeout_seconds, master_prompt, prompt_blocks_json + SELECT qc_base_url, qc_bearer_token_encrypted, language_output_mode, job_poll_interval_seconds, job_poll_timeout_seconds, + llm_active_provider, llm_active_model, llm_base_url, + openai_api_key_encrypted, anthropic_api_key_encrypted, google_api_key_encrypted, xai_api_key_encrypted, ollama_api_key_encrypted, + master_prompt, prompt_blocks_json FROM app_settings WHERE id = 1`) var settings domain.AppSettings @@ -448,6 +472,14 @@ func (s *Store) GetSettings(ctx context.Context) (*domain.AppSettings, error) { &settings.LanguageOutputMode, &settings.JobPollIntervalSeconds, &settings.JobPollTimeoutSeconds, + &settings.LLMActiveProvider, + &settings.LLMActiveModel, + &settings.LLMBaseURL, + &settings.OpenAIAPIKeyEncrypted, + &settings.AnthropicAPIKeyEncrypted, + &settings.GoogleAPIKeyEncrypted, + &settings.XAIAPIKeyEncrypted, + &settings.OllamaAPIKeyEncrypted, &settings.MasterPrompt, &promptBlocksRaw, ); err != nil { @@ -460,6 +492,14 @@ func (s *Store) GetSettings(ctx context.Context) (*domain.AppSettings, error) { if len(promptBlocksRaw) > 0 { _ = json.Unmarshal(promptBlocksRaw, &settings.PromptBlocks) } + settings.LLMActiveProvider = domain.NormalizeLLMProvider(settings.LLMActiveProvider) + settings.LLMActiveModel = domain.NormalizeLLMModel(settings.LLMActiveProvider, settings.LLMActiveModel) + settings.LLMBaseURL = strings.TrimSpace(settings.LLMBaseURL) + settings.OpenAIAPIKeyEncrypted = strings.TrimSpace(settings.OpenAIAPIKeyEncrypted) + settings.AnthropicAPIKeyEncrypted = strings.TrimSpace(settings.AnthropicAPIKeyEncrypted) + settings.GoogleAPIKeyEncrypted = strings.TrimSpace(settings.GoogleAPIKeyEncrypted) + settings.XAIAPIKeyEncrypted = strings.TrimSpace(settings.XAIAPIKeyEncrypted) + settings.OllamaAPIKeyEncrypted = strings.TrimSpace(settings.OllamaAPIKeyEncrypted) settings.PromptBlocks = domain.NormalizePromptBlocks(settings.PromptBlocks) return &settings, nil } diff --git a/web/templates/settings.gohtml b/web/templates/settings.gohtml index dc2c004..499e8c8 100644 --- a/web/templates/settings.gohtml +++ b/web/templates/settings.gohtml @@ -10,7 +10,7 @@ {{if .Msg}}
{{.Msg}}
{{end}} {{if .Err}}
{{.Err}}
{{end}}

Settings

-

QC-Settings plus globale Prompt-/Systemsteuerung fuer den spaeteren LLM-Flow.

+

QC-Settings plus LLM- und globale Prompt-/Systemsteuerung fuer den spaeteren LLM-Flow.

@@ -20,6 +20,60 @@
QC Base URL{{.QCBaseURL}}
Bearer token configured{{if .TokenConfigured}}yes{{else}}no{{end}}
Language output mode{{.LanguageOutputMode}}
+

LLM Provider / Modell

+

Phase-A-Grundlage: Provider, Modell, optionale Base URL (Ollama/kompatibel) und provider-spezifische API-Keys.

+
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+ +
+

Globaler Master Prompt

Diese Einstellungen gelten systemweit und werden im normalen Build-/Review-Formular nicht mehr direkt editiert.

@@ -43,6 +97,18 @@ {{end}}
+ {{end}}