|
- {{define "settings"}}
- <!doctype html>
- <html lang="en">
- <head>
- <title>{{.Title}}</title>
- {{template "head" .}}
- </head>
- <body>
- {{template "nav" .}}
- {{if .Msg}}<div class="flash flash-ok">{{.Msg}}</div>{{end}}
- {{if .Err}}<div class="flash flash-err">{{.Err}}</div>{{end}}
- <h1>Settings</h1>
- <p>QC-Settings plus LLM- und globale Prompt-/Systemsteuerung fuer den spaeteren LLM-Flow.</p>
- <table>
- <tr><th>QC Base URL</th><td class="mono">{{.QCBaseURL}}</td></tr>
- <tr><th>Bearer token configured</th><td>{{if .TokenConfigured}}yes{{else}}no{{end}}</td></tr>
- <tr><th>Poll interval (seconds)</th><td>{{.PollIntervalSeconds}}</td></tr>
- <tr><th>Poll timeout (seconds)</th><td>{{.PollTimeoutSeconds}}</td></tr>
- <tr><th>Poll max concurrent</th><td>{{.PollMaxConcurrent}}</td></tr>
- <tr><th>Language output mode</th><td>{{.LanguageOutputMode}}</td></tr>
- </table>
-
- <h2>LLM Provider / Modell</h2>
- <p><small>Provider-/Modellwahl mit statischem provider-aware Katalog (spaeter erweiterbar um dynamisches Refresh), Runtime-Tuning und provider-spezifischen Keys.</small></p>
- <form method="post" action="/settings/llm">
- <div>
- <label>Provider
- <select id="llm-provider" name="llm_provider">
- {{range .LLMProviderOptions}}
- <option value="{{.Value}}" {{if eq $.LLMActiveProvider .Value}}selected{{end}}>{{.Label}}</option>
- {{end}}
- </select>
- </label>
- </div>
- <div>
- <label>Model
- <select id="llm-model" name="llm_model" data-selected="{{.LLMActiveModel}}">
- {{range .LLMModelOptions}}
- <option value="{{.Value}}" {{if eq $.LLMActiveModel .Value}}selected{{end}}>{{.Label}}</option>
- {{end}}
- </select>
- </label>
- </div>
- <div id="llm-base-url-wrap" {{if ne .LLMActiveProvider "ollama"}}style="display:none;"{{end}}>
- <label>Base URL (nur Ollama / kompatible Endpoints)
- <input type="url" name="llm_base_url" placeholder="http://localhost:11434/v1" value="{{.LLMBaseURL}}">
- </label>
- </div>
- <div>
- <label>Temperature (0.0 - 2.0)
- <input type="number" name="llm_temperature" min="0" max="2" step="0.01" value="{{printf "%.2f" .LLMTemperature}}">
- </label>
- </div>
- <div>
- <label>Max Tokens (64 - 8192)
- <input type="number" name="llm_max_tokens" min="64" max="8192" step="1" value="{{.LLMMaxTokens}}">
- </label>
- </div>
- <div>
- <label>OpenAI API Key ({{if .OpenAIKeyConfigured}}configured{{else}}not configured{{end}})
- <input type="password" name="llm_api_key_openai" placeholder="leer lassen = unveraendert">
- </label>
- </div>
- <div>
- <label>Anthropic API Key ({{if .AnthropicKeyConfigured}}configured{{else}}not configured{{end}})
- <input type="password" name="llm_api_key_anthropic" placeholder="leer lassen = unveraendert">
- </label>
- </div>
- <div>
- <label>Google API Key ({{if .GoogleKeyConfigured}}configured{{else}}not configured{{end}})
- <input type="password" name="llm_api_key_google" placeholder="leer lassen = unveraendert">
- </label>
- </div>
- <div>
- <label>xAI API Key ({{if .XAIKeyConfigured}}configured{{else}}not configured{{end}})
- <input type="password" name="llm_api_key_xai" placeholder="leer lassen = unveraendert">
- </label>
- </div>
- <div>
- <label>Ollama API Key (optional; {{if .OllamaKeyConfigured}}configured{{else}}not configured{{end}})
- <input type="password" name="llm_api_key_ollama" placeholder="leer lassen = unveraendert">
- </label>
- </div>
- <button type="submit" formaction="/settings/llm/validate">Validate provider config</button>
- <button type="submit">LLM-Settings speichern</button>
- </form>
-
- <h2>Globaler Master Prompt</h2>
- <p><small>Diese Einstellungen gelten systemweit und werden im normalen Build-/Review-Formular nicht mehr direkt editiert.</small></p>
- <form method="post" action="/settings/prompt">
- <input type="hidden" name="prompt_block_count" value="{{len .PromptBlocks}}">
- <div>
- <label>Master Prompt
- <textarea name="master_prompt">{{.MasterPrompt}}</textarea>
- </label>
- </div>
- <h3>Prompt-Bloecke (Standard)</h3>
- {{range $i, $block := .PromptBlocks}}
- <input type="hidden" name="prompt_block_id_{{$i}}" value="{{$block.ID}}">
- <div>
- <label>
- <input type="checkbox" name="prompt_block_enabled_{{$i}}" {{if $block.Enabled}}checked{{end}}>
- {{$block.Label}}
- </label>
- <input type="hidden" name="prompt_block_label_{{$i}}" value="{{$block.Label}}">
- <textarea name="prompt_block_instruction_{{$i}}">{{$block.Instruction}}</textarea>
- </div>
- {{end}}
- <button type="submit">Prompt-Settings speichern</button>
- </form>
- <script>
- (function () {
- var provider = document.getElementById('llm-provider');
- var model = document.getElementById('llm-model');
- var baseUrlWrap = document.getElementById('llm-base-url-wrap');
- if (!provider || !baseUrlWrap || !model) return;
- var modelCatalog = {
- {{range $provider := .LLMProviderOptions}}
- "{{$provider.Value}}": [
- {{range $idx, $model := $provider.Models}}{{if $idx}},{{end}}{"value":"{{$model.Value}}","label":"{{$model.Label}}"}{{end}}
- ],
- {{end}}
- };
- var selectedByProvider = {};
- selectedByProvider[provider.value] = model.dataset.selected || model.value;
- var syncModelOptions = function () {
- var providerValue = provider.value;
- var options = modelCatalog[providerValue] || [];
- var preferred = selectedByProvider[providerValue] || model.value;
- model.innerHTML = "";
- options.forEach(function (entry, idx) {
- var option = document.createElement('option');
- option.value = entry.value;
- option.textContent = entry.label;
- if (entry.value === preferred || (!preferred && idx === 0)) {
- option.selected = true;
- }
- model.appendChild(option);
- });
- };
- var syncBaseURLVisibility = function () {
- baseUrlWrap.style.display = provider.value === 'ollama' ? '' : 'none';
- };
- provider.addEventListener('change', function () {
- syncModelOptions();
- syncBaseURLVisibility();
- });
- model.addEventListener('change', function () {
- selectedByProvider[provider.value] = model.value;
- });
- syncModelOptions();
- syncBaseURLVisibility();
- })();
- </script>
- </body>
- </html>
- {{end}}
|