재구성 AX Agent 설정과 채팅 UI를 Claude형 구조로
Some checks failed
Release Gate / gate (push) Has been cancelled

This commit is contained in:
2026-04-04 17:48:51 +09:00
parent 90c2f15e96
commit a027ea4f9a
6000 changed files with 11532 additions and 94063 deletions

View File

@@ -388,6 +388,24 @@ public static class AgentHookRunner
}
}
if (ctxProp.ValueKind == JsonValueKind.Object)
{
// 구조화된 컨텍스트 오브젝트도 허용합니다.
// 우선순위: message > content > text
foreach (var key in new[] { "message", "content", "text" })
{
if (!ctxProp.TryGetProperty(key, out var value) || value.ValueKind != JsonValueKind.String)
continue;
var text = value.GetString()?.Trim();
if (!string.IsNullOrWhiteSpace(text))
{
additionalContext = text;
return true;
}
}
}
return false;
}
}

View File

@@ -39,17 +39,26 @@ public static class ContextCondenser
/// 2단계: 이전 대화 LLM 요약 (토큰이 여전히 높으면)
/// </summary>
public static async Task<bool> CondenseIfNeededAsync(
List<ChatMessage> messages, LlmService llm, int maxOutputTokens, CancellationToken ct = default)
List<ChatMessage> messages,
LlmService llm,
int maxOutputTokens,
bool proactiveEnabled = true,
int triggerPercent = 80,
bool force = false,
CancellationToken ct = default)
{
if (messages.Count < 6) return false;
if (!force && !proactiveEnabled) return false;
// 현재 모델의 입력 토큰 한도
var settings = llm.GetCurrentModelInfo();
var inputLimit = GetModelInputLimit(settings.service, settings.model);
var threshold = (int)(inputLimit * 0.65); // 65%에서 압축 시작
var effectiveMax = maxOutputTokens > 0 ? Math.Min(inputLimit, maxOutputTokens) : inputLimit;
var percent = Math.Clamp(triggerPercent, 50, 95);
var threshold = (int)(effectiveMax * (percent / 100.0)); // 설정 임계치에서 압축 시작
var currentTokens = TokenEstimator.EstimateMessages(messages);
if (currentTokens < threshold) return false;
if (!force && currentTokens < threshold) return false;
bool didCompress = false;
@@ -58,7 +67,7 @@ public static class ContextCondenser
// 1단계 후 다시 추정
currentTokens = TokenEstimator.EstimateMessages(messages);
if (currentTokens < threshold) return didCompress;
if (!force && currentTokens < threshold) return didCompress;
// ── 2단계: 이전 대화 LLM 요약 ──
didCompress |= await SummarizeOldMessagesAsync(messages, llm, ct);

View File

@@ -55,6 +55,9 @@ public class HttpTool : IAgentTool
public async Task<ToolResult> ExecuteAsync(JsonElement args, AgentContext context, CancellationToken ct = default)
{
if (AxCopilot.Services.OperationModePolicy.IsInternal(context.OperationMode))
return ToolResult.Fail("사내모드에서는 HTTP 도구 실행이 차단됩니다. operationMode=external에서만 사용할 수 있습니다.");
var method = args.GetProperty("method").GetString()?.ToUpperInvariant() ?? "GET";
var url = args.GetProperty("url").GetString() ?? "";
var body = args.TryGetProperty("body", out var b) ? b.GetString() ?? "" : "";

View File

@@ -38,6 +38,9 @@ public class OpenExternalTool : IAgentTool
if (rawPath.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
rawPath.StartsWith("https://", StringComparison.OrdinalIgnoreCase))
{
if (AxCopilot.Services.OperationModePolicy.IsInternal(context.OperationMode))
return Task.FromResult(ToolResult.Fail("사내모드에서는 외부 URL 열기가 차단됩니다. operationMode=external에서만 사용할 수 있습니다."));
Process.Start(new ProcessStartInfo(rawPath) { UseShellExecute = true });
return Task.FromResult(ToolResult.Ok($"URL 열기: {rawPath}"));
}

View File

@@ -124,12 +124,12 @@ public static class PermissionModeCatalog
var normalized = NormalizeGlobalMode(mode);
return normalized switch
{
Default => "소극 활용",
AcceptEdits => "적극 활용",
Plan => "계획 중심",
BypassPermissions => "완전 자동",
Default => "권한 요청",
AcceptEdits => "편집 자동 승인",
Plan => "계획 모드",
BypassPermissions => "권한 건너뛰기",
DontAsk => "질문 없이 진행",
Deny => "활용하지 않음",
Deny => "읽기 전용",
_ => normalized,
};
}

View File

@@ -14,32 +14,32 @@ internal static class PermissionModePresentationCatalog
new PermissionModePresentation(
PermissionModeCatalog.Deny,
"\uE711",
"활용하지 않음",
"읽기 전용",
"파일 읽기만 허용하고 생성/수정/삭제는 차단합니다.",
"#107C10"),
new PermissionModePresentation(
PermissionModeCatalog.Default,
"\uE8D7",
"소극 활용",
"변경 전 확인하고, 필요한 경우에만 파일 접근을 진행합니다.",
"권한 요청",
"변경하기 전에 항상 확인합니다.",
"#2563EB"),
new PermissionModePresentation(
PermissionModeCatalog.AcceptEdits,
"\uE73E",
"적극 활용",
"파일 편집 도구를 자동 승인하고 명령 실행은 계속 확인합니다.",
"편집 자동 승인",
"모든 파일 편집 자동 승인합니다.",
"#107C10"),
new PermissionModePresentation(
PermissionModeCatalog.Plan,
"\uE7C3",
"계획 중심",
"기 전 계획과 승인 흐름을 우선합니다.",
"계획 모드",
"변경하기 전 계획을 먼저 만듭니다.",
"#4338CA"),
new PermissionModePresentation(
PermissionModeCatalog.BypassPermissions,
"\uE814",
"완전 자동",
"권한 확인을 대부분 생략합니다. 민감한 작업은 주의하세요.",
"권한 건너뛰기",
"모든 권한을 허용합니다.",
"#B45309"),
new PermissionModePresentation(
PermissionModeCatalog.DontAsk,

View File

@@ -47,6 +47,7 @@ public class ClipboardHistoryService : IDisposable
private HwndSource? _msgSource;
private readonly object _lock = new();
private volatile bool _ignoreNext; // 자체 클립보드 조작 시 히스토리 추가 방지
private uint _lastClipboardSequenceNumber;
private bool _disposed;
private readonly List<ClipboardEntry> _history = new();
@@ -332,7 +333,19 @@ public class ClipboardHistoryService : IDisposable
private void OnClipboardUpdate()
{
if (_ignoreNext) { _ignoreNext = false; return; }
var sequence = GetClipboardSequenceNumber();
if (_ignoreNext)
{
_ignoreNext = false;
_lastClipboardSequenceNumber = sequence;
return;
}
if (sequence != 0 && sequence == _lastClipboardSequenceNumber)
return;
_lastClipboardSequenceNumber = sequence;
if (!_settings.Settings.ClipboardHistory.Enabled) return;
Application.Current.Dispatcher.Invoke(() =>
@@ -529,6 +542,9 @@ public class ClipboardHistoryService : IDisposable
[DllImport("user32.dll", SetLastError = true)]
private static extern bool RemoveClipboardFormatListener(IntPtr hwnd);
[DllImport("user32.dll")]
private static extern uint GetClipboardSequenceNumber();
}
/// <summary>클립보드 히스토리 단일 항목. 텍스트 또는 이미지 중 하나를 담습니다.</summary>

View File

@@ -430,12 +430,11 @@ public partial class LlmService
private async Task<List<ContentBlock>> SendOpenAiWithToolsAsync(
List<ChatMessage> messages, IReadOnlyCollection<IAgentTool> tools, CancellationToken ct)
{
var llm = _settings.Settings.Llm;
var activeService = ResolveService();
var body = BuildOpenAiToolBody(messages, tools);
// 등록 모델의 커스텀 엔드포인트 우선 사용 (ResolveServerInfo)
var (resolvedEp, _) = ResolveServerInfo();
var (resolvedEp, _, allowInsecureTls) = ResolveServerInfo();
var endpoint = string.IsNullOrEmpty(resolvedEp)
? ResolveEndpointForService(activeService)
: resolvedEp;
@@ -452,7 +451,7 @@ public partial class LlmService
// CP4D 또는 Bearer 인증 적용
await ApplyAuthHeaderAsync(req, ct);
using var resp = await _http.SendAsync(req, ct);
using var resp = await SendWithTlsAsync(req, allowInsecureTls, ct);
if (!resp.IsSuccessStatusCode)
{
var errBody = await resp.Content.ReadAsStringAsync(ct);

View File

@@ -20,6 +20,7 @@ public record TokenUsage(int PromptTokens, int CompletionTokens)
public partial class LlmService : IDisposable
{
private readonly HttpClient _http;
private readonly HttpClient _httpInsecure;
private readonly SettingsService _settings;
private string? _systemPrompt;
@@ -136,6 +137,34 @@ public partial class LlmService : IDisposable
private string? ResolveReasoningEffort() => _reasoningEffortOverride;
private static bool LooksLikeEncryptedPayload(string value)
{
if (string.IsNullOrWhiteSpace(value) || value.Length < 32 || (value.Length % 4) != 0)
return false;
foreach (var ch in value)
{
var isBase64 = (ch >= 'A' && ch <= 'Z')
|| (ch >= 'a' && ch <= 'z')
|| (ch >= '0' && ch <= '9')
|| ch is '+' or '/' or '=';
if (!isBase64) return false;
}
return true;
}
private static string ResolveSecretValue(string raw, bool encryptionEnabled)
{
if (string.IsNullOrWhiteSpace(raw)) return "";
if (raw.Trim() == "(저장됨)") return "";
if (!encryptionEnabled) return raw.Trim();
var decrypted = CryptoService.DecryptIfEnabled(raw, encryptionEnabled).Trim();
if (string.IsNullOrWhiteSpace(decrypted)) return "";
if (string.Equals(decrypted, raw, StringComparison.Ordinal) && LooksLikeEncryptedPayload(raw))
return "";
return decrypted;
}
/// <summary>지정 서비스의 API 키를 반환합니다.</summary>
private string ResolveApiKeyForService(string service)
{
@@ -144,8 +173,8 @@ public partial class LlmService : IDisposable
{
"gemini" => llm.GeminiApiKey,
"sigmoid" => llm.ClaudeApiKey,
"vllm" => CryptoService.DecryptIfEnabled(llm.VllmApiKey, llm.EncryptionEnabled),
"ollama" => CryptoService.DecryptIfEnabled(llm.OllamaApiKey, llm.EncryptionEnabled),
"vllm" => ResolveSecretValue(llm.VllmApiKey, llm.EncryptionEnabled),
"ollama" => ResolveSecretValue(llm.OllamaApiKey, llm.EncryptionEnabled),
_ => "",
};
}
@@ -165,13 +194,41 @@ public partial class LlmService : IDisposable
/// <summary>가장 최근 요청의 토큰 사용량. 스트리밍/비스트리밍 완료 후 갱신됩니다.</summary>
public TokenUsage? LastTokenUsage { get; private set; }
public record RuntimeConnectionSnapshot(
string Service,
string Model,
string Endpoint,
bool AllowInsecureTls,
bool HasApiKey);
public LlmService(SettingsService settings)
{
_settings = settings;
_http = new HttpClient { Timeout = TimeSpan.FromMinutes(10) };
var insecureHandler = new HttpClientHandler
{
ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator
};
_httpInsecure = new HttpClient(insecureHandler) { Timeout = TimeSpan.FromMinutes(10) };
LoadSystemPrompt();
}
public RuntimeConnectionSnapshot GetRuntimeConnectionSnapshot()
{
var service = ResolveService();
var model = ResolveModel();
var (endpoint, apiKey, allowInsecureTls) = ResolveServerInfo();
if (string.IsNullOrWhiteSpace(endpoint))
endpoint = ResolveEndpointForService(service);
return new RuntimeConnectionSnapshot(
service,
model,
endpoint ?? "",
allowInsecureTls,
!string.IsNullOrWhiteSpace(apiKey));
}
// ─── 시스템 프롬프트 (빌드 경로에서 동적 로딩) ─────────────────────────
private void LoadSystemPrompt()
@@ -200,7 +257,7 @@ public partial class LlmService : IDisposable
/// 현재 활성 모델에 매칭되는 RegisteredModel을 찾아 엔드포인트/API키를 반환합니다.
/// RegisteredModel에 전용 서버 정보가 있으면 그것을 사용하고, 없으면 기본 설정을 사용합니다.
/// </summary>
private (string Endpoint, string ApiKey) ResolveServerInfo()
private (string Endpoint, string ApiKey, bool AllowInsecureTls) ResolveServerInfo()
{
var llm = _settings.Settings.Llm;
var activeService = ResolveService();
@@ -209,20 +266,26 @@ public partial class LlmService : IDisposable
// RegisteredModel에서 현재 모델과 서비스가 일치하는 항목 검색
var registered = FindRegisteredModel(llm, activeService, modelName);
if (registered != null && !string.IsNullOrEmpty(registered.Endpoint))
if (registered != null)
{
var endpoint = string.IsNullOrWhiteSpace(registered.Endpoint)
? ResolveEndpointForService(activeService)
: registered.Endpoint;
var apiKey = !string.IsNullOrEmpty(registered.ApiKey)
? CryptoService.DecryptIfEnabled(registered.ApiKey, llm.EncryptionEnabled)
? ResolveSecretValue(registered.ApiKey, llm.EncryptionEnabled)
: GetDefaultApiKey(llm, activeService);
return (registered.Endpoint, apiKey);
var allowInsecureTls = activeService == "vllm"
? (registered.AllowInsecureTls || llm.VllmAllowInsecureTls)
: false;
return (endpoint, apiKey, allowInsecureTls);
}
// 기본 엔드포인트 사용
return activeService.ToLowerInvariant() switch
{
"vllm" => (llm.VllmEndpoint, CryptoService.DecryptIfEnabled(llm.VllmApiKey, llm.EncryptionEnabled)),
"ollama" => (llm.OllamaEndpoint, CryptoService.DecryptIfEnabled(llm.OllamaApiKey, llm.EncryptionEnabled)),
_ => ("", ""),
"vllm" => (llm.VllmEndpoint, ResolveSecretValue(llm.VllmApiKey, llm.EncryptionEnabled), llm.VllmAllowInsecureTls),
"ollama" => (llm.OllamaEndpoint, ResolveSecretValue(llm.OllamaApiKey, llm.EncryptionEnabled), false),
_ => ("", "", false),
};
}
@@ -258,7 +321,7 @@ public partial class LlmService : IDisposable
}
// 기본 Bearer 인증 — 기존 API 키 반환
var (_, apiKey) = ResolveServerInfo();
var (_, apiKey, _) = ResolveServerInfo();
return string.IsNullOrEmpty(apiKey) ? null : apiKey;
}
@@ -278,8 +341,8 @@ public partial class LlmService : IDisposable
var svc = service ?? llm.Service;
return svc.ToLowerInvariant() switch
{
"vllm" => CryptoService.DecryptIfEnabled(llm.VllmApiKey, llm.EncryptionEnabled),
"ollama" => CryptoService.DecryptIfEnabled(llm.OllamaApiKey, llm.EncryptionEnabled),
"vllm" => ResolveSecretValue(llm.VllmApiKey, llm.EncryptionEnabled),
"ollama" => ResolveSecretValue(llm.OllamaApiKey, llm.EncryptionEnabled),
_ => "",
};
}
@@ -356,7 +419,7 @@ public partial class LlmService : IDisposable
try
{
var llm = _settings.Settings.Llm;
var normalizedService = NormalizeServiceName(llm.Service);
var normalizedService = ResolveService();
if (OperationModePolicy.IsInternal(_settings.Settings) && IsExternalLlmService(normalizedService))
{
var blockedName = normalizedService == "sigmoid" ? "Claude" : "Gemini";
@@ -366,16 +429,26 @@ public partial class LlmService : IDisposable
switch (normalizedService)
{
case "ollama":
var resp = await _http.GetAsync(llm.Endpoint.TrimEnd('/') + "/api/tags");
{
var (endpoint, _, _) = ResolveServerInfo();
var ep = string.IsNullOrWhiteSpace(endpoint) ? ResolveEndpointForService("ollama") : endpoint;
var resp = await _http.GetAsync(ep.TrimEnd('/') + "/api/tags");
return resp.IsSuccessStatusCode
? (true, "Ollama 연결 성공")
: (false, ClassifyHttpError(resp));
}
case "vllm":
var vResp = await _http.GetAsync(llm.Endpoint.TrimEnd('/') + "/v1/models");
{
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
var ep = string.IsNullOrWhiteSpace(endpoint) ? ResolveEndpointForService("vllm") : endpoint;
using var vReq = new HttpRequestMessage(HttpMethod.Get, ep.TrimEnd('/') + "/v1/models");
await ApplyAuthHeaderAsync(vReq, CancellationToken.None);
using var vResp = await SendWithTlsAsync(vReq, allowInsecureTls, CancellationToken.None);
return vResp.IsSuccessStatusCode
? (true, "vLLM 연결 성공")
: (false, ClassifyHttpError(vResp));
}
case "gemini":
var gKey = ResolveApiKeyForService("gemini");
@@ -424,10 +497,10 @@ public partial class LlmService : IDisposable
private async Task<string> SendOllamaAsync(List<ChatMessage> messages, CancellationToken ct)
{
var llm = _settings.Settings.Llm;
var (endpoint, _) = ResolveServerInfo();
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
var body = BuildOllamaBody(messages, stream: false);
var resp = await PostJsonWithRetryAsync(ep.TrimEnd('/') + "/api/chat", body, ct);
var resp = await PostJsonWithRetryAsync(ep.TrimEnd('/') + "/api/chat", body, allowInsecureTls, ct);
return SafeParseJson(resp, root =>
{
TryParseOllamaUsage(root);
@@ -440,13 +513,13 @@ public partial class LlmService : IDisposable
[EnumeratorCancellation] CancellationToken ct)
{
var llm = _settings.Settings.Llm;
var (endpoint, _) = ResolveServerInfo();
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
var body = BuildOllamaBody(messages, stream: true);
var url = ep.TrimEnd('/') + "/api/chat";
using var req = new HttpRequestMessage(HttpMethod.Post, url) { Content = JsonContent(body) };
using var resp = await SendWithErrorClassificationAsync(req, ct);
using var resp = await SendWithErrorClassificationAsync(req, allowInsecureTls, ct);
using var stream = await resp.Content.ReadAsStreamAsync(ct);
using var reader = new StreamReader(stream);
@@ -496,7 +569,7 @@ public partial class LlmService : IDisposable
private async Task<string> SendOpenAiCompatibleAsync(List<ChatMessage> messages, CancellationToken ct)
{
var llm = _settings.Settings.Llm;
var (endpoint, _) = ResolveServerInfo();
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
var body = BuildOpenAiBody(messages, stream: false);
var url = ep.TrimEnd('/') + "/v1/chat/completions";
@@ -508,7 +581,7 @@ public partial class LlmService : IDisposable
};
await ApplyAuthHeaderAsync(req, ct);
using var resp = await SendWithErrorClassificationAsync(req, ct);
using var resp = await SendWithErrorClassificationAsync(req, allowInsecureTls, ct);
var respBody = await resp.Content.ReadAsStringAsync(ct);
return SafeParseJson(respBody, root =>
{
@@ -524,14 +597,14 @@ public partial class LlmService : IDisposable
[EnumeratorCancellation] CancellationToken ct)
{
var llm = _settings.Settings.Llm;
var (endpoint, _) = ResolveServerInfo();
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
var body = BuildOpenAiBody(messages, stream: true);
var url = ep.TrimEnd('/') + "/v1/chat/completions";
using var req = new HttpRequestMessage(HttpMethod.Post, url) { Content = JsonContent(body) };
await ApplyAuthHeaderAsync(req, ct);
using var resp = await SendWithErrorClassificationAsync(req, ct);
using var resp = await SendWithErrorClassificationAsync(req, allowInsecureTls, ct);
using var stream = await resp.Content.ReadAsStreamAsync(ct);
using var reader = new StreamReader(stream);
@@ -597,7 +670,7 @@ public partial class LlmService : IDisposable
var model = ResolveModel();
var body = BuildGeminiBody(messages);
var url = $"https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apiKey}";
var resp = await PostJsonWithRetryAsync(url, body, ct);
var resp = await PostJsonWithRetryAsync(url, body, false, ct);
return SafeParseJson(resp, root =>
{
TryParseGeminiUsage(root);
@@ -623,7 +696,7 @@ public partial class LlmService : IDisposable
var url = $"https://generativelanguage.googleapis.com/v1beta/models/{model}:streamGenerateContent?alt=sse&key={apiKey}";
using var req = new HttpRequestMessage(HttpMethod.Post, url) { Content = JsonContent(body) };
using var resp = await SendWithErrorClassificationAsync(req, ct);
using var resp = await SendWithErrorClassificationAsync(req, false, ct);
using var stream = await resp.Content.ReadAsStreamAsync(ct);
using var reader = new StreamReader(stream);
@@ -881,8 +954,19 @@ public partial class LlmService : IDisposable
return result;
}
private async Task<HttpResponseMessage> SendWithTlsAsync(
HttpRequestMessage req,
bool allowInsecureTls,
CancellationToken ct,
HttpCompletionOption completion = HttpCompletionOption.ResponseContentRead)
{
if (!allowInsecureTls)
return await _http.SendAsync(req, completion, ct);
return await _httpInsecure.SendAsync(req, completion, ct);
}
/// <summary>비스트리밍 POST + 재시도 (일시적 오류 시 최대 2회)</summary>
private async Task<string> PostJsonWithRetryAsync(string url, object body, CancellationToken ct)
private async Task<string> PostJsonWithRetryAsync(string url, object body, bool allowInsecureTls, CancellationToken ct)
{
var json = JsonSerializer.Serialize(body);
Exception? lastEx = null;
@@ -891,8 +975,11 @@ public partial class LlmService : IDisposable
{
try
{
using var content = new StringContent(json, Encoding.UTF8, "application/json");
using var resp = await _http.PostAsync(url, content, ct);
using var req = new HttpRequestMessage(HttpMethod.Post, url)
{
Content = new StringContent(json, Encoding.UTF8, "application/json")
};
using var resp = await SendWithTlsAsync(req, allowInsecureTls, ct);
if (resp.IsSuccessStatusCode)
return await resp.Content.ReadAsStringAsync(ct);
@@ -920,9 +1007,9 @@ public partial class LlmService : IDisposable
/// <summary>스트리밍 전용 — HTTP 요청 전송 + 에러 분류</summary>
private async Task<HttpResponseMessage> SendWithErrorClassificationAsync(
HttpRequestMessage req, CancellationToken ct)
HttpRequestMessage req, bool allowInsecureTls, CancellationToken ct)
{
var resp = await _http.SendAsync(req, HttpCompletionOption.ResponseHeadersRead, ct);
var resp = await SendWithTlsAsync(req, allowInsecureTls, ct, HttpCompletionOption.ResponseHeadersRead);
if (!resp.IsSuccessStatusCode)
{
var errBody = await resp.Content.ReadAsStringAsync(ct);
@@ -1085,5 +1172,9 @@ public partial class LlmService : IDisposable
catch { }
}
public void Dispose() => _http.Dispose();
public void Dispose()
{
_http.Dispose();
_httpInsecure.Dispose();
}
}

View File

@@ -81,7 +81,7 @@ public class McpClientService : IDisposable
{
protocolVersion = "2024-11-05",
capabilities = new { },
clientInfo = new { name = "AX Copilot", version = "1.7.2" },
clientInfo = new { name = "AX Copilot", version = "0.7.3" },
}, ct);
if (initResult == null) return false;
@@ -328,3 +328,4 @@ public class McpClientService : IDisposable
catch { }
}
}

View File

@@ -174,6 +174,10 @@ public class SettingsService
private void NormalizeRuntimeSettings()
{
// AX Agent 사용 기본 정책: 항상 활성화.
if (!_settings.AiEnabled)
_settings.AiEnabled = true;
_settings.Llm.FilePermission = PermissionModeCatalog.NormalizeGlobalMode(_settings.Llm.FilePermission);
_settings.Llm.DefaultAgentPermission = PermissionModeCatalog.NormalizeGlobalMode(_settings.Llm.DefaultAgentPermission);
if (_settings.Llm.ToolPermissions != null && _settings.Llm.ToolPermissions.Count > 0)
@@ -183,6 +187,13 @@ public class SettingsService
_settings.Llm.ToolPermissions[key] = PermissionModeCatalog.NormalizeToolOverride(_settings.Llm.ToolPermissions[key]);
}
_settings.Llm.MaxFavoriteSlashCommands = Math.Clamp(_settings.Llm.MaxFavoriteSlashCommands <= 0 ? 10 : _settings.Llm.MaxFavoriteSlashCommands, 1, 30);
_settings.Llm.MaxRecentSlashCommands = Math.Clamp(_settings.Llm.MaxRecentSlashCommands <= 0 ? 20 : _settings.Llm.MaxRecentSlashCommands, 5, 50);
if (_settings.Llm.FavoriteSlashCommands.Count > _settings.Llm.MaxFavoriteSlashCommands)
_settings.Llm.FavoriteSlashCommands = _settings.Llm.FavoriteSlashCommands.Take(_settings.Llm.MaxFavoriteSlashCommands).ToList();
if (_settings.Llm.RecentSlashCommands.Count > _settings.Llm.MaxRecentSlashCommands)
_settings.Llm.RecentSlashCommands = _settings.Llm.RecentSlashCommands.Take(_settings.Llm.MaxRecentSlashCommands).ToList();
NormalizeLlmThresholds(_settings.Llm);
}