재구성 AX Agent 설정과 채팅 UI를 Claude형 구조로
Some checks failed
Release Gate / gate (push) Has been cancelled
Some checks failed
Release Gate / gate (push) Has been cancelled
This commit is contained in:
@@ -20,6 +20,7 @@ public record TokenUsage(int PromptTokens, int CompletionTokens)
|
||||
public partial class LlmService : IDisposable
|
||||
{
|
||||
private readonly HttpClient _http;
|
||||
private readonly HttpClient _httpInsecure;
|
||||
private readonly SettingsService _settings;
|
||||
private string? _systemPrompt;
|
||||
|
||||
@@ -136,6 +137,34 @@ public partial class LlmService : IDisposable
|
||||
|
||||
private string? ResolveReasoningEffort() => _reasoningEffortOverride;
|
||||
|
||||
private static bool LooksLikeEncryptedPayload(string value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value) || value.Length < 32 || (value.Length % 4) != 0)
|
||||
return false;
|
||||
foreach (var ch in value)
|
||||
{
|
||||
var isBase64 = (ch >= 'A' && ch <= 'Z')
|
||||
|| (ch >= 'a' && ch <= 'z')
|
||||
|| (ch >= '0' && ch <= '9')
|
||||
|| ch is '+' or '/' or '=';
|
||||
if (!isBase64) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string ResolveSecretValue(string raw, bool encryptionEnabled)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(raw)) return "";
|
||||
if (raw.Trim() == "(저장됨)") return "";
|
||||
if (!encryptionEnabled) return raw.Trim();
|
||||
|
||||
var decrypted = CryptoService.DecryptIfEnabled(raw, encryptionEnabled).Trim();
|
||||
if (string.IsNullOrWhiteSpace(decrypted)) return "";
|
||||
if (string.Equals(decrypted, raw, StringComparison.Ordinal) && LooksLikeEncryptedPayload(raw))
|
||||
return "";
|
||||
return decrypted;
|
||||
}
|
||||
|
||||
/// <summary>지정 서비스의 API 키를 반환합니다.</summary>
|
||||
private string ResolveApiKeyForService(string service)
|
||||
{
|
||||
@@ -144,8 +173,8 @@ public partial class LlmService : IDisposable
|
||||
{
|
||||
"gemini" => llm.GeminiApiKey,
|
||||
"sigmoid" => llm.ClaudeApiKey,
|
||||
"vllm" => CryptoService.DecryptIfEnabled(llm.VllmApiKey, llm.EncryptionEnabled),
|
||||
"ollama" => CryptoService.DecryptIfEnabled(llm.OllamaApiKey, llm.EncryptionEnabled),
|
||||
"vllm" => ResolveSecretValue(llm.VllmApiKey, llm.EncryptionEnabled),
|
||||
"ollama" => ResolveSecretValue(llm.OllamaApiKey, llm.EncryptionEnabled),
|
||||
_ => "",
|
||||
};
|
||||
}
|
||||
@@ -165,13 +194,41 @@ public partial class LlmService : IDisposable
|
||||
/// <summary>가장 최근 요청의 토큰 사용량. 스트리밍/비스트리밍 완료 후 갱신됩니다.</summary>
|
||||
public TokenUsage? LastTokenUsage { get; private set; }
|
||||
|
||||
public record RuntimeConnectionSnapshot(
|
||||
string Service,
|
||||
string Model,
|
||||
string Endpoint,
|
||||
bool AllowInsecureTls,
|
||||
bool HasApiKey);
|
||||
|
||||
public LlmService(SettingsService settings)
|
||||
{
|
||||
_settings = settings;
|
||||
_http = new HttpClient { Timeout = TimeSpan.FromMinutes(10) };
|
||||
var insecureHandler = new HttpClientHandler
|
||||
{
|
||||
ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator
|
||||
};
|
||||
_httpInsecure = new HttpClient(insecureHandler) { Timeout = TimeSpan.FromMinutes(10) };
|
||||
LoadSystemPrompt();
|
||||
}
|
||||
|
||||
public RuntimeConnectionSnapshot GetRuntimeConnectionSnapshot()
|
||||
{
|
||||
var service = ResolveService();
|
||||
var model = ResolveModel();
|
||||
var (endpoint, apiKey, allowInsecureTls) = ResolveServerInfo();
|
||||
if (string.IsNullOrWhiteSpace(endpoint))
|
||||
endpoint = ResolveEndpointForService(service);
|
||||
|
||||
return new RuntimeConnectionSnapshot(
|
||||
service,
|
||||
model,
|
||||
endpoint ?? "",
|
||||
allowInsecureTls,
|
||||
!string.IsNullOrWhiteSpace(apiKey));
|
||||
}
|
||||
|
||||
// ─── 시스템 프롬프트 (빌드 경로에서 동적 로딩) ─────────────────────────
|
||||
|
||||
private void LoadSystemPrompt()
|
||||
@@ -200,7 +257,7 @@ public partial class LlmService : IDisposable
|
||||
/// 현재 활성 모델에 매칭되는 RegisteredModel을 찾아 엔드포인트/API키를 반환합니다.
|
||||
/// RegisteredModel에 전용 서버 정보가 있으면 그것을 사용하고, 없으면 기본 설정을 사용합니다.
|
||||
/// </summary>
|
||||
private (string Endpoint, string ApiKey) ResolveServerInfo()
|
||||
private (string Endpoint, string ApiKey, bool AllowInsecureTls) ResolveServerInfo()
|
||||
{
|
||||
var llm = _settings.Settings.Llm;
|
||||
var activeService = ResolveService();
|
||||
@@ -209,20 +266,26 @@ public partial class LlmService : IDisposable
|
||||
// RegisteredModel에서 현재 모델과 서비스가 일치하는 항목 검색
|
||||
var registered = FindRegisteredModel(llm, activeService, modelName);
|
||||
|
||||
if (registered != null && !string.IsNullOrEmpty(registered.Endpoint))
|
||||
if (registered != null)
|
||||
{
|
||||
var endpoint = string.IsNullOrWhiteSpace(registered.Endpoint)
|
||||
? ResolveEndpointForService(activeService)
|
||||
: registered.Endpoint;
|
||||
var apiKey = !string.IsNullOrEmpty(registered.ApiKey)
|
||||
? CryptoService.DecryptIfEnabled(registered.ApiKey, llm.EncryptionEnabled)
|
||||
? ResolveSecretValue(registered.ApiKey, llm.EncryptionEnabled)
|
||||
: GetDefaultApiKey(llm, activeService);
|
||||
return (registered.Endpoint, apiKey);
|
||||
var allowInsecureTls = activeService == "vllm"
|
||||
? (registered.AllowInsecureTls || llm.VllmAllowInsecureTls)
|
||||
: false;
|
||||
return (endpoint, apiKey, allowInsecureTls);
|
||||
}
|
||||
|
||||
// 기본 엔드포인트 사용
|
||||
return activeService.ToLowerInvariant() switch
|
||||
{
|
||||
"vllm" => (llm.VllmEndpoint, CryptoService.DecryptIfEnabled(llm.VllmApiKey, llm.EncryptionEnabled)),
|
||||
"ollama" => (llm.OllamaEndpoint, CryptoService.DecryptIfEnabled(llm.OllamaApiKey, llm.EncryptionEnabled)),
|
||||
_ => ("", ""),
|
||||
"vllm" => (llm.VllmEndpoint, ResolveSecretValue(llm.VllmApiKey, llm.EncryptionEnabled), llm.VllmAllowInsecureTls),
|
||||
"ollama" => (llm.OllamaEndpoint, ResolveSecretValue(llm.OllamaApiKey, llm.EncryptionEnabled), false),
|
||||
_ => ("", "", false),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -258,7 +321,7 @@ public partial class LlmService : IDisposable
|
||||
}
|
||||
|
||||
// 기본 Bearer 인증 — 기존 API 키 반환
|
||||
var (_, apiKey) = ResolveServerInfo();
|
||||
var (_, apiKey, _) = ResolveServerInfo();
|
||||
return string.IsNullOrEmpty(apiKey) ? null : apiKey;
|
||||
}
|
||||
|
||||
@@ -278,8 +341,8 @@ public partial class LlmService : IDisposable
|
||||
var svc = service ?? llm.Service;
|
||||
return svc.ToLowerInvariant() switch
|
||||
{
|
||||
"vllm" => CryptoService.DecryptIfEnabled(llm.VllmApiKey, llm.EncryptionEnabled),
|
||||
"ollama" => CryptoService.DecryptIfEnabled(llm.OllamaApiKey, llm.EncryptionEnabled),
|
||||
"vllm" => ResolveSecretValue(llm.VllmApiKey, llm.EncryptionEnabled),
|
||||
"ollama" => ResolveSecretValue(llm.OllamaApiKey, llm.EncryptionEnabled),
|
||||
_ => "",
|
||||
};
|
||||
}
|
||||
@@ -356,7 +419,7 @@ public partial class LlmService : IDisposable
|
||||
try
|
||||
{
|
||||
var llm = _settings.Settings.Llm;
|
||||
var normalizedService = NormalizeServiceName(llm.Service);
|
||||
var normalizedService = ResolveService();
|
||||
if (OperationModePolicy.IsInternal(_settings.Settings) && IsExternalLlmService(normalizedService))
|
||||
{
|
||||
var blockedName = normalizedService == "sigmoid" ? "Claude" : "Gemini";
|
||||
@@ -366,16 +429,26 @@ public partial class LlmService : IDisposable
|
||||
switch (normalizedService)
|
||||
{
|
||||
case "ollama":
|
||||
var resp = await _http.GetAsync(llm.Endpoint.TrimEnd('/') + "/api/tags");
|
||||
{
|
||||
var (endpoint, _, _) = ResolveServerInfo();
|
||||
var ep = string.IsNullOrWhiteSpace(endpoint) ? ResolveEndpointForService("ollama") : endpoint;
|
||||
var resp = await _http.GetAsync(ep.TrimEnd('/') + "/api/tags");
|
||||
return resp.IsSuccessStatusCode
|
||||
? (true, "Ollama 연결 성공")
|
||||
: (false, ClassifyHttpError(resp));
|
||||
}
|
||||
|
||||
case "vllm":
|
||||
var vResp = await _http.GetAsync(llm.Endpoint.TrimEnd('/') + "/v1/models");
|
||||
{
|
||||
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
|
||||
var ep = string.IsNullOrWhiteSpace(endpoint) ? ResolveEndpointForService("vllm") : endpoint;
|
||||
using var vReq = new HttpRequestMessage(HttpMethod.Get, ep.TrimEnd('/') + "/v1/models");
|
||||
await ApplyAuthHeaderAsync(vReq, CancellationToken.None);
|
||||
using var vResp = await SendWithTlsAsync(vReq, allowInsecureTls, CancellationToken.None);
|
||||
return vResp.IsSuccessStatusCode
|
||||
? (true, "vLLM 연결 성공")
|
||||
: (false, ClassifyHttpError(vResp));
|
||||
}
|
||||
|
||||
case "gemini":
|
||||
var gKey = ResolveApiKeyForService("gemini");
|
||||
@@ -424,10 +497,10 @@ public partial class LlmService : IDisposable
|
||||
private async Task<string> SendOllamaAsync(List<ChatMessage> messages, CancellationToken ct)
|
||||
{
|
||||
var llm = _settings.Settings.Llm;
|
||||
var (endpoint, _) = ResolveServerInfo();
|
||||
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
|
||||
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
|
||||
var body = BuildOllamaBody(messages, stream: false);
|
||||
var resp = await PostJsonWithRetryAsync(ep.TrimEnd('/') + "/api/chat", body, ct);
|
||||
var resp = await PostJsonWithRetryAsync(ep.TrimEnd('/') + "/api/chat", body, allowInsecureTls, ct);
|
||||
return SafeParseJson(resp, root =>
|
||||
{
|
||||
TryParseOllamaUsage(root);
|
||||
@@ -440,13 +513,13 @@ public partial class LlmService : IDisposable
|
||||
[EnumeratorCancellation] CancellationToken ct)
|
||||
{
|
||||
var llm = _settings.Settings.Llm;
|
||||
var (endpoint, _) = ResolveServerInfo();
|
||||
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
|
||||
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
|
||||
var body = BuildOllamaBody(messages, stream: true);
|
||||
var url = ep.TrimEnd('/') + "/api/chat";
|
||||
|
||||
using var req = new HttpRequestMessage(HttpMethod.Post, url) { Content = JsonContent(body) };
|
||||
using var resp = await SendWithErrorClassificationAsync(req, ct);
|
||||
using var resp = await SendWithErrorClassificationAsync(req, allowInsecureTls, ct);
|
||||
|
||||
using var stream = await resp.Content.ReadAsStreamAsync(ct);
|
||||
using var reader = new StreamReader(stream);
|
||||
@@ -496,7 +569,7 @@ public partial class LlmService : IDisposable
|
||||
private async Task<string> SendOpenAiCompatibleAsync(List<ChatMessage> messages, CancellationToken ct)
|
||||
{
|
||||
var llm = _settings.Settings.Llm;
|
||||
var (endpoint, _) = ResolveServerInfo();
|
||||
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
|
||||
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
|
||||
var body = BuildOpenAiBody(messages, stream: false);
|
||||
var url = ep.TrimEnd('/') + "/v1/chat/completions";
|
||||
@@ -508,7 +581,7 @@ public partial class LlmService : IDisposable
|
||||
};
|
||||
await ApplyAuthHeaderAsync(req, ct);
|
||||
|
||||
using var resp = await SendWithErrorClassificationAsync(req, ct);
|
||||
using var resp = await SendWithErrorClassificationAsync(req, allowInsecureTls, ct);
|
||||
var respBody = await resp.Content.ReadAsStringAsync(ct);
|
||||
return SafeParseJson(respBody, root =>
|
||||
{
|
||||
@@ -524,14 +597,14 @@ public partial class LlmService : IDisposable
|
||||
[EnumeratorCancellation] CancellationToken ct)
|
||||
{
|
||||
var llm = _settings.Settings.Llm;
|
||||
var (endpoint, _) = ResolveServerInfo();
|
||||
var (endpoint, _, allowInsecureTls) = ResolveServerInfo();
|
||||
var ep = string.IsNullOrEmpty(endpoint) ? llm.Endpoint : endpoint;
|
||||
var body = BuildOpenAiBody(messages, stream: true);
|
||||
var url = ep.TrimEnd('/') + "/v1/chat/completions";
|
||||
|
||||
using var req = new HttpRequestMessage(HttpMethod.Post, url) { Content = JsonContent(body) };
|
||||
await ApplyAuthHeaderAsync(req, ct);
|
||||
using var resp = await SendWithErrorClassificationAsync(req, ct);
|
||||
using var resp = await SendWithErrorClassificationAsync(req, allowInsecureTls, ct);
|
||||
|
||||
using var stream = await resp.Content.ReadAsStreamAsync(ct);
|
||||
using var reader = new StreamReader(stream);
|
||||
@@ -597,7 +670,7 @@ public partial class LlmService : IDisposable
|
||||
var model = ResolveModel();
|
||||
var body = BuildGeminiBody(messages);
|
||||
var url = $"https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent?key={apiKey}";
|
||||
var resp = await PostJsonWithRetryAsync(url, body, ct);
|
||||
var resp = await PostJsonWithRetryAsync(url, body, false, ct);
|
||||
return SafeParseJson(resp, root =>
|
||||
{
|
||||
TryParseGeminiUsage(root);
|
||||
@@ -623,7 +696,7 @@ public partial class LlmService : IDisposable
|
||||
var url = $"https://generativelanguage.googleapis.com/v1beta/models/{model}:streamGenerateContent?alt=sse&key={apiKey}";
|
||||
|
||||
using var req = new HttpRequestMessage(HttpMethod.Post, url) { Content = JsonContent(body) };
|
||||
using var resp = await SendWithErrorClassificationAsync(req, ct);
|
||||
using var resp = await SendWithErrorClassificationAsync(req, false, ct);
|
||||
|
||||
using var stream = await resp.Content.ReadAsStreamAsync(ct);
|
||||
using var reader = new StreamReader(stream);
|
||||
@@ -881,8 +954,19 @@ public partial class LlmService : IDisposable
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<HttpResponseMessage> SendWithTlsAsync(
|
||||
HttpRequestMessage req,
|
||||
bool allowInsecureTls,
|
||||
CancellationToken ct,
|
||||
HttpCompletionOption completion = HttpCompletionOption.ResponseContentRead)
|
||||
{
|
||||
if (!allowInsecureTls)
|
||||
return await _http.SendAsync(req, completion, ct);
|
||||
return await _httpInsecure.SendAsync(req, completion, ct);
|
||||
}
|
||||
|
||||
/// <summary>비스트리밍 POST + 재시도 (일시적 오류 시 최대 2회)</summary>
|
||||
private async Task<string> PostJsonWithRetryAsync(string url, object body, CancellationToken ct)
|
||||
private async Task<string> PostJsonWithRetryAsync(string url, object body, bool allowInsecureTls, CancellationToken ct)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(body);
|
||||
Exception? lastEx = null;
|
||||
@@ -891,8 +975,11 @@ public partial class LlmService : IDisposable
|
||||
{
|
||||
try
|
||||
{
|
||||
using var content = new StringContent(json, Encoding.UTF8, "application/json");
|
||||
using var resp = await _http.PostAsync(url, content, ct);
|
||||
using var req = new HttpRequestMessage(HttpMethod.Post, url)
|
||||
{
|
||||
Content = new StringContent(json, Encoding.UTF8, "application/json")
|
||||
};
|
||||
using var resp = await SendWithTlsAsync(req, allowInsecureTls, ct);
|
||||
|
||||
if (resp.IsSuccessStatusCode)
|
||||
return await resp.Content.ReadAsStringAsync(ct);
|
||||
@@ -920,9 +1007,9 @@ public partial class LlmService : IDisposable
|
||||
|
||||
/// <summary>스트리밍 전용 — HTTP 요청 전송 + 에러 분류</summary>
|
||||
private async Task<HttpResponseMessage> SendWithErrorClassificationAsync(
|
||||
HttpRequestMessage req, CancellationToken ct)
|
||||
HttpRequestMessage req, bool allowInsecureTls, CancellationToken ct)
|
||||
{
|
||||
var resp = await _http.SendAsync(req, HttpCompletionOption.ResponseHeadersRead, ct);
|
||||
var resp = await SendWithTlsAsync(req, allowInsecureTls, ct, HttpCompletionOption.ResponseHeadersRead);
|
||||
if (!resp.IsSuccessStatusCode)
|
||||
{
|
||||
var errBody = await resp.Content.ReadAsStringAsync(ct);
|
||||
@@ -1085,5 +1172,9 @@ public partial class LlmService : IDisposable
|
||||
catch { }
|
||||
}
|
||||
|
||||
public void Dispose() => _http.Dispose();
|
||||
public void Dispose()
|
||||
{
|
||||
_http.Dispose();
|
||||
_httpInsecure.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user