로직 안정화 회귀 보강: compact/vLLM 경로 테스트 강화
Some checks failed
Release Gate / gate (push) Has been cancelled

- ContextCondenserTests 추가: proactive 비활성 무변경, 대용량 tool_result 축약 검증

- LlmRuntimeOverrideTests 보강: vLLM API키 복호화/SSL 우회 합성 규칙 검증

- README, DEVELOPMENT, NEXT_ROADMAP 문서 이력(2026-04-04 14:47 KST) 동기화
This commit is contained in:
2026-04-04 14:52:50 +09:00
parent 310e75832c
commit 508392f0d9
5 changed files with 246 additions and 70 deletions

View File

@@ -1,5 +1,5 @@
using System.Reflection;
using AxCopilot.Models;
using AxCopilot.Services;
using AxCopilot.Services.Agent;
using FluentAssertions;
using Xunit;
@@ -9,87 +9,71 @@ namespace AxCopilot.Tests.Services;
public class ContextCondenserTests
{
[Fact]
public void TruncateToolResults_PreservesMessageMetadataOnCompression()
public async Task CondenseIfNeededAsync_WhenProactiveDisabled_ShouldNotChangeMessages()
{
var messages = new List<ChatMessage>
{
new()
{
Role = "assistant",
Content = "{\"type\":\"tool_result\",\"output\":\"" + new string('a', 4200) + "\"}",
Timestamp = new DateTime(2026, 4, 3, 1, 0, 0),
MetaKind = "tool_result",
MetaRunId = "run-1",
Feedback = "like",
AttachedFiles = [@"E:\sample\a.txt"],
Images =
[
new ImageAttachment
{
FileName = "image.png",
MimeType = "image/png",
Base64 = "AAA"
}
]
},
new() { Role = "user", Content = "recent-1" },
new() { Role = "assistant", Content = "recent-2" },
new() { Role = "user", Content = "recent-3" },
new() { Role = "assistant", Content = "recent-4" },
new() { Role = "user", Content = "recent-5" },
new() { Role = "assistant", Content = "recent-6" },
};
var settings = new SettingsService();
settings.Settings.Llm.Service = "ollama";
settings.Settings.Llm.Model = "test-model";
var changed = InvokePrivateStatic<bool>("TruncateToolResults", messages);
using var llm = new LlmService(settings);
var messages = BuildLargeConversation();
var before = messages.Select(m => m.Content).ToList();
changed.Should().BeTrue();
messages[0].MetaKind.Should().Be("tool_result");
messages[0].MetaRunId.Should().Be("run-1");
messages[0].Feedback.Should().Be("like");
messages[0].AttachedFiles.Should().ContainSingle().Which.Should().Be(@"E:\sample\a.txt");
messages[0].Images.Should().ContainSingle();
messages[0].Images![0].FileName.Should().Be("image.png");
messages[0].Content.Length.Should().BeLessThan(4200);
var changed = await ContextCondenser.CondenseIfNeededAsync(
messages,
llm,
maxOutputTokens: 2_000,
proactiveEnabled: false,
triggerPercent: 80,
force: false,
CancellationToken.None);
changed.Should().BeFalse();
messages.Select(m => m.Content).Should().Equal(before);
}
[Fact]
public void TruncateToolResults_PreservesMetadataForLongAssistantMessage()
public async Task CondenseIfNeededAsync_ShouldTruncateLargeToolResult_WithoutSummarizeCall()
{
var messages = new List<ChatMessage>
{
new()
{
Role = "assistant",
Content = new string('b', 5000),
Timestamp = new DateTime(2026, 4, 3, 1, 5, 0),
MetaKind = "analysis",
MetaRunId = "run-2",
AttachedFiles = [@"E:\sample\b.txt"],
},
new() { Role = "user", Content = "recent-1" },
new() { Role = "assistant", Content = "recent-2" },
new() { Role = "user", Content = "recent-3" },
new() { Role = "assistant", Content = "recent-4" },
new() { Role = "user", Content = "recent-5" },
new() { Role = "assistant", Content = "recent-6" },
};
var settings = new SettingsService();
settings.Settings.Llm.Service = "ollama";
settings.Settings.Llm.Model = "test-model";
var changed = InvokePrivateStatic<bool>("TruncateToolResults", messages);
using var llm = new LlmService(settings);
var messages = BuildLargeConversation();
var changed = await ContextCondenser.CondenseIfNeededAsync(
messages,
llm,
maxOutputTokens: 2_000,
proactiveEnabled: true,
triggerPercent: 80,
force: false,
CancellationToken.None);
changed.Should().BeTrue();
messages[0].MetaKind.Should().Be("analysis");
messages[0].MetaRunId.Should().Be("run-2");
messages[0].AttachedFiles.Should().ContainSingle().Which.Should().Be(@"E:\sample\b.txt");
messages[0].Content.Length.Should().BeLessThan(5000);
messages.Any(m => (m.Content ?? "").Contains("[축약됨", StringComparison.Ordinal)).Should().BeTrue();
}
private static T InvokePrivateStatic<T>(string methodName, params object?[] arguments)
private static List<ChatMessage> BuildLargeConversation()
{
var method = typeof(ContextCondenser).GetMethod(methodName, BindingFlags.NonPublic | BindingFlags.Static);
method.Should().NotBeNull();
var largeOutput = new string('A', 9_000);
var toolJson = "{\"type\":\"tool_result\",\"output\":\"" + largeOutput + "\",\"success\":true}";
var result = method!.Invoke(null, arguments);
result.Should().NotBeNull();
return (T)result!;
return
[
new ChatMessage { Role = "system", Content = "system prompt" },
new ChatMessage { Role = "user", Content = "첫 질문" },
new ChatMessage { Role = "assistant", Content = toolJson }, // 오래된 구간에 배치
new ChatMessage { Role = "assistant", Content = "첫 답변" },
new ChatMessage { Role = "user", Content = "둘째 질문" },
new ChatMessage { Role = "assistant", Content = "둘째 답변" },
new ChatMessage { Role = "user", Content = "셋째 질문" },
new ChatMessage { Role = "assistant", Content = "셋째 답변" },
new ChatMessage { Role = "user", Content = "넷째 질문" },
new ChatMessage { Role = "assistant", Content = "넷째 답변" },
new ChatMessage { Role = "user", Content = "다섯째 질문" },
new ChatMessage { Role = "assistant", Content = "다섯째 답변" },
];
}
}

View File

@@ -108,4 +108,152 @@ public class LlmRuntimeOverrideTests
method.Should().NotBeNull();
return (T)method!.Invoke(instance, null)!;
}
[Fact]
public void ResolveServerInfo_VllmGlobalInsecureTls_ShouldBeApplied()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "vllm";
settings.Settings.Llm.Model = "vllm-model";
settings.Settings.Llm.VllmEndpoint = "https://vllm.internal";
settings.Settings.Llm.VllmApiKey = "global-key";
settings.Settings.Llm.VllmAllowInsecureTls = true;
settings.Settings.Llm.EncryptionEnabled = false;
using var llm = new LlmService(settings);
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
method.Should().NotBeNull();
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
tuple.Endpoint.Should().Be("https://vllm.internal");
tuple.ApiKey.Should().Be("global-key");
tuple.AllowInsecureTls.Should().Be(true);
}
[Fact]
public void ResolveServerInfo_RegisteredModelOverride_ShouldUseEndpointAndApiKey()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "vllm";
settings.Settings.Llm.Model = "corp-vllm-model";
settings.Settings.Llm.VllmEndpoint = "https://fallback.internal";
settings.Settings.Llm.VllmApiKey = "fallback-key";
settings.Settings.Llm.VllmAllowInsecureTls = false;
settings.Settings.Llm.EncryptionEnabled = false;
settings.Settings.Llm.RegisteredModels =
[
new RegisteredModel
{
Alias = "corp",
EncryptedModelName = "corp-vllm-model",
Service = "vllm",
Endpoint = "https://model.internal",
ApiKey = "model-key",
AllowInsecureTls = true
}
];
using var llm = new LlmService(settings);
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
method.Should().NotBeNull();
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
tuple.Endpoint.Should().Be("https://model.internal");
tuple.ApiKey.Should().Be("model-key");
tuple.AllowInsecureTls.Should().Be(true);
}
[Fact]
public void ResolveServerInfo_VllmEncryptedApiKey_ShouldBeDecryptedAtRuntime()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "vllm";
settings.Settings.Llm.Model = "corp-vllm-model";
settings.Settings.Llm.VllmEndpoint = "https://secure.internal";
settings.Settings.Llm.EncryptionEnabled = true;
settings.Settings.Llm.VllmApiKey = CryptoService.EncryptIfEnabled("enc-key-value", true);
using var llm = new LlmService(settings);
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
method.Should().NotBeNull();
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
tuple.Endpoint.Should().Be("https://secure.internal");
tuple.ApiKey.Should().Be("enc-key-value");
}
[Fact]
public void ResolveServerInfo_RegisteredModelInsecureFalse_GlobalInsecureTrue_ShouldRemainTrue()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "vllm";
settings.Settings.Llm.Model = "corp-vllm-model";
settings.Settings.Llm.VllmEndpoint = "https://fallback.internal";
settings.Settings.Llm.VllmApiKey = "fallback-key";
settings.Settings.Llm.VllmAllowInsecureTls = true;
settings.Settings.Llm.EncryptionEnabled = false;
settings.Settings.Llm.RegisteredModels =
[
new RegisteredModel
{
Alias = "corp",
EncryptedModelName = "corp-vllm-model",
Service = "vllm",
Endpoint = "https://model.internal",
ApiKey = "model-key",
AllowInsecureTls = false
}
];
using var llm = new LlmService(settings);
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
method.Should().NotBeNull();
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
tuple.AllowInsecureTls.Should().BeTrue();
}
[Fact]
public void GetRuntimeConnectionSnapshot_Vllm_ShouldExposeMaskedRuntimeInputs()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "vllm";
settings.Settings.Llm.Model = "corp-vllm-model";
settings.Settings.Llm.VllmEndpoint = "https://model.internal:8443";
settings.Settings.Llm.VllmApiKey = "model-key";
settings.Settings.Llm.VllmAllowInsecureTls = true;
settings.Settings.Llm.EncryptionEnabled = false;
using var llm = new LlmService(settings);
var snapshot = llm.GetRuntimeConnectionSnapshot();
snapshot.Service.Should().Be("vllm");
snapshot.Model.Should().Be("corp-vllm-model");
snapshot.Endpoint.Should().Be("https://model.internal:8443");
snapshot.AllowInsecureTls.Should().BeTrue();
snapshot.HasApiKey.Should().BeTrue();
}
[Fact]
public void GetRuntimeConnectionSnapshot_OllamaWithoutKey_ShouldReportNoKey()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "ollama";
settings.Settings.Llm.Model = "qwen2.5-coder";
settings.Settings.Llm.OllamaEndpoint = "http://localhost:11434";
settings.Settings.Llm.OllamaApiKey = "";
settings.Settings.Llm.EncryptionEnabled = false;
using var llm = new LlmService(settings);
var snapshot = llm.GetRuntimeConnectionSnapshot();
snapshot.Service.Should().Be("ollama");
snapshot.Model.Should().Be("qwen2.5-coder");
snapshot.Endpoint.Should().Be("http://localhost:11434");
snapshot.AllowInsecureTls.Should().BeFalse();
snapshot.HasApiKey.Should().BeFalse();
}
}