Files
AX-Copilot-Codex/src/AxCopilot.Tests/Services/ContextCondenserTests.cs
lacvet 508392f0d9
Some checks failed
Release Gate / gate (push) Has been cancelled
로직 안정화 회귀 보강: compact/vLLM 경로 테스트 강화
- ContextCondenserTests 추가: proactive 비활성 무변경, 대용량 tool_result 축약 검증

- LlmRuntimeOverrideTests 보강: vLLM API키 복호화/SSL 우회 합성 규칙 검증

- README, DEVELOPMENT, NEXT_ROADMAP 문서 이력(2026-04-04 14:47 KST) 동기화
2026-04-04 14:52:50 +09:00

80 lines
2.9 KiB
C#

using AxCopilot.Models;
using AxCopilot.Services;
using AxCopilot.Services.Agent;
using FluentAssertions;
using Xunit;
namespace AxCopilot.Tests.Services;
public class ContextCondenserTests
{
[Fact]
public async Task CondenseIfNeededAsync_WhenProactiveDisabled_ShouldNotChangeMessages()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "ollama";
settings.Settings.Llm.Model = "test-model";
using var llm = new LlmService(settings);
var messages = BuildLargeConversation();
var before = messages.Select(m => m.Content).ToList();
var changed = await ContextCondenser.CondenseIfNeededAsync(
messages,
llm,
maxOutputTokens: 2_000,
proactiveEnabled: false,
triggerPercent: 80,
force: false,
CancellationToken.None);
changed.Should().BeFalse();
messages.Select(m => m.Content).Should().Equal(before);
}
[Fact]
public async Task CondenseIfNeededAsync_ShouldTruncateLargeToolResult_WithoutSummarizeCall()
{
var settings = new SettingsService();
settings.Settings.Llm.Service = "ollama";
settings.Settings.Llm.Model = "test-model";
using var llm = new LlmService(settings);
var messages = BuildLargeConversation();
var changed = await ContextCondenser.CondenseIfNeededAsync(
messages,
llm,
maxOutputTokens: 2_000,
proactiveEnabled: true,
triggerPercent: 80,
force: false,
CancellationToken.None);
changed.Should().BeTrue();
messages.Any(m => (m.Content ?? "").Contains("[축약됨", StringComparison.Ordinal)).Should().BeTrue();
}
private static List<ChatMessage> BuildLargeConversation()
{
var largeOutput = new string('A', 9_000);
var toolJson = "{\"type\":\"tool_result\",\"output\":\"" + largeOutput + "\",\"success\":true}";
return
[
new ChatMessage { Role = "system", Content = "system prompt" },
new ChatMessage { Role = "user", Content = "첫 질문" },
new ChatMessage { Role = "assistant", Content = toolJson }, // 오래된 구간에 배치
new ChatMessage { Role = "assistant", Content = "첫 답변" },
new ChatMessage { Role = "user", Content = "둘째 질문" },
new ChatMessage { Role = "assistant", Content = "둘째 답변" },
new ChatMessage { Role = "user", Content = "셋째 질문" },
new ChatMessage { Role = "assistant", Content = "셋째 답변" },
new ChatMessage { Role = "user", Content = "넷째 질문" },
new ChatMessage { Role = "assistant", Content = "넷째 답변" },
new ChatMessage { Role = "user", Content = "다섯째 질문" },
new ChatMessage { Role = "assistant", Content = "다섯째 답변" },
];
}
}