로직 안정화 회귀 보강: compact/vLLM 경로 테스트 강화
Some checks failed
Release Gate / gate (push) Has been cancelled
Some checks failed
Release Gate / gate (push) Has been cancelled
- ContextCondenserTests 추가: proactive 비활성 무변경, 대용량 tool_result 축약 검증 - LlmRuntimeOverrideTests 보강: vLLM API키 복호화/SSL 우회 합성 규칙 검증 - README, DEVELOPMENT, NEXT_ROADMAP 문서 이력(2026-04-04 14:47 KST) 동기화
This commit is contained in:
@@ -108,4 +108,152 @@ public class LlmRuntimeOverrideTests
|
||||
method.Should().NotBeNull();
|
||||
return (T)method!.Invoke(instance, null)!;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ResolveServerInfo_VllmGlobalInsecureTls_ShouldBeApplied()
|
||||
{
|
||||
var settings = new SettingsService();
|
||||
settings.Settings.Llm.Service = "vllm";
|
||||
settings.Settings.Llm.Model = "vllm-model";
|
||||
settings.Settings.Llm.VllmEndpoint = "https://vllm.internal";
|
||||
settings.Settings.Llm.VllmApiKey = "global-key";
|
||||
settings.Settings.Llm.VllmAllowInsecureTls = true;
|
||||
settings.Settings.Llm.EncryptionEnabled = false;
|
||||
|
||||
using var llm = new LlmService(settings);
|
||||
|
||||
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
|
||||
method.Should().NotBeNull();
|
||||
|
||||
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
|
||||
tuple.Endpoint.Should().Be("https://vllm.internal");
|
||||
tuple.ApiKey.Should().Be("global-key");
|
||||
tuple.AllowInsecureTls.Should().Be(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ResolveServerInfo_RegisteredModelOverride_ShouldUseEndpointAndApiKey()
|
||||
{
|
||||
var settings = new SettingsService();
|
||||
settings.Settings.Llm.Service = "vllm";
|
||||
settings.Settings.Llm.Model = "corp-vllm-model";
|
||||
settings.Settings.Llm.VllmEndpoint = "https://fallback.internal";
|
||||
settings.Settings.Llm.VllmApiKey = "fallback-key";
|
||||
settings.Settings.Llm.VllmAllowInsecureTls = false;
|
||||
settings.Settings.Llm.EncryptionEnabled = false;
|
||||
settings.Settings.Llm.RegisteredModels =
|
||||
[
|
||||
new RegisteredModel
|
||||
{
|
||||
Alias = "corp",
|
||||
EncryptedModelName = "corp-vllm-model",
|
||||
Service = "vllm",
|
||||
Endpoint = "https://model.internal",
|
||||
ApiKey = "model-key",
|
||||
AllowInsecureTls = true
|
||||
}
|
||||
];
|
||||
|
||||
using var llm = new LlmService(settings);
|
||||
|
||||
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
|
||||
method.Should().NotBeNull();
|
||||
|
||||
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
|
||||
tuple.Endpoint.Should().Be("https://model.internal");
|
||||
tuple.ApiKey.Should().Be("model-key");
|
||||
tuple.AllowInsecureTls.Should().Be(true);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ResolveServerInfo_VllmEncryptedApiKey_ShouldBeDecryptedAtRuntime()
|
||||
{
|
||||
var settings = new SettingsService();
|
||||
settings.Settings.Llm.Service = "vllm";
|
||||
settings.Settings.Llm.Model = "corp-vllm-model";
|
||||
settings.Settings.Llm.VllmEndpoint = "https://secure.internal";
|
||||
settings.Settings.Llm.EncryptionEnabled = true;
|
||||
settings.Settings.Llm.VllmApiKey = CryptoService.EncryptIfEnabled("enc-key-value", true);
|
||||
|
||||
using var llm = new LlmService(settings);
|
||||
|
||||
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
|
||||
method.Should().NotBeNull();
|
||||
|
||||
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
|
||||
tuple.Endpoint.Should().Be("https://secure.internal");
|
||||
tuple.ApiKey.Should().Be("enc-key-value");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ResolveServerInfo_RegisteredModelInsecureFalse_GlobalInsecureTrue_ShouldRemainTrue()
|
||||
{
|
||||
var settings = new SettingsService();
|
||||
settings.Settings.Llm.Service = "vllm";
|
||||
settings.Settings.Llm.Model = "corp-vllm-model";
|
||||
settings.Settings.Llm.VllmEndpoint = "https://fallback.internal";
|
||||
settings.Settings.Llm.VllmApiKey = "fallback-key";
|
||||
settings.Settings.Llm.VllmAllowInsecureTls = true;
|
||||
settings.Settings.Llm.EncryptionEnabled = false;
|
||||
settings.Settings.Llm.RegisteredModels =
|
||||
[
|
||||
new RegisteredModel
|
||||
{
|
||||
Alias = "corp",
|
||||
EncryptedModelName = "corp-vllm-model",
|
||||
Service = "vllm",
|
||||
Endpoint = "https://model.internal",
|
||||
ApiKey = "model-key",
|
||||
AllowInsecureTls = false
|
||||
}
|
||||
];
|
||||
|
||||
using var llm = new LlmService(settings);
|
||||
var method = typeof(LlmService).GetMethod("ResolveServerInfo", BindingFlags.NonPublic | BindingFlags.Instance);
|
||||
method.Should().NotBeNull();
|
||||
|
||||
var tuple = ((string Endpoint, string ApiKey, bool AllowInsecureTls))method!.Invoke(llm, null)!;
|
||||
tuple.AllowInsecureTls.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuntimeConnectionSnapshot_Vllm_ShouldExposeMaskedRuntimeInputs()
|
||||
{
|
||||
var settings = new SettingsService();
|
||||
settings.Settings.Llm.Service = "vllm";
|
||||
settings.Settings.Llm.Model = "corp-vllm-model";
|
||||
settings.Settings.Llm.VllmEndpoint = "https://model.internal:8443";
|
||||
settings.Settings.Llm.VllmApiKey = "model-key";
|
||||
settings.Settings.Llm.VllmAllowInsecureTls = true;
|
||||
settings.Settings.Llm.EncryptionEnabled = false;
|
||||
|
||||
using var llm = new LlmService(settings);
|
||||
var snapshot = llm.GetRuntimeConnectionSnapshot();
|
||||
|
||||
snapshot.Service.Should().Be("vllm");
|
||||
snapshot.Model.Should().Be("corp-vllm-model");
|
||||
snapshot.Endpoint.Should().Be("https://model.internal:8443");
|
||||
snapshot.AllowInsecureTls.Should().BeTrue();
|
||||
snapshot.HasApiKey.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetRuntimeConnectionSnapshot_OllamaWithoutKey_ShouldReportNoKey()
|
||||
{
|
||||
var settings = new SettingsService();
|
||||
settings.Settings.Llm.Service = "ollama";
|
||||
settings.Settings.Llm.Model = "qwen2.5-coder";
|
||||
settings.Settings.Llm.OllamaEndpoint = "http://localhost:11434";
|
||||
settings.Settings.Llm.OllamaApiKey = "";
|
||||
settings.Settings.Llm.EncryptionEnabled = false;
|
||||
|
||||
using var llm = new LlmService(settings);
|
||||
var snapshot = llm.GetRuntimeConnectionSnapshot();
|
||||
|
||||
snapshot.Service.Should().Be("ollama");
|
||||
snapshot.Model.Should().Be("qwen2.5-coder");
|
||||
snapshot.Endpoint.Should().Be("http://localhost:11434");
|
||||
snapshot.AllowInsecureTls.Should().BeFalse();
|
||||
snapshot.HasApiKey.Should().BeFalse();
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user