- Agent Compare 기준으로 런처 빠른 실행 칩, 검색 히스토리 탐색, 선택 항목 미리보기 패널을 현재 런처에 이식 - 하단 위젯 바, QuickLook(F3), 화면 OCR(F4), 관련 서비스/partial 파일을 현재 LauncherWindow/LauncherViewModel 구조에 연결 - UsageRankingService 상위 항목 조회와 SearchHistoryService를 추가해 실행 상위 경로/검색 기록이 실제 런처 동작에 반영되도록 정리 - README.md, docs/DEVELOPMENT.md에 이식 범위와 검증 결과를 2026-04-05 11:58 (KST) 기준으로 기록 검증 결과 - dotnet build src/AxCopilot/AxCopilot.csproj -c Release -v minimal -p:OutputPath=bin\\verify\\ -p:IntermediateOutputPath=obj\\verify\\ 경고 0 / 오류 0
116 lines
3.2 KiB
C#
116 lines
3.2 KiB
C#
using System.Net.Http;
|
|
using AxCopilot.Models;
|
|
|
|
namespace AxCopilot.Services;
|
|
|
|
internal sealed class ServerStatusService
|
|
{
|
|
public static readonly ServerStatusService Instance = new();
|
|
|
|
private static readonly HttpClient Http = new()
|
|
{
|
|
Timeout = TimeSpan.FromMilliseconds(1500)
|
|
};
|
|
|
|
public bool OllamaOnline { get; private set; }
|
|
public bool LlmOnline { get; private set; }
|
|
public bool McpOnline { get; private set; }
|
|
public string McpName { get; private set; } = "MCP";
|
|
|
|
public event EventHandler? StatusChanged;
|
|
|
|
private System.Threading.Timer? _timer;
|
|
private string _ollamaEndpoint = "http://localhost:11434";
|
|
private string _llmEndpoint = "";
|
|
private string _llmService = "Ollama";
|
|
private string _mcpEndpoint = "";
|
|
|
|
private ServerStatusService() { }
|
|
|
|
public void Start(AppSettings? settings = null)
|
|
{
|
|
LoadEndpoints(settings);
|
|
if (_timer != null)
|
|
return;
|
|
|
|
_timer = new System.Threading.Timer(async _ => await CheckAllAsync(), null, 0, 15000);
|
|
}
|
|
|
|
public void Stop()
|
|
{
|
|
_timer?.Dispose();
|
|
_timer = null;
|
|
}
|
|
|
|
public void Refresh(AppSettings? settings = null)
|
|
{
|
|
LoadEndpoints(settings);
|
|
_ = CheckAllAsync();
|
|
}
|
|
|
|
private void LoadEndpoints(AppSettings? settings)
|
|
{
|
|
var llm = settings?.Llm;
|
|
if (llm == null)
|
|
return;
|
|
|
|
_ollamaEndpoint = llm.OllamaEndpoint?.TrimEnd('/') ?? "http://localhost:11434";
|
|
_llmService = llm.Service ?? "Ollama";
|
|
_llmEndpoint = string.Equals(_llmService, "vLLM", StringComparison.OrdinalIgnoreCase)
|
|
? (llm.VllmEndpoint?.TrimEnd('/') ?? "")
|
|
: _ollamaEndpoint;
|
|
|
|
var mcp = llm.McpServers?.FirstOrDefault(s => s.Enabled && !string.IsNullOrWhiteSpace(s.Url));
|
|
if (mcp != null)
|
|
{
|
|
McpName = mcp.Name;
|
|
_mcpEndpoint = mcp.Url?.TrimEnd('/') ?? "";
|
|
}
|
|
else
|
|
{
|
|
McpName = "MCP";
|
|
_mcpEndpoint = "";
|
|
}
|
|
}
|
|
|
|
private async Task CheckAllAsync()
|
|
{
|
|
var ollamaTask = PingAsync(_ollamaEndpoint + "/api/version");
|
|
var llmTask = string.IsNullOrEmpty(_llmEndpoint) || _llmEndpoint == _ollamaEndpoint
|
|
? ollamaTask
|
|
: PingAsync(_llmEndpoint);
|
|
var mcpTask = string.IsNullOrEmpty(_mcpEndpoint)
|
|
? Task.FromResult(false)
|
|
: PingAsync(_mcpEndpoint);
|
|
|
|
await Task.WhenAll(ollamaTask, llmTask, mcpTask).ConfigureAwait(false);
|
|
|
|
var changed = OllamaOnline != ollamaTask.Result ||
|
|
LlmOnline != llmTask.Result ||
|
|
McpOnline != mcpTask.Result;
|
|
|
|
OllamaOnline = ollamaTask.Result;
|
|
LlmOnline = llmTask.Result;
|
|
McpOnline = mcpTask.Result;
|
|
|
|
if (changed)
|
|
StatusChanged?.Invoke(this, EventArgs.Empty);
|
|
}
|
|
|
|
private static async Task<bool> PingAsync(string url)
|
|
{
|
|
if (string.IsNullOrWhiteSpace(url))
|
|
return false;
|
|
|
|
try
|
|
{
|
|
var resp = await Http.GetAsync(url).ConfigureAwait(false);
|
|
return resp.IsSuccessStatusCode || (int)resp.StatusCode < 500;
|
|
}
|
|
catch
|
|
{
|
|
return false;
|
|
}
|
|
}
|
|
}
|