- AxAgentExecutionEngine에 ExecutePreparedAsync를 추가해 AgentLoop와 일반 LLM 호출 선택을 엔진이 담당하도록 정리함 - SendMessageAsync와 SendRegenerateAsync가 공통 실행 진입점을 사용하도록 바꿔 창 코드의 중복 분기를 줄임 - 검증: dotnet build src/AxCopilot/AxCopilot.csproj -c Release -v minimal -p:OutputPath=bin\\verify\\ -p:IntermediateOutputPath=obj\\verify\\ 경고 0 / 오류 0
This commit is contained in:
@@ -74,6 +74,17 @@ public sealed class AxAgentExecutionEngine
|
||||
return new PreparedExecution(mode, promptStack, preparedTurn.Messages);
|
||||
}
|
||||
|
||||
public Task<string> ExecutePreparedAsync(
|
||||
PreparedExecution prepared,
|
||||
Func<IReadOnlyList<ChatMessage>, CancellationToken, Task<string>> agentLoopRunner,
|
||||
Func<IReadOnlyList<ChatMessage>, CancellationToken, Task<string>> llmRunner,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
return prepared.Mode.UseAgentLoop
|
||||
? agentLoopRunner(prepared.Messages, cancellationToken)
|
||||
: llmRunner(prepared.Messages, cancellationToken);
|
||||
}
|
||||
|
||||
public PreparedTurn PrepareTurn(
|
||||
ChatConversation conversation,
|
||||
IEnumerable<string?> systemPrompts,
|
||||
|
||||
@@ -8408,20 +8408,14 @@ public partial class ChatWindow : Window
|
||||
}
|
||||
}
|
||||
|
||||
if (executionMode.UseAgentLoop)
|
||||
{
|
||||
var response = await RunAgentLoopAsync(runTab, originTab, conv, sendMessages, _streamCts!.Token);
|
||||
assistantContent = response;
|
||||
StopAiIconPulse();
|
||||
_cachedStreamContent = response;
|
||||
}
|
||||
else
|
||||
{
|
||||
var response = await _llm.SendAsync(sendMessages, _streamCts.Token);
|
||||
assistantContent = response;
|
||||
StopAiIconPulse();
|
||||
_cachedStreamContent = response;
|
||||
}
|
||||
var response = await _chatEngine.ExecutePreparedAsync(
|
||||
preparedExecution,
|
||||
(messages, token) => RunAgentLoopAsync(runTab, originTab, conv, messages, token),
|
||||
(messages, token) => _llm.SendAsync(messages.ToList(), token),
|
||||
_streamCts.Token);
|
||||
assistantContent = response;
|
||||
StopAiIconPulse();
|
||||
_cachedStreamContent = response;
|
||||
|
||||
draftSucceeded = true;
|
||||
}
|
||||
@@ -10981,9 +10975,11 @@ public partial class ChatWindow : Window
|
||||
var executionMode = preparedExecution.Mode;
|
||||
sendMessages = preparedExecution.Messages;
|
||||
|
||||
var response = executionMode.UseAgentLoop
|
||||
? await RunAgentLoopAsync(runTab, runTab, conv, sendMessages, _streamCts.Token)
|
||||
: await _llm.SendAsync(sendMessages, _streamCts.Token);
|
||||
var response = await _chatEngine.ExecutePreparedAsync(
|
||||
preparedExecution,
|
||||
(messages, token) => RunAgentLoopAsync(runTab, runTab, conv, messages, token),
|
||||
(messages, token) => _llm.SendAsync(messages.ToList(), token),
|
||||
_streamCts.Token);
|
||||
assistantContent = response;
|
||||
StopAiIconPulse();
|
||||
_cachedStreamContent = response;
|
||||
|
||||
Reference in New Issue
Block a user