채팅 탭 SSE 스트리밍 응답 경로 복구 및 문서 반영
Some checks failed
Release Gate / gate (push) Has been cancelled

- Chat 탭 직접 대화 경로가 최종 응답만 한 번에 표시하던 문제를 수정하고 LlmService 스트리밍 경로를 실제 UI에 연결함\n- AxAgentExecutionEngine에서 비에이전트 채팅이 스트리밍 전송을 사용할 수 있도록 실행 모드를 조정함\n- ChatWindow에서 기존 스트리밍 컨테이너와 타이핑 타이머를 실제 전송 루프에 연결해 타자 치듯 점진적으로 응답이 보이게 함\n- README와 DEVELOPMENT 문서에 2026-04-07 02:23 (KST) 기준 변경 이력 반영\n- 검증: dotnet build src/AxCopilot/AxCopilot.csproj -c Release -v minimal -p:OutputPath=bin\\verify\\ -p:IntermediateOutputPath=obj\\verify\\ (경고 0 / 오류 0)
This commit is contained in:
2026-04-07 08:03:37 +09:00
parent 8617f66496
commit 23b2352637
4 changed files with 52 additions and 8 deletions

View File

@@ -49,7 +49,7 @@ public sealed class AxAgentExecutionEngine
if (string.Equals(runTab, "Code", StringComparison.OrdinalIgnoreCase))
return new ExecutionMode(true, false, codeSystemPrompt);
return new ExecutionMode(false, false, null);
return new ExecutionMode(false, streamingEnabled, null);
}
public PreparedExecution PrepareExecution(

View File

@@ -5593,14 +5593,45 @@ public partial class ChatWindow : Window
_elapsedTimer.Start();
SetStatus(busyStatus, spinning: true);
StackPanel? streamingContainer = null;
TextBlock? streamingText = null;
try
{
var response = await _chatEngine.ExecutePreparedAsync(
preparedExecution,
(messages, token) => RunAgentLoopAsync(runTab, rememberTab, conversation, messages, token),
(messages, token) => _llm.SendAsync(messages.ToList(), token),
_streamCts.Token);
assistantContent = response;
if (!preparedExecution.Mode.UseAgentLoop && preparedExecution.Mode.UseStreamingTransport)
{
streamingContainer = CreateStreamingContainer(out var createdStreamText);
streamingText = createdStreamText;
_activeStreamText = streamingText;
_cachedStreamContent = "";
_displayedLength = 0;
_cursorVisible = true;
MessagePanel.Children.Add(streamingContainer);
ForceScrollToEnd();
_cursorTimer.Start();
_typingTimer.Start();
await foreach (var chunk in _llm.StreamAsync(preparedExecution.Messages.ToList(), _streamCts.Token))
{
if (string.IsNullOrEmpty(chunk))
continue;
assistantContent += chunk;
_cachedStreamContent = assistantContent;
if (_activeStreamText != null && _displayedLength == 0)
_activeStreamText.Text = _cursorVisible ? "\u258c" : " ";
}
}
else
{
var response = await _chatEngine.ExecutePreparedAsync(
preparedExecution,
(messages, token) => RunAgentLoopAsync(runTab, rememberTab, conversation, messages, token),
(messages, token) => _llm.SendAsync(messages.ToList(), token),
_streamCts.Token);
assistantContent = response;
}
responseElapsedMs = Math.Max(0, (long)(DateTime.UtcNow - _streamStartTime).TotalMilliseconds);
assistantMetaRunId = _appState.AgentRun.RunId;
var usage = _llm.LastTokenUsage;
@@ -5618,8 +5649,11 @@ public partial class ChatWindow : Window
}
}
StopAiIconPulse();
_cachedStreamContent = response;
_cachedStreamContent = assistantContent;
draftSucceeded = true;
if (streamingContainer != null && streamingText != null)
FinalizeStreamingContainer(streamingContainer, streamingText, assistantContent);
}
catch (OperationCanceledException)
{
@@ -5627,6 +5661,8 @@ public partial class ChatWindow : Window
assistantContent = finalized.Content;
draftCancelled = finalized.Cancelled;
draftFailure = finalized.FailureReason;
if (streamingContainer != null && streamingText != null)
FinalizeStreamingContainer(streamingContainer, streamingText, assistantContent);
}
catch (Exception ex)
{
@@ -5634,6 +5670,8 @@ public partial class ChatWindow : Window
assistantContent = finalized.Content;
draftFailure = finalized.FailureReason;
ShowToast("실패한 요청은 작업 요약에서 다시 시도할 수 있습니다.", "\uE783", 2600);
if (streamingContainer != null && streamingText != null)
FinalizeStreamingContainer(streamingContainer, streamingText, assistantContent);
}
finally
{