IBM vLLM 도구 호출 스트리밍과 모델 프로파일 기반 실행 정책 강화
Some checks failed
Release Gate / gate (push) Has been cancelled

- IBM 배포형 도구 호출 바디에 프로파일 기반 tool temperature를 적용하고 tool_call_strict 프로파일에서 더 직접적인 tool-only 지시를 추가함
- IBM 경로가 tool_choice를 거부할 때 tool_choice만 제거한 대체 강제 재시도 경로를 추가함
- OpenAI/vLLM tool-use 응답을 SSE로 수신하고 delta.tool_calls를 부분 조립해 도구 호출을 더 빨리 감지하도록 변경함
- read-only 도구 조기 실행과 결과 재사용 경로를 도입해 Cowork/Code 도구 착수 속도를 개선함
- README와 DEVELOPMENT 문서를 2026-04-08 11:14(KST) 기준으로 갱신함

검증
- dotnet build src/AxCopilot/AxCopilot.csproj -c Release -v minimal -p:OutputPath=bin\verify\ -p:IntermediateOutputPath=obj\verify\
- 경고 0 / 오류 0
This commit is contained in:
2026-04-08 16:48:11 +09:00
parent a2c952879d
commit 90ef3400f6
20 changed files with 1231 additions and 241 deletions

View File

@@ -1,5 +1,6 @@
using System.Net.Http;
using System.Runtime.CompilerServices;
using System.IO;
using System.Text;
using System.Text.Json;
using AxCopilot.Models;
@@ -22,8 +23,15 @@ public partial class LlmService
public string ToolName { get; init; } = ""; // tool_use 타입일 때
public string ToolId { get; init; } = ""; // tool_use ID
public JsonElement? ToolInput { get; init; } // tool_use 파라미터
public string? ResolvedToolName { get; set; }
public Task<ToolPrefetchResult?>? PrefetchedExecutionTask { get; set; }
}
public sealed record ToolPrefetchResult(
Agent.ToolResult Result,
long ElapsedMilliseconds,
string? ResolvedToolName = null);
/// <summary>도구 정의를 포함하여 LLM에 요청하고, 텍스트 + tool_use 블록을 파싱하여 반환합니다.</summary>
/// <param name="forceToolCall">
/// true이면 <c>tool_choice: "required"</c>를 요청에 추가하여 모델이 반드시 도구를 호출하도록 강제합니다.
@@ -34,7 +42,8 @@ public partial class LlmService
List<ChatMessage> messages,
IReadOnlyCollection<IAgentTool> tools,
CancellationToken ct = default,
bool forceToolCall = false)
bool forceToolCall = false,
Func<ContentBlock, Task<ToolPrefetchResult?>>? prefetchToolCallAsync = null)
{
var activeService = ResolveService();
EnsureOperationModeAllowsLlmService(activeService);
@@ -42,7 +51,7 @@ public partial class LlmService
{
"sigmoid" => await SendSigmoidWithToolsAsync(messages, tools, ct),
"gemini" => await SendGeminiWithToolsAsync(messages, tools, ct),
"ollama" or "vllm" => await SendOpenAiWithToolsAsync(messages, tools, ct, forceToolCall),
"ollama" or "vllm" => await SendOpenAiWithToolsAsync(messages, tools, ct, forceToolCall, prefetchToolCallAsync),
_ => throw new NotSupportedException($"서비스 '{activeService}'는 아직 Function Calling을 지원하지 않습니다.")
};
}
@@ -435,7 +444,8 @@ public partial class LlmService
private async Task<List<ContentBlock>> SendOpenAiWithToolsAsync(
List<ChatMessage> messages, IReadOnlyCollection<IAgentTool> tools, CancellationToken ct,
bool forceToolCall = false)
bool forceToolCall = false,
Func<ContentBlock, Task<ToolPrefetchResult?>>? prefetchToolCallAsync = null)
{
var activeService = ResolveService();
@@ -453,7 +463,7 @@ public partial class LlmService
string url;
if (isIbmDeployment)
url = BuildIbmDeploymentChatUrl(endpoint, stream: false);
url = BuildIbmDeploymentChatUrl(endpoint, stream: true);
else if (activeService.ToLowerInvariant() == "ollama")
url = endpoint.TrimEnd('/') + "/api/chat";
else
@@ -467,13 +477,28 @@ public partial class LlmService
// CP4D 또는 Bearer 인증 적용
await ApplyAuthHeaderAsync(req, ct);
using var resp = await SendWithTlsAsync(req, allowInsecureTls, ct);
using var resp = await SendWithTlsAsync(req, allowInsecureTls, ct, HttpCompletionOption.ResponseHeadersRead);
if (!resp.IsSuccessStatusCode)
{
var errBody = await resp.Content.ReadAsStringAsync(ct);
var detail = ExtractErrorDetail(errBody);
LogService.Warn($"[ToolUse] {activeService} API 오류 ({resp.StatusCode}): {errBody}");
if (isIbmDeployment && forceToolCall && (int)resp.StatusCode == 400)
{
LogService.Warn("[ToolUse] IBM 배포형 경로에서 tool_choice가 거부되어 대체 강제 전략으로 재시도합니다.");
var fallbackBody = BuildIbmToolBody(messages, tools, forceToolCall: true, useToolChoice: false);
var fallbackJson = JsonSerializer.Serialize(fallbackBody);
using var retryReq = new HttpRequestMessage(HttpMethod.Post, url)
{
Content = new StringContent(fallbackJson, Encoding.UTF8, "application/json")
};
await ApplyAuthHeaderAsync(retryReq, ct);
using var retryResp = await SendWithTlsAsync(retryReq, allowInsecureTls, ct, HttpCompletionOption.ResponseHeadersRead);
if (retryResp.IsSuccessStatusCode)
return await ReadOpenAiToolBlocksFromStreamAsync(retryResp, true, prefetchToolCallAsync, ct);
}
// 400 BadRequest → 도구 없이 일반 응답으로 폴백 시도
if ((int)resp.StatusCode == 400)
throw new ToolCallNotSupportedException(
@@ -482,83 +507,103 @@ public partial class LlmService
throw new HttpRequestException($"{activeService} API 오류 ({resp.StatusCode}): {detail}");
}
var rawResp = await resp.Content.ReadAsStringAsync(ct);
return await ReadOpenAiToolBlocksFromStreamAsync(resp, isIbmDeployment, prefetchToolCallAsync, ct);
}
// SSE 형식 응답 사전 처리 (stream:false 요청에도 SSE로 응답하는 경우)
var respJson = ExtractJsonFromSseIfNeeded(rawResp);
/// <summary>
/// Qwen/vLLM 등이 tool_calls 대신 텍스트로 도구 호출을 출력하는 경우를 파싱합니다.
/// 지원 패턴:
/// 1. &lt;tool_call&gt;{"name":"...", "arguments":{...}}&lt;/tool_call&gt;
/// 2. Qwen3 &lt;tool_call&gt;\n{"name":"...", "arguments":{...}}\n&lt;/tool_call&gt;
/// 3. 여러 건의 연속 tool_call 태그
/// </summary>
private static List<ContentBlock> TryExtractToolCallsFromText(string text)
{
var results = new List<ContentBlock>();
if (string.IsNullOrWhiteSpace(text)) return results;
// 비-JSON 응답(IBM 도구 호출 미지원 등) → ToolCallNotSupportedException으로 폴백 트리거
// 패턴 1: <tool_call>...</tool_call> 태그 (Qwen 계열 기본 출력)
var tagPattern = new System.Text.RegularExpressions.Regex(
@"<\s*tool_call\s*>\s*(\{[\s\S]*?\})\s*<\s*/\s*tool_call\s*>",
System.Text.RegularExpressions.RegexOptions.IgnoreCase);
foreach (System.Text.RegularExpressions.Match m in tagPattern.Matches(text))
{
var trimmedResp = respJson.TrimStart();
if (!trimmedResp.StartsWith('{') && !trimmedResp.StartsWith('['))
throw new ToolCallNotSupportedException(
$"vLLM 응답이 JSON이 아닙니다 (도구 호출 미지원 가능성): {respJson[..Math.Min(120, respJson.Length)]}");
var block = TryParseToolCallJson(m.Groups[1].Value);
if (block != null) results.Add(block);
}
using var doc = JsonDocument.Parse(respJson);
var root = doc.RootElement;
TryParseOpenAiUsage(root);
var blocks = new List<ContentBlock>();
// Ollama 형식: root.message
// OpenAI 형식: root.choices[0].message
JsonElement message;
if (root.TryGetProperty("message", out var ollamaMsg))
message = ollamaMsg;
else if (root.TryGetProperty("choices", out var choices) && choices.GetArrayLength() > 0)
message = choices[0].TryGetProperty("message", out var choiceMsg) ? choiceMsg : default;
else
return blocks;
// 텍스트 응답
if (message.TryGetProperty("content", out var content))
// 패턴 2: ✿FUNCTION✿ 또는 <|tool_call|> (일부 Qwen 변형)
if (results.Count == 0)
{
var text = content.GetString();
if (!string.IsNullOrWhiteSpace(text))
blocks.Add(new ContentBlock { Type = "text", Text = text });
}
// 도구 호출 (tool_calls 배열)
if (message.TryGetProperty("tool_calls", out var toolCalls))
{
foreach (var tc in toolCalls.EnumerateArray())
var fnPattern = new System.Text.RegularExpressions.Regex(
@"✿FUNCTION✿\s*(\w+)\s*\n\s*(\{[\s\S]*?\})\s*(?:✿|$)",
System.Text.RegularExpressions.RegexOptions.IgnoreCase);
foreach (System.Text.RegularExpressions.Match m in fnPattern.Matches(text))
{
if (!tc.TryGetProperty("function", out var func)) continue;
// arguments: 표준(OpenAI)은 JSON 문자열, Ollama/qwen 등은 JSON 객체를 직접 반환하기도 함
JsonElement? parsedArgs = null;
if (func.TryGetProperty("arguments", out var argsEl))
{
if (argsEl.ValueKind == JsonValueKind.String)
{
// 표준: 문자열로 감싸진 JSON → 파싱
try
{
using var argsDoc = JsonDocument.Parse(argsEl.GetString() ?? "{}");
parsedArgs = argsDoc.RootElement.Clone();
}
catch { parsedArgs = null; }
}
else if (argsEl.ValueKind == JsonValueKind.Object || argsEl.ValueKind == JsonValueKind.Array)
{
// Ollama/qwen 방식: 이미 JSON 객체 — 그대로 사용
parsedArgs = argsEl.Clone();
}
}
blocks.Add(new ContentBlock
{
Type = "tool_use",
ToolName = func.TryGetProperty("name", out var fnm) ? fnm.GetString() ?? "" : "",
ToolId = tc.TryGetProperty("id", out var id) ? id.GetString() ?? Guid.NewGuid().ToString("N")[..12] : Guid.NewGuid().ToString("N")[..12],
ToolInput = parsedArgs,
});
var block = TryParseToolCallJsonWithName(m.Groups[1].Value, m.Groups[2].Value);
if (block != null) results.Add(block);
}
}
return blocks;
// 패턴 3: JSON 객체가 직접 출력된 경우 ({"name":"tool_name","arguments":{...}})
if (results.Count == 0)
{
var jsonPattern = new System.Text.RegularExpressions.Regex(
@"\{\s*""name""\s*:\s*""(\w+)""\s*,\s*""arguments""\s*:\s*(\{[\s\S]*?\})\s*\}");
foreach (System.Text.RegularExpressions.Match m in jsonPattern.Matches(text))
{
var block = TryParseToolCallJsonWithName(m.Groups[1].Value, m.Groups[2].Value);
if (block != null) results.Add(block);
}
}
return results;
}
/// <summary>{"name":"...", "arguments":{...}} 형식 JSON을 ContentBlock으로 변환.</summary>
private static ContentBlock? TryParseToolCallJson(string json)
{
try
{
using var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
var name = root.TryGetProperty("name", out var n) ? n.GetString() ?? "" : "";
if (string.IsNullOrEmpty(name)) return null;
JsonElement? args = null;
if (root.TryGetProperty("arguments", out var a))
args = a.Clone();
else if (root.TryGetProperty("parameters", out var p))
args = p.Clone();
return new ContentBlock
{
Type = "tool_use",
ToolName = name,
ToolId = $"text_fc_{Guid.NewGuid():N}"[..16],
ToolInput = args,
};
}
catch { return null; }
}
/// <summary>이름과 arguments JSON이 별도로 주어진 경우.</summary>
private static ContentBlock? TryParseToolCallJsonWithName(string name, string argsJson)
{
if (string.IsNullOrWhiteSpace(name)) return null;
try
{
using var doc = JsonDocument.Parse(argsJson);
return new ContentBlock
{
Type = "tool_use",
ToolName = name.Trim(),
ToolId = $"text_fc_{Guid.NewGuid():N}"[..16],
ToolInput = doc.RootElement.Clone(),
};
}
catch { return null; }
}
private object BuildOpenAiToolBody(List<ChatMessage> messages, IReadOnlyCollection<IAgentTool> tools, bool forceToolCall = false)
@@ -688,7 +733,7 @@ public partial class LlmService
["model"] = activeModel,
["messages"] = msgs,
["tools"] = toolDefs,
["stream"] = false,
["stream"] = true,
["temperature"] = ResolveToolTemperature(),
["max_tokens"] = ResolveOpenAiCompatibleMaxTokens(),
["parallel_tool_calls"] = executionPolicy.EnableParallelReadBatch,
@@ -704,8 +749,16 @@ public partial class LlmService
}
/// <summary>IBM 배포형 /text/chat 전용 도구 바디. parameters 래퍼 사용, model 필드 없음.</summary>
private object BuildIbmToolBody(List<ChatMessage> messages, IReadOnlyCollection<IAgentTool> tools, bool forceToolCall = false)
private object BuildIbmToolBody(
List<ChatMessage> messages,
IReadOnlyCollection<IAgentTool> tools,
bool forceToolCall = false,
bool useToolChoice = true)
{
var executionPolicy = GetActiveExecutionPolicy();
var strictToolOnlyDirective =
forceToolCall &&
string.Equals(executionPolicy.Key, "tool_call_strict", StringComparison.OrdinalIgnoreCase);
var msgs = new List<object>();
// 시스템 프롬프트
@@ -778,6 +831,15 @@ public partial class LlmService
msgs.Add(new { role = m.Role == "assistant" ? "assistant" : "user", content = m.Content });
}
if (strictToolOnlyDirective)
{
msgs.Add(new
{
role = "user",
content = "[TOOL_ONLY] 설명하지 말고 지금 즉시 tools 중 하나 이상을 호출하세요. 평문 응답 금지. 도구 호출만 하세요."
});
}
// OpenAI 호환 도구 정의 (형식 동일, watsonx에서 tools 필드 지원)
var toolDefs = tools.Select(t =>
{
@@ -806,7 +868,7 @@ public partial class LlmService
// IBM watsonx: parameters 래퍼 사용, model 필드 없음
// tool_choice: "required" 지원 여부는 배포 버전마다 다를 수 있으므로
// forceToolCall=true일 때 추가하되, 오류 시 상위에서 ToolCallNotSupportedException으로 폴백됨
if (forceToolCall)
if (forceToolCall && useToolChoice)
{
return new
{
@@ -815,7 +877,7 @@ public partial class LlmService
tool_choice = "required",
parameters = new
{
temperature = ResolveTemperature(),
temperature = ResolveToolTemperature(),
max_new_tokens = ResolveOpenAiCompatibleMaxTokens()
}
};
@@ -827,12 +889,314 @@ public partial class LlmService
tools = toolDefs,
parameters = new
{
temperature = ResolveTemperature(),
temperature = ResolveToolTemperature(),
max_new_tokens = ResolveOpenAiCompatibleMaxTokens()
}
};
}
private sealed class ToolCallAccumulator
{
public int Index { get; init; }
public string Id { get; set; } = "";
public string Name { get; set; } = "";
public StringBuilder Arguments { get; } = new();
public bool Emitted { get; set; }
}
private async Task<List<ContentBlock>> ReadOpenAiToolBlocksFromStreamAsync(
HttpResponseMessage resp,
bool usesIbmDeploymentApi,
Func<ContentBlock, Task<ToolPrefetchResult?>>? prefetchToolCallAsync,
CancellationToken ct)
{
using var stream = await resp.Content.ReadAsStreamAsync(ct);
using var reader = new StreamReader(stream);
var firstChunkReceived = false;
var textBuilder = new StringBuilder();
var toolAccumulators = new Dictionary<int, ToolCallAccumulator>();
var emittedTools = new List<ContentBlock>();
var lastIbmGeneratedText = "";
while (!reader.EndOfStream && !ct.IsCancellationRequested)
{
var timeout = firstChunkReceived ? SubsequentChunkTimeout : FirstChunkTimeout;
var line = await ReadLineWithTimeoutAsync(reader, ct, timeout);
if (line == null)
{
if (!firstChunkReceived)
throw new TimeoutException("도구 호출 응답 첫 청크를 받지 못했습니다.");
break;
}
if (string.IsNullOrWhiteSpace(line) || !line.StartsWith("data: ", StringComparison.Ordinal))
continue;
firstChunkReceived = true;
var data = line["data: ".Length..].Trim();
if (string.Equals(data, "[DONE]", StringComparison.OrdinalIgnoreCase))
break;
using var doc = JsonDocument.Parse(data);
var root = doc.RootElement;
TryParseOpenAiUsage(root);
if (usesIbmDeploymentApi &&
root.TryGetProperty("status", out var statusEl) &&
string.Equals(statusEl.GetString(), "error", StringComparison.OrdinalIgnoreCase))
{
var detail = root.TryGetProperty("message", out var msgEl)
? msgEl.GetString()
: "IBM vLLM 도구 호출 응답 오류";
throw new ToolCallNotSupportedException(detail ?? "IBM vLLM 도구 호출 응답 오류");
}
if (TryExtractMessageToolBlocks(root, textBuilder, emittedTools))
continue;
if (usesIbmDeploymentApi &&
root.TryGetProperty("results", out var resultsEl) &&
resultsEl.ValueKind == JsonValueKind.Array &&
resultsEl.GetArrayLength() > 0)
{
var first = resultsEl[0];
var generatedText = first.TryGetProperty("generated_text", out var generatedTextEl)
? generatedTextEl.GetString()
: first.TryGetProperty("output_text", out var outputTextEl)
? outputTextEl.GetString()
: null;
if (!string.IsNullOrEmpty(generatedText))
{
if (generatedText.StartsWith(lastIbmGeneratedText, StringComparison.Ordinal))
{
textBuilder.Append(generatedText[lastIbmGeneratedText.Length..]);
lastIbmGeneratedText = generatedText;
}
else
{
textBuilder.Clear();
textBuilder.Append(generatedText);
lastIbmGeneratedText = generatedText;
}
}
}
if (root.TryGetProperty("choices", out var choicesEl) &&
choicesEl.ValueKind == JsonValueKind.Array &&
choicesEl.GetArrayLength() > 0)
{
var firstChoice = choicesEl[0];
if (firstChoice.TryGetProperty("delta", out var deltaEl))
{
if (deltaEl.TryGetProperty("content", out var contentEl) &&
contentEl.ValueKind == JsonValueKind.String)
{
var chunk = contentEl.GetString();
if (!string.IsNullOrEmpty(chunk))
textBuilder.Append(chunk);
}
if (deltaEl.TryGetProperty("tool_calls", out var toolCallsEl) &&
toolCallsEl.ValueKind == JsonValueKind.Array)
{
foreach (var toolCallEl in toolCallsEl.EnumerateArray())
{
var index = toolCallEl.TryGetProperty("index", out var indexEl) &&
indexEl.TryGetInt32(out var parsedIndex)
? parsedIndex
: toolAccumulators.Count;
if (!toolAccumulators.TryGetValue(index, out var acc))
{
acc = new ToolCallAccumulator { Index = index };
toolAccumulators[index] = acc;
}
if (toolCallEl.TryGetProperty("id", out var idEl) && idEl.ValueKind == JsonValueKind.String)
acc.Id = idEl.GetString() ?? acc.Id;
if (toolCallEl.TryGetProperty("function", out var functionEl))
{
if (functionEl.TryGetProperty("name", out var nameEl) && nameEl.ValueKind == JsonValueKind.String)
acc.Name = nameEl.GetString() ?? acc.Name;
if (functionEl.TryGetProperty("arguments", out var argumentsEl))
{
if (argumentsEl.ValueKind == JsonValueKind.String)
acc.Arguments.Append(argumentsEl.GetString());
else if (argumentsEl.ValueKind is JsonValueKind.Object or JsonValueKind.Array)
acc.Arguments.Append(argumentsEl.GetRawText());
}
}
await TryEmitCompletedToolCallAsync(acc, emittedTools, prefetchToolCallAsync).ConfigureAwait(false);
}
}
}
if (firstChoice.TryGetProperty("message", out var messageEl))
TryExtractMessageToolBlocks(messageEl, textBuilder, emittedTools);
}
}
foreach (var acc in toolAccumulators.Values.OrderBy(a => a.Index))
await TryEmitCompletedToolCallAsync(acc, emittedTools, prefetchToolCallAsync, forceEmit: true).ConfigureAwait(false);
var blocks = new List<ContentBlock>();
var text = textBuilder.ToString().Trim();
if (!string.IsNullOrWhiteSpace(text))
blocks.Add(new ContentBlock { Type = "text", Text = text });
blocks.AddRange(emittedTools);
if (!blocks.Any(b => b.Type == "tool_use"))
{
var textBlock = blocks.FirstOrDefault(b => b.Type == "text" && !string.IsNullOrWhiteSpace(b.Text));
if (textBlock != null)
{
var extracted = TryExtractToolCallsFromText(textBlock.Text);
if (extracted.Count > 0)
{
foreach (var block in extracted)
{
if (prefetchToolCallAsync != null)
block.PrefetchedExecutionTask = prefetchToolCallAsync(block);
}
blocks.AddRange(extracted);
LogService.Debug($"[ToolUse] 텍스트에서 도구 호출 {extracted.Count}건 추출 (SSE 폴백 파싱)");
}
}
}
return blocks;
}
private static bool TryExtractMessageToolBlocks(
JsonElement messageOrRoot,
StringBuilder textBuilder,
List<ContentBlock> emittedTools)
{
JsonElement message = messageOrRoot;
if (messageOrRoot.TryGetProperty("message", out var nestedMessage))
message = nestedMessage;
var consumed = false;
if (message.TryGetProperty("content", out var contentEl) &&
contentEl.ValueKind == JsonValueKind.String)
{
var text = contentEl.GetString();
if (!string.IsNullOrWhiteSpace(text))
{
textBuilder.Append(text);
consumed = true;
}
}
if (message.TryGetProperty("tool_calls", out var toolCallsEl) &&
toolCallsEl.ValueKind == JsonValueKind.Array)
{
foreach (var tc in toolCallsEl.EnumerateArray())
{
if (!tc.TryGetProperty("function", out var functionEl))
continue;
JsonElement? parsedArgs = null;
if (functionEl.TryGetProperty("arguments", out var argsEl))
{
if (argsEl.ValueKind == JsonValueKind.String)
{
try
{
using var argsDoc = JsonDocument.Parse(argsEl.GetString() ?? "{}");
parsedArgs = argsDoc.RootElement.Clone();
}
catch { parsedArgs = null; }
}
else if (argsEl.ValueKind is JsonValueKind.Object or JsonValueKind.Array)
{
parsedArgs = argsEl.Clone();
}
}
emittedTools.Add(new ContentBlock
{
Type = "tool_use",
ToolName = functionEl.TryGetProperty("name", out var nameEl) ? nameEl.GetString() ?? "" : "",
ToolId = tc.TryGetProperty("id", out var idEl) ? idEl.GetString() ?? Guid.NewGuid().ToString("N")[..12] : Guid.NewGuid().ToString("N")[..12],
ToolInput = parsedArgs,
});
}
consumed = true;
}
return consumed;
}
private static bool LooksLikeCompleteJson(string json)
{
if (string.IsNullOrWhiteSpace(json))
return false;
json = json.Trim();
if (!(json.StartsWith('{') || json.StartsWith('[')))
return false;
try
{
using var _ = JsonDocument.Parse(json);
return true;
}
catch
{
return false;
}
}
private static async Task TryEmitCompletedToolCallAsync(
ToolCallAccumulator acc,
List<ContentBlock> emittedTools,
Func<ContentBlock, Task<ToolPrefetchResult?>>? prefetchToolCallAsync,
bool forceEmit = false)
{
if (acc.Emitted || string.IsNullOrWhiteSpace(acc.Name))
return;
var argsJson = acc.Arguments.ToString().Trim();
JsonElement? parsedArgs = null;
if (!string.IsNullOrEmpty(argsJson))
{
if (!forceEmit && !LooksLikeCompleteJson(argsJson))
return;
try
{
using var argsDoc = JsonDocument.Parse(argsJson);
parsedArgs = argsDoc.RootElement.Clone();
}
catch
{
if (!forceEmit)
return;
}
}
var block = new ContentBlock
{
Type = "tool_use",
ToolName = acc.Name,
ToolId = string.IsNullOrWhiteSpace(acc.Id) ? Guid.NewGuid().ToString("N")[..12] : acc.Id,
ToolInput = parsedArgs,
};
if (prefetchToolCallAsync != null)
block.PrefetchedExecutionTask = prefetchToolCallAsync(block);
emittedTools.Add(block);
acc.Emitted = true;
}
// ─── 공통 헬퍼 ─────────────────────────────────────────────────────
/// <summary>ToolProperty를 LLM API용 스키마 객체로 변환. array/enum/items 포함.</summary>