def convert__openai_chat_to_anthropic_messages__response(
response: openai_models.ChatCompletionResponse,
) -> anthropic_models.MessageResponse:
"""Convert OpenAI ChatCompletionResponse to Anthropic MessageResponse."""
text_content = ""
finish_reason = None
tool_contents: list[anthropic_models.ToolUseBlock] = []
if response.choices:
choice = response.choices[0]
finish_reason = getattr(choice, "finish_reason", None)
msg = getattr(choice, "message", None)
if msg is not None:
content_val = getattr(msg, "content", None)
if isinstance(content_val, str):
text_content = content_val
elif isinstance(content_val, list):
parts: list[str] = []
for part in content_val:
if isinstance(part, dict) and part.get("type") == "text":
t = part.get("text")
if isinstance(t, str):
parts.append(t)
text_content = "".join(parts)
# Extract OpenAI Chat tool calls (strict JSON parsing)
tool_calls = getattr(msg, "tool_calls", None)
if isinstance(tool_calls, list):
for i, tc in enumerate(tool_calls):
fn = getattr(tc, "function", None)
if fn is None and isinstance(tc, dict):
fn = tc.get("function")
if not fn:
continue
name = getattr(fn, "name", None)
if name is None and isinstance(fn, dict):
name = fn.get("name")
args_raw = getattr(fn, "arguments", None)
if args_raw is None and isinstance(fn, dict):
args_raw = fn.get("arguments")
args = strict_parse_tool_arguments(args_raw)
tool_id = getattr(tc, "id", None)
if tool_id is None and isinstance(tc, dict):
tool_id = tc.get("id")
tool_contents.append(
anthropic_models.ToolUseBlock(
type="tool_use",
id=tool_id or f"call_{i}",
name=name or "function",
input=args,
)
)
# Legacy single function
legacy_fn = getattr(msg, "function", None)
if legacy_fn:
name = getattr(legacy_fn, "name", None)
args_raw = getattr(legacy_fn, "arguments", None)
args = strict_parse_tool_arguments(args_raw)
tool_contents.append(
anthropic_models.ToolUseBlock(
type="tool_use",
id="call_0",
name=name or "function",
input=args,
)
)
content_blocks: list[anthropic_models.ResponseContentBlock] = []
if text_content:
content_blocks.append(
anthropic_models.TextBlock(type="text", text=text_content)
)
# Append tool blocks after text (order matches Responses path patterns)
content_blocks.extend(tool_contents)
# Map usage via shared utility
usage = openai_usage_to_anthropic_usage(getattr(response, "usage", None))
stop_reason = map_openai_finish_to_anthropic_stop(finish_reason)
return anthropic_models.MessageResponse(
id=getattr(response, "id", "msg_1") or "msg_1",
type="message",
role="assistant",
model=getattr(response, "model", "") or "",
content=content_blocks,
stop_reason=stop_reason,
stop_sequence=None,
usage=usage,
)