ics-simlab-config-gen-claude/services/agent_call.py

70 lines
1.8 KiB
Python

from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict, Optional
from openai import OpenAI
# ----------------------------
# Low-level (pass-through)
# ----------------------------
def agent_call_req(client: OpenAI, req: Dict[str, Any]) -> Any:
"""
Lowest-level call: forwards the request dict to the OpenAI Responses API.
Use this when your code already builds `req` with fields like:
- model, input, max_output_tokens
- text.format (json_schema / json_object)
- reasoning / temperature, etc.
"""
return client.responses.create(**req)
# ----------------------------
# High-level convenience (optional)
# ----------------------------
@dataclass
class AgentCallResult:
text: str
used_structured_output: bool
def agent_call(
client: OpenAI,
model: str,
prompt: str,
max_output_tokens: int,
schema: Optional[dict] = None,
) -> AgentCallResult:
"""
Convenience wrapper for simple calls.
IMPORTANT:
This uses `response_format=...` which is a different request shape than
the `text: {format: ...}` style you use in main/main.py.
For your current pipeline (where you build `req` with text.format),
prefer `agent_call_req(client, req)`.
"""
if schema:
resp = client.responses.create(
model=model,
input=prompt,
max_output_tokens=max_output_tokens,
response_format={
"type": "json_schema",
"json_schema": schema,
},
)
return AgentCallResult(text=resp.output_text, used_structured_output=True)
resp = client.responses.create(
model=model,
input=prompt,
max_output_tokens=max_output_tokens,
)
return AgentCallResult(text=resp.output_text, used_structured_output=False)