切换成new-api方式进行llm调用

This commit is contained in:
zhaoawd
2025-12-08 23:11:43 +08:00
parent eefaf91ed1
commit f261121845
7 changed files with 145 additions and 57 deletions

View File

@ -24,6 +24,7 @@ from app.services import LLMGateway
from app.settings import DEFAULT_IMPORT_MODEL
from app.services.import_analysis import (
IMPORT_GATEWAY_BASE_URL,
build_import_gateway_headers,
resolve_provider_from_model,
)
from app.utils.llm_usage import extract_usage as extract_llm_usage
@ -532,6 +533,7 @@ async def _call_chat_completions(
temperature: float = 0.2,
timeout_seconds: Optional[float] = None,
) -> Any:
# Normalize model spec to provider+model and issue the unified chat call.
provider, model_name = resolve_provider_from_model(model_spec)
payload = {
"provider": provider.value,
@ -545,16 +547,17 @@ async def _call_chat_completions(
payload_size_bytes = len(json.dumps(payload, ensure_ascii=False).encode("utf-8"))
url = f"{IMPORT_GATEWAY_BASE_URL.rstrip('/')}/v1/chat/completions"
headers = build_import_gateway_headers()
try:
# log the request whole info
logger.info(
"Calling chat completions API %s with model %s and size %s and payload %s",
"Calling chat completions API %s with model=%s payload_size=%sB",
url,
model_name,
payload_size_bytes,
payload,
)
response = await client.post(url, json=payload, timeout=timeout_seconds)
response = await client.post(
url, json=payload, timeout=timeout_seconds, headers=headers
)
response.raise_for_status()
except httpx.HTTPError as exc:
@ -703,6 +706,7 @@ async def _run_action_with_callback(
input_payload: Any = None,
model_spec: Optional[str] = None,
) -> Any:
# Execute a pipeline action and always emit a callback capturing success/failure.
if input_payload is not None:
logger.info(
"Pipeline action %s input: %s",