78 lines
2.5 KiB
Python
78 lines
2.5 KiB
Python
from __future__ import annotations
|
|
|
|
import os
|
|
from typing import Any, Dict, List
|
|
|
|
import httpx
|
|
|
|
from app.exceptions import ProviderAPICallError
|
|
from app.models import LLMChoice, LLMMessage, LLMProvider, LLMRequest, LLMResponse
|
|
from app.providers.base import LLMProviderClient
|
|
|
|
|
|
class OpenRouterProvider(LLMProviderClient):
|
|
name = LLMProvider.OPENROUTER.value
|
|
api_key_env = "OPENROUTER_API_KEY"
|
|
supports_stream = True
|
|
base_url = "https://openrouter.ai/api/v1/chat/completions"
|
|
|
|
def __init__(self, api_key: str | None):
|
|
super().__init__(api_key)
|
|
self.site_url = os.getenv("OPENROUTER_SITE_URL")
|
|
self.app_name = os.getenv("OPENROUTER_APP_NAME")
|
|
|
|
async def chat(
|
|
self, request: LLMRequest, client: httpx.AsyncClient
|
|
) -> LLMResponse:
|
|
self.ensure_stream_supported(request.stream)
|
|
|
|
payload = self.merge_payload(
|
|
{
|
|
"model": request.model,
|
|
"messages": [msg.model_dump() for msg in request.messages],
|
|
"temperature": request.temperature,
|
|
"top_p": request.top_p,
|
|
"max_tokens": request.max_tokens,
|
|
"stream": request.stream,
|
|
},
|
|
request.extra_params,
|
|
)
|
|
|
|
headers = {
|
|
"Authorization": f"Bearer {self.api_key}",
|
|
"Content-Type": "application/json",
|
|
}
|
|
|
|
if self.site_url:
|
|
headers["HTTP-Referer"] = self.site_url
|
|
if self.app_name:
|
|
headers["X-Title"] = self.app_name
|
|
|
|
try:
|
|
response = await client.post(self.base_url, json=payload, headers=headers)
|
|
response.raise_for_status()
|
|
except httpx.HTTPError as exc:
|
|
raise ProviderAPICallError(f"OpenRouter request failed: {exc}") from exc
|
|
|
|
data: Dict[str, Any] = response.json()
|
|
choices = self._build_choices(data.get("choices", []))
|
|
|
|
return LLMResponse(
|
|
provider=LLMProvider.OPENROUTER,
|
|
model=data.get("model", request.model),
|
|
choices=choices,
|
|
raw=data,
|
|
)
|
|
|
|
@staticmethod
|
|
def _build_choices(choices: List[dict[str, Any]]) -> List[LLMChoice]:
|
|
built: List[LLMChoice] = []
|
|
for choice in choices:
|
|
message_data = choice.get("message") or {}
|
|
message = LLMMessage(
|
|
role=message_data.get("role", "assistant"),
|
|
content=message_data.get("content", ""),
|
|
)
|
|
built.append(LLMChoice(index=choice.get("index", len(built)), message=message))
|
|
return built
|