model相关参数和默认

This commit is contained in:
zhaoawd
2025-10-29 23:43:26 +08:00
parent a78c8b9446
commit 2d158750fa
3 changed files with 59 additions and 2 deletions

16
.env Normal file
View File

@ -0,0 +1,16 @@
# LLM provider API keys
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
OPENROUTER_API_KEY=
OPENROUTER_SITE_URL=
OPENROUTER_APP_NAME=
GEMINI_API_KEY=
QWEN_API_KEY=
DEEPSEEK_API_KEY="sk-657f0752a1564563be7ce35b6a0a7b46"
# Data import analysis defaults
IMPORT_SUPPORTED_MODELS=openai:gpt-5,deepseek:deepseek-chat,openrouter:anthropic/claude-4.0-sonnet
DEFAULT_IMPORT_MODEL=deepseek:deepseek-chat
# Service configuration
IMPORT_GATEWAY_BASE_URL=http://localhost:8000

View File

@ -5,6 +5,8 @@ from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Field, HttpUrl
from app.settings import DEFAULT_IMPORT_MODEL
class LLMRole(str, Enum):
USER = "user"
@ -117,8 +119,8 @@ class DataImportAnalysisJobRequest(BaseModel):
description="URL to notify when the analysis completes. Receives JSON payload with status/results.",
)
llm_model: str = Field(
"gpt-4.1-mini",
description="Target LLM model identifier. Defaults to gpt-4.1-mini.",
DEFAULT_IMPORT_MODEL,
description="Target LLM model identifier. Defaults to DEFAULT_IMPORT_MODEL.",
)
temperature: Optional[float] = Field(
None,

39
app/settings.py Normal file
View File

@ -0,0 +1,39 @@
from __future__ import annotations
import os
from functools import lru_cache
from typing import Dict, Set
from dotenv import load_dotenv
load_dotenv()
PROVIDER_KEY_ENV_MAP: Dict[str, str] = {
"openai": "OPENAI_API_KEY",
"anthropic": "ANTHROPIC_API_KEY",
"openrouter": "OPENROUTER_API_KEY",
"gemini": "GEMINI_API_KEY",
"qwen": "QWEN_API_KEY",
"deepseek": "DEEPSEEK_API_KEY",
}
DEFAULT_IMPORT_MODEL = os.getenv("DEFAULT_IMPORT_MODEL", "openai:gpt-4.1-mini")
@lru_cache(maxsize=1)
def get_supported_import_models() -> Set[str]:
raw = os.getenv("IMPORT_SUPPORTED_MODELS", "")
return {model.strip() for model in raw.split(",") if model.strip()}
@lru_cache(maxsize=1)
def get_available_provider_keys() -> Dict[str, str]:
keys: Dict[str, str] = {}
for provider, env_name in PROVIDER_KEY_ENV_MAP.items():
value = os.getenv(env_name)
if value:
keys[provider] = value
return keys