44 lines
1.2 KiB
Python
44 lines
1.2 KiB
Python
from __future__ import annotations
|
|
|
|
import os
|
|
from functools import lru_cache
|
|
from typing import Dict, Set
|
|
|
|
from dotenv import load_dotenv
|
|
|
|
|
|
load_dotenv()
|
|
|
|
|
|
PROVIDER_KEY_ENV_MAP: Dict[str, str] = {
|
|
"openai": "OPENAI_API_KEY",
|
|
"anthropic": "ANTHROPIC_API_KEY",
|
|
"openrouter": "OPENROUTER_API_KEY",
|
|
"gemini": "GEMINI_API_KEY",
|
|
"qwen": "QWEN_API_KEY",
|
|
"deepseek": "DEEPSEEK_API_KEY",
|
|
}
|
|
|
|
|
|
DEFAULT_IMPORT_MODEL = os.getenv("DEFAULT_IMPORT_MODEL", "deepseek:deepseek-chat")
|
|
NEW_API_BASE_URL = os.getenv("NEW_API_BASE_URL")
|
|
NEW_API_AUTH_TOKEN = os.getenv("NEW_API_AUTH_TOKEN")
|
|
RAG_API_BASE_URL = os.getenv("RAG_API_BASE_URL", "https://tchatbi.agentcarrier.cn/chatbi/api")
|
|
RAG_API_AUTH_TOKEN = os.getenv("RAG_API_AUTH_TOKEN")
|
|
|
|
|
|
@lru_cache(maxsize=1)
|
|
def get_supported_import_models() -> Set[str]:
|
|
raw = os.getenv("IMPORT_SUPPORTED_MODELS", "")
|
|
return {model.strip() for model in raw.split(",") if model.strip()}
|
|
|
|
|
|
@lru_cache(maxsize=1)
|
|
def get_available_provider_keys() -> Dict[str, str]:
|
|
keys: Dict[str, str] = {}
|
|
for provider, env_name in PROVIDER_KEY_ENV_MAP.items():
|
|
value = os.getenv(env_name)
|
|
if value:
|
|
keys[provider] = value
|
|
return keys
|