增加日志配置

This commit is contained in:
zhaoawd
2025-10-30 22:38:23 +08:00
parent 455b884551
commit a0ed43a8b7

View File

@ -2,7 +2,9 @@ from __future__ import annotations
import asyncio import asyncio
import logging import logging
import os
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from typing import Any
import httpx import httpx
from fastapi import Depends, FastAPI, HTTPException, Request from fastapi import Depends, FastAPI, HTTPException, Request
@ -18,12 +20,89 @@ from app.services import LLMGateway
from app.services.import_analysis import process_import_analysis_job from app.services.import_analysis import process_import_analysis_job
def _configure_logging() -> None:
level_name = os.getenv("LOG_LEVEL", "INFO").upper()
level = getattr(logging, level_name, logging.INFO)
log_format = os.getenv(
"LOG_FORMAT",
"%(asctime)s %(levelname)s %(name)s:%(lineno)d %(message)s",
)
root = logging.getLogger()
if not root.handlers:
logging.basicConfig(level=level, format=log_format)
else:
root.setLevel(level)
formatter = logging.Formatter(log_format)
for handler in root.handlers:
handler.setLevel(level)
handler.setFormatter(formatter)
_configure_logging()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def _env_bool(name: str, default: bool) -> bool:
raw = os.getenv(name)
if raw is None:
return default
return raw.strip().lower() in {"1", "true", "yes", "on"}
def _env_float(name: str, default: float) -> float:
raw = os.getenv(name)
if raw is None:
return default
try:
return float(raw)
except ValueError:
logger.warning("Invalid value for %s=%r, using default %.2f", name, raw, default)
return default
def _parse_proxy_config(raw: str | None) -> dict[str, str] | str | None:
if raw is None:
return None
cleaned = raw.strip()
if not cleaned:
return None
# Support comma-separated key=value pairs for scheme-specific proxies.
if "=" in cleaned:
proxies: dict[str, str] = {}
for part in cleaned.split(","):
key, sep, value = part.partition("=")
if not sep:
continue
key = key.strip()
value = value.strip()
if key and value:
proxies[key] = value
if proxies:
return proxies
return cleaned
def _create_http_client() -> httpx.AsyncClient:
timeout_seconds = _env_float("HTTP_CLIENT_TIMEOUT", 30.0)
trust_env = _env_bool("HTTP_CLIENT_TRUST_ENV", True)
proxies = _parse_proxy_config(os.getenv("HTTP_CLIENT_PROXY"))
client_kwargs: dict[str, object] = {
"timeout": httpx.Timeout(timeout_seconds),
"trust_env": trust_env,
}
if proxies:
client_kwargs["proxies"] = proxies
return httpx.AsyncClient(**client_kwargs)
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
client = httpx.AsyncClient(timeout=httpx.Timeout(30.0)) client = _create_http_client()
gateway = LLMGateway() gateway = LLMGateway()
try: try:
app.state.http_client = client # type: ignore[attr-defined] app.state.http_client = client # type: ignore[attr-defined]
@ -85,6 +164,11 @@ def create_app() -> FastAPI:
return DataImportAnalysisJobAck(import_record_id=payload.import_record_id, status="accepted") return DataImportAnalysisJobAck(import_record_id=payload.import_record_id, status="accepted")
@application.post("/__mock__/import-callback")
async def mock_import_callback(payload: dict[str, Any]) -> dict[str, str]:
logger.info("Received import analysis callback: %s", payload)
return {"status": "received"}
return application return application