add mcp server

This commit is contained in:
ipu 2025-08-12 16:39:59 +03:00
parent 29697f6274
commit 46ec8c7fe3
8 changed files with 1665 additions and 17 deletions

View file

@ -1,5 +1,5 @@
services: services:
lolly-api: api:
build: . build: .
command: uv run fastapi run src/main.py --host 0.0.0.0 --port 8000 command: uv run fastapi run src/main.py --host 0.0.0.0 --port 8000
env_file: env_file:
@ -18,6 +18,24 @@ services:
- db - db
- redis - redis
mcp:
build: .
command: uv run python -m src.mcp.server
env_file:
- .env
ports:
- "${MCP_PORT:-8000}:8000"
volumes:
- ./src:/app/src:ro
depends_on:
- db
- redis
networks:
- ai-network
links:
- db
- redis
redis: redis:
image: redis:7-alpine image: redis:7-alpine
volumes: volumes:

View file

@ -23,6 +23,10 @@ dependencies = [
"alembic>=1.16.4", "alembic>=1.16.4",
"psycopg2-binary>=2.9.10", "psycopg2-binary>=2.9.10",
"tqdm>=4.67.1", "tqdm>=4.67.1",
"fastmcp>=2.11.2",
"ipython>=9.4.0",
"pydantic-ai>=0.6.2",
"ipykernel>=6.30.1",
] ]
[project.optional-dependencies] [project.optional-dependencies]

View file

@ -20,7 +20,8 @@ async def insurance_chat(request: models.InsuranceChatRequest):
session_id=result["session_id"], session_id=result["session_id"],
answer=result["answer"], answer=result["answer"],
sources=result["sources"], sources=result["sources"],
history=result["history"] history=result["history"],
hooks=result["hooks"],
) )
except Exception as e: except Exception as e:

0
src/mcp/__init__.py Normal file
View file

37
src/mcp/server.py Normal file
View file

@ -0,0 +1,37 @@
from fastmcp import FastMCP
from src.cache.redis_cache import fetch_plans, get_plan_by_id
mcp = FastMCP(
name="LollyInsurancePlans",
instructions="""
You are a helpful assistant that can fetch insurance plans from the Lolly API.
You can fetch the plan id list by using get_plan_list() tool.
You can fetch the particular plan information by using get_plan_by_id() tool.
"""
)
@mcp.tool()
async def get_plan_by_id(plan_id: int) -> dict:
plan = await get_plan_by_id(plan_id)
return plan
@mcp.tool()
async def get_plan_list() -> list[dict]:
plans = await fetch_plans()
return [
{
"id": p["id"],
"name": p["name"],
"category": p["category"],
} for p in plans
]
if __name__ == "__main__":
mcp.run(
"http",
host="0.0.0.0",
port=8000,
path="/mcp",
)

View file

@ -4,16 +4,16 @@ from datetime import date
class Applicant(BaseModel): class Applicant(BaseModel):
applicant: int applicant: int
firstName: str firstName: str | None = None
lastName: str lastName: str | None = None
midName: Optional[str] = Field("", description="Middle name") midName: str | None
phone: Optional[str] = Field("", description="Phone number") phone: str | None
gender: str gender: str | None = None
dob: date dob: date | None = None
nicotine: bool nicotine: bool | None = None
weight: float weight: float | None = None
heightFt: int heightFt: int | None = None
heightIn: int heightIn: int | None = None
class Plan(BaseModel): class Plan(BaseModel):
id: int id: int
@ -99,11 +99,26 @@ class HistoryItem(BaseModel):
role: str role: str
message: str message: str
class PlansParam(BaseModel):
plans: list[int]
class ApplicantParam(BaseModel):
applicants: list[Applicant]
class ChatHook(BaseModel):
tool: str
params: PlansParam | ApplicantParam
class AIChatResponse(BaseModel):
answer: str
hooks: List[ChatHook] = []
class InsuranceChatResponse(BaseModel): class InsuranceChatResponse(BaseModel):
session_id: str session_id: str
answer: str answer: str
sources: List[Source] = [] sources: List[Source] = []
history: List[HistoryItem] = [] history: List[HistoryItem] = []
hooks: List[ChatHook] = []
class SessionCreateResponse(BaseModel): class SessionCreateResponse(BaseModel):
session_id: str session_id: str

View file

@ -1,3 +1,4 @@
import json
import httpx import httpx
from typing import Dict, Any, List, Optional from typing import Dict, Any, List, Optional
from ..config import settings from ..config import settings
@ -112,13 +113,16 @@ class ChatService:
chat_response = await self.send_message(session_id, message) chat_response = await self.send_message(session_id, message)
history = await self.get_chat_history(session_id) history = await self.get_chat_history(session_id)
sources = self._extract_sources_from_response(chat_response.get("message", "")) ai_response = json.loads(chat_response.get("message", {}))
ai_message = ai_response.get("message", "")
hooks = ai_response.get("hooks", [])
return { return {
"session_id": session_id, "session_id": session_id,
"answer": chat_response.get("message", "No response received"), "answer": ai_message,
"sources": sources, "sources": [],
"history": history "history": history,
"hooks": hooks,
} }
except Exception as e: except Exception as e:

1569
uv.lock generated

File diff suppressed because it is too large Load diff