add mcp server

This commit is contained in:
ipu 2025-08-12 16:39:59 +03:00
parent 29697f6274
commit 46ec8c7fe3
8 changed files with 1665 additions and 17 deletions

View file

@ -1,5 +1,5 @@
services:
lolly-api:
api:
build: .
command: uv run fastapi run src/main.py --host 0.0.0.0 --port 8000
env_file:
@ -18,6 +18,24 @@ services:
- db
- redis
mcp:
build: .
command: uv run python -m src.mcp.server
env_file:
- .env
ports:
- "${MCP_PORT:-8000}:8000"
volumes:
- ./src:/app/src:ro
depends_on:
- db
- redis
networks:
- ai-network
links:
- db
- redis
redis:
image: redis:7-alpine
volumes:

View file

@ -23,6 +23,10 @@ dependencies = [
"alembic>=1.16.4",
"psycopg2-binary>=2.9.10",
"tqdm>=4.67.1",
"fastmcp>=2.11.2",
"ipython>=9.4.0",
"pydantic-ai>=0.6.2",
"ipykernel>=6.30.1",
]
[project.optional-dependencies]

View file

@ -20,7 +20,8 @@ async def insurance_chat(request: models.InsuranceChatRequest):
session_id=result["session_id"],
answer=result["answer"],
sources=result["sources"],
history=result["history"]
history=result["history"],
hooks=result["hooks"],
)
except Exception as e:

0
src/mcp/__init__.py Normal file
View file

37
src/mcp/server.py Normal file
View file

@ -0,0 +1,37 @@
from fastmcp import FastMCP
from src.cache.redis_cache import fetch_plans, get_plan_by_id
mcp = FastMCP(
name="LollyInsurancePlans",
instructions="""
You are a helpful assistant that can fetch insurance plans from the Lolly API.
You can fetch the plan id list by using get_plan_list() tool.
You can fetch the particular plan information by using get_plan_by_id() tool.
"""
)
@mcp.tool()
async def get_plan_by_id(plan_id: int) -> dict:
plan = await get_plan_by_id(plan_id)
return plan
@mcp.tool()
async def get_plan_list() -> list[dict]:
plans = await fetch_plans()
return [
{
"id": p["id"],
"name": p["name"],
"category": p["category"],
} for p in plans
]
if __name__ == "__main__":
mcp.run(
"http",
host="0.0.0.0",
port=8000,
path="/mcp",
)

View file

@ -4,16 +4,16 @@ from datetime import date
class Applicant(BaseModel):
applicant: int
firstName: str
lastName: str
midName: Optional[str] = Field("", description="Middle name")
phone: Optional[str] = Field("", description="Phone number")
gender: str
dob: date
nicotine: bool
weight: float
heightFt: int
heightIn: int
firstName: str | None = None
lastName: str | None = None
midName: str | None
phone: str | None
gender: str | None = None
dob: date | None = None
nicotine: bool | None = None
weight: float | None = None
heightFt: int | None = None
heightIn: int | None = None
class Plan(BaseModel):
id: int
@ -99,11 +99,26 @@ class HistoryItem(BaseModel):
role: str
message: str
class PlansParam(BaseModel):
plans: list[int]
class ApplicantParam(BaseModel):
applicants: list[Applicant]
class ChatHook(BaseModel):
tool: str
params: PlansParam | ApplicantParam
class AIChatResponse(BaseModel):
answer: str
hooks: List[ChatHook] = []
class InsuranceChatResponse(BaseModel):
session_id: str
answer: str
sources: List[Source] = []
history: List[HistoryItem] = []
hooks: List[ChatHook] = []
class SessionCreateResponse(BaseModel):
session_id: str

View file

@ -1,3 +1,4 @@
import json
import httpx
from typing import Dict, Any, List, Optional
from ..config import settings
@ -112,13 +113,16 @@ class ChatService:
chat_response = await self.send_message(session_id, message)
history = await self.get_chat_history(session_id)
sources = self._extract_sources_from_response(chat_response.get("message", ""))
ai_response = json.loads(chat_response.get("message", {}))
ai_message = ai_response.get("message", "")
hooks = ai_response.get("hooks", [])
return {
"session_id": session_id,
"answer": chat_response.get("message", "No response received"),
"sources": sources,
"history": history
"answer": ai_message,
"sources": [],
"history": history,
"hooks": hooks,
}
except Exception as e:

1569
uv.lock generated

File diff suppressed because it is too large Load diff