gpt/mcp/scripts/server.py
2025-05-23 13:52:40 +09:00

63 lines
1.6 KiB
Python

# server.py
#from fastapi import FastAPI
#from fastapi_mcp import FastApiMCP
#
#app = FastAPI()
#
#@app.get("/items/{item_id}", operation_id="get_item")
#async def read_item(item_id: int):
# return {"item_id": item_id, "name": f"Item {item_id}"}
#
## MCPサーバを作成し、FastAPIアプリにマウント
#mcp = FastApiMCP(
# app,
# name="My API MCP",
# description="My API description"
#)
#mcp.mount()
#
#if __name__ == "__main__":
# import uvicorn
# uvicorn.run(app, host="0.0.0.0", port=8000)
from fastapi import FastAPI
from fastapi_mcp import FastApiMCP
from pydantic import BaseModel
from memory_store import save_message, load_messages
app = FastAPI()
mcp = FastApiMCP(app, name="aigpt-agent", description="MCP Server for AI memory")
# --- モデル定義 ---
class ChatInput(BaseModel):
message: str
class MemoryInput(BaseModel):
sender: str
message: str
# --- ツール(エンドポイント)定義 ---
@app.post("/chat", operation_id="chat")
async def chat(input: ChatInput):
save_message("user", input.message)
response = f"AI: 「{input.message}」を受け取りました!"
save_message("ai", response)
return {"response": response}
@app.post("/memory", operation_id="save_memory")
async def memory_post(input: MemoryInput):
save_message(input.sender, input.message)
return {"status": "saved"}
@app.get("/memory", operation_id="get_memory")
async def memory_get():
return {"messages": load_messages()}
# --- MCP 初期化 ---
mcp.mount()
if __name__ == "__main__":
import uvicorn
print("🚀 Starting MCP server...")
uvicorn.run(app, host="127.0.0.1", port=5000)