This commit is contained in:
2025-05-21 19:21:15 +09:00
parent 22d497661e
commit f94b377130
13 changed files with 345 additions and 13 deletions

3
mcp/cli.py Normal file
View File

@ -0,0 +1,3 @@
# cli.py
def main():
print("Hello MCP!")

30
mcp/scripts/ask.py Normal file
View File

@ -0,0 +1,30 @@
import httpx
import os
import json
from context_loader import load_context_from_repo
from prompt_template import PROMPT_TEMPLATE
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "http://localhost:11434")
OLLAMA_URL = f"{OLLAMA_HOST}/api/generate"
OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "syui/ai")
def ask_question(question, repo_path="."):
context = load_context_from_repo(repo_path)
prompt = PROMPT_TEMPLATE.format(context=context[:10000], question=question)
payload = {
"model": OLLAMA_MODEL,
"prompt": prompt,
"stream": False
}
#response = httpx.post(OLLAMA_URL, json=payload)
response = httpx.post(OLLAMA_URL, json=payload, timeout=60.0)
result = response.json()
return result.get("response", "返答がありませんでした。")
if __name__ == "__main__":
import sys
question = " ".join(sys.argv[1:])
answer = ask_question(question)
print("\n🧠 回答:\n", answer)

View File

@ -0,0 +1,11 @@
import os
def load_context_from_repo(repo_path: str, extensions={".rs", ".toml", ".md"}) -> str:
context = ""
for root, dirs, files in os.walk(repo_path):
for file in files:
if any(file.endswith(ext) for ext in extensions):
with open(os.path.join(root, file), "r", encoding="utf-8", errors="ignore") as f:
content = f.read()
context += f"\n\n# FILE: {os.path.join(root, file)}\n{content}"
return context

View File

@ -0,0 +1,11 @@
PROMPT_TEMPLATE = """
あなたは優秀なAIアシスタントです。
以下のコードベースの情報を参考にして、質問に答えてください。
[コードコンテキスト]
{context}
[質問]
{question}
"""

12
mcp/setup.py Normal file
View File

@ -0,0 +1,12 @@
from setuptools import setup
setup(
name='mcp',
version='0.1.0',
py_modules=['cli'],
entry_points={
'console_scripts': [
'mcp = cli:main',
],
},
)