From 7618f88a9be1b55d0d95cb65688584ba5abe67c0 Mon Sep 17 00:00:00 2001 From: syui Date: Wed, 21 May 2025 19:26:56 +0900 Subject: [PATCH] fix mcp asset --- .gitignore | 1 - mcp/cli.py | 3 +++ mcp/scripts/ask.py | 30 ++++++++++++++++++++++++++++++ mcp/scripts/context_loader.py | 11 +++++++++++ mcp/scripts/prompt_template.py | 11 +++++++++++ mcp/setup.py | 12 ++++++++++++ src/commands/mcp.rs | 2 +- 7 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 mcp/cli.py create mode 100644 mcp/scripts/ask.py create mode 100644 mcp/scripts/context_loader.py create mode 100644 mcp/scripts/prompt_template.py create mode 100644 mcp/setup.py diff --git a/.gitignore b/.gitignore index ae397b7..d088031 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,3 @@ **.lock output.json config/*.db -MCP diff --git a/mcp/cli.py b/mcp/cli.py new file mode 100644 index 0000000..5164c4c --- /dev/null +++ b/mcp/cli.py @@ -0,0 +1,3 @@ +# cli.py +def main(): + print("Hello MCP!") diff --git a/mcp/scripts/ask.py b/mcp/scripts/ask.py new file mode 100644 index 0000000..c2eab45 --- /dev/null +++ b/mcp/scripts/ask.py @@ -0,0 +1,30 @@ +import httpx +import os +import json +from context_loader import load_context_from_repo +from prompt_template import PROMPT_TEMPLATE + +OLLAMA_HOST = os.getenv("OLLAMA_HOST", "http://localhost:11434") +OLLAMA_URL = f"{OLLAMA_HOST}/api/generate" +OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "syui/ai") + +def ask_question(question, repo_path="."): + context = load_context_from_repo(repo_path) + prompt = PROMPT_TEMPLATE.format(context=context[:10000], question=question) + + payload = { + "model": OLLAMA_MODEL, + "prompt": prompt, + "stream": False + } + + #response = httpx.post(OLLAMA_URL, json=payload) + response = httpx.post(OLLAMA_URL, json=payload, timeout=60.0) + result = response.json() + return result.get("response", "返答がありませんでした。") + +if __name__ == "__main__": + import sys + question = " ".join(sys.argv[1:]) + answer = ask_question(question) + print("\n🧠 回答:\n", answer) diff --git a/mcp/scripts/context_loader.py b/mcp/scripts/context_loader.py new file mode 100644 index 0000000..439cab4 --- /dev/null +++ b/mcp/scripts/context_loader.py @@ -0,0 +1,11 @@ +import os + +def load_context_from_repo(repo_path: str, extensions={".rs", ".toml", ".md"}) -> str: + context = "" + for root, dirs, files in os.walk(repo_path): + for file in files: + if any(file.endswith(ext) for ext in extensions): + with open(os.path.join(root, file), "r", encoding="utf-8", errors="ignore") as f: + content = f.read() + context += f"\n\n# FILE: {os.path.join(root, file)}\n{content}" + return context diff --git a/mcp/scripts/prompt_template.py b/mcp/scripts/prompt_template.py new file mode 100644 index 0000000..0d11280 --- /dev/null +++ b/mcp/scripts/prompt_template.py @@ -0,0 +1,11 @@ +PROMPT_TEMPLATE = """ +あなたは優秀なAIアシスタントです。 + +以下のコードベースの情報を参考にして、質問に答えてください。 + +[コードコンテキスト] +{context} + +[質問] +{question} +""" diff --git a/mcp/setup.py b/mcp/setup.py new file mode 100644 index 0000000..345f87b --- /dev/null +++ b/mcp/setup.py @@ -0,0 +1,12 @@ +from setuptools import setup + +setup( + name='mcp', + version='0.1.0', + py_modules=['cli'], + entry_points={ + 'console_scripts': [ + 'mcp = cli:main', + ], + }, +) diff --git a/src/commands/mcp.rs b/src/commands/mcp.rs index 1ec7bb0..ee2d93e 100644 --- a/src/commands/mcp.rs +++ b/src/commands/mcp.rs @@ -24,7 +24,7 @@ pub fn mcp_setup() { assert!(status.success(), "git clone 実行時にエラーが発生しました"); } - let asset_base = PathBuf::from("assets/mcp"); + let asset_base = PathBuf::from("mcp"); let files_to_copy = vec![ "cli.py", "setup.py",