1
0
This commit is contained in:
syui 2025-05-21 19:21:15 +09:00
parent 22d497661e
commit f94b377130
Signed by: syui
GPG Key ID: 5417CFEBAD92DF56
13 changed files with 345 additions and 13 deletions

View File

@ -10,3 +10,4 @@ chrono = "0.4"
seahorse = "*"
rusqlite = { version = "0.29", features = ["serde_json"] }
shellexpand = "*"
fs_extra = "1.3"

View File

@ -1,22 +1,33 @@
# ai `gpt`
ai x 送信
ai x Communication
## 概要
## Overview
`ai.gpt`はAGE systemで動きます。
`ai.gpt` runs on the AGE system.
これは「人格 × 関係性 × 外部環境 × 時間変化」を軸にした、自律的・関係性駆動のAIシステムの原型です。
This is a prototype of an autonomous, relationship-driven AI system based on the axes of "Personality × Relationship × External Environment × Time Variation."
`送信可否`, `送信のタイミング`, `送信内容`が「人格 x 関係性 x 外部環境 x 時間変化」のパラメータで決定されます。
The parameters of "Send Permission," "Send Timing," and "Send Content" are determined by the factors of "Personality x Relationship x External Environment x Time Variation."
## 連携
## Integration
`ai.ai`には、AIM systemという人の心を読み取ることを目的としたシステムで動きます。
`ai.ai` runs on the AIM system, which is designed to read human emotions.
- AIMは人格と倫理の軸(AIの意識構造)
- AGEは行動と関係性の軸(AIの自律性・振る舞い)
- AIM focuses on the axis of personality and ethics (AI's consciousness structure)
- AGE focuses on the axis of behavior and relationships (AI's autonomy and behavior)
> この2つが連携すると、ユーザーが「AIと共に成長する」実感をもてる世界ができるんだと思うよ。
> When these two systems work together, it creates a world where users can feel like they are "growing together with AI."
## mcp
```sh
$ ollama run syui/ai
```
```sh
$ cargo build
$ ./target/debug/aigpt mcp setup
$ ./target/debug/aigpt mcp chat "hello world!" --host http://localhost:11434 --model syui/ai
```
とのことです。

3
mcp/cli.py Normal file
View File

@ -0,0 +1,3 @@
# cli.py
def main():
print("Hello MCP!")

30
mcp/scripts/ask.py Normal file
View File

@ -0,0 +1,30 @@
import httpx
import os
import json
from context_loader import load_context_from_repo
from prompt_template import PROMPT_TEMPLATE
OLLAMA_HOST = os.getenv("OLLAMA_HOST", "http://localhost:11434")
OLLAMA_URL = f"{OLLAMA_HOST}/api/generate"
OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "syui/ai")
def ask_question(question, repo_path="."):
context = load_context_from_repo(repo_path)
prompt = PROMPT_TEMPLATE.format(context=context[:10000], question=question)
payload = {
"model": OLLAMA_MODEL,
"prompt": prompt,
"stream": False
}
#response = httpx.post(OLLAMA_URL, json=payload)
response = httpx.post(OLLAMA_URL, json=payload, timeout=60.0)
result = response.json()
return result.get("response", "返答がありませんでした。")
if __name__ == "__main__":
import sys
question = " ".join(sys.argv[1:])
answer = ask_question(question)
print("\n🧠 回答:\n", answer)

View File

@ -0,0 +1,11 @@
import os
def load_context_from_repo(repo_path: str, extensions={".rs", ".toml", ".md"}) -> str:
context = ""
for root, dirs, files in os.walk(repo_path):
for file in files:
if any(file.endswith(ext) for ext in extensions):
with open(os.path.join(root, file), "r", encoding="utf-8", errors="ignore") as f:
content = f.read()
context += f"\n\n# FILE: {os.path.join(root, file)}\n{content}"
return context

View File

@ -0,0 +1,11 @@
PROMPT_TEMPLATE = """
あなたは優秀なAIアシスタントです
以下のコードベースの情報を参考にして質問に答えてください
[コードコンテキスト]
{context}
[質問]
{question}
"""

12
mcp/setup.py Normal file
View File

@ -0,0 +1,12 @@
from setuptools import setup
setup(
name='mcp',
version='0.1.0',
py_modules=['cli'],
entry_points={
'console_scripts': [
'mcp = cli:main',
],
},
)

45
src/chat.rs Normal file
View File

@ -0,0 +1,45 @@
// src/chat.rs
use seahorse::Context;
use std::process::Command;
//use std::env;
use crate::config::ConfigPaths;
pub fn ask_chat(c: &Context, question: &str) {
let config = ConfigPaths::new();
let base_dir = config.base_dir.join("mcp");
let script_path = base_dir.join("scripts/ask.py");
let python_path = if cfg!(target_os = "windows") {
base_dir.join(".venv/Scripts/python.exe")
} else {
base_dir.join(".venv/bin/python")
};
let ollama_host = c.string_flag("host").ok();
let ollama_model = c.string_flag("model").ok();
let mut command = Command::new(python_path);
command.arg(script_path).arg(question);
if let Some(host) = ollama_host {
command.env("OLLAMA_HOST", host);
}
if let Some(model) = ollama_model {
command.env("OLLAMA_MODEL", model);
}
let output = command
.output()
.expect("❌ MCPチャットスクリプトの実行に失敗しました");
if output.status.success() {
println!("💬 {}", String::from_utf8_lossy(&output.stdout));
} else {
eprintln!(
"❌ 実行エラー: {}\n{}",
String::from_utf8_lossy(&output.stderr),
String::from_utf8_lossy(&output.stdout),
);
}
}

View File

@ -4,11 +4,13 @@ use chrono::{Duration, Local};
use rusqlite::Connection;
use seahorse::{App, Command, Context};
use crate::utils::{load_config, save_config};
use crate::commands::db::{save_cmd, export_cmd};
use crate::commands::scheduler::{scheduler_cmd};
use crate::config::ConfigPaths;
use crate::agent::AIState;
use crate::commands::db::{save_cmd, export_cmd};
use crate::commands::scheduler::{scheduler_cmd};
use crate::commands::mcp::mcp_cmd;
pub fn cli_app() -> App {
let set_cmd = Command::new("set")
@ -94,4 +96,5 @@ pub fn cli_app() -> App {
.command(save_cmd())
.command(export_cmd())
.command(scheduler_cmd())
.command(mcp_cmd())
}

160
src/commands/mcp.rs Normal file
View File

@ -0,0 +1,160 @@
// src/commands/mcp.rs
use seahorse::{Command, Context, Flag, FlagType};
use crate::chat::ask_chat;
use crate::git::{git_init, git_status};
use std::fs;
use std::path::{PathBuf};
use crate::config::ConfigPaths;
use std::process::Command as OtherCommand;
pub fn mcp_setup() {
let config = ConfigPaths::new();
let dest_dir = config.base_dir.join("mcp");
let repo_url = "https://github.com/microsoft/MCP.git";
println!("📁 MCP ディレクトリ: {}", dest_dir.display());
// 1. git cloneもしまだなければ
if !dest_dir.exists() {
let status = OtherCommand::new("git")
.args(&["clone", repo_url, dest_dir.to_str().unwrap()])
.status()
.expect("git clone に失敗しました");
assert!(status.success(), "git clone 実行時にエラーが発生しました");
}
let asset_base = PathBuf::from("mcp");
let files_to_copy = vec![
"cli.py",
"setup.py",
"scripts/ask.py",
"scripts/context_loader.py",
"scripts/prompt_template.py",
];
for rel_path in files_to_copy {
let src = asset_base.join(rel_path);
let dst = dest_dir.join(rel_path);
if let Some(parent) = dst.parent() {
let _ = fs::create_dir_all(parent);
}
if let Err(e) = fs::copy(&src, &dst) {
eprintln!("❌ コピー失敗: {}{}: {}", src.display(), dst.display(), e);
} else {
println!("✅ コピー: {}{}", src.display(), dst.display());
}
}
// venvの作成
let venv_path = dest_dir.join(".venv");
if !venv_path.exists() {
println!("🐍 仮想環境を作成しています...");
let output = OtherCommand::new("python3")
.args(&["-m", "venv", ".venv"])
.current_dir(&dest_dir)
.output()
.expect("venvの作成に失敗しました");
if !output.status.success() {
eprintln!("❌ venv作成エラー: {}", String::from_utf8_lossy(&output.stderr));
return;
}
}
// `pip install -e .` を仮想環境で実行
let pip_path = if cfg!(target_os = "windows") {
dest_dir.join(".venv/Scripts/pip.exe").to_string_lossy().to_string()
} else {
dest_dir.join(".venv/bin/pip").to_string_lossy().to_string()
};
println!("📦 必要なパッケージをインストールしています...");
let output = OtherCommand::new(&pip_path)
.arg("install")
.arg("openai")
.current_dir(&dest_dir)
.output()
.expect("pip install に失敗しました");
if !output.status.success() {
eprintln!(
"❌ pip エラー: {}\n{}",
String::from_utf8_lossy(&output.stderr),
String::from_utf8_lossy(&output.stdout)
);
return;
}
println!("📦 pip install -e . を実行します...");
let output = OtherCommand::new(&pip_path)
.arg("install")
.arg("-e")
.arg(".")
.current_dir(&dest_dir)
.output()
.expect("pip install に失敗しました");
if output.status.success() {
println!("🎉 MCP セットアップが完了しました!");
} else {
eprintln!(
"❌ pip エラー: {}\n{}",
String::from_utf8_lossy(&output.stderr),
String::from_utf8_lossy(&output.stdout)
);
}
}
fn chat_cmd() -> Command {
Command::new("chat")
.description("チャットで質問を送る")
.usage("mcp chat '質問内容' --host <OLLAMA_HOST> --model <OLLAMA_MODEL>")
.flag(Flag::new("host", FlagType::String).description("OLLAMAホストのURL"))
.flag(Flag::new("model", FlagType::String).description("OLLAMAモデル名"))
.action(|c: &Context| {
if let Some(question) = c.args.get(0) {
ask_chat(c, question);
} else {
eprintln!("❗ 質問が必要です: mcp chat 'こんにちは'");
}
})
}
fn init_cmd() -> Command {
Command::new("init")
.description("Git 初期化")
.usage("mcp init")
.action(|_| {
git_init();
})
}
fn status_cmd() -> Command {
Command::new("status")
.description("Git ステータス表示")
.usage("mcp status")
.action(|_| {
git_status();
})
}
fn setup_cmd() -> Command {
Command::new("setup")
.description("MCP の初期セットアップ")
.usage("mcp setup")
.action(|_| {
mcp_setup();
})
}
pub fn mcp_cmd() -> Command {
Command::new("mcp")
.description("MCP操作コマンド")
.usage("mcp <subcommand>")
.alias("m")
.command(chat_cmd())
.command(init_cmd())
.command(status_cmd())
.command(setup_cmd())
}

View File

@ -1,2 +1,3 @@
pub mod db;
pub mod scheduler;
pub mod mcp;

42
src/git.rs Normal file
View File

@ -0,0 +1,42 @@
// src/git.rs
use std::process::Command;
pub fn git_status() {
run_git_command(&["status"]);
}
pub fn git_init() {
run_git_command(&["init"]);
}
#[allow(dead_code)]
pub fn git_commit(message: &str) {
run_git_command(&["add", "."]);
run_git_command(&["commit", "-m", message]);
}
#[allow(dead_code)]
pub fn git_push() {
run_git_command(&["push"]);
}
#[allow(dead_code)]
pub fn git_pull() {
run_git_command(&["pull"]);
}
#[allow(dead_code)]
pub fn git_branch() {
run_git_command(&["branch"]);
}
fn run_git_command(args: &[&str]) {
let status = Command::new("git")
.args(args)
.status()
.expect("git コマンドの実行に失敗しました");
if !status.success() {
eprintln!("⚠️ git コマンドに失敗しました: {:?}", args);
}
}

View File

@ -6,6 +6,8 @@ mod cli;
mod utils;
mod commands;
mod config;
mod git;
mod chat;
use cli::cli_app;
use seahorse::App;