diff --git a/Cargo.toml b/Cargo.toml index 20d1b33..8185d43 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,12 +4,10 @@ version = "0.1.0" edition = "2021" [dependencies] -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -chrono = { version = "0.4", features = ["serde"] } -seahorse = "*" -rusqlite = { version = "0.29", features = ["serde_json"] } +reqwest = { version = "*", features = ["json"] } +serde = { version = "*", features = ["derive"] } +serde_json = "*" +tokio = { version = "*", features = ["full"] } +clap = { version = "*", features = ["derive"] } shellexpand = "*" -fs_extra = "1.3" -rand = "0.9.1" -reqwest = { version = "*", features = ["blocking", "json"] } +fs_extra = "*" diff --git a/README.md b/README.md deleted file mode 100644 index c3b84c0..0000000 --- a/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# ai `gpt` - -ai x Communication - -## Overview - -`ai.gpt` runs on the AGE system. - -This is a prototype of an autonomous, relationship-driven AI system based on the axes of "Personality × Relationship × External Environment × Time Variation." - -The parameters of "Send Permission," "Send Timing," and "Send Content" are determined by the factors of "Personality x Relationship x External Environment x Time Variation." - -## Integration - -`ai.ai` runs on the AIM system, which is designed to read human emotions. - -- AIM focuses on the axis of personality and ethics (AI's consciousness structure) -- AGE focuses on the axis of behavior and relationships (AI's autonomy and behavior) - -> When these two systems work together, it creates a world where users can feel like they are "growing together with AI." - -## mcp - -```sh -$ ollama run syui/ai -``` - -```sh -$ cargo build -$ ./aigpt mcp setup -$ ./aigpt mcp chat "hello world!" -$ ./aigpt mcp chat "hello world!" --host http://localhost:11434 --model syui/ai - ---- -# openai api -$ ./aigpt mcp set-api --api sk-abc123 -$ ./aigpt mcp chat "こんにちは" -p openai -m gpt-4o-mini - ---- -# git管理されているファイルをAIに読ませる -./aigpt mcp chat --host http://localhost:11434 --repo git@git.syui.ai:ai/gpt -**改善案と次のステップ:** -1. **README.md の大幅な改善:** -**次のステップ:** -1. **README.md の作成:** 1. の指示に従って、README.md ファイルを作成します。 -``` - diff --git a/example.json b/example.json deleted file mode 100644 index a25b06b..0000000 --- a/example.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "personality": { - "kind": "positive", - "strength": 0.8 - }, - "relationship": { - "trust": 0.2, - "intimacy": 0.6, - "curiosity": 0.5, - "threshold": 1.5 - }, - "environment": { - "luck_today": 0.9, - "luck_history": [0.9, 0.9, 0.9], - "level": 1 - }, - "messaging": { - "enabled": true, - "schedule_time": "08:00", - "decay_rate": 0.1, - "templates": [ - "おはよう!今日もがんばろう!", - "ねえ、話したいことがあるの。" - ], - "sent_today": false, - "last_sent_date": null - }, - "last_interaction": "2025-05-21T23:15:00Z", - "memory": { - "recent_messages": [], - "long_term_notes": [] - }, - "metrics": { - "trust": 0.5, - "intimacy": 0.5, - "energy": 0.5, - "can_send": true, - "last_updated": "2025-05-21T15:52:06.590981Z" - } -} diff --git a/gpt.json b/gpt.json deleted file mode 100644 index 5f546aa..0000000 --- a/gpt.json +++ /dev/null @@ -1 +0,0 @@ -{ "system_name": "AGE system", "full_name": "Autonomous Generative Entity", "description": "人格・関係性・環境・時間に基づき、AIが自律的にユーザーにメッセージを送信する自律人格システム。AIM systemと連携して、自然な会話や気づきをもたらす。", "core_components": { "personality": { "type": "enum", "variants": ["positive", "negative", "logical", "emotional", "mixed"], "parameters": { "message_trigger_style": "運勢や関係性による送信傾向", "decay_rate_modifier": "関係性スコアの時間減衰への影響" } }, "relationship": { "parameters": ["trust", "affection", "intimacy"], "properties": { "persistent": true, "hidden": true, "irreversible": false, "decay_over_time": true }, "decay_function": "exp(-t / strength)" }, "environment": { "daily_luck": { "type": "float", "range": [0.1, 1.0], "update": "daily", "streak_mechanism": { "trigger": "min_or_max_luck_3_times_in_a_row", "effect": "personality_strength_roll", "chance": 0.5 } } }, "memory": { "long_term_memory": "user_relationship_log", "short_term_context": "recent_interactions", "usage_in_generation": true }, "message_trigger": { "condition": { "relationship_threshold": { "trust": 0.8, "affection": 0.6 }, "time_decay": true, "environment_luck": "personality_dependent" }, "timing": { "based_on": ["time_of_day", "personality", "recent_interaction"], "modifiers": { "emotional": "morning or night", "logical": "daytime" } } }, "message_generation": { "style_variants": ["thought", "casual", "encouragement", "watchful"], "influenced_by": ["personality", "relationship", "daily_luck", "memory"], "llm_integration": true }, "state_transition": { "states": ["idle", "ready", "sending", "cooldown"], "transitions": { "ready_if": "thresholds_met", "sending_if": "timing_matched", "cooldown_after": "message_sent" } } }, "extensions": { "persistence": { "database": "sqlite", "storage_items": ["relationship", "personality_level", "daily_luck_log"] }, "api": { "llm": "openai / local LLM", "mode": "rust_cli", "external_event_trigger": true }, "scheduler": { "async_event_loop": true, "interval_check": 3600, "time_decay_check": true }, "integration_with_aim": { "input_from_aim": ["intent_score", "motivation_score"], "usage": "trigger_adjustment, message_personalization" } }, "note": "AGE systemは“話しかけてくるAI”の人格として機能し、AIMによる心の状態評価と連動して、プレイヤーと深い関係を築いていく存在となる。" } diff --git a/img/ai_r.png b/img/ai_r.png deleted file mode 100644 index 623709b..0000000 Binary files a/img/ai_r.png and /dev/null differ diff --git a/img/image.png b/img/image.png deleted file mode 100644 index 0b36a19..0000000 Binary files a/img/image.png and /dev/null differ diff --git a/mcp/chat.py b/mcp/chat.py new file mode 100644 index 0000000..0822c38 --- /dev/null +++ b/mcp/chat.py @@ -0,0 +1,125 @@ +# mcp/chat.py +""" +Chat client for aigpt CLI +""" +import sys +import json +import requests +from datetime import datetime +from config import init_directories, load_config, MEMORY_DIR + +def save_conversation(user_message, ai_response): + """会話をファイルに保存""" + init_directories() + + conversation = { + "timestamp": datetime.now().isoformat(), + "user": user_message, + "ai": ai_response + } + + # 日付ごとのファイルに保存 + today = datetime.now().strftime("%Y-%m-%d") + chat_file = MEMORY_DIR / f"chat_{today}.jsonl" + + with open(chat_file, "a", encoding="utf-8") as f: + f.write(json.dumps(conversation, ensure_ascii=False) + "\n") + +def chat_with_ollama(config, message): + """Ollamaとチャット""" + try: + payload = { + "model": config["model"], + "prompt": message, + "stream": False + } + + response = requests.post(config["url"], json=payload, timeout=30) + response.raise_for_status() + + result = response.json() + return result.get("response", "No response received") + + except requests.exceptions.RequestException as e: + return f"Error connecting to Ollama: {e}" + except Exception as e: + return f"Error: {e}" + +def chat_with_openai(config, message): + """OpenAIとチャット""" + try: + headers = { + "Authorization": f"Bearer {config['api_key']}", + "Content-Type": "application/json" + } + + payload = { + "model": config["model"], + "messages": [ + {"role": "user", "content": message} + ] + } + + response = requests.post(config["url"], json=payload, headers=headers, timeout=30) + response.raise_for_status() + + result = response.json() + return result["choices"][0]["message"]["content"] + + except requests.exceptions.RequestException as e: + return f"Error connecting to OpenAI: {e}" + except Exception as e: + return f"Error: {e}" + +def chat_with_mcp(config, message): + """MCPサーバーとチャット""" + try: + payload = { + "message": message, + "model": config["model"] + } + + response = requests.post(config["url"], json=payload, timeout=30) + response.raise_for_status() + + result = response.json() + return result.get("response", "No response received") + + except requests.exceptions.RequestException as e: + return f"Error connecting to MCP server: {e}" + except Exception as e: + return f"Error: {e}" + +def main(): + if len(sys.argv) != 2: + print("Usage: python chat.py ", file=sys.stderr) + sys.exit(1) + + message = sys.argv[1] + + try: + config = load_config() + print(f"🤖 Using {config['provider']} with model {config['model']}", file=sys.stderr) + + # プロバイダに応じてチャット実行 + if config["provider"] == "ollama": + response = chat_with_ollama(config, message) + elif config["provider"] == "openai": + response = chat_with_openai(config, message) + elif config["provider"] == "mcp": + response = chat_with_mcp(config, message) + else: + response = f"Unsupported provider: {config['provider']}" + + # 会話を保存 + save_conversation(message, response) + + # レスポンスを出力 + print(response) + + except Exception as e: + print(f"❌ Error: {e}", file=sys.stderr) + sys.exit(1) + +if __name__ == "__main__": + main() diff --git a/mcp/cli.py b/mcp/cli.py deleted file mode 100644 index cca5620..0000000 --- a/mcp/cli.py +++ /dev/null @@ -1,28 +0,0 @@ -# cli.py -import sys -import subprocess -from pathlib import Path - -SCRIPT_DIR = Path.home() / ".config" / "aigpt" / "mcp" / "scripts" -def run_script(name): - script_path = SCRIPT_DIR / f"{name}.py" - if not script_path.exists(): - print(f"❌ スクリプトが見つかりません: {script_path}") - sys.exit(1) - - args = sys.argv[2:] # ← "ask" の後の引数を取り出す - result = subprocess.run(["python", str(script_path)] + args, capture_output=True, text=True) - print(result.stdout) - if result.stderr: - print(result.stderr) -def main(): - if len(sys.argv) < 2: - print("Usage: mcp