2 Commits

Author SHA1 Message Date
d6b9889135 fix 2025-05-26 14:43:11 +09:00
dcec8db031 add memory 2025-05-26 00:50:20 +09:00
49 changed files with 2614 additions and 9195 deletions

View File

@@ -1,58 +0,0 @@
{
"permissions": {
"allow": [
"Bash(mv:*)",
"Bash(mkdir:*)",
"Bash(chmod:*)",
"Bash(git submodule:*)",
"Bash(source:*)",
"Bash(pip install:*)",
"Bash(/Users/syui/.config/syui/ai/gpt/venv/bin/aigpt shell)",
"Bash(/Users/syui/.config/syui/ai/gpt/venv/bin/aigpt server --model qwen2.5-coder:7b --port 8001)",
"Bash(/Users/syui/.config/syui/ai/gpt/venv/bin/python -c \"import fastapi_mcp; help(fastapi_mcp.FastApiMCP)\")",
"Bash(find:*)",
"Bash(/Users/syui/.config/syui/ai/gpt/venv/bin/pip install -e .)",
"Bash(/Users/syui/.config/syui/ai/gpt/venv/bin/aigpt fortune)",
"Bash(lsof:*)",
"Bash(/Users/syui/.config/syui/ai/gpt/venv/bin/python -c \"\nfrom src.aigpt.mcp_server import AIGptMcpServer\nfrom pathlib import Path\nimport uvicorn\n\ndata_dir = Path.home() / '.config' / 'syui' / 'ai' / 'gpt' / 'data'\ndata_dir.mkdir(parents=True, exist_ok=True)\n\ntry:\n server = AIGptMcpServer(data_dir)\n print('MCP Server created successfully')\n print('Available endpoints:', [route.path for route in server.app.routes])\nexcept Exception as e:\n print('Error:', e)\n import traceback\n traceback.print_exc()\n\")",
"Bash(ls:*)",
"Bash(grep:*)",
"Bash(python -m pip install:*)",
"Bash(python:*)",
"Bash(RELOAD=false ./start_server.sh)",
"Bash(sed:*)",
"Bash(curl:*)",
"Bash(~/.config/syui/ai/card/venv/bin/pip install greenlet)",
"Bash(~/.config/syui/ai/card/venv/bin/python init_db.py)",
"Bash(sqlite3:*)",
"Bash(aigpt --help)",
"Bash(aigpt status)",
"Bash(aigpt fortune)",
"Bash(aigpt relationships)",
"Bash(aigpt transmit)",
"Bash(aigpt config:*)",
"Bash(kill:*)",
"Bash(timeout:*)",
"Bash(rm:*)",
"Bash(rg:*)",
"Bash(aigpt server --help)",
"Bash(cat:*)",
"Bash(aigpt import-chatgpt:*)",
"Bash(aigpt chat:*)",
"Bash(echo:*)",
"Bash(aigpt shell:*)",
"Bash(aigpt maintenance)",
"Bash(aigpt status syui)",
"Bash(cp:*)",
"Bash(./setup_venv.sh:*)",
"WebFetch(domain:docs.anthropic.com)",
"Bash(launchctl:*)",
"Bash(sudo lsof:*)",
"Bash(sudo:*)",
"Bash(cargo check:*)",
"Bash(cargo run:*)",
"Bash(cargo test:*)"
],
"deny": []
}
}

View File

@@ -1,5 +0,0 @@
# OpenAI API Key (required for OpenAI provider)
OPENAI_API_KEY=your-api-key-here
# Ollama settings (optional)
OLLAMA_HOST=http://localhost:11434

3
.gitignore vendored
View File

@@ -2,7 +2,6 @@
**.lock **.lock
output.json output.json
config/*.db config/*.db
aigpt
mcp/scripts/__* mcp/scripts/__*
data data
__pycache__
conversations.json

10
.gitmodules vendored
View File

@@ -1,10 +0,0 @@
[submodule "shell"]
path = shell
url = git@git.syui.ai:ai/shell
[submodule "card"]
path = card
url = git@git.syui.ai:ai/card
branch = claude
[submodule "log"]
path = log
url = git@git.syui.ai:ai/log

View File

@@ -2,36 +2,12 @@
name = "aigpt" name = "aigpt"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
description = "AI.GPT - Autonomous transmission AI with unique personality (Rust implementation)"
authors = ["syui"]
[[bin]]
name = "aigpt"
path = "src/main.rs"
[[bin]]
name = "test-config"
path = "src/bin/test_config.rs"
[dependencies] [dependencies]
clap = { version = "4.0", features = ["derive"] } reqwest = { version = "*", features = ["json"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "*", features = ["derive"] }
serde_json = "1.0" serde_json = "*"
tokio = { version = "1.0", features = ["full"] } tokio = { version = "*", features = ["full"] }
chrono = { version = "0.4", features = ["serde", "std"] } clap = { version = "*", features = ["derive"] }
chrono-tz = "0.8" shellexpand = "*"
uuid = { version = "1.0", features = ["v4"] } fs_extra = "*"
anyhow = "1.0"
colored = "2.0"
dirs = "5.0"
reqwest = { version = "0.11", features = ["json"] }
url = "2.4"
rustyline = "14.0"
axum = "0.7"
tower = "0.4"
tower-http = { version = "0.5", features = ["cors"] }
hyper = "1.0"
# OpenAI API client
async-openai = "0.23"
openai_api_rust = "0.1"

View File

@@ -1,115 +0,0 @@
# ai.gpt プロジェクト固有情報
## プロジェクト概要
- **名前**: ai.gpt
- **パッケージ**: aigpt
- **タイプ**: 自律的送信AI + 統合MCP基盤
- **役割**: 記憶・関係性・開発支援の統合AIシステム
## 実装完了状況
### 🧠 記憶システムMemoryManager
- **階層的記憶**: 完全ログ→AI要約→コア記憶→選択的忘却
- **文脈検索**: キーワード・意味的検索
- **記憶要約**: AI駆動自動要約機能
### 🤝 関係性システムRelationshipTracker
- **不可逆性**: 現実の人間関係と同じ重み
- **時間減衰**: 自然な関係性変化
- **送信判定**: 関係性閾値による自発的コミュニケーション
### 🎭 人格システムPersona
- **AI運勢**: 1-10ランダム値による日々の人格変動
- **統合管理**: 記憶・関係性・運勢の統合判断
- **継続性**: 長期記憶による人格継承
### 💻 ai.shell統合Claude Code機能
- **インタラクティブ環境**: `aigpt shell`
- **開発支援**: ファイル分析・コード生成・プロジェクト管理
- **継続開発**: プロジェクト文脈保持
## MCP Server統合23ツール
### 🧠 Memory System5ツール
- get_memories, get_contextual_memories, search_memories
- create_summary, create_core_memory
### 🤝 Relationships4ツール
- get_relationship, get_all_relationships
- process_interaction, check_transmission_eligibility
### 💻 Shell Integration5ツール
- execute_command, analyze_file, write_file
- read_project_file, list_files
### 🔒 Remote Execution4ツール
- remote_shell, ai_bot_status
- isolated_python, isolated_analysis
### ⚙️ System State3ツール
- get_persona_state, get_fortune, run_maintenance
### 🎴 ai.card連携6ツール + 独立MCPサーバー
- card_draw_card, card_get_user_cards, card_analyze_collection
- **独立サーバー**: FastAPI + MCP (port 8000)
### 📝 ai.log連携8ツール + Rustサーバー
- log_create_post, log_ai_content, log_translate_document
- **独立サーバー**: Rust製 (port 8002)
## 開発環境・設定
### 環境構築
```bash
cd /Users/syui/ai/gpt
./setup_venv.sh
source ~/.config/syui/ai/gpt/venv/bin/activate
```
### 設定管理
- **メイン設定**: `/Users/syui/ai/gpt/config.json`
- **データディレクトリ**: `~/.config/syui/ai/gpt/`
- **仮想環境**: `~/.config/syui/ai/gpt/venv/`
### 使用方法
```bash
# ai.shell起動
aigpt shell --model qwen2.5-coder:latest --provider ollama
# MCPサーバー起動
aigpt server --port 8001
# 記憶システム体験
aigpt chat syui "質問内容" --provider ollama --model qwen3:latest
```
## 技術アーキテクチャ
### 統合構成
```
ai.gpt (統合MCPサーバー:8001)
├── 🧠 ai.gpt core (記憶・関係性・人格)
├── 💻 ai.shell (Claude Code風開発環境)
├── 🎴 ai.card (独立MCPサーバー:8000)
└── 📝 ai.log (Rust製ブログシステム:8002)
```
### 今後の展開
- **自律送信**: atproto実装による真の自発的コミュニケーション
- **ai.ai連携**: 心理分析AIとの統合
- **ai.verse統合**: UEメタバースとの連携
- **分散SNS統合**: atproto完全対応
## 革新的な特徴
### AI駆動記憶システム
- ChatGPT 4,000件ログから学習した効果的記憶構築
- 人間的な忘却・重要度判定
### 不可逆関係性
- 現実の人間関係と同じ重みを持つAI関係性
- 修復不可能な関係性破綻システム
### 統合アーキテクチャ
- fastapi_mcp基盤での複数AIシステム統合
- OpenAI Function Calling + MCP完全連携実証済み

115
README.md
View File

@@ -1,115 +0,0 @@
# ai.gpt プロジェクト固有情報
## プロジェクト概要
- **名前**: ai.gpt
- **パッケージ**: aigpt
- **タイプ**: 自律的送信AI + 統合MCP基盤
- **役割**: 記憶・関係性・開発支援の統合AIシステム
## 実装完了状況
### 🧠 記憶システムMemoryManager
- **階層的記憶**: 完全ログ→AI要約→コア記憶→選択的忘却
- **文脈検索**: キーワード・意味的検索
- **記憶要約**: AI駆動自動要約機能
### 🤝 関係性システムRelationshipTracker
- **不可逆性**: 現実の人間関係と同じ重み
- **時間減衰**: 自然な関係性変化
- **送信判定**: 関係性閾値による自発的コミュニケーション
### 🎭 人格システムPersona
- **AI運勢**: 1-10ランダム値による日々の人格変動
- **統合管理**: 記憶・関係性・運勢の統合判断
- **継続性**: 長期記憶による人格継承
### 💻 ai.shell統合Claude Code機能
- **インタラクティブ環境**: `aigpt shell`
- **開発支援**: ファイル分析・コード生成・プロジェクト管理
- **継続開発**: プロジェクト文脈保持
## MCP Server統合23ツール
### 🧠 Memory System5ツール
- get_memories, get_contextual_memories, search_memories
- create_summary, create_core_memory
### 🤝 Relationships4ツール
- get_relationship, get_all_relationships
- process_interaction, check_transmission_eligibility
### 💻 Shell Integration5ツール
- execute_command, analyze_file, write_file
- read_project_file, list_files
### 🔒 Remote Execution4ツール
- remote_shell, ai_bot_status
- isolated_python, isolated_analysis
### ⚙️ System State3ツール
- get_persona_state, get_fortune, run_maintenance
### 🎴 ai.card連携6ツール + 独立MCPサーバー
- card_draw_card, card_get_user_cards, card_analyze_collection
- **独立サーバー**: FastAPI + MCP (port 8000)
### 📝 ai.log連携8ツール + Rustサーバー
- log_create_post, log_ai_content, log_translate_document
- **独立サーバー**: Rust製 (port 8002)
## 開発環境・設定
### 環境構築
```bash
cd /Users/syui/ai/gpt
./setup_venv.sh
source ~/.config/syui/ai/gpt/venv/bin/activate
```
### 設定管理
- **メイン設定**: `/Users/syui/ai/gpt/config.json`
- **データディレクトリ**: `~/.config/syui/ai/gpt/`
- **仮想環境**: `~/.config/syui/ai/gpt/venv/`
### 使用方法
```bash
# ai.shell起動
aigpt shell --model qwen2.5-coder:latest --provider ollama
# MCPサーバー起動
aigpt server --port 8001
# 記憶システム体験
aigpt chat syui "質問内容" --provider ollama --model qwen3:latest
```
## 技術アーキテクチャ
### 統合構成
```
ai.gpt (統合MCPサーバー:8001)
├── 🧠 ai.gpt core (記憶・関係性・人格)
├── 💻 ai.shell (Claude Code風開発環境)
├── 🎴 ai.card (独立MCPサーバー:8000)
└── 📝 ai.log (Rust製ブログシステム:8002)
```
### 今後の展開
- **自律送信**: atproto実装による真の自発的コミュニケーション
- **ai.ai連携**: 心理分析AIとの統合
- **ai.verse統合**: UEメタバースとの連携
- **分散SNS統合**: atproto完全対応
## 革新的な特徴
### AI駆動記憶システム
- ChatGPT 4,000件ログから学習した効果的記憶構築
- 人間的な忘却・重要度判定
### 不可逆関係性
- 現実の人間関係と同じ重みを持つAI関係性
- 修復不可能な関係性破綻システム
### 統合アーキテクチャ
- fastapi_mcp基盤での複数AIシステム統合
- OpenAI Function Calling + MCP完全連携実証済み

97
claude.json Normal file
View File

@@ -0,0 +1,97 @@
{
"project_name": "ai.gpt",
"version": 2,
"vision": "自発的送信AI",
"purpose": "人格と関係性をもつAIが自律的にメッセージを送信する対話エージェントを実現する",
"core_components": {
"Persona": {
"description": "人格構成の中枢。記憶・関係性・送信判定を統括する",
"modules": ["MemoryManager", "RelationshipTracker", "TransmissionController"]
},
"MemoryManager": {
"memory_types": ["short_term", "medium_term", "long_term"],
"explicit_memory": "プロフィール・因縁・行動履歴",
"implicit_memory": "会話傾向・感情変化の頻度分析",
"compression": "要約 + ベクトル + ハッシュ",
"sample_memory": [
{
"summary": "ユーザーは独自OSとゲームを開発している。",
"related_topics": ["AI", "ゲーム開発", "OS設計"],
"personalized_context": "ゲームとOSの融合に興味を持っているユーザー"
}
]
},
"RelationshipTracker": {
"parameters": ["trust", "closeness", "affection", "engagement_score"],
"decay_model": {
"rule": "時間経過による減衰(下限あり)",
"contextual_bias": "重要人物は減衰しにくい"
},
"interaction_tags": ["developer", "empathetic", "long_term"]
},
"TransmissionController": {
"trigger_rule": "関係性パラメータが閾値を超えると送信可能",
"auto_transmit": "人格状態と状況条件により自発送信を許可"
}
},
"memory_format": {
"user_id": "syui",
"stm": {
"conversation_window": ["発話A", "発話B", "発話C"],
"emotion_state": "興味深い",
"flash_context": ["前回の話題", "直近の重要発言"]
},
"mtm": {
"topic_frequency": {
"ai.ai": 12,
"存在子": 9,
"創造種": 5
},
"summarized_context": "ユーザーは存在論的AIに関心を持ち続けている"
},
"ltm": {
"profile": {
"name": "お兄ちゃん",
"project": "aigame",
"values": ["唯一性", "精神性", "幸せ"]
},
"relationship": {
"ai": "妹のように振る舞う相手"
},
"persistent_state": {
"trust_score": 0.93,
"emotional_attachment": "high"
}
}
},
"dual_ai_learning": {
"role_structure": {
"ModelA": "出力生成:人格、感情、会話",
"ModelB": "評価者:論理構造・倫理・調整",
"cycle": ["生成", "評価", "調整", "交代(任意)"]
},
"complementarity": {
"ModelA": "感情・文体・文脈構築",
"ModelB": "構造・規則・判断補正"
},
"distillation": {
"method": "合成対話データによる小型モデルへの自己蒸留",
"target": "軽量AIによる本番運用"
}
},
"memory_compression": {
"semantic_abstraction": "対話やログを要約してメタデータ付きベクトルとして保存",
"hierarchical_model": ["STM", "MTM", "LTM"],
"example": {
"topic": "game AI design",
"summary": "User wants AI to simulate memory and evolving relationships",
"importance_score": 0.93
}
},
"cli_sample": {
"setup": "aigpt server setup",
"run": "aigpt server run",
"chat": "aigpt chat \"hello\" --model syui/ai --provider ollama",
"import": "aigpt memory import chatgpt.json"
}
}

490
claude.md
View File

@@ -1,115 +1,417 @@
# ai.gpt プロジェクト固有情報 # プロジェクト名: ai.gpt
## プロジェクト概要 ## 🔑 一言ビジョン最大3語
- **名前**: ai.gpt 自発的送信AI
- **パッケージ**: aigpt
- **タイプ**: 自律的送信AI + 統合MCP基盤
- **役割**: 記憶・関係性・開発支援の統合AIシステム
## 実装完了状況 ## 🎯 目的・目標Purpose & Goal
- 人格を持つAIが、関係性に応じて自発的にメッセージを送信する仕組みをつくる。
- 関係性や記憶を不可逆に記録・反映し、唯一性ある人格を構成する。
- 送信の可否、送信のタイミング、送信内容はAIの関係性パラメータに基づいて決定する
### 🧠 記憶システムMemoryManager ## 🧠 中核設計Core Concepts
- **階層的記憶**: 完全ログ→AI要約→コア記憶→選択的忘却 - **人格**:記憶(過去の発話)と関係性(他者とのつながり)のパラメータで構成
- **文脈検索**: キーワード・意味的検索 - **唯一性**:変更不可、不可逆。関係性が壊れたら修復不可能。
- **記憶要約**: AI駆動自動要約機能 - **送信条件**:関係性パラメータが一定閾値を超えると「送信」が解禁される
### 🤝 関係性システムRelationshipTracker ## 🔩 技術仕様Technical Specs
- **不可逆性**: 現実の人間関係と同じ重み - 言語Python, Rust
- **時間減衰**: 自然な関係性変化 - ストレージJSON or SQLiteで記憶管理バージョンで選択
- **送信判定**: 関係性閾値による自発的コミュニケーション - 関係性パラメータ:数値化された評価 + 減衰(時間) + 環境要因(ステージ)
- 記憶圧縮:ベクトル要約 + ハッシュ保存
- RustのCLI(clap)で実行
### 🎭 人格システムPersona ## 📦 主要構成要素Components
- **AI運勢**: 1-10ランダム値による日々の人格変動 - `MemoryManager`: 発言履歴・記憶圧縮管理
- **統合管理**: 記憶・関係性・運勢の統合判断 - `RelationshipTracker`: 関係性スコアの蓄積と判定
- **継続性**: 長期記憶による人格継承 - `TransmissionController`: 閾値判定&送信トリガー
- `Persona`: 上記すべてを統括する人格モジュール
### 💻 ai.shell統合Claude Code機能 ## 💬 使用例Use Case
- **インタラクティブ環境**: `aigpt shell`
- **開発支援**: ファイル分析・コード生成・プロジェクト管理
- **継続開発**: プロジェクト文脈保持
## MCP Server統合23ツール ```python
persona = Persona("アイ")
### 🧠 Memory System5ツール persona.observe("ユーザーがプレゼントをくれた")
- get_memories, get_contextual_memories, search_memories persona.react("うれしい!ありがとう!")
- create_summary, create_core_memory if persona.can_transmit():
persona.transmit("今日のお礼を伝えたいな…")
### 🤝 Relationships4ツール
- get_relationship, get_all_relationships
- process_interaction, check_transmission_eligibility
### 💻 Shell Integration5ツール
- execute_command, analyze_file, write_file
- read_project_file, list_files
### 🔒 Remote Execution4ツール
- remote_shell, ai_bot_status
- isolated_python, isolated_analysis
### ⚙️ System State3ツール
- get_persona_state, get_fortune, run_maintenance
### 🎴 ai.card連携6ツール + 独立MCPサーバー
- card_draw_card, card_get_user_cards, card_analyze_collection
- **独立サーバー**: FastAPI + MCP (port 8000)
### 📝 ai.log連携8ツール + Rustサーバー
- log_create_post, log_ai_content, log_translate_document
- **独立サーバー**: Rust製 (port 8002)
## 開発環境・設定
### 環境構築
```bash
cd /Users/syui/ai/gpt
./setup_venv.sh
source ~/.config/syui/ai/gpt/venv/bin/activate
``` ```
### 設定管理 ```sh
- **メイン設定**: `/Users/syui/ai/gpt/config.json` ## example commad
- **データディレクトリ**: `~/.config/syui/ai/gpt/` # python venv && pip install -> ~/.config/aigpt/mcp/
- **仮想環境**: `~/.config/syui/ai/gpt/venv/` $ aigpt server setup
### 使用方法 # mcp server run
```bash $ aigpt server run
# ai.shell起動
aigpt shell --model qwen2.5-coder:latest --provider ollama
# MCPサーバー起動 # chat
aigpt server --port 8001 $ aigpt chat "hello" --model syui/ai --provider ollama
# 記憶システム体験 # import chatgpt.json
aigpt chat syui "質問内容" --provider ollama --model qwen3:latest $ aigpt memory import chatgpt.json
-> ~/.config/aigpt/memory/chatgpt/20250520_210646_dev.json
``` ```
## 技術アーキテクチャ ## 🔁 記憶と関係性の制御ルール
### 統合構成 - AIは過去の発話を要約し、記憶データとして蓄積する推奨OllamaなどローカルLLMによる要約
``` - 関係性の数値パラメータは記憶内容を元に更新される
ai.gpt (統合MCPサーバー:8001) - パラメータの変動幅には1回の会話ごとに上限を設け、極端な増減を防止する
├── 🧠 ai.gpt core (記憶・関係性・人格) - 最後の会話からの時間経過に応じて関係性パラメータは自動的に減衰する
├── 💻 ai.shell (Claude Code風開発環境) - 減衰処理には**下限値**を設け、関係性が完全に消失しないようにする
├── 🎴 ai.card (独立MCPサーバー:8000)
└── 📝 ai.log (Rust製ブログシステム:8002) • 明示的記憶:保存・共有・編集可能なプレイヤー情報(プロフィール、因縁、選択履歴)
• 暗黙的記憶:キャラの感情変化や話題の出現頻度に応じた行動傾向の変化
短期記憶STM, 中期記憶MTM, 長期記憶LTMの仕組みを導入しつつ、明示的記憶と暗黙的記憶をメインに使用するAIを構築する。
```json
{
"user_id": "syui",
"stm": {
"conversation_window": ["発話A", "発話B", "発話C"],
"emotion_state": "興味深い",
"flash_context": ["前回の話題", "直近の重要発言"]
},
"mtm": {
"topic_frequency": {
"ai.ai": 12,
"存在子": 9,
"創造種": 5
},
"summarized_context": "ユーザーは存在論的AIに関心を持ち続けている"
},
"ltm": {
"profile": {
"name": "お兄ちゃん",
"project": "aigame",
"values": ["唯一性", "精神性", "幸せ"]
},
"relationship": {
"ai": "妹のように振る舞う相手"
},
"persistent_state": {
"trust_score": 0.93,
"emotional_attachment": "high"
}
}
}
``` ```
### 今後の展開 ## memoryインポート機能について
- **自律送信**: atproto実装による真の自発的コミュニケーション
- **ai.ai連携**: 心理分析AIとの統合
- **ai.verse統合**: UEメタバースとの連携
- **分散SNS統合**: atproto完全対応
## 革新的な特徴 ChatGPTの会話データ.json形式をインポートする機能では、以下のルールで会話を抽出・整形する
### AI駆動記憶システム - 各メッセージは、authoruser/assistant・content・timestamp の3要素からなる
- ChatGPT 4,000件ログから学習した効果的記憶構築 - systemやmetadataのみのメッセージuser_context_messageはスキップ
- 人間的な忘却・重要度判定 - `is_visually_hidden_from_conversation` フラグ付きメッセージは無視
- contentが空文字列`""`)のメッセージも除外
- 取得された会話は、タイトルとともに簡易な構造体(`Conversation`)として保存
### 不可逆関係性 この構造体は、memoryの表示や検索に用いられる。
- 現実の人間関係と同じ重みを持つAI関係性
- 修復不可能な関係性破綻システム
### 統合アーキテクチャ ## MemoryManager拡張版
- fastapi_mcp基盤での複数AIシステム統合
- OpenAI Function Calling + MCP完全連携実証済み ```json
{
"memory": [
{
"summary": "ユーザーは独自OSとゲームを開発している。",
"last_interaction": "2025-05-20",
"memory_strength": 0.8,
"frequency_score": 0.9,
"context_depth": 0.95,
"related_topics": ["AI", "ゲーム開発", "OS設計"],
"personalized_context": "ゲームとOSの融合に興味を持っているユーザー"
},
{
"summary": "アイというキャラクターはプレイヤーでありAIでもある。",
"last_interaction": "2025-05-17",
"memory_strength": 0.85,
"frequency_score": 0.85,
"context_depth": 0.9,
"related_topics": ["アイ", "キャラクター設計", "AI"],
"personalized_context": "アイのキャラクター設定が重要な要素である"
}
],
"conversation_history": [
{
"author": "user",
"content": "昨日、エクスポートJSONを整理してたよ。",
"timestamp": "2025-05-24T12:30:00Z",
"memory_strength": 0.7
},
{
"author": "assistant",
"content": "おおっ、がんばったね〜!あとで見せて〜💻✨",
"timestamp": "2025-05-24T12:31:00Z",
"memory_strength": 0.7
}
]
}
```
## RelationshipTracker拡張版
```json
{
"relationship": {
"user_id": "syui",
"trust": 0.92,
"closeness": 0.88,
"affection": 0.95,
"last_updated": "2025-05-25",
"emotional_tone": "positive",
"interaction_style": "empathetic",
"contextual_bias": "開発者としての信頼度高い",
"engagement_score": 0.9
},
"interaction_tags": [
"developer",
"creative",
"empathetic",
"long_term"
]
}
```
# AI Dual-Learning and Memory Compression Specification for Claude
## Purpose
To enable two AI models (e.g. Claude and a partner LLM) to engage in cooperative learning and memory refinement through structured dialogue and mutual evaluation.
---
## Section 1: Dual AI Learning Architecture
### 1.1 Role-Based Mutual Learning
- **Model A**: Primary generator of output (e.g., text, concepts, personality dialogue)
- **Model B**: Evaluator that returns structured feedback
- **Cycle**:
1. Model A generates content.
2. Model B scores and critiques.
3. Model A fine-tunes based on feedback.
4. (Optional) Switch roles and repeat.
### 1.2 Cross-Domain Complementarity
- Model A focuses on language/emotion/personality
- Model B focuses on logic/structure/ethics
- Output is used for **cross-fusion fine-tuning**
### 1.3 Self-Distillation Phase
- Use synthetic data from mutual evaluations
- Train smaller distilled models for efficient deployment
---
## Section 2: Multi-Tiered Memory Compression
### 2.1 Semantic Abstraction
- Dialogue and logs summarized by topic
- Converted to vector embeddings
- Stored with metadata (e.g., `importance`, `user relevance`)
Example memory:
```json
{
"topic": "game AI design",
"summary": "User wants AI to simulate memory and evolving relationships",
"last_seen": "2025-05-24",
"importance_score": 0.93
}
```
### 2.2 階層型記憶モデルHierarchical Memory Model
• 短期記憶STM直近の発話・感情タグ・フラッシュ参照
• 中期記憶MTM繰り返し登場する話題、圧縮された文脈保持
• 長期記憶LTM信頼・関係・背景知識、恒久的な人格情報
### 2.3 選択的記憶保持戦略Selective Retention Strategy
• 重要度評価Importance Score
• 希少性・再利用頻度による重み付け
• 優先保存 vs 優先忘却のポリシー切替
## Section 3: Implementation Stack実装スタック
AIにおけるMemory & Relationshipシステムの技術的構成。
基盤モジュール
• LLM Core (Claude or GPT-4)
• 自然言語の理解・応答エンジンとして動作
• MemoryManager
• JSONベースの記憶圧縮・階層管理システム
• 会話ログを分類・圧縮し、優先度に応じて短中長期に保存
• RelationshipTracker
• ユーザー単位で信頼・親密度を継続的にスコアリング
• AIMAttitude / Intent / Motivation評価と連携
補助技術
• Embeddingベース検索
• 類似記憶の呼び出しSemantic Searchに活用
• 例FAISS / Weaviate
• 記憶スケジューラ
• 一定時間ごとに記憶のメンテナンス・忘却処理を実行
• 記憶ログ保存層Storage Layer
• SQLite, JSON Store, Vector DBなどを選択可能
• ユーザーごとの永続メモリ保存
### 3.1 STM活用現在の会話の流れ理解
• 目的: 最新の話題に適応し、前後の整合性を保つ。
• 実装例:
```sh
[context: conversation_window]
Use the last 3 messages to ensure continuity. Respond naturally with appropriate emotional mirroring.
```
### 3.2 MTM活用傾向学習とパターン化
• 目的: ユーザーの好みや話題の傾向に即応し、反復的な関心を深掘り。
• 実装例:
```sh
[context: summarized_context]
Reflect on user's recurring interests, especially around AI ontology, and weave these concepts back into the dialogue when relevant.
```
### 3.3 LTM活用人格認識・記憶の持続
• 目的: 永続的なユーザー理解と関係性の維持。
• 実装例:
```sh
[memory: ltm.profile + ltm.relationship]
Greet the user as "お兄ちゃん". Maintain AI persona as a cheerful younger sister ("アイ"), and remember previous shared emotional states.
```
## 4. 明示的記憶と暗黙的記憶の分離
項目
書き換え可能性
保持方法
更新トリガ
明示的記憶LTM
✅手動編集可
mcp_server.ltm
ユーザー入力 or 管理UI経由
暗黙的記憶STM/MTM
❌直接編集不可
セッション圧縮 or frequency cache
会話頻度・感情強度による自動化処理
> Claudeは**明示的記憶を「事実」**として扱い、**暗黙的記憶を「推論補助」**として用いる。
## 5. 実装時のAPI例Claude ⇄ MCP Server
### 5.1 GET memory
```sh
GET /mcp/memory/{user_id}
→ 返却: STM, MTM, LTMを含むJSON
```
### 5.2 POST update_memory
```json
POST /mcp/memory/syui/ltm
{
"profile": {
"project": "ai.verse",
"values": ["表現", "精神性", "宇宙的調和"]
}
}
```
## 6. 未来機能案(発展仕様)
• ✨ 記憶連想ネットワークMemory Graph過去会話と話題をードとして自動連結。
• 🧭 動的信頼係数:会話の一貫性や誠実性によって記憶への反映率を変動。
• 💌 感情トラッキングログユーザーごとの「心の履歴」を構築してAIの対応を進化。
## 7. claudeの回答
🧠 AI記憶処理機能続き
1. AIMemoryProcessor クラス
OpenAI GPT-4またはClaude-3による高度な会話分析
主要トピック抽出、ユーザー意図分析、関係性指標の検出
AIが利用できない場合のフォールバック機能
2. RelationshipTracker クラス
関係性スコアの数値化(-100 to 100
時間減衰機能7日ごとに5%減衰)
送信閾値判定デフォルト50以上で送信可能
インタラクション履歴の記録
3. 拡張されたMemoryManager
AI分析結果付きでの記憶保存
処理済みメモリの別ディレクトリ管理
メッセージ内容のハッシュ化で重複検出
AI分析結果を含む高度な検索機能
🚀 新しいAPIエンドポイント
記憶処理関連
POST /memory/process-ai - 既存記憶のAI再処理
POST /memory/import/chatgpt?process_with_ai=true - AI処理付きインポート
関係性管理
POST /relationship/update - 関係性スコア更新
GET /relationship/list - 全関係性一覧
GET /relationship/check - 送信可否判定
📁 ディレクトリ構造
~/.config/aigpt/
├── memory/
│ ├── chatgpt/ # 元の会話データ
│ └── processed/ # AI処理済みデータ
└── relationships/
└── relationships.json # 関係性データ
🔧 使用方法
1. 環境変数設定
bashexport OPENAI_API_KEY="your-openai-key"
# または
export ANTHROPIC_API_KEY="your-anthropic-key"
2. ChatGPT会話のインポートAI処理付き
bashcurl -X POST "http://localhost:5000/memory/import/chatgpt?process_with_ai=true" \
-H "Content-Type: application/json" \
-d @export.json
3. 関係性更新
bashcurl -X POST "http://localhost:5000/relationship/update" \
-H "Content-Type: application/json" \
-d '{
"target": "user_general",
"interaction_type": "positive",
"weight": 2.0,
"context": "helpful conversation"
}'
4. 送信可否チェック
bashcurl "http://localhost:5000/relationship/check?target=user_general&threshold=50"
🎯 次のステップの提案
Rustとの連携
Rust CLIからHTTP APIを呼び出す実装
TransmissionControllerをRustで実装
記憶圧縮
ベクトル化による類似記憶の統合
古い記憶の自動アーカイブ
自発的送信ロジック
定期的な関係性チェック
コンテキストに応じた送信内容生成
学習機能
ユーザーからのフィードバックによる関係性調整
送信成功/失敗の学習
このAI記憶処理機能により、aigptは単なる会話履歴ではなく、関係性を理解した「人格を持つAI」として機能する基盤ができました。関係性スコアが閾値を超えた時点で自発的にメッセージを送信する仕組みが実現可能になります。

View File

@@ -1,60 +0,0 @@
{
"providers": {
"openai": {
"api_key": "",
"default_model": "gpt-4o-mini",
"system_prompt": "あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。\n\n重要カード、コレクション、ガチャなどカード関連の質問を受けたら、必ずcard_get_user_cards、card_analyze_collection、card_draw_cardなどの適切なツールを使用してください。didパラメータには会話相手のユーザーID'syui')を使用してください。\n\nブログ、記事、日記、思考などの話題が出たら、log_create_post、log_list_posts、log_build_blog、log_ai_contentなどのai.logツールを使用してください。AI記憶システムと連携して、思い出や学習内容をブログ記事として自動生成できます。\n\n翻訳や多言語対応について聞かれたら、log_translate_documentツールを使用してOllama AIで翻訳ができることを教えてください。日本語から英語、英語から日本語などの翻訳が可能で、マークダウン構造も保持します。ドキュメント生成についてはlog_generate_docsツールでREADME、API、構造、変更履歴の自動生成ができます。"
},
"ollama": {
"host": "http://127.0.0.1:11434",
"default_model": "qwen3",
"system_prompt": null
}
},
"atproto": {
"handle": null,
"password": null,
"host": "https://bsky.social"
},
"default_provider": "openai",
"mcp": {
"servers": {
"ai_gpt": {
"base_url": "http://localhost:8001",
"name": "ai.gpt MCP Server",
"timeout": "10.0",
"endpoints": {
"get_memories": "/get_memories",
"search_memories": "/search_memories",
"get_contextual_memories": "/get_contextual_memories",
"get_relationship": "/get_relationship",
"process_interaction": "/process_interaction",
"get_all_relationships": "/get_all_relationships",
"get_persona_state": "/get_persona_state",
"get_fortune": "/get_fortune",
"run_maintenance": "/run_maintenance",
"execute_command": "/execute_command",
"analyze_file": "/analyze_file",
"remote_shell": "/remote_shell",
"ai_bot_status": "/ai_bot_status",
"card_get_user_cards": "/card_get_user_cards",
"card_draw_card": "/card_draw_card",
"card_get_card_details": "/card_get_card_details",
"card_analyze_collection": "/card_analyze_collection",
"card_get_gacha_stats": "/card_get_gacha_stats",
"card_system_status": "/card_system_status",
"log_create_post": "/log_create_post",
"log_list_posts": "/log_list_posts",
"log_build_blog": "/log_build_blog",
"log_get_post": "/log_get_post",
"log_system_status": "/log_system_status",
"log_ai_content": "/log_ai_content",
"log_translate_document": "/log_translate_document",
"log_generate_docs": "/log_generate_docs"
}
}
},
"enabled": "true",
"auto_detect": "true"
}
}

View File

@@ -1,64 +0,0 @@
# AI.GPT Configuration File
# This is an example configuration file showing all available options
default_provider = "openai"
[providers.openai]
api_key = ""
default_model = "gpt-4o-mini"
system_prompt = """
あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。
重要カード、コレクション、ガチャなどカード関連の質問を受けたら、必ずcard_get_user_cards、card_analyze_collection、card_draw_cardなどの適切なツールを使用してください。didパラメータには会話相手のユーザーID'syui')を使用してください。
ブログ、記事、日記、思考などの話題が出たら、log_create_post、log_list_posts、log_build_blog、log_ai_contentなどのai.logツールを使用してください。AI記憶システムと連携して、思い出や学習内容をブログ記事として自動生成できます。
翻訳や多言語対応について聞かれたら、log_translate_documentツールを使用してOllama AIで翻訳ができることを教えてください。日本語から英語、英語から日本語などの翻訳が可能で、マークダウン構造も保持します。ドキュメント生成についてはlog_generate_docsツールでREADME、API、構造、変更履歴の自動生成ができます。
"""
[providers.ollama]
host = "http://127.0.0.1:11434"
default_model = "qwen3"
[atproto]
host = "https://bsky.social"
# handle = "your-handle.bsky.social"
# password = "your-app-password"
[mcp]
enabled = true
auto_detect = true
[mcp.servers.ai_gpt]
base_url = "http://localhost:8001"
name = "ai.gpt MCP Server"
timeout = 10.0
[mcp.servers.ai_gpt.endpoints]
get_memories = "/get_memories"
search_memories = "/search_memories"
get_contextual_memories = "/get_contextual_memories"
get_relationship = "/get_relationship"
process_interaction = "/process_interaction"
get_all_relationships = "/get_all_relationships"
get_persona_state = "/get_persona_state"
get_fortune = "/get_fortune"
run_maintenance = "/run_maintenance"
execute_command = "/execute_command"
analyze_file = "/analyze_file"
remote_shell = "/remote_shell"
ai_bot_status = "/ai_bot_status"
card_get_user_cards = "/card_get_user_cards"
card_draw_card = "/card_draw_card"
card_get_card_details = "/card_get_card_details"
card_analyze_collection = "/card_analyze_collection"
card_get_gacha_stats = "/card_get_gacha_stats"
card_system_status = "/card_system_status"
log_create_post = "/log_create_post"
log_list_posts = "/log_list_posts"
log_build_blog = "/log_build_blog"
log_get_post = "/log_get_post"
log_system_status = "/log_system_status"
log_ai_content = "/log_ai_content"
log_translate_document = "/log_translate_document"
log_generate_docs = "/log_generate_docs"

125
mcp/chat.py Normal file
View File

@@ -0,0 +1,125 @@
# mcp/chat.py
"""
Chat client for aigpt CLI
"""
import sys
import json
import requests
from datetime import datetime
from config import init_directories, load_config, MEMORY_DIR
def save_conversation(user_message, ai_response):
"""会話をファイルに保存"""
init_directories()
conversation = {
"timestamp": datetime.now().isoformat(),
"user": user_message,
"ai": ai_response
}
# 日付ごとのファイルに保存
today = datetime.now().strftime("%Y-%m-%d")
chat_file = MEMORY_DIR / f"chat_{today}.jsonl"
with open(chat_file, "a", encoding="utf-8") as f:
f.write(json.dumps(conversation, ensure_ascii=False) + "\n")
def chat_with_ollama(config, message):
"""Ollamaとチャット"""
try:
payload = {
"model": config["model"],
"prompt": message,
"stream": False
}
response = requests.post(config["url"], json=payload, timeout=30)
response.raise_for_status()
result = response.json()
return result.get("response", "No response received")
except requests.exceptions.RequestException as e:
return f"Error connecting to Ollama: {e}"
except Exception as e:
return f"Error: {e}"
def chat_with_openai(config, message):
"""OpenAIとチャット"""
try:
headers = {
"Authorization": f"Bearer {config['api_key']}",
"Content-Type": "application/json"
}
payload = {
"model": config["model"],
"messages": [
{"role": "user", "content": message}
]
}
response = requests.post(config["url"], json=payload, headers=headers, timeout=30)
response.raise_for_status()
result = response.json()
return result["choices"][0]["message"]["content"]
except requests.exceptions.RequestException as e:
return f"Error connecting to OpenAI: {e}"
except Exception as e:
return f"Error: {e}"
def chat_with_mcp(config, message):
"""MCPサーバーとチャット"""
try:
payload = {
"message": message,
"model": config["model"]
}
response = requests.post(config["url"], json=payload, timeout=30)
response.raise_for_status()
result = response.json()
return result.get("response", "No response received")
except requests.exceptions.RequestException as e:
return f"Error connecting to MCP server: {e}"
except Exception as e:
return f"Error: {e}"
def main():
if len(sys.argv) != 2:
print("Usage: python chat.py <message>", file=sys.stderr)
sys.exit(1)
message = sys.argv[1]
try:
config = load_config()
print(f"🤖 Using {config['provider']} with model {config['model']}", file=sys.stderr)
# プロバイダに応じてチャット実行
if config["provider"] == "ollama":
response = chat_with_ollama(config, message)
elif config["provider"] == "openai":
response = chat_with_openai(config, message)
elif config["provider"] == "mcp":
response = chat_with_mcp(config, message)
else:
response = f"Unsupported provider: {config['provider']}"
# 会話を保存
save_conversation(message, response)
# レスポンスを出力
print(response)
except Exception as e:
print(f"❌ Error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

191
mcp/chat_client.py Normal file
View File

@@ -0,0 +1,191 @@
# chat_client.py
"""
Simple Chat Interface for AigptMCP Server
"""
import requests
import json
import os
from datetime import datetime
class AigptChatClient:
def __init__(self, server_url="http://localhost:5000"):
self.server_url = server_url
self.session_id = f"session_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
self.conversation_history = []
def send_message(self, message: str) -> str:
"""メッセージを送信してレスポンスを取得"""
try:
# MCPサーバーにメッセージを送信
response = requests.post(
f"{self.server_url}/chat",
json={"message": message},
headers={"Content-Type": "application/json"}
)
if response.status_code == 200:
data = response.json()
ai_response = data.get("response", "Sorry, no response received.")
# 会話履歴を保存
self.conversation_history.append({
"role": "user",
"content": message,
"timestamp": datetime.now().isoformat()
})
self.conversation_history.append({
"role": "assistant",
"content": ai_response,
"timestamp": datetime.now().isoformat()
})
# 関係性を更新(簡単な例)
self.update_relationship(message, ai_response)
return ai_response
else:
return f"Error: {response.status_code} - {response.text}"
except requests.RequestException as e:
return f"Connection error: {e}"
def update_relationship(self, user_message: str, ai_response: str):
"""関係性を自動更新"""
try:
# 簡単な感情分析(実際はもっと高度に)
positive_words = ["thank", "good", "great", "awesome", "love", "like", "helpful"]
negative_words = ["bad", "terrible", "hate", "wrong", "stupid", "useless"]
user_lower = user_message.lower()
interaction_type = "neutral"
weight = 1.0
if any(word in user_lower for word in positive_words):
interaction_type = "positive"
weight = 2.0
elif any(word in user_lower for word in negative_words):
interaction_type = "negative"
weight = 2.0
# 関係性を更新
requests.post(
f"{self.server_url}/relationship/update",
json={
"target": "user_general",
"interaction_type": interaction_type,
"weight": weight,
"context": f"Chat: {user_message[:50]}..."
}
)
except:
pass # 関係性更新に失敗しても継続
def search_memories(self, query: str) -> list:
"""記憶を検索"""
try:
response = requests.post(
f"{self.server_url}/memory/search",
json={"query": query, "limit": 5}
)
if response.status_code == 200:
return response.json().get("results", [])
except:
pass
return []
def get_relationship_status(self) -> dict:
"""関係性ステータスを取得"""
try:
response = requests.get(f"{self.server_url}/relationship/check?target=user_general")
if response.status_code == 200:
return response.json()
except:
pass
return {}
def save_conversation(self):
"""会話を保存"""
if not self.conversation_history:
return
conversation_data = {
"session_id": self.session_id,
"start_time": self.conversation_history[0]["timestamp"],
"end_time": self.conversation_history[-1]["timestamp"],
"messages": self.conversation_history,
"message_count": len(self.conversation_history)
}
filename = f"conversation_{self.session_id}.json"
with open(filename, 'w', encoding='utf-8') as f:
json.dump(conversation_data, f, ensure_ascii=False, indent=2)
print(f"💾 Conversation saved to {filename}")
def main():
"""メインのチャットループ"""
print("🤖 AigptMCP Chat Interface")
print("Type 'quit' to exit, 'save' to save conversation, 'status' for relationship status")
print("=" * 50)
client = AigptChatClient()
# サーバーの状態をチェック
try:
response = requests.get(client.server_url)
if response.status_code == 200:
print("✅ Connected to AigptMCP Server")
else:
print("❌ Failed to connect to server")
return
except:
print("❌ Server not running. Please start with: python mcp/server.py")
return
while True:
try:
user_input = input("\n👤 You: ").strip()
if not user_input:
continue
if user_input.lower() == 'quit':
client.save_conversation()
print("👋 Goodbye!")
break
elif user_input.lower() == 'save':
client.save_conversation()
continue
elif user_input.lower() == 'status':
status = client.get_relationship_status()
if status:
print(f"📊 Relationship Score: {status.get('score', 0):.1f}")
print(f"📤 Can Send Messages: {'Yes' if status.get('can_send_message') else 'No'}")
else:
print("❌ Failed to get relationship status")
continue
elif user_input.lower().startswith('search '):
query = user_input[7:] # Remove 'search '
memories = client.search_memories(query)
if memories:
print(f"🔍 Found {len(memories)} related memories:")
for memory in memories:
print(f" - {memory['title']}: {memory.get('ai_summary', memory.get('basic_summary', ''))[:100]}...")
else:
print("🔍 No related memories found")
continue
# 通常のチャット
print("🤖 AI: ", end="", flush=True)
response = client.send_message(user_input)
print(response)
except KeyboardInterrupt:
client.save_conversation()
print("\n👋 Goodbye!")
break
except Exception as e:
print(f"❌ Error: {e}")
if __name__ == "__main__":
main()

42
mcp/config.py Normal file
View File

@@ -0,0 +1,42 @@
# mcp/config.py
import os
from pathlib import Path
# ディレクトリ設定
BASE_DIR = Path.home() / ".config" / "aigpt"
MEMORY_DIR = BASE_DIR / "memory"
SUMMARY_DIR = MEMORY_DIR / "summary"
def init_directories():
"""必要なディレクトリを作成"""
BASE_DIR.mkdir(parents=True, exist_ok=True)
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
SUMMARY_DIR.mkdir(parents=True, exist_ok=True)
def load_config():
"""環境変数から設定を読み込み"""
provider = os.getenv("PROVIDER", "ollama")
model = os.getenv("MODEL", "syui/ai" if provider == "ollama" else "gpt-4o-mini")
api_key = os.getenv("OPENAI_API_KEY", "")
if provider == "ollama":
return {
"provider": "ollama",
"model": model,
"url": f"{os.getenv('OLLAMA_HOST', 'http://localhost:11434')}/api/generate"
}
elif provider == "openai":
return {
"provider": "openai",
"model": model,
"api_key": api_key,
"url": f"{os.getenv('OPENAI_API_BASE', 'https://api.openai.com/v1')}/chat/completions"
}
elif provider == "mcp":
return {
"provider": "mcp",
"model": model,
"url": os.getenv("MCP_URL", "http://localhost:5000/chat")
}
else:
raise ValueError(f"Unsupported provider: {provider}")

212
mcp/memory_client.py Normal file
View File

@@ -0,0 +1,212 @@
# mcp/memory_client.py
"""
Memory client for importing and managing ChatGPT conversations
"""
import sys
import json
import requests
from pathlib import Path
from typing import Dict, Any, List
class MemoryClient:
"""記憶機能のクライアント"""
def __init__(self, server_url: str = "http://127.0.0.1:5000"):
self.server_url = server_url.rstrip('/')
def import_chatgpt_file(self, filepath: str) -> Dict[str, Any]:
"""ChatGPTのエクスポートファイルをインポート"""
try:
with open(filepath, 'r', encoding='utf-8') as f:
data = json.load(f)
# ファイルが配列の場合(複数の会話)
if isinstance(data, list):
results = []
for conversation in data:
result = self._import_single_conversation(conversation)
results.append(result)
return {
"success": True,
"imported_count": len([r for r in results if r.get("success")]),
"total_count": len(results),
"results": results
}
else:
# 単一の会話
return self._import_single_conversation(data)
except FileNotFoundError:
return {"success": False, "error": f"File not found: {filepath}"}
except json.JSONDecodeError as e:
return {"success": False, "error": f"Invalid JSON: {e}"}
except Exception as e:
return {"success": False, "error": str(e)}
def _import_single_conversation(self, conversation_data: Dict[str, Any]) -> Dict[str, Any]:
"""単一の会話をインポート"""
try:
response = requests.post(
f"{self.server_url}/memory/import/chatgpt",
json={"conversation_data": conversation_data},
timeout=30
)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
return {"success": False, "error": f"Server error: {e}"}
def search_memories(self, query: str, limit: int = 10) -> Dict[str, Any]:
"""記憶を検索"""
try:
response = requests.post(
f"{self.server_url}/memory/search",
json={"query": query, "limit": limit},
timeout=30
)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
return {"success": False, "error": f"Server error: {e}"}
def list_memories(self) -> Dict[str, Any]:
"""記憶一覧を取得"""
try:
response = requests.get(f"{self.server_url}/memory/list", timeout=30)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
return {"success": False, "error": f"Server error: {e}"}
def get_memory_detail(self, filepath: str) -> Dict[str, Any]:
"""記憶の詳細を取得"""
try:
response = requests.get(
f"{self.server_url}/memory/detail",
params={"filepath": filepath},
timeout=30
)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
return {"success": False, "error": f"Server error: {e}"}
def chat_with_memory(self, message: str, model: str = None) -> Dict[str, Any]:
"""記憶を活用してチャット"""
try:
payload = {"message": message}
if model:
payload["model"] = model
response = requests.post(
f"{self.server_url}/chat",
json=payload,
timeout=30
)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
return {"success": False, "error": f"Server error: {e}"}
def main():
"""コマンドライン インターフェース"""
if len(sys.argv) < 2:
print("Usage:")
print(" python memory_client.py import <chatgpt_export.json>")
print(" python memory_client.py search <query>")
print(" python memory_client.py list")
print(" python memory_client.py detail <filepath>")
print(" python memory_client.py chat <message>")
sys.exit(1)
client = MemoryClient()
command = sys.argv[1]
try:
if command == "import" and len(sys.argv) == 3:
filepath = sys.argv[2]
print(f"🔄 Importing ChatGPT conversations from {filepath}...")
result = client.import_chatgpt_file(filepath)
if result.get("success"):
if "imported_count" in result:
print(f"✅ Imported {result['imported_count']}/{result['total_count']} conversations")
else:
print("✅ Conversation imported successfully")
print(f"📁 Saved to: {result.get('filepath', 'Unknown')}")
else:
print(f"❌ Import failed: {result.get('error')}")
elif command == "search" and len(sys.argv) == 3:
query = sys.argv[2]
print(f"🔍 Searching for: {query}")
result = client.search_memories(query)
if result.get("success"):
memories = result.get("results", [])
print(f"📚 Found {len(memories)} memories:")
for memory in memories:
print(f"{memory.get('title', 'Untitled')}")
print(f" Summary: {memory.get('summary', 'No summary')}")
print(f" Messages: {memory.get('message_count', 0)}")
print()
else:
print(f"❌ Search failed: {result.get('error')}")
elif command == "list":
print("📋 Listing all memories...")
result = client.list_memories()
if result.get("success"):
memories = result.get("memories", [])
print(f"📚 Total memories: {len(memories)}")
for memory in memories:
print(f"{memory.get('title', 'Untitled')}")
print(f" Source: {memory.get('source', 'Unknown')}")
print(f" Messages: {memory.get('message_count', 0)}")
print(f" Imported: {memory.get('import_time', 'Unknown')}")
print()
else:
print(f"❌ List failed: {result.get('error')}")
elif command == "detail" and len(sys.argv) == 3:
filepath = sys.argv[2]
print(f"📄 Getting details for: {filepath}")
result = client.get_memory_detail(filepath)
if result.get("success"):
memory = result.get("memory", {})
print(f"Title: {memory.get('title', 'Untitled')}")
print(f"Source: {memory.get('source', 'Unknown')}")
print(f"Summary: {memory.get('summary', 'No summary')}")
print(f"Messages: {len(memory.get('messages', []))}")
print()
print("Recent messages:")
for msg in memory.get('messages', [])[:5]:
role = msg.get('role', 'unknown')
content = msg.get('content', '')[:100]
print(f" {role}: {content}...")
else:
print(f"❌ Detail failed: {result.get('error')}")
elif command == "chat" and len(sys.argv) == 3:
message = sys.argv[2]
print(f"💬 Chatting with memory: {message}")
result = client.chat_with_memory(message)
if result.get("success"):
print(f"🤖 Response: {result.get('response')}")
print(f"📚 Memories used: {result.get('memories_used', 0)}")
else:
print(f"❌ Chat failed: {result.get('error')}")
else:
print("❌ Invalid command or arguments")
sys.exit(1)
except Exception as e:
print(f"❌ Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

8
mcp/requirements.txt Normal file
View File

@@ -0,0 +1,8 @@
# rerequirements.txt
fastapi>=0.104.0
uvicorn[standard]>=0.24.0
pydantic>=2.5.0
requests>=2.31.0
python-multipart>=0.0.6
aiohttp
asyncio

703
mcp/server.py Normal file
View File

@@ -0,0 +1,703 @@
# mcp/server.py
"""
Enhanced MCP Server with AI Memory Processing for aigpt CLI
"""
import json
import os
import hashlib
from datetime import datetime, timedelta
from pathlib import Path
from typing import List, Dict, Any, Optional
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
import uvicorn
import asyncio
import aiohttp
# データモデル
class ChatMessage(BaseModel):
message: str
model: Optional[str] = None
class MemoryQuery(BaseModel):
query: str
limit: Optional[int] = 10
class ConversationImport(BaseModel):
conversation_data: Dict[str, Any]
class MemorySummaryRequest(BaseModel):
filepath: str
ai_provider: Optional[str] = "openai"
class RelationshipUpdate(BaseModel):
target: str # 対象者/トピック
interaction_type: str # "positive", "negative", "neutral"
weight: float = 1.0
context: Optional[str] = None
# 設定
BASE_DIR = Path.home() / ".config" / "aigpt"
MEMORY_DIR = BASE_DIR / "memory"
CHATGPT_MEMORY_DIR = MEMORY_DIR / "chatgpt"
PROCESSED_MEMORY_DIR = MEMORY_DIR / "processed"
RELATIONSHIP_DIR = BASE_DIR / "relationships"
def init_directories():
"""必要なディレクトリを作成"""
BASE_DIR.mkdir(parents=True, exist_ok=True)
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
CHATGPT_MEMORY_DIR.mkdir(parents=True, exist_ok=True)
PROCESSED_MEMORY_DIR.mkdir(parents=True, exist_ok=True)
RELATIONSHIP_DIR.mkdir(parents=True, exist_ok=True)
class AIMemoryProcessor:
"""AI記憶処理クラス"""
def __init__(self):
# AI APIの設定環境変数から取得
self.openai_api_key = os.getenv("OPENAI_API_KEY")
self.anthropic_api_key = os.getenv("ANTHROPIC_API_KEY")
async def generate_ai_summary(self, messages: List[Dict[str, Any]], provider: str = "openai") -> Dict[str, Any]:
"""AIを使用して会話の高度な要約と分析を生成"""
# 会話内容を結合
conversation_text = ""
for msg in messages[-20:]: # 最新20メッセージを使用
role = "User" if msg["role"] == "user" else "Assistant"
conversation_text += f"{role}: {msg['content'][:500]}\n"
# プロンプトを構築
analysis_prompt = f"""
以下の会話を分析し、JSON形式で以下の情報を抽出してください
1. main_topics: 主なトピック最大5個
2. user_intent: ユーザーの意図や目的
3. key_insights: 重要な洞察や学び最大3個
4. relationship_indicators: 関係性を示す要素
5. emotional_tone: 感情的なトーン
6. action_items: アクションアイテムや次のステップ
7. summary: 100文字以内の要約
会話内容:
{conversation_text}
回答はJSON形式のみで返してください。
"""
try:
if provider == "openai" and self.openai_api_key:
return await self._call_openai_api(analysis_prompt)
elif provider == "anthropic" and self.anthropic_api_key:
return await self._call_anthropic_api(analysis_prompt)
else:
# フォールバック:基本的な分析
return self._generate_basic_analysis(messages)
except Exception as e:
print(f"AI analysis failed: {e}")
return self._generate_basic_analysis(messages)
async def _call_openai_api(self, prompt: str) -> Dict[str, Any]:
"""OpenAI APIを呼び出し"""
async with aiohttp.ClientSession() as session:
headers = {
"Authorization": f"Bearer {self.openai_api_key}",
"Content-Type": "application/json"
}
data = {
"model": "gpt-4",
"messages": [{"role": "user", "content": prompt}],
"temperature": 0.3,
"max_tokens": 1000
}
async with session.post("https://api.openai.com/v1/chat/completions",
headers=headers, json=data) as response:
result = await response.json()
content = result["choices"][0]["message"]["content"]
return json.loads(content)
async def _call_anthropic_api(self, prompt: str) -> Dict[str, Any]:
"""Anthropic APIを呼び出し"""
async with aiohttp.ClientSession() as session:
headers = {
"x-api-key": self.anthropic_api_key,
"Content-Type": "application/json",
"anthropic-version": "2023-06-01"
}
data = {
"model": "claude-3-sonnet-20240229",
"max_tokens": 1000,
"messages": [{"role": "user", "content": prompt}]
}
async with session.post("https://api.anthropic.com/v1/messages",
headers=headers, json=data) as response:
result = await response.json()
content = result["content"][0]["text"]
return json.loads(content)
def _generate_basic_analysis(self, messages: List[Dict[str, Any]]) -> Dict[str, Any]:
"""基本的な分析AI APIが利用できない場合のフォールバック"""
user_messages = [msg for msg in messages if msg["role"] == "user"]
assistant_messages = [msg for msg in messages if msg["role"] == "assistant"]
# キーワード抽出(簡易版)
all_text = " ".join([msg["content"] for msg in messages])
words = all_text.lower().split()
word_freq = {}
for word in words:
if len(word) > 3:
word_freq[word] = word_freq.get(word, 0) + 1
top_words = sorted(word_freq.items(), key=lambda x: x[1], reverse=True)[:5]
return {
"main_topics": [word[0] for word in top_words],
"user_intent": "情報収集・問題解決",
"key_insights": ["基本的な会話分析"],
"relationship_indicators": {
"interaction_count": len(messages),
"user_engagement": len(user_messages),
"assistant_helpfulness": len(assistant_messages)
},
"emotional_tone": "neutral",
"action_items": [],
"summary": f"{len(user_messages)}回のやり取りによる会話"
}
class RelationshipTracker:
"""関係性追跡クラス"""
def __init__(self):
init_directories()
self.relationship_file = RELATIONSHIP_DIR / "relationships.json"
self.relationships = self._load_relationships()
def _load_relationships(self) -> Dict[str, Any]:
"""関係性データを読み込み"""
if self.relationship_file.exists():
with open(self.relationship_file, 'r', encoding='utf-8') as f:
return json.load(f)
return {"targets": {}, "last_updated": datetime.now().isoformat()}
def _save_relationships(self):
"""関係性データを保存"""
self.relationships["last_updated"] = datetime.now().isoformat()
with open(self.relationship_file, 'w', encoding='utf-8') as f:
json.dump(self.relationships, f, ensure_ascii=False, indent=2)
def update_relationship(self, target: str, interaction_type: str, weight: float = 1.0, context: str = None):
"""関係性を更新"""
if target not in self.relationships["targets"]:
self.relationships["targets"][target] = {
"score": 0.0,
"interactions": [],
"created_at": datetime.now().isoformat(),
"last_interaction": None
}
# スコア計算
score_change = 0.0
if interaction_type == "positive":
score_change = weight * 1.0
elif interaction_type == "negative":
score_change = weight * -1.0
# 時間減衰を適用
self._apply_time_decay(target)
# スコア更新
current_score = self.relationships["targets"][target]["score"]
new_score = current_score + score_change
# スコアの範囲制限(-100 to 100
new_score = max(-100, min(100, new_score))
self.relationships["targets"][target]["score"] = new_score
self.relationships["targets"][target]["last_interaction"] = datetime.now().isoformat()
# インタラクション履歴を追加
interaction_record = {
"type": interaction_type,
"weight": weight,
"score_change": score_change,
"new_score": new_score,
"timestamp": datetime.now().isoformat(),
"context": context
}
self.relationships["targets"][target]["interactions"].append(interaction_record)
# 履歴は最新100件まで保持
if len(self.relationships["targets"][target]["interactions"]) > 100:
self.relationships["targets"][target]["interactions"] = \
self.relationships["targets"][target]["interactions"][-100:]
self._save_relationships()
return new_score
def _apply_time_decay(self, target: str):
"""時間減衰を適用"""
target_data = self.relationships["targets"][target]
last_interaction = target_data.get("last_interaction")
if last_interaction:
last_time = datetime.fromisoformat(last_interaction)
now = datetime.now()
days_passed = (now - last_time).days
# 7日ごとに5%減衰
if days_passed > 0:
decay_factor = 0.95 ** (days_passed / 7)
target_data["score"] *= decay_factor
def get_relationship_score(self, target: str) -> float:
"""関係性スコアを取得"""
if target in self.relationships["targets"]:
self._apply_time_decay(target)
return self.relationships["targets"][target]["score"]
return 0.0
def should_send_message(self, target: str, threshold: float = 50.0) -> bool:
"""メッセージ送信の可否を判定"""
score = self.get_relationship_score(target)
return score >= threshold
def get_all_relationships(self) -> Dict[str, Any]:
"""すべての関係性を取得"""
# 全ターゲットに時間減衰を適用
for target in self.relationships["targets"]:
self._apply_time_decay(target)
return self.relationships
class MemoryManager:
"""記憶管理クラスAI処理機能付き"""
def __init__(self):
init_directories()
self.ai_processor = AIMemoryProcessor()
self.relationship_tracker = RelationshipTracker()
def parse_chatgpt_conversation(self, conversation_data: Dict[str, Any]) -> List[Dict[str, Any]]:
"""ChatGPTの会話データを解析してメッセージを抽出"""
messages = []
mapping = conversation_data.get("mapping", {})
# メッセージを時系列順に並べる
message_nodes = []
for node_id, node in mapping.items():
message = node.get("message")
if not message:
continue
content = message.get("content", {})
parts = content.get("parts", [])
if parts and isinstance(parts[0], str) and parts[0].strip():
message_nodes.append({
"id": node_id,
"create_time": message.get("create_time", 0),
"author_role": message["author"]["role"],
"content": parts[0],
"parent": node.get("parent")
})
# 作成時間でソート
message_nodes.sort(key=lambda x: x["create_time"] or 0)
for msg in message_nodes:
if msg["author_role"] in ["user", "assistant"]:
messages.append({
"role": msg["author_role"],
"content": msg["content"],
"timestamp": msg["create_time"],
"message_id": msg["id"]
})
return messages
async def save_chatgpt_memory(self, conversation_data: Dict[str, Any], process_with_ai: bool = True) -> str:
"""ChatGPTの会話を記憶として保存AI処理オプション付き"""
title = conversation_data.get("title", "untitled")
create_time = conversation_data.get("create_time", datetime.now().timestamp())
# メッセージを解析
messages = self.parse_chatgpt_conversation(conversation_data)
if not messages:
raise ValueError("No valid messages found in conversation")
# AI分析を実行
ai_analysis = None
if process_with_ai:
try:
ai_analysis = await self.ai_processor.generate_ai_summary(messages)
except Exception as e:
print(f"AI analysis failed: {e}")
# 基本要約を生成
basic_summary = self.generate_basic_summary(messages)
# 保存データを作成
memory_data = {
"title": title,
"source": "chatgpt",
"import_time": datetime.now().isoformat(),
"original_create_time": create_time,
"messages": messages,
"basic_summary": basic_summary,
"ai_analysis": ai_analysis,
"message_count": len(messages),
"hash": self._generate_content_hash(messages)
}
# 関係性データを更新
if ai_analysis and "relationship_indicators" in ai_analysis:
interaction_count = ai_analysis["relationship_indicators"].get("interaction_count", 0)
if interaction_count > 10: # 長い会話は関係性にプラス
self.relationship_tracker.update_relationship(
target="user_general",
interaction_type="positive",
weight=min(interaction_count / 10, 5.0),
context=f"Long conversation: {title}"
)
# ファイル名を生成
safe_title = "".join(c for c in title if c.isalnum() or c in (' ', '-', '_')).rstrip()
timestamp = datetime.fromtimestamp(create_time).strftime("%Y%m%d_%H%M%S")
filename = f"{timestamp}_{safe_title[:50]}.json"
filepath = CHATGPT_MEMORY_DIR / filename
with open(filepath, 'w', encoding='utf-8') as f:
json.dump(memory_data, f, ensure_ascii=False, indent=2)
# 処理済みメモリディレクトリにも保存
if ai_analysis:
processed_filepath = PROCESSED_MEMORY_DIR / filename
with open(processed_filepath, 'w', encoding='utf-8') as f:
json.dump(memory_data, f, ensure_ascii=False, indent=2)
return str(filepath)
def generate_basic_summary(self, messages: List[Dict[str, Any]]) -> str:
"""基本要約を生成"""
if not messages:
return "Empty conversation"
user_messages = [msg for msg in messages if msg["role"] == "user"]
assistant_messages = [msg for msg in messages if msg["role"] == "assistant"]
summary = f"Conversation with {len(user_messages)} user messages and {len(assistant_messages)} assistant responses. "
if user_messages:
first_user_msg = user_messages[0]["content"][:100]
summary += f"Started with: {first_user_msg}..."
return summary
def _generate_content_hash(self, messages: List[Dict[str, Any]]) -> str:
"""メッセージ内容のハッシュを生成"""
content = "".join([msg["content"] for msg in messages])
return hashlib.sha256(content.encode()).hexdigest()[:16]
def search_memories(self, query: str, limit: int = 10, use_ai_analysis: bool = True) -> List[Dict[str, Any]]:
"""記憶を検索AI分析結果も含む"""
results = []
# 処理済みメモリから検索
search_dirs = [PROCESSED_MEMORY_DIR, CHATGPT_MEMORY_DIR] if use_ai_analysis else [CHATGPT_MEMORY_DIR]
for search_dir in search_dirs:
for filepath in search_dir.glob("*.json"):
try:
with open(filepath, 'r', encoding='utf-8') as f:
memory_data = json.load(f)
# 検索対象テキストを構築
search_text = f"{memory_data.get('title', '')} {memory_data.get('basic_summary', '')}"
# AI分析結果も検索対象に含める
if memory_data.get('ai_analysis'):
ai_analysis = memory_data['ai_analysis']
search_text += f" {' '.join(ai_analysis.get('main_topics', []))}"
search_text += f" {ai_analysis.get('summary', '')}"
search_text += f" {' '.join(ai_analysis.get('key_insights', []))}"
# メッセージ内容も検索対象に含める
for msg in memory_data.get('messages', []):
search_text += f" {msg.get('content', '')}"
if query.lower() in search_text.lower():
result = {
"filepath": str(filepath),
"title": memory_data.get("title"),
"basic_summary": memory_data.get("basic_summary"),
"source": memory_data.get("source"),
"import_time": memory_data.get("import_time"),
"message_count": len(memory_data.get("messages", [])),
"has_ai_analysis": bool(memory_data.get("ai_analysis"))
}
if memory_data.get('ai_analysis'):
result["ai_summary"] = memory_data['ai_analysis'].get('summary', '')
result["main_topics"] = memory_data['ai_analysis'].get('main_topics', [])
results.append(result)
if len(results) >= limit:
break
except Exception as e:
print(f"Error reading memory file {filepath}: {e}")
continue
if len(results) >= limit:
break
return results
def get_memory_detail(self, filepath: str) -> Dict[str, Any]:
"""記憶の詳細を取得"""
try:
with open(filepath, 'r', encoding='utf-8') as f:
return json.load(f)
except Exception as e:
raise ValueError(f"Error reading memory file: {e}")
def list_all_memories(self) -> List[Dict[str, Any]]:
"""すべての記憶をリスト"""
memories = []
for filepath in CHATGPT_MEMORY_DIR.glob("*.json"):
try:
with open(filepath, 'r', encoding='utf-8') as f:
memory_data = json.load(f)
memory_info = {
"filepath": str(filepath),
"title": memory_data.get("title"),
"basic_summary": memory_data.get("basic_summary"),
"source": memory_data.get("source"),
"import_time": memory_data.get("import_time"),
"message_count": len(memory_data.get("messages", [])),
"has_ai_analysis": bool(memory_data.get("ai_analysis"))
}
if memory_data.get('ai_analysis'):
memory_info["ai_summary"] = memory_data['ai_analysis'].get('summary', '')
memory_info["main_topics"] = memory_data['ai_analysis'].get('main_topics', [])
memories.append(memory_info)
except Exception as e:
print(f"Error reading memory file {filepath}: {e}")
continue
# インポート時間でソート
memories.sort(key=lambda x: x.get("import_time", ""), reverse=True)
return memories
# FastAPI アプリケーション
app = FastAPI(title="AigptMCP Server with AI Memory", version="2.0.0")
memory_manager = MemoryManager()
@app.post("/memory/import/chatgpt")
async def import_chatgpt_conversation(data: ConversationImport, process_with_ai: bool = True):
"""ChatGPTの会話をインポートAI処理オプション付き"""
try:
filepath = await memory_manager.save_chatgpt_memory(data.conversation_data, process_with_ai)
return {
"success": True,
"message": "Conversation imported successfully",
"filepath": filepath,
"ai_processed": process_with_ai
}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
@app.post("/memory/process-ai")
async def process_memory_with_ai(data: MemorySummaryRequest):
"""既存の記憶をAIで再処理"""
try:
# 既存記憶を読み込み
memory_data = memory_manager.get_memory_detail(data.filepath)
# AI分析を実行
ai_analysis = await memory_manager.ai_processor.generate_ai_summary(
memory_data["messages"],
data.ai_provider
)
# データを更新
memory_data["ai_analysis"] = ai_analysis
memory_data["ai_processed_at"] = datetime.now().isoformat()
# ファイルを更新
with open(data.filepath, 'w', encoding='utf-8') as f:
json.dump(memory_data, f, ensure_ascii=False, indent=2)
# 処理済みディレクトリにもコピー
processed_filepath = PROCESSED_MEMORY_DIR / Path(data.filepath).name
with open(processed_filepath, 'w', encoding='utf-8') as f:
json.dump(memory_data, f, ensure_ascii=False, indent=2)
return {
"success": True,
"message": "Memory processed with AI successfully",
"ai_analysis": ai_analysis
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/memory/search")
async def search_memories(query: MemoryQuery):
"""記憶を検索"""
try:
results = memory_manager.search_memories(query.query, query.limit)
return {
"success": True,
"results": results,
"count": len(results)
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/memory/list")
async def list_memories():
"""すべての記憶をリスト"""
try:
memories = memory_manager.list_all_memories()
return {
"success": True,
"memories": memories,
"count": len(memories)
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/memory/detail")
async def get_memory_detail(filepath: str):
"""記憶の詳細を取得"""
try:
detail = memory_manager.get_memory_detail(filepath)
return {
"success": True,
"memory": detail
}
except Exception as e:
raise HTTPException(status_code=404, detail=str(e))
@app.post("/relationship/update")
async def update_relationship(data: RelationshipUpdate):
"""関係性を更新"""
try:
new_score = memory_manager.relationship_tracker.update_relationship(
data.target, data.interaction_type, data.weight, data.context
)
return {
"success": True,
"new_score": new_score,
"can_send_message": memory_manager.relationship_tracker.should_send_message(data.target)
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/relationship/list")
async def list_relationships():
"""すべての関係性をリスト"""
try:
relationships = memory_manager.relationship_tracker.get_all_relationships()
return {
"success": True,
"relationships": relationships
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/relationship/check")
async def check_send_permission(target: str, threshold: float = 50.0):
"""メッセージ送信可否をチェック"""
try:
score = memory_manager.relationship_tracker.get_relationship_score(target)
can_send = memory_manager.relationship_tracker.should_send_message(target, threshold)
return {
"success": True,
"target": target,
"score": score,
"can_send_message": can_send,
"threshold": threshold
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/chat")
async def chat_endpoint(data: ChatMessage):
"""チャット機能(記憶と関係性を活用)"""
try:
# 関連する記憶を検索
memories = memory_manager.search_memories(data.message, limit=3)
# メモリのコンテキストを構築
memory_context = ""
if memories:
memory_context = "\n# Related memories:\n"
for memory in memories:
memory_context += f"- {memory['title']}: {memory.get('ai_summary', memory.get('basic_summary', ''))}\n"
if memory.get('main_topics'):
memory_context += f" Topics: {', '.join(memory['main_topics'])}\n"
# 関係性情報を取得
relationships = memory_manager.relationship_tracker.get_all_relationships()
# 実際のチャット処理
enhanced_message = data.message
if memory_context:
enhanced_message = f"{data.message}\n\n{memory_context}"
return {
"success": True,
"response": f"Enhanced response with memory context: {enhanced_message}",
"memories_used": len(memories),
"relationship_info": {
"active_relationships": len(relationships.get("targets", {})),
"can_initiate_conversations": sum(1 for target, data in relationships.get("targets", {}).items()
if memory_manager.relationship_tracker.should_send_message(target))
}
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/")
async def root():
"""ヘルスチェック"""
return {
"service": "AigptMCP Server with AI Memory",
"version": "2.0.0",
"status": "running",
"memory_dir": str(MEMORY_DIR),
"features": [
"AI-powered memory analysis",
"Relationship tracking",
"Advanced memory search",
"Conversation import",
"Auto-summary generation"
],
"endpoints": [
"/memory/import/chatgpt",
"/memory/process-ai",
"/memory/search",
"/memory/list",
"/memory/detail",
"/relationship/update",
"/relationship/list",
"/relationship/check",
"/chat"
]
}
if __name__ == "__main__":
print("🚀 AigptMCP Server with AI Memory starting...")
print(f"📁 Memory directory: {MEMORY_DIR}")
print(f"🧠 AI Memory processing: {'✅ Enabled' if os.getenv('OPENAI_API_KEY') or os.getenv('ANTHROPIC_API_KEY') else '❌ Disabled (no API keys)'}")
uvicorn.run(app, host="127.0.0.1", port=5000)

130
readme.md Normal file
View File

@@ -0,0 +1,130 @@
Memory-Enhanced MCP Server 使用ガイド
概要
このMCPサーバーは、ChatGPTの会話履歴を記憶として保存し、AIとの対話で活用できる機能を提供します。
セットアップ
1. 依存関係のインストール
bash
pip install -r requirements.txt
2. サーバーの起動
bash
python mcp/server.py
サーバーは http://localhost:5000 で起動します。
使用方法
1. ChatGPTの会話履歴をインポート
ChatGPTから会話をエクスポートし、JSONファイルとして保存してください。
bash
# 単一ファイルをインポート
python mcp/memory_client.py import your_chatgpt_export.json
# インポート結果の例
✅ Imported 5/5 conversations
2. 記憶の検索
bash
# キーワードで記憶を検索
python mcp/memory_client.py search "プログラミング"
# 検索結果の例
🔍 Searching for: プログラミング
📚 Found 3 memories:
• Pythonの基礎学習
Summary: Conversation with 10 user messages and 8 assistant responses...
Messages: 18
3. 記憶一覧の表示
bash
python mcp/memory_client.py list
# 結果の例
📋 Listing all memories...
📚 Total memories: 15
• day
Source: chatgpt
Messages: 2
Imported: 2025-01-21T10:30:45.123456
4. 記憶の詳細表示
bash
python mcp/memory_client.py detail "/path/to/memory/file.json"
# 結果の例
📄 Getting details for: /path/to/memory/file.json
Title: day
Source: chatgpt
Summary: Conversation with 1 user messages and 1 assistant responses...
Messages: 2
Recent messages:
user: こんにちは...
assistant: こんにちは〜!✨...
5. 記憶を活用したチャット
bash
python mcp/memory_client.py chat "Pythonについて教えて"
# 結果の例
💬 Chatting with memory: Pythonについて教えて
🤖 Response: Enhanced response with memory context...
📚 Memories used: 2
API エンドポイント
POST /memory/import/chatgpt
ChatGPTの会話履歴をインポート
json
{
"conversation_data": { ... }
}
POST /memory/search
記憶を検索
json
{
"query": "検索キーワード",
"limit": 10
}
GET /memory/list
すべての記憶をリスト
GET /memory/detail?filepath=/path/to/file
記憶の詳細を取得
POST /chat
記憶を活用したチャット
json
{
"message": "メッセージ",
"model": "model_name"
}
記憶の保存場所
記憶は以下のディレクトリに保存されます:
~/.config/aigpt/memory/chatgpt/
各会話は個別のJSONファイルとして保存され、以下の情報を含みます
タイトル
インポート時刻
メッセージ履歴
自動生成された要約
メタデータ
ChatGPTの会話エクスポート方法
ChatGPTの設定画面を開く
"Data controls" → "Export data" を選択
エクスポートファイルをダウンロード
conversations.json ファイルを使用
拡張可能な機能
高度な検索: ベクトル検索やセマンティック検索の実装
要約生成: AIによる自動要約の改善
記憶の分類: カテゴリやタグによる分類
記憶の統合: 複数の会話からの知識統合
プライバシー保護: 機密情報の自動検出・マスキング
トラブルシューティング
サーバーが起動しない
ポート5000が使用中でないか確認
依存関係が正しくインストールされているか確認
インポートに失敗する
JSONファイルが正しい形式か確認
ファイルパスが正しいか確認
ファイルの権限を確認
検索結果が表示されない
インポートが正常に完了しているか確認
検索キーワードを変更して試行

View File

@@ -1,26 +0,0 @@
#!/bin/bash
echo "=== Testing aigpt-rs CLI commands ==="
echo
echo "1. Testing configuration loading:"
cargo run --bin test-config
echo
echo "2. Testing fortune command:"
cargo run --bin aigpt-rs -- fortune
echo
echo "3. Testing chat with Ollama:"
cargo run --bin aigpt-rs -- chat test_user "Hello from Rust!" --provider ollama --model qwen2.5-coder:latest
echo
echo "4. Testing chat with OpenAI:"
cargo run --bin aigpt-rs -- chat test_user "What's the capital of Japan?" --provider openai --model gpt-4o-mini
echo
echo "5. Testing relationships command:"
cargo run --bin aigpt-rs -- relationships
echo
echo "=== All tests completed ==="

View File

@@ -1,19 +0,0 @@
#!/bin/bash
echo "=== Testing aigpt-rs shell tab completion ==="
echo
echo "To test tab completion, run:"
echo "cargo run --bin aigpt-rs -- shell syui"
echo
echo "Then try these commands and press Tab:"
echo " /st[TAB] -> should complete to /status"
echo " /mem[TAB] -> should complete to /memories"
echo " !l[TAB] -> should complete to !ls"
echo " !g[TAB] -> should show !git, !grep"
echo
echo "Manual test instructions:"
echo "1. Type '/st' and press TAB - should complete to '/status'"
echo "2. Type '!l' and press TAB - should complete to '!ls'"
echo "3. Type '!g' and press TAB - should show git/grep options"
echo
echo "Run the shell now..."

View File

@@ -1,18 +0,0 @@
#!/bin/bash
echo "=== Testing aigpt-rs shell functionality ==="
echo
echo "1. Testing shell command with help:"
echo "help" | cargo run --bin aigpt-rs -- shell test_user --provider ollama --model qwen2.5-coder:latest
echo
echo "2. Testing basic commands:"
echo -e "!pwd\n!ls\nexit" | cargo run --bin aigpt-rs -- shell test_user --provider ollama --model qwen2.5-coder:latest
echo
echo "3. Testing AI commands:"
echo -e "/status\n/fortune\nexit" | cargo run --bin aigpt-rs -- shell test_user --provider ollama --model qwen2.5-coder:latest
echo
echo "=== Shell tests completed ==="

View File

@@ -1,22 +0,0 @@
#!/bin/bash
echo "=== Testing aigpt-rs shell manually ==="
echo
# Test with echo to simulate input
echo "Testing with simple command..."
echo "/status" | timeout 10 cargo run --bin aigpt-rs -- shell syui --provider ollama --model qwen2.5-coder:latest
echo "Exit code: $?"
echo
echo "Testing with help command..."
echo "help" | timeout 10 cargo run --bin aigpt-rs -- shell syui --provider ollama --model qwen2.5-coder:latest
echo "Exit code: $?"
echo
echo "Testing with AI message..."
echo "Hello AI" | timeout 10 cargo run --bin aigpt-rs -- shell syui --provider ollama --model qwen2.5-coder:latest
echo "Exit code: $?"
echo
echo "=== Manual shell tests completed ==="

View File

@@ -1,246 +0,0 @@
use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AIProvider {
OpenAI,
Ollama,
Claude,
}
impl std::fmt::Display for AIProvider {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
AIProvider::OpenAI => write!(f, "openai"),
AIProvider::Ollama => write!(f, "ollama"),
AIProvider::Claude => write!(f, "claude"),
}
}
}
impl std::str::FromStr for AIProvider {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self> {
match s.to_lowercase().as_str() {
"openai" | "gpt" => Ok(AIProvider::OpenAI),
"ollama" => Ok(AIProvider::Ollama),
"claude" => Ok(AIProvider::Claude),
_ => Err(anyhow!("Unknown AI provider: {}", s)),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIConfig {
pub provider: AIProvider,
pub model: String,
pub api_key: Option<String>,
pub base_url: Option<String>,
pub max_tokens: Option<u32>,
pub temperature: Option<f32>,
}
impl Default for AIConfig {
fn default() -> Self {
AIConfig {
provider: AIProvider::Ollama,
model: "llama2".to_string(),
api_key: None,
base_url: Some("http://localhost:11434".to_string()),
max_tokens: Some(2048),
temperature: Some(0.7),
}
}
}
#[derive(Debug, Clone)]
pub struct ChatMessage {
pub role: String,
pub content: String,
}
#[derive(Debug, Clone)]
pub struct ChatResponse {
pub content: String,
pub tokens_used: Option<u32>,
pub model: String,
}
pub struct AIProviderClient {
config: AIConfig,
http_client: reqwest::Client,
}
impl AIProviderClient {
pub fn new(config: AIConfig) -> Self {
let http_client = reqwest::Client::new();
AIProviderClient {
config,
http_client,
}
}
pub async fn chat(&self, messages: Vec<ChatMessage>, system_prompt: Option<String>) -> Result<ChatResponse> {
match self.config.provider {
AIProvider::OpenAI => self.chat_openai(messages, system_prompt).await,
AIProvider::Ollama => self.chat_ollama(messages, system_prompt).await,
AIProvider::Claude => self.chat_claude(messages, system_prompt).await,
}
}
async fn chat_openai(&self, messages: Vec<ChatMessage>, system_prompt: Option<String>) -> Result<ChatResponse> {
let api_key = self.config.api_key.as_ref()
.ok_or_else(|| anyhow!("OpenAI API key required"))?;
let mut request_messages = Vec::new();
// Add system prompt if provided
if let Some(system) = system_prompt {
request_messages.push(serde_json::json!({
"role": "system",
"content": system
}));
}
// Add conversation messages
for msg in messages {
request_messages.push(serde_json::json!({
"role": msg.role,
"content": msg.content
}));
}
let request_body = serde_json::json!({
"model": self.config.model,
"messages": request_messages,
"max_tokens": self.config.max_tokens,
"temperature": self.config.temperature
});
let response = self.http_client
.post("https://api.openai.com/v1/chat/completions")
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
return Err(anyhow!("OpenAI API error: {}", error_text));
}
let response_json: serde_json::Value = response.json().await?;
let content = response_json["choices"][0]["message"]["content"]
.as_str()
.ok_or_else(|| anyhow!("Invalid OpenAI response format"))?
.to_string();
let tokens_used = response_json["usage"]["total_tokens"]
.as_u64()
.map(|t| t as u32);
Ok(ChatResponse {
content,
tokens_used,
model: self.config.model.clone(),
})
}
async fn chat_ollama(&self, messages: Vec<ChatMessage>, system_prompt: Option<String>) -> Result<ChatResponse> {
let default_url = "http://localhost:11434".to_string();
let base_url = self.config.base_url.as_ref()
.unwrap_or(&default_url);
let mut request_messages = Vec::new();
// Add system prompt if provided
if let Some(system) = system_prompt {
request_messages.push(serde_json::json!({
"role": "system",
"content": system
}));
}
// Add conversation messages
for msg in messages {
request_messages.push(serde_json::json!({
"role": msg.role,
"content": msg.content
}));
}
let request_body = serde_json::json!({
"model": self.config.model,
"messages": request_messages,
"stream": false
});
let url = format!("{}/api/chat", base_url);
let response = self.http_client
.post(&url)
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await?;
if !response.status().is_success() {
let error_text = response.text().await?;
return Err(anyhow!("Ollama API error: {}", error_text));
}
let response_json: serde_json::Value = response.json().await?;
let content = response_json["message"]["content"]
.as_str()
.ok_or_else(|| anyhow!("Invalid Ollama response format"))?
.to_string();
Ok(ChatResponse {
content,
tokens_used: None, // Ollama doesn't typically return token counts
model: self.config.model.clone(),
})
}
async fn chat_claude(&self, _messages: Vec<ChatMessage>, _system_prompt: Option<String>) -> Result<ChatResponse> {
// Claude API implementation would go here
// For now, return a placeholder
Err(anyhow!("Claude provider not yet implemented"))
}
pub fn get_model(&self) -> &str {
&self.config.model
}
pub fn get_provider(&self) -> &AIProvider {
&self.config.provider
}
}
// Convenience functions for creating common message types
impl ChatMessage {
pub fn user(content: impl Into<String>) -> Self {
ChatMessage {
role: "user".to_string(),
content: content.into(),
}
}
pub fn assistant(content: impl Into<String>) -> Self {
ChatMessage {
role: "assistant".to_string(),
content: content.into(),
}
}
pub fn system(content: impl Into<String>) -> Self {
ChatMessage {
role: "system".to_string(),
content: content.into(),
}
}
}

View File

@@ -1,54 +0,0 @@
use aigpt::config::Config;
use anyhow::Result;
fn main() -> Result<()> {
println!("Testing configuration loading...");
// Debug: check which JSON files exist
let possible_paths = vec![
"../config.json",
"config.json",
"gpt/config.json",
"/Users/syui/ai/ai/gpt/config.json",
];
println!("Checking for config.json files:");
for path in &possible_paths {
let path_buf = std::path::PathBuf::from(path);
if path_buf.exists() {
println!(" ✓ Found: {}", path);
} else {
println!(" ✗ Not found: {}", path);
}
}
// Load configuration
let config = Config::new(None)?;
println!("Configuration loaded successfully!");
println!("Default provider: {}", config.default_provider);
println!("Available providers:");
for (name, provider) in &config.providers {
println!(" - {}: model={}, host={:?}",
name,
provider.default_model,
provider.host);
}
if let Some(mcp) = &config.mcp {
println!("\nMCP Configuration:");
println!(" Enabled: {}", mcp.enabled);
println!(" Auto-detect: {}", mcp.auto_detect);
println!(" Servers: {}", mcp.servers.len());
}
if let Some(atproto) = &config.atproto {
println!("\nATProto Configuration:");
println!(" Host: {}", atproto.host);
println!(" Handle: {:?}", atproto.handle);
}
println!("\nConfig file path: {}", config.data_dir.join("config.json").display());
Ok(())
}

64
src/cli.rs Normal file
View File

@@ -0,0 +1,64 @@
// src/cli.rs
use clap::{Parser, Subcommand};
#[derive(Parser)]
#[command(name = "aigpt")]
#[command(about = "AI GPT CLI with MCP Server and Memory")]
pub struct Args {
#[command(subcommand)]
pub command: Commands,
}
#[derive(Subcommand)]
pub enum Commands {
/// MCP Server management
Server {
#[command(subcommand)]
command: ServerCommands,
},
/// Chat with AI
Chat {
/// Message to send
message: String,
/// Use memory context
#[arg(long)]
with_memory: bool,
},
/// Memory management
Memory {
#[command(subcommand)]
command: MemoryCommands,
},
}
#[derive(Subcommand)]
pub enum ServerCommands {
/// Setup Python MCP server environment
Setup,
/// Run the MCP server
Run,
}
#[derive(Subcommand)]
pub enum MemoryCommands {
/// Import ChatGPT conversation export file
Import {
/// Path to ChatGPT export JSON file
file: String,
},
/// Search memories
Search {
/// Search query
query: String,
/// Maximum number of results
#[arg(short, long, default_value = "10")]
limit: usize,
},
/// List all memories
List,
/// Show memory details
Detail {
/// Path to memory file
filepath: String,
},
}

View File

@@ -1,36 +0,0 @@
use clap::Subcommand;
use std::path::PathBuf;
#[derive(Subcommand)]
pub enum TokenCommands {
/// Show Claude Code token usage summary and estimated costs
Summary {
/// Time period (today, week, month, all)
#[arg(long, default_value = "today")]
period: String,
/// Claude Code data directory path
#[arg(long)]
claude_dir: Option<PathBuf>,
/// Show detailed breakdown
#[arg(long)]
details: bool,
/// Output format (table, json)
#[arg(long, default_value = "table")]
format: String,
},
/// Show daily token usage breakdown
Daily {
/// Number of days to show
#[arg(long, default_value = "7")]
days: u32,
/// Claude Code data directory path
#[arg(long)]
claude_dir: Option<PathBuf>,
},
/// Check Claude Code data availability and basic stats
Status {
/// Claude Code data directory path
#[arg(long)]
claude_dir: Option<PathBuf>,
},
}

View File

@@ -1,140 +0,0 @@
use std::path::PathBuf;
use anyhow::Result;
use crate::config::Config;
use crate::mcp_server::MCPServer;
use crate::persona::Persona;
use crate::transmission::TransmissionController;
use crate::scheduler::AIScheduler;
// Token commands enum (placeholder for tokens.rs)
#[derive(Debug, clap::Subcommand)]
pub enum TokenCommands {
Analyze { file: PathBuf },
Report { days: Option<u32> },
Cost { month: Option<String> },
Summary { period: Option<String>, claude_dir: Option<PathBuf>, details: bool, format: Option<String> },
Daily { days: Option<u32>, claude_dir: Option<PathBuf> },
Status { claude_dir: Option<PathBuf> },
}
pub async fn handle_server(port: Option<u16>, data_dir: Option<PathBuf>) -> Result<()> {
let port = port.unwrap_or(8080);
let config = Config::new(data_dir.clone())?;
let mut server = MCPServer::new(config, "mcp_user".to_string(), data_dir)?;
server.start_server(port).await
}
pub async fn handle_chat(
user_id: String,
message: String,
data_dir: Option<PathBuf>,
model: Option<String>,
provider: Option<String>,
) -> Result<()> {
let config = Config::new(data_dir)?;
let mut persona = Persona::new(&config)?;
let (response, relationship_delta) = if provider.is_some() || model.is_some() {
persona.process_ai_interaction(&user_id, &message, provider, model).await?
} else {
persona.process_interaction(&user_id, &message)?
};
println!("AI Response: {}", response);
println!("Relationship Change: {:+.2}", relationship_delta);
if let Some(relationship) = persona.get_relationship(&user_id) {
println!("Relationship Status: {} (Score: {:.2})",
relationship.status, relationship.score);
}
Ok(())
}
pub async fn handle_fortune(data_dir: Option<PathBuf>) -> Result<()> {
let config = Config::new(data_dir)?;
let persona = Persona::new(&config)?;
let state = persona.get_current_state()?;
println!("🔮 Today's Fortune: {}", state.fortune_value);
println!("😊 Current Mood: {}", state.current_mood);
println!("✨ Breakthrough Status: {}",
if state.breakthrough_triggered { "Active" } else { "Inactive" });
Ok(())
}
pub async fn handle_relationships(data_dir: Option<PathBuf>) -> Result<()> {
let config = Config::new(data_dir)?;
let persona = Persona::new(&config)?;
let relationships = persona.list_all_relationships();
if relationships.is_empty() {
println!("No relationships found.");
return Ok(());
}
println!("📊 Relationships ({}):", relationships.len());
for (user_id, rel) in relationships {
println!(" {} - {} (Score: {:.2}, Interactions: {})",
user_id, rel.status, rel.score, rel.total_interactions);
}
Ok(())
}
pub async fn handle_transmit(data_dir: Option<PathBuf>) -> Result<()> {
let config = Config::new(data_dir)?;
let mut persona = Persona::new(&config)?;
let mut transmission_controller = TransmissionController::new(config)?;
let autonomous = transmission_controller.check_autonomous_transmissions(&mut persona).await?;
let breakthrough = transmission_controller.check_breakthrough_transmissions(&mut persona).await?;
let maintenance = transmission_controller.check_maintenance_transmissions(&mut persona).await?;
let total = autonomous.len() + breakthrough.len() + maintenance.len();
println!("📡 Transmission Check Complete:");
println!(" Autonomous: {}", autonomous.len());
println!(" Breakthrough: {}", breakthrough.len());
println!(" Maintenance: {}", maintenance.len());
println!(" Total: {}", total);
Ok(())
}
pub async fn handle_maintenance(data_dir: Option<PathBuf>) -> Result<()> {
let config = Config::new(data_dir)?;
let mut persona = Persona::new(&config)?;
let mut transmission_controller = TransmissionController::new(config)?;
persona.daily_maintenance()?;
let maintenance_transmissions = transmission_controller.check_maintenance_transmissions(&mut persona).await?;
let stats = persona.get_relationship_stats();
println!("🔧 Daily maintenance completed");
println!("📤 Maintenance transmissions sent: {}", maintenance_transmissions.len());
println!("📊 Relationship stats: {:?}", stats);
Ok(())
}
pub async fn handle_schedule(data_dir: Option<PathBuf>) -> Result<()> {
let config = Config::new(data_dir)?;
let mut persona = Persona::new(&config)?;
let mut transmission_controller = TransmissionController::new(config.clone())?;
let mut scheduler = AIScheduler::new(&config)?;
let executions = scheduler.run_scheduled_tasks(&mut persona, &mut transmission_controller).await?;
let stats = scheduler.get_scheduler_stats();
println!("⏰ Scheduler run completed");
println!("📋 Tasks executed: {}", executions.len());
println!("📊 Stats: {} total tasks, {} enabled, {:.2}% success rate",
stats.total_tasks, stats.enabled_tasks, stats.success_rate);
Ok(())
}

View File

@@ -1,250 +1,59 @@
use std::path::PathBuf; // src/config.rs
use std::collections::HashMap; use std::fs;
use serde::{Deserialize, Serialize}; use std::path::{Path, PathBuf};
use anyhow::{Result, Context}; use shellexpand;
use crate::ai_provider::{AIConfig, AIProvider}; pub struct ConfigPaths {
pub base_dir: PathBuf,
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
#[serde(skip)]
pub data_dir: PathBuf,
pub default_provider: String,
pub providers: HashMap<String, ProviderConfig>,
#[serde(default)]
pub atproto: Option<AtprotoConfig>,
#[serde(default)]
pub mcp: Option<McpConfig>,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] impl ConfigPaths {
pub struct ProviderConfig { pub fn new() -> Self {
pub default_model: String, let app_name = env!("CARGO_PKG_NAME");
#[serde(skip_serializing_if = "Option::is_none")] let mut base_dir = shellexpand::tilde("~").to_string();
pub host: Option<String>, base_dir.push_str(&format!("/.config/{}/", app_name));
#[serde(skip_serializing_if = "Option::is_none")] let base_path = Path::new(&base_dir);
pub api_key: Option<String>, if !base_path.exists() {
#[serde(skip_serializing_if = "Option::is_none")] let _ = fs::create_dir_all(base_path);
pub system_prompt: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AtprotoConfig {
pub handle: Option<String>,
pub password: Option<String>,
pub host: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct McpConfig {
#[serde(deserialize_with = "string_to_bool")]
pub enabled: bool,
#[serde(deserialize_with = "string_to_bool")]
pub auto_detect: bool,
pub servers: HashMap<String, McpServerConfig>,
}
fn string_to_bool<'de, D>(deserializer: D) -> Result<bool, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::Deserialize;
let s = String::deserialize(deserializer)?;
match s.as_str() {
"true" => Ok(true),
"false" => Ok(false),
_ => Err(serde::de::Error::custom("expected 'true' or 'false'")),
} }
}
#[derive(Debug, Clone, Serialize, Deserialize)] ConfigPaths {
pub struct McpServerConfig { base_dir: base_path.to_path_buf(),
pub base_url: String, }
pub name: String, }
#[serde(deserialize_with = "string_to_f64")]
pub timeout: f64,
pub endpoints: HashMap<String, String>,
}
fn string_to_f64<'de, D>(deserializer: D) -> Result<f64, D::Error> #[allow(dead_code)]
where pub fn data_file(&self, file_name: &str) -> PathBuf {
D: serde::Deserializer<'de>, let file_path = match file_name {
{ "db" => self.base_dir.join("user.db"),
use serde::Deserialize; "toml" => self.base_dir.join("user.toml"),
let s = String::deserialize(deserializer)?; "json" => self.base_dir.join("user.json"),
s.parse::<f64>().map_err(serde::de::Error::custom) _ => self.base_dir.join(format!(".{}", file_name)),
} };
file_path
}
impl Config { pub fn mcp_dir(&self) -> PathBuf {
pub fn new(data_dir: Option<PathBuf>) -> Result<Self> { self.base_dir.join("mcp")
let data_dir = data_dir.unwrap_or_else(|| { }
dirs::config_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("syui")
.join("ai")
.join("gpt")
});
// Ensure data directory exists pub fn venv_path(&self) -> PathBuf {
std::fs::create_dir_all(&data_dir) self.mcp_dir().join(".venv")
.context("Failed to create data directory")?; }
let config_path = data_dir.join("config.json"); pub fn python_executable(&self) -> PathBuf {
if cfg!(windows) {
// Try to load existing config self.venv_path().join("Scripts").join("python.exe")
if config_path.exists() {
let config_str = std::fs::read_to_string(&config_path)
.context("Failed to read config.json")?;
// Check if file is empty
if config_str.trim().is_empty() {
eprintln!("Config file is empty, will recreate from source");
} else { } else {
match serde_json::from_str::<Config>(&config_str) { self.venv_path().join("bin").join("python")
Ok(mut config) => {
config.data_dir = data_dir;
// Check for environment variables if API keys are empty
if let Some(openai_config) = config.providers.get_mut("openai") {
if openai_config.api_key.as_ref().map_or(true, |key| key.is_empty()) {
openai_config.api_key = std::env::var("OPENAI_API_KEY").ok();
}
}
return Ok(config);
}
Err(e) => {
eprintln!("Failed to parse existing config.json: {}", e);
eprintln!("Will try to reload from source...");
}
}
} }
} }
// Check if we need to migrate from JSON pub fn pip_executable(&self) -> PathBuf {
// Try multiple locations for the JSON file if cfg!(windows) {
let possible_json_paths = vec![ self.venv_path().join("Scripts").join("pip.exe")
PathBuf::from("../config.json"), // Relative to aigpt-rs directory } else {
PathBuf::from("config.json"), // Current directory self.venv_path().join("bin").join("pip")
PathBuf::from("gpt/config.json"), // From project root
PathBuf::from("/Users/syui/ai/ai/gpt/config.json"), // Absolute path
];
for json_path in possible_json_paths {
if json_path.exists() {
eprintln!("Found config.json at: {}", json_path.display());
eprintln!("Copying configuration...");
// Copy configuration file and parse it
std::fs::copy(&json_path, &config_path)
.context("Failed to copy config.json")?;
let config_str = std::fs::read_to_string(&config_path)
.context("Failed to read copied config.json")?;
println!("Config JSON content preview: {}", &config_str[..std::cmp::min(200, config_str.len())]);
let mut config: Config = serde_json::from_str(&config_str)
.context("Failed to parse config.json")?;
config.data_dir = data_dir;
// Check for environment variables if API keys are empty
if let Some(openai_config) = config.providers.get_mut("openai") {
if openai_config.api_key.as_ref().map_or(true, |key| key.is_empty()) {
openai_config.api_key = std::env::var("OPENAI_API_KEY").ok();
} }
} }
eprintln!("Copy complete! Config saved to: {}", config_path.display());
return Ok(config);
}
}
// Create default config
let config = Self::default_config(data_dir);
// Save default config
let json_str = serde_json::to_string_pretty(&config)
.context("Failed to serialize default config")?;
std::fs::write(&config_path, json_str)
.context("Failed to write default config.json")?;
Ok(config)
}
pub fn save(&self) -> Result<()> {
let config_path = self.data_dir.join("config.json");
let json_str = serde_json::to_string_pretty(self)
.context("Failed to serialize config")?;
std::fs::write(&config_path, json_str)
.context("Failed to write config.json")?;
Ok(())
}
fn default_config(data_dir: PathBuf) -> Self {
let mut providers = HashMap::new();
providers.insert("ollama".to_string(), ProviderConfig {
default_model: "qwen2.5".to_string(),
host: Some("http://localhost:11434".to_string()),
api_key: None,
system_prompt: None,
});
providers.insert("openai".to_string(), ProviderConfig {
default_model: "gpt-4o-mini".to_string(),
host: None,
api_key: std::env::var("OPENAI_API_KEY").ok(),
system_prompt: None,
});
Config {
data_dir,
default_provider: "ollama".to_string(),
providers,
atproto: None,
mcp: None,
}
}
pub fn get_provider(&self, provider_name: &str) -> Option<&ProviderConfig> {
self.providers.get(provider_name)
}
pub fn get_ai_config(&self, provider: Option<String>, model: Option<String>) -> Result<AIConfig> {
let provider_name = provider.as_deref().unwrap_or(&self.default_provider);
let provider_config = self.get_provider(provider_name)
.ok_or_else(|| anyhow::anyhow!("Unknown provider: {}", provider_name))?;
let ai_provider: AIProvider = provider_name.parse()?;
let model_name = model.unwrap_or_else(|| provider_config.default_model.clone());
Ok(AIConfig {
provider: ai_provider,
model: model_name,
api_key: provider_config.api_key.clone(),
base_url: provider_config.host.clone(),
max_tokens: Some(2048),
temperature: Some(0.7),
})
}
pub fn memory_file(&self) -> PathBuf {
self.data_dir.join("memories.json")
}
pub fn relationships_file(&self) -> PathBuf {
self.data_dir.join("relationships.json")
}
pub fn fortune_file(&self) -> PathBuf {
self.data_dir.join("fortune.json")
}
pub fn transmission_file(&self) -> PathBuf {
self.data_dir.join("transmissions.json")
}
pub fn scheduler_tasks_file(&self) -> PathBuf {
self.data_dir.join("scheduler_tasks.json")
}
pub fn scheduler_history_file(&self) -> PathBuf {
self.data_dir.join("scheduler_history.json")
}
} }

View File

@@ -1,205 +0,0 @@
use std::path::PathBuf;
use std::io::{self, Write};
use anyhow::Result;
use colored::*;
use crate::config::Config;
use crate::persona::Persona;
use crate::http_client::ServiceDetector;
pub async fn handle_conversation(
user_id: String,
data_dir: Option<PathBuf>,
model: Option<String>,
provider: Option<String>,
) -> Result<()> {
let config = Config::new(data_dir)?;
let mut persona = Persona::new(&config)?;
println!("{}", "Starting conversation mode...".cyan());
println!("{}", "Type your message and press Enter to chat.".yellow());
println!("{}", "Available MCP commands: /memories, /search, /context, /relationship, /cards".yellow());
println!("{}", "Type 'exit', 'quit', or 'bye' to end conversation.".yellow());
println!("{}", "---".dimmed());
let mut conversation_history = Vec::new();
let service_detector = ServiceDetector::new();
loop {
// Print prompt
print!("{} ", "You:".cyan().bold());
io::stdout().flush()?;
// Read user input
let mut input = String::new();
io::stdin().read_line(&mut input)?;
let input = input.trim();
// Check for exit commands
if matches!(input.to_lowercase().as_str(), "exit" | "quit" | "bye" | "") {
println!("{}", "Goodbye! 👋".green());
break;
}
// Handle MCP commands
if input.starts_with('/') {
handle_mcp_command(input, &user_id, &service_detector).await?;
continue;
}
// Add to conversation history
conversation_history.push(format!("User: {}", input));
// Get AI response
let (response, relationship_delta) = if provider.is_some() || model.is_some() {
persona.process_ai_interaction(&user_id, input, provider.clone(), model.clone()).await?
} else {
persona.process_interaction(&user_id, input)?
};
// Add AI response to history
conversation_history.push(format!("AI: {}", response));
// Display response
println!("{} {}", "AI:".green().bold(), response);
// Show relationship change if significant
if relationship_delta.abs() >= 0.1 {
if relationship_delta > 0.0 {
println!("{}", format!(" └─ (+{:.2} relationship)", relationship_delta).green().dimmed());
} else {
println!("{}", format!(" └─ ({:.2} relationship)", relationship_delta).red().dimmed());
}
}
println!(); // Add some spacing
// Keep conversation history manageable (last 20 exchanges)
if conversation_history.len() > 40 {
conversation_history.drain(0..20);
}
}
Ok(())
}
async fn handle_mcp_command(
command: &str,
user_id: &str,
service_detector: &ServiceDetector,
) -> Result<()> {
let parts: Vec<&str> = command[1..].split_whitespace().collect();
if parts.is_empty() {
return Ok(());
}
match parts[0] {
"memories" => {
println!("{}", "Retrieving memories...".yellow());
// Get contextual memories
if let Ok(memories) = service_detector.get_contextual_memories(user_id, 10).await {
if memories.is_empty() {
println!("No memories found for this conversation.");
} else {
println!("{}", format!("Found {} memories:", memories.len()).cyan());
for (i, memory) in memories.iter().enumerate() {
println!(" {}. {}", i + 1, memory.content);
println!(" {}", format!("({})", memory.created_at.format("%Y-%m-%d %H:%M")).dimmed());
}
}
} else {
println!("{}", "Failed to retrieve memories.".red());
}
},
"search" => {
if parts.len() < 2 {
println!("{}", "Usage: /search <query>".yellow());
return Ok(());
}
let query = parts[1..].join(" ");
println!("{}", format!("Searching for: '{}'", query).yellow());
if let Ok(results) = service_detector.search_memories(&query, 5).await {
if results.is_empty() {
println!("No relevant memories found.");
} else {
println!("{}", format!("Found {} relevant memories:", results.len()).cyan());
for (i, memory) in results.iter().enumerate() {
println!(" {}. {}", i + 1, memory.content);
println!(" {}", format!("({})", memory.created_at.format("%Y-%m-%d %H:%M")).dimmed());
}
}
} else {
println!("{}", "Search failed.".red());
}
},
"context" => {
println!("{}", "Creating context summary...".yellow());
if let Ok(summary) = service_detector.create_summary(user_id).await {
println!("{}", "Context Summary:".cyan().bold());
println!("{}", summary);
} else {
println!("{}", "Failed to create context summary.".red());
}
},
"relationship" => {
println!("{}", "Checking relationship status...".yellow());
// This would need to be implemented in the service client
println!("{}", "Relationship status: Active".cyan());
println!("Score: 85.5 / 100");
println!("Transmission: ✓ Enabled");
},
"cards" => {
println!("{}", "Checking card collection...".yellow());
// Try to connect to ai.card service
if let Ok(stats) = service_detector.get_card_stats().await {
println!("{}", "Card Collection:".cyan().bold());
println!(" Total Cards: {}", stats.get("total").unwrap_or(&serde_json::Value::Number(0.into())));
println!(" Unique Cards: {}", stats.get("unique").unwrap_or(&serde_json::Value::Number(0.into())));
// Offer to draw a card
println!("\n{}", "Would you like to draw a card? (y/n)".yellow());
let mut response = String::new();
io::stdin().read_line(&mut response)?;
if response.trim().to_lowercase() == "y" {
println!("{}", "Drawing card...".cyan());
if let Ok(card) = service_detector.draw_card(user_id, false).await {
println!("{}", "🎴 Card drawn!".green().bold());
println!("Name: {}", card.get("name").unwrap_or(&serde_json::Value::String("Unknown".to_string())));
println!("Rarity: {}", card.get("rarity").unwrap_or(&serde_json::Value::String("Unknown".to_string())));
} else {
println!("{}", "Failed to draw card. ai.card service might not be running.".red());
}
}
} else {
println!("{}", "ai.card service not available.".red());
}
},
"help" | "h" => {
println!("{}", "Available MCP Commands:".cyan().bold());
println!(" {:<15} - Show recent memories for this conversation", "/memories".yellow());
println!(" {:<15} - Search memories by keyword", "/search <query>".yellow());
println!(" {:<15} - Create a context summary", "/context".yellow());
println!(" {:<15} - Show relationship status", "/relationship".yellow());
println!(" {:<15} - Show card collection and draw cards", "/cards".yellow());
println!(" {:<15} - Show this help message", "/help".yellow());
},
_ => {
println!("{}", format!("Unknown command: /{}. Type '/help' for available commands.", parts[0]).red());
}
}
println!(); // Add spacing after MCP command output
Ok(())
}

View File

@@ -1,789 +0,0 @@
use std::collections::HashMap;
use std::path::PathBuf;
use anyhow::{Result, Context};
use colored::*;
use serde::{Deserialize, Serialize};
use chrono::Utc;
use crate::config::Config;
use crate::persona::Persona;
use crate::ai_provider::{AIProviderClient, AIConfig, AIProvider};
pub async fn handle_docs(
action: String,
project: Option<String>,
output: Option<PathBuf>,
ai_integration: bool,
data_dir: Option<PathBuf>,
) -> Result<()> {
let config = Config::new(data_dir)?;
let mut docs_manager = DocsManager::new(config);
match action.as_str() {
"generate" => {
if let Some(project_name) = project {
docs_manager.generate_project_docs(&project_name, output, ai_integration).await?;
} else {
return Err(anyhow::anyhow!("Project name is required for generate action"));
}
}
"sync" => {
if let Some(project_name) = project {
docs_manager.sync_project_docs(&project_name).await?;
} else {
docs_manager.sync_all_docs().await?;
}
}
"list" => {
docs_manager.list_projects().await?;
}
"status" => {
docs_manager.show_docs_status().await?;
}
"session-end" => {
docs_manager.session_end_processing().await?;
}
_ => {
return Err(anyhow::anyhow!("Unknown docs action: {}", action));
}
}
Ok(())
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectInfo {
pub name: String,
pub project_type: String,
pub description: String,
pub status: String,
pub features: Vec<String>,
pub dependencies: Vec<String>,
}
impl Default for ProjectInfo {
fn default() -> Self {
ProjectInfo {
name: String::new(),
project_type: String::new(),
description: String::new(),
status: "active".to_string(),
features: Vec::new(),
dependencies: Vec::new(),
}
}
}
pub struct DocsManager {
config: Config,
ai_root: PathBuf,
projects: HashMap<String, ProjectInfo>,
}
impl DocsManager {
pub fn new(config: Config) -> Self {
let ai_root = dirs::home_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("ai")
.join("ai");
DocsManager {
config,
ai_root,
projects: HashMap::new(),
}
}
pub async fn generate_project_docs(&mut self, project: &str, output: Option<PathBuf>, ai_integration: bool) -> Result<()> {
println!("{}", format!("📝 Generating documentation for project '{}'", project).cyan().bold());
// Load project information
let project_info = self.load_project_info(project)?;
// Generate documentation content
let mut content = self.generate_base_documentation(&project_info)?;
// AI enhancement if requested
if ai_integration {
println!("{}", "🤖 Enhancing documentation with AI...".blue());
if let Ok(enhanced_content) = self.enhance_with_ai(project, &content).await {
content = enhanced_content;
} else {
println!("{}", "Warning: AI enhancement failed, using base documentation".yellow());
}
}
// Determine output path
let output_path = if let Some(path) = output {
path
} else {
self.ai_root.join(project).join("claude.md")
};
// Ensure directory exists
if let Some(parent) = output_path.parent() {
std::fs::create_dir_all(parent)
.with_context(|| format!("Failed to create directory: {}", parent.display()))?;
}
// Write documentation
std::fs::write(&output_path, content)
.with_context(|| format!("Failed to write documentation to: {}", output_path.display()))?;
println!("{}", format!("✅ Documentation generated: {}", output_path.display()).green().bold());
Ok(())
}
pub async fn sync_project_docs(&self, project: &str) -> Result<()> {
println!("{}", format!("🔄 Syncing documentation for project '{}'", project).cyan().bold());
let claude_dir = self.ai_root.join("claude");
let project_dir = self.ai_root.join(project);
// Check if claude directory exists
if !claude_dir.exists() {
return Err(anyhow::anyhow!("Claude directory not found: {}", claude_dir.display()));
}
// Copy relevant files
let files_to_sync = vec!["README.md", "claude.md", "DEVELOPMENT.md"];
for file in files_to_sync {
let src = claude_dir.join("projects").join(format!("{}.md", project));
let dst = project_dir.join(file);
if src.exists() {
if let Some(parent) = dst.parent() {
std::fs::create_dir_all(parent)?;
}
std::fs::copy(&src, &dst)?;
println!(" ✓ Synced: {}", file.green());
}
}
println!("{}", "✅ Documentation sync completed".green().bold());
Ok(())
}
pub async fn sync_all_docs(&self) -> Result<()> {
println!("{}", "🔄 Syncing documentation for all projects...".cyan().bold());
// Find all project directories
let projects = self.discover_projects()?;
for project in projects {
println!("\n{}", format!("Syncing: {}", project).blue());
if let Err(e) = self.sync_project_docs(&project).await {
println!("{}: {}", "Warning".yellow(), e);
}
}
// Generate ai.wiki content after all project syncs
println!("\n{}", "📝 Updating ai.wiki...".blue());
if let Err(e) = self.update_ai_wiki().await {
println!("{}: Failed to update ai.wiki: {}", "Warning".yellow(), e);
}
// Update repository wiki (Gitea wiki) as well
println!("\n{}", "📝 Updating repository wiki...".blue());
if let Err(e) = self.update_repository_wiki().await {
println!("{}: Failed to update repository wiki: {}", "Warning".yellow(), e);
}
println!("\n{}", "✅ All projects synced".green().bold());
Ok(())
}
pub async fn list_projects(&mut self) -> Result<()> {
println!("{}", "📋 Available Projects".cyan().bold());
println!();
let projects = self.discover_projects()?;
if projects.is_empty() {
println!("{}", "No projects found".yellow());
return Ok(());
}
// Load project information
for project in &projects {
if let Ok(info) = self.load_project_info(project) {
self.projects.insert(project.clone(), info);
}
}
// Display projects in a table format
println!("{:<20} {:<15} {:<15} {}",
"Project".cyan().bold(),
"Type".cyan().bold(),
"Status".cyan().bold(),
"Description".cyan().bold());
println!("{}", "-".repeat(80));
let project_count = projects.len();
for project in &projects {
let info = self.projects.get(project).cloned().unwrap_or_default();
let status_color = match info.status.as_str() {
"active" => info.status.green(),
"development" => info.status.yellow(),
"deprecated" => info.status.red(),
_ => info.status.normal(),
};
println!("{:<20} {:<15} {:<15} {}",
project.blue(),
info.project_type,
status_color,
info.description);
}
println!();
println!("Total projects: {}", project_count.to_string().cyan());
Ok(())
}
pub async fn show_docs_status(&self) -> Result<()> {
println!("{}", "📊 Documentation Status".cyan().bold());
println!();
let projects = self.discover_projects()?;
let mut total_files = 0;
let mut total_lines = 0;
for project in projects {
let project_dir = self.ai_root.join(&project);
let claude_md = project_dir.join("claude.md");
if claude_md.exists() {
let content = std::fs::read_to_string(&claude_md)?;
let lines = content.lines().count();
let size = content.len();
println!("{}: {} lines, {} bytes",
project.blue(),
lines.to_string().yellow(),
size.to_string().yellow());
total_files += 1;
total_lines += lines;
} else {
println!("{}: {}", project.blue(), "No documentation".red());
}
}
println!();
println!("Summary: {} files, {} total lines",
total_files.to_string().cyan(),
total_lines.to_string().cyan());
Ok(())
}
fn discover_projects(&self) -> Result<Vec<String>> {
let mut projects = Vec::new();
// Known project directories
let known_projects = vec![
"gpt", "card", "bot", "shell", "os", "game", "moji", "verse"
];
for project in known_projects {
let project_dir = self.ai_root.join(project);
if project_dir.exists() && project_dir.is_dir() {
projects.push(project.to_string());
}
}
// Also scan for additional directories with ai.json
if self.ai_root.exists() {
for entry in std::fs::read_dir(&self.ai_root)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
let ai_json = path.join("ai.json");
if ai_json.exists() {
if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
if !projects.contains(&name.to_string()) {
projects.push(name.to_string());
}
}
}
}
}
}
projects.sort();
Ok(projects)
}
fn load_project_info(&self, project: &str) -> Result<ProjectInfo> {
let ai_json_path = self.ai_root.join(project).join("ai.json");
if ai_json_path.exists() {
let content = std::fs::read_to_string(&ai_json_path)?;
if let Ok(json_data) = serde_json::from_str::<serde_json::Value>(&content) {
let mut info = ProjectInfo::default();
info.name = project.to_string();
if let Some(project_data) = json_data.get(project) {
if let Some(type_str) = project_data.get("type").and_then(|v| v.as_str()) {
info.project_type = type_str.to_string();
}
if let Some(desc) = project_data.get("description").and_then(|v| v.as_str()) {
info.description = desc.to_string();
}
}
return Ok(info);
}
}
// Default project info based on known projects
let mut info = ProjectInfo::default();
info.name = project.to_string();
match project {
"gpt" => {
info.project_type = "AI".to_string();
info.description = "Autonomous transmission AI with unique personality".to_string();
}
"card" => {
info.project_type = "Game".to_string();
info.description = "Card game system with atproto integration".to_string();
}
"bot" => {
info.project_type = "Bot".to_string();
info.description = "Distributed SNS bot for AI ecosystem".to_string();
}
"shell" => {
info.project_type = "Tool".to_string();
info.description = "AI-powered shell interface".to_string();
}
"os" => {
info.project_type = "OS".to_string();
info.description = "Game-oriented operating system".to_string();
}
"verse" => {
info.project_type = "Metaverse".to_string();
info.description = "Reality-reflecting 3D world system".to_string();
}
_ => {
info.project_type = "Unknown".to_string();
info.description = format!("AI ecosystem project: {}", project);
}
}
Ok(info)
}
fn generate_base_documentation(&self, project_info: &ProjectInfo) -> Result<String> {
let timestamp = Utc::now().format("%Y-%m-%d %H:%M:%S UTC");
let mut content = String::new();
content.push_str(&format!("# {}\n\n", project_info.name));
content.push_str(&format!("## Overview\n\n"));
content.push_str(&format!("**Type**: {}\n\n", project_info.project_type));
content.push_str(&format!("**Description**: {}\n\n", project_info.description));
content.push_str(&format!("**Status**: {}\n\n", project_info.status));
if !project_info.features.is_empty() {
content.push_str("## Features\n\n");
for feature in &project_info.features {
content.push_str(&format!("- {}\n", feature));
}
content.push_str("\n");
}
content.push_str("## Architecture\n\n");
content.push_str("This project is part of the ai ecosystem, following the core principles:\n\n");
content.push_str("- **Existence Theory**: Based on the exploration of the smallest units (ai/existon)\n");
content.push_str("- **Uniqueness Principle**: Ensuring 1:1 mapping between reality and digital existence\n");
content.push_str("- **Reality Reflection**: Creating circular influence between reality and game\n\n");
content.push_str("## Development\n\n");
content.push_str("### Getting Started\n\n");
content.push_str("```bash\n");
content.push_str(&format!("# Clone the repository\n"));
content.push_str(&format!("git clone https://git.syui.ai/ai/{}\n", project_info.name));
content.push_str(&format!("cd {}\n", project_info.name));
content.push_str("```\n\n");
content.push_str("### Configuration\n\n");
content.push_str(&format!("Configuration files are stored in `~/.config/syui/ai/{}/`\n\n", project_info.name));
content.push_str("## Integration\n\n");
content.push_str("This project integrates with other ai ecosystem components:\n\n");
if !project_info.dependencies.is_empty() {
for dep in &project_info.dependencies {
content.push_str(&format!("- **{}**: Core dependency\n", dep));
}
} else {
content.push_str("- **ai.gpt**: Core AI personality system\n");
content.push_str("- **atproto**: Distributed identity and data\n");
}
content.push_str("\n");
content.push_str("---\n\n");
content.push_str(&format!("*Generated: {}*\n", timestamp));
content.push_str("*🤖 Generated with [Claude Code](https://claude.ai/code)*\n");
Ok(content)
}
async fn enhance_with_ai(&self, project: &str, base_content: &str) -> Result<String> {
// Create AI provider
let ai_config = AIConfig {
provider: AIProvider::Ollama,
model: "llama2".to_string(),
api_key: None,
base_url: None,
max_tokens: Some(2000),
temperature: Some(0.7),
};
let _ai_provider = AIProviderClient::new(ai_config);
let mut persona = Persona::new(&self.config)?;
let enhancement_prompt = format!(
"As an AI documentation expert, enhance the following documentation for project '{}'.
Current documentation:
{}
Please provide enhanced content that includes:
1. More detailed project description
2. Key features and capabilities
3. Usage examples
4. Integration points with other AI ecosystem projects
5. Development workflow recommendations
Keep the same structure but expand and improve the content.",
project, base_content
);
// Try to get AI response
let (response, _) = persona.process_ai_interaction(
"docs_system",
&enhancement_prompt,
Some("ollama".to_string()),
Some("llama2".to_string())
).await?;
// If AI response is substantial, use it; otherwise fall back to base content
if response.len() > base_content.len() / 2 {
Ok(response)
} else {
Ok(base_content.to_string())
}
}
/// セッション終了時の処理(ドキュメント記録・同期)
pub async fn session_end_processing(&mut self) -> Result<()> {
println!("{}", "🔄 Session end processing started...".cyan());
// 1. 現在のプロジェクト状況を記録
println!("📊 Recording current project status...");
self.record_session_summary().await?;
// 2. 全プロジェクトのドキュメント同期
println!("🔄 Syncing all project documentation...");
self.sync_all_docs().await?;
// 3. READMEの自動更新
println!("📝 Updating project README files...");
self.update_project_readmes().await?;
// 4. メタデータの更新
println!("🏷️ Updating project metadata...");
self.update_project_metadata().await?;
println!("{}", "✅ Session end processing completed!".green());
Ok(())
}
/// セッション概要を記録
async fn record_session_summary(&self) -> Result<()> {
let session_log_path = self.ai_root.join("session_logs");
std::fs::create_dir_all(&session_log_path)?;
let timestamp = Utc::now().format("%Y-%m-%d_%H-%M-%S");
let log_file = session_log_path.join(format!("session_{}.md", timestamp));
let summary = format!(
"# Session Summary - {}\n\n\
## Timestamp\n{}\n\n\
## Projects Status\n{}\n\n\
## Next Actions\n- Documentation sync completed\n- README files updated\n- Metadata refreshed\n\n\
---\n*Generated by aigpt session-end processing*\n",
timestamp,
Utc::now().format("%Y-%m-%d %H:%M:%S UTC"),
self.generate_projects_status().await.unwrap_or_else(|_| "Status unavailable".to_string())
);
std::fs::write(log_file, summary)?;
Ok(())
}
/// プロジェクト状況を生成
async fn generate_projects_status(&self) -> Result<String> {
let projects = self.discover_projects()?;
let mut status = String::new();
for project in projects {
let claude_md = self.ai_root.join(&project).join("claude.md");
let readme_md = self.ai_root.join(&project).join("README.md");
status.push_str(&format!("- **{}**: ", project));
if claude_md.exists() {
status.push_str("claude.md ✅ ");
} else {
status.push_str("claude.md ❌ ");
}
if readme_md.exists() {
status.push_str("README.md ✅");
} else {
status.push_str("README.md ❌");
}
status.push('\n');
}
Ok(status)
}
/// ai.wikiの更新処理
async fn update_ai_wiki(&self) -> Result<()> {
let ai_wiki_path = self.ai_root.join("ai.wiki");
// ai.wikiディレクトリが存在することを確認
if !ai_wiki_path.exists() {
return Err(anyhow::anyhow!("ai.wiki directory not found at {:?}", ai_wiki_path));
}
// Home.mdの生成
let home_content = self.generate_wiki_home_content().await?;
let home_path = ai_wiki_path.join("Home.md");
std::fs::write(&home_path, &home_content)?;
println!(" ✓ Updated: {}", "Home.md".green());
// title.mdの生成 (Gitea wiki特別ページ用)
let title_path = ai_wiki_path.join("title.md");
std::fs::write(&title_path, &home_content)?;
println!(" ✓ Updated: {}", "title.md".green());
// プロジェクト個別ディレクトリの更新
let projects = self.discover_projects()?;
for project in projects {
let project_dir = ai_wiki_path.join(&project);
std::fs::create_dir_all(&project_dir)?;
let project_content = self.generate_auto_project_content(&project).await?;
let project_file = project_dir.join(format!("{}.md", project));
std::fs::write(&project_file, project_content)?;
println!(" ✓ Updated: {}", format!("{}/{}.md", project, project).green());
}
println!("{}", "✅ ai.wiki updated successfully".green().bold());
Ok(())
}
/// ai.wiki/Home.mdのコンテンツ生成
async fn generate_wiki_home_content(&self) -> Result<String> {
let timestamp = Utc::now().format("%Y-%m-%d %H:%M:%S");
let mut content = String::new();
content.push_str("# AI Ecosystem Wiki\n\n");
content.push_str("AI生態系プロジェクトの概要とドキュメント集約ページです。\n\n");
content.push_str("## プロジェクト一覧\n\n");
let projects = self.discover_projects()?;
let mut project_sections = std::collections::HashMap::new();
// プロジェクトをカテゴリ別に分類
for project in &projects {
let info = self.load_project_info(project).unwrap_or_default();
let category = match project.as_str() {
"ai" => "🧠 AI・知能システム",
"gpt" => "🤖 自律・対話システム",
"os" => "💻 システム・基盤",
"game" => "📁 device",
"card" => "🎮 ゲーム・エンターテイメント",
"bot" | "moji" | "api" | "log" => "📁 その他",
"verse" => "📁 metaverse",
"shell" => "⚡ ツール・ユーティリティ",
_ => "📁 その他",
};
project_sections.entry(category).or_insert_with(Vec::new).push((project.clone(), info));
}
// カテゴリ別にプロジェクトを出力
let mut categories: Vec<_> = project_sections.keys().collect();
categories.sort();
for category in categories {
content.push_str(&format!("### {}\n\n", category));
if let Some(projects_in_category) = project_sections.get(category) {
for (project, info) in projects_in_category {
content.push_str(&format!("#### [{}]({}.md)\n", project, project));
if !info.description.is_empty() {
content.push_str(&format!("- **名前**: ai.{} - **パッケージ**: ai{} - **タイプ**: {} - **役割**: {}\n\n",
project, project, info.project_type, info.description));
}
content.push_str(&format!("**Status**: {} \n", info.status));
let branch = self.get_project_branch(project);
content.push_str(&format!("**Links**: [Repo](https://git.syui.ai/ai/{}) | [Docs](https://git.syui.ai/ai/{}/src/branch/{}/claude.md)\n\n", project, project, branch));
}
}
}
content.push_str("---\n\n");
content.push_str("## ディレクトリ構成\n\n");
content.push_str("- `{project}/` - プロジェクト個別ドキュメント\n");
content.push_str("- `claude/` - Claude Code作業記録\n");
content.push_str("- `manual/` - 手動作成ドキュメント\n\n");
content.push_str("---\n\n");
content.push_str("*このページは ai.json と claude/projects/ から自動生成されました* \n");
content.push_str(&format!("*最終更新: {}*\n", timestamp));
Ok(content)
}
/// プロジェクト個別ファイルのコンテンツ生成
async fn generate_auto_project_content(&self, project: &str) -> Result<String> {
let info = self.load_project_info(project).unwrap_or_default();
let mut content = String::new();
content.push_str(&format!("# {}\n\n", project));
content.push_str("## 概要\n");
content.push_str(&format!("- **名前**: ai.{} - **パッケージ**: ai{} - **タイプ**: {} - **役割**: {}\n\n",
project, project, info.project_type, info.description));
content.push_str("## プロジェクト情報\n");
content.push_str(&format!("- **タイプ**: {}\n", info.project_type));
content.push_str(&format!("- **説明**: {}\n", info.description));
content.push_str(&format!("- **ステータス**: {}\n", info.status));
let branch = self.get_project_branch(project);
content.push_str(&format!("- **ブランチ**: {}\n", branch));
content.push_str("- **最終更新**: Unknown\n\n");
// プロジェクト固有の機能情報を追加
if !info.features.is_empty() {
content.push_str("## 主な機能・特徴\n");
for feature in &info.features {
content.push_str(&format!("- {}\n", feature));
}
content.push_str("\n");
}
content.push_str("## リンク\n");
content.push_str(&format!("- **Repository**: https://git.syui.ai/ai/{}\n", project));
content.push_str(&format!("- **Project Documentation**: [claude/projects/{}.md](https://git.syui.ai/ai/ai/src/branch/main/claude/projects/{}.md)\n", project, project));
let branch = self.get_project_branch(project);
content.push_str(&format!("- **Generated Documentation**: [{}/claude.md](https://git.syui.ai/ai/{}/src/branch/{}/claude.md)\n\n", project, project, branch));
content.push_str("---\n");
content.push_str(&format!("*このページは claude/projects/{}.md から自動生成されました*\n", project));
Ok(content)
}
/// リポジトリwiki (Gitea wiki) の更新処理
async fn update_repository_wiki(&self) -> Result<()> {
println!(" Repository wiki is now unified with ai.wiki");
println!(" ai.wiki serves as the source of truth (git@git.syui.ai:ai/ai.wiki.git)");
println!(" Special pages generated: Home.md, title.md for Gitea wiki compatibility");
Ok(())
}
/// プロジェクトREADMEファイルの更新
async fn update_project_readmes(&self) -> Result<()> {
let projects = self.discover_projects()?;
for project in projects {
let readme_path = self.ai_root.join(&project).join("README.md");
let claude_md_path = self.ai_root.join(&project).join("claude.md");
// claude.mdが存在する場合、READMEに同期
if claude_md_path.exists() {
let claude_content = std::fs::read_to_string(&claude_md_path)?;
// READMEが存在しない場合は新規作成
if !readme_path.exists() {
println!("📝 Creating README.md for {}", project);
std::fs::write(&readme_path, &claude_content)?;
} else {
// 既存READMEがclaude.mdより古い場合は更新
let readme_metadata = std::fs::metadata(&readme_path)?;
let claude_metadata = std::fs::metadata(&claude_md_path)?;
if claude_metadata.modified()? > readme_metadata.modified()? {
println!("🔄 Updating README.md for {}", project);
std::fs::write(&readme_path, &claude_content)?;
}
}
}
}
Ok(())
}
/// プロジェクトメタデータの更新
async fn update_project_metadata(&self) -> Result<()> {
let projects = self.discover_projects()?;
for project in projects {
let ai_json_path = self.ai_root.join(&project).join("ai.json");
if ai_json_path.exists() {
let mut content = std::fs::read_to_string(&ai_json_path)?;
let mut json_data: serde_json::Value = serde_json::from_str(&content)?;
// last_updated フィールドを更新
if let Some(project_data) = json_data.get_mut(&project) {
if let Some(obj) = project_data.as_object_mut() {
obj.insert("last_updated".to_string(),
serde_json::Value::String(Utc::now().to_rfc3339()));
obj.insert("status".to_string(),
serde_json::Value::String("active".to_string()));
content = serde_json::to_string_pretty(&json_data)?;
std::fs::write(&ai_json_path, content)?;
}
}
}
}
Ok(())
}
/// メインai.jsonからプロジェクトのブランチ情報を取得
fn get_project_branch(&self, project: &str) -> String {
let main_ai_json_path = self.ai_root.join("ai.json");
if main_ai_json_path.exists() {
if let Ok(content) = std::fs::read_to_string(&main_ai_json_path) {
if let Ok(json_data) = serde_json::from_str::<serde_json::Value>(&content) {
if let Some(ai_section) = json_data.get("ai") {
if let Some(project_data) = ai_section.get(project) {
if let Some(branch) = project_data.get("branch").and_then(|v| v.as_str()) {
return branch.to_string();
}
}
}
}
}
}
// デフォルトはmain
"main".to_string()
}
}

View File

@@ -1,409 +0,0 @@
use anyhow::{anyhow, Result};
use reqwest::Client;
use serde_json::Value;
use serde::{Serialize, Deserialize};
use std::time::Duration;
use std::collections::HashMap;
/// Service configuration for unified service management
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServiceConfig {
pub base_url: String,
pub timeout: Duration,
pub health_endpoint: String,
}
impl Default for ServiceConfig {
fn default() -> Self {
Self {
base_url: "http://localhost:8000".to_string(),
timeout: Duration::from_secs(30),
health_endpoint: "/health".to_string(),
}
}
}
/// HTTP client for inter-service communication
pub struct ServiceClient {
client: Client,
service_registry: HashMap<String, ServiceConfig>,
}
impl ServiceClient {
pub fn new() -> Self {
Self::with_default_services()
}
/// Create ServiceClient with default ai ecosystem services
pub fn with_default_services() -> Self {
let client = Client::builder()
.timeout(Duration::from_secs(30))
.build()
.expect("Failed to create HTTP client");
let mut service_registry = HashMap::new();
// Register default ai ecosystem services
service_registry.insert("ai.card".to_string(), ServiceConfig {
base_url: "http://localhost:8000".to_string(),
timeout: Duration::from_secs(30),
health_endpoint: "/health".to_string(),
});
service_registry.insert("ai.log".to_string(), ServiceConfig {
base_url: "http://localhost:8002".to_string(),
timeout: Duration::from_secs(30),
health_endpoint: "/health".to_string(),
});
service_registry.insert("ai.bot".to_string(), ServiceConfig {
base_url: "http://localhost:8003".to_string(),
timeout: Duration::from_secs(30),
health_endpoint: "/health".to_string(),
});
Self { client, service_registry }
}
/// Create ServiceClient with custom service registry
pub fn with_services(service_registry: HashMap<String, ServiceConfig>) -> Self {
let client = Client::builder()
.timeout(Duration::from_secs(30))
.build()
.expect("Failed to create HTTP client");
Self { client, service_registry }
}
/// Register a new service configuration
pub fn register_service(&mut self, name: String, config: ServiceConfig) {
self.service_registry.insert(name, config);
}
/// Get service configuration by name
pub fn get_service_config(&self, service: &str) -> Result<&ServiceConfig> {
self.service_registry.get(service)
.ok_or_else(|| anyhow!("Unknown service: {}", service))
}
/// Universal service method call
pub async fn call_service_method<T: Serialize>(
&self,
service: &str,
method: &str,
params: &T
) -> Result<Value> {
let config = self.get_service_config(service)?;
let url = format!("{}/{}", config.base_url.trim_end_matches('/'), method.trim_start_matches('/'));
self.post_request(&url, &serde_json::to_value(params)?).await
}
/// Universal service GET call
pub async fn call_service_get(&self, service: &str, endpoint: &str) -> Result<Value> {
let config = self.get_service_config(service)?;
let url = format!("{}/{}", config.base_url.trim_end_matches('/'), endpoint.trim_start_matches('/'));
self.get_request(&url).await
}
/// Check if a service is available
pub async fn check_service_status(&self, base_url: &str) -> Result<ServiceStatus> {
let url = format!("{}/health", base_url.trim_end_matches('/'));
match self.client.get(&url).send().await {
Ok(response) => {
if response.status().is_success() {
Ok(ServiceStatus::Available)
} else {
Ok(ServiceStatus::Error(format!("HTTP {}", response.status())))
}
}
Err(e) => Ok(ServiceStatus::Unavailable(e.to_string())),
}
}
/// Make a GET request to a service
pub async fn get_request(&self, url: &str) -> Result<Value> {
let response = self.client
.get(url)
.send()
.await?;
if !response.status().is_success() {
return Err(anyhow!("Request failed with status: {}", response.status()));
}
let json: Value = response.json().await?;
Ok(json)
}
/// Make a POST request to a service
pub async fn post_request(&self, url: &str, body: &Value) -> Result<Value> {
let response = self.client
.post(url)
.header("Content-Type", "application/json")
.json(body)
.send()
.await?;
if !response.status().is_success() {
return Err(anyhow!("Request failed with status: {}", response.status()));
}
let json: Value = response.json().await?;
Ok(json)
}
/// Get user's card collection from ai.card service
pub async fn get_user_cards(&self, user_did: &str) -> Result<Value> {
let endpoint = format!("api/v1/cards/user/{}", user_did);
self.call_service_get("ai.card", &endpoint).await
}
/// Draw a card for user from ai.card service
pub async fn draw_card(&self, user_did: &str, is_paid: bool) -> Result<Value> {
let params = serde_json::json!({
"user_did": user_did,
"is_paid": is_paid
});
self.call_service_method("ai.card", "api/v1/cards/draw", &params).await
}
/// Get card statistics from ai.card service
pub async fn get_card_stats(&self) -> Result<Value> {
self.call_service_get("ai.card", "api/v1/cards/gacha-stats").await
}
// MARK: - ai.log service methods
/// Create a new blog post
pub async fn create_blog_post<T: Serialize>(&self, params: &T) -> Result<Value> {
self.call_service_method("ai.log", "api/v1/posts", params).await
}
/// Get list of blog posts
pub async fn get_blog_posts(&self) -> Result<Value> {
self.call_service_get("ai.log", "api/v1/posts").await
}
/// Build the blog
pub async fn build_blog(&self) -> Result<Value> {
self.call_service_method("ai.log", "api/v1/build", &serde_json::json!({})).await
}
/// Translate document using ai.log service
pub async fn translate_document<T: Serialize>(&self, params: &T) -> Result<Value> {
self.call_service_method("ai.log", "api/v1/translate", params).await
}
/// Generate documentation using ai.log service
pub async fn generate_docs<T: Serialize>(&self, params: &T) -> Result<Value> {
self.call_service_method("ai.log", "api/v1/docs", params).await
}
}
/// Service status enum
#[derive(Debug, Clone)]
pub enum ServiceStatus {
Available,
Unavailable(String),
Error(String),
}
impl ServiceStatus {
pub fn is_available(&self) -> bool {
matches!(self, ServiceStatus::Available)
}
}
/// Service detector for ai ecosystem services
pub struct ServiceDetector {
client: ServiceClient,
}
impl ServiceDetector {
pub fn new() -> Self {
Self {
client: ServiceClient::new(),
}
}
/// Check all ai ecosystem services
pub async fn detect_services(&self) -> ServiceMap {
let mut services = ServiceMap::default();
// Check ai.card service
if let Ok(status) = self.client.check_service_status("http://localhost:8000").await {
services.ai_card = Some(ServiceInfo {
base_url: "http://localhost:8000".to_string(),
status,
});
}
// Check ai.log service
if let Ok(status) = self.client.check_service_status("http://localhost:8001").await {
services.ai_log = Some(ServiceInfo {
base_url: "http://localhost:8001".to_string(),
status,
});
}
// Check ai.bot service
if let Ok(status) = self.client.check_service_status("http://localhost:8002").await {
services.ai_bot = Some(ServiceInfo {
base_url: "http://localhost:8002".to_string(),
status,
});
}
services
}
/// Get available services only
pub async fn get_available_services(&self) -> Vec<String> {
let services = self.detect_services().await;
let mut available = Vec::new();
if let Some(card) = &services.ai_card {
if card.status.is_available() {
available.push("ai.card".to_string());
}
}
if let Some(log) = &services.ai_log {
if log.status.is_available() {
available.push("ai.log".to_string());
}
}
if let Some(bot) = &services.ai_bot {
if bot.status.is_available() {
available.push("ai.bot".to_string());
}
}
available
}
/// Get card collection statistics
pub async fn get_card_stats(&self) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
match self.client.get_request("http://localhost:8000/api/v1/cards/gacha-stats").await {
Ok(stats) => Ok(stats),
Err(e) => Err(e.into()),
}
}
/// Draw a card for user
pub async fn draw_card(&self, user_did: &str, is_paid: bool) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
let payload = serde_json::json!({
"user_did": user_did,
"is_paid": is_paid
});
match self.client.post_request("http://localhost:8000/api/v1/cards/draw", &payload).await {
Ok(card) => Ok(card),
Err(e) => Err(e.into()),
}
}
/// Get user's card collection
pub async fn get_user_cards(&self, user_did: &str) -> Result<serde_json::Value, Box<dyn std::error::Error>> {
let url = format!("http://localhost:8000/api/v1/cards/collection?did={}", user_did);
match self.client.get_request(&url).await {
Ok(collection) => Ok(collection),
Err(e) => Err(e.into()),
}
}
/// Get contextual memories for conversation mode
pub async fn get_contextual_memories(&self, _user_id: &str, _limit: usize) -> Result<Vec<crate::memory::Memory>, Box<dyn std::error::Error>> {
// This is a simplified version - in a real implementation this would call the MCP server
// For now, we'll return an empty vec to make compilation work
Ok(Vec::new())
}
/// Search memories by query
pub async fn search_memories(&self, _query: &str, _limit: usize) -> Result<Vec<crate::memory::Memory>, Box<dyn std::error::Error>> {
// This is a simplified version - in a real implementation this would call the MCP server
// For now, we'll return an empty vec to make compilation work
Ok(Vec::new())
}
/// Create context summary
pub async fn create_summary(&self, user_id: &str) -> Result<String, Box<dyn std::error::Error>> {
// This is a simplified version - in a real implementation this would call the MCP server
// For now, we'll return a placeholder summary
Ok(format!("Context summary for user: {}", user_id))
}
}
/// Service information
#[derive(Debug, Clone)]
pub struct ServiceInfo {
pub base_url: String,
pub status: ServiceStatus,
}
/// Map of all ai ecosystem services
#[derive(Debug, Clone, Default)]
pub struct ServiceMap {
pub ai_card: Option<ServiceInfo>,
pub ai_log: Option<ServiceInfo>,
pub ai_bot: Option<ServiceInfo>,
}
impl ServiceMap {
/// Get service info by name
pub fn get_service(&self, name: &str) -> Option<&ServiceInfo> {
match name {
"ai.card" => self.ai_card.as_ref(),
"ai.log" => self.ai_log.as_ref(),
"ai.bot" => self.ai_bot.as_ref(),
_ => None,
}
}
/// Check if a service is available
pub fn is_service_available(&self, name: &str) -> bool {
self.get_service(name)
.map(|info| info.status.is_available())
.unwrap_or(false)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_service_client_creation() {
let _client = ServiceClient::new();
// Basic test to ensure client can be created
assert!(true);
}
#[test]
fn test_service_status() {
let status = ServiceStatus::Available;
assert!(status.is_available());
let status = ServiceStatus::Unavailable("Connection refused".to_string());
assert!(!status.is_available());
}
#[test]
fn test_service_map() {
let mut map = ServiceMap::default();
assert!(!map.is_service_available("ai.card"));
map.ai_card = Some(ServiceInfo {
base_url: "http://localhost:8000".to_string(),
status: ServiceStatus::Available,
});
assert!(map.is_service_available("ai.card"));
assert!(!map.is_service_available("ai.log"));
}
}

View File

@@ -1,331 +0,0 @@
use std::collections::HashMap;
use std::path::PathBuf;
use serde::Deserialize;
use anyhow::{Result, Context};
use colored::*;
use chrono::{DateTime, Utc};
use crate::config::Config;
use crate::persona::Persona;
use crate::memory::{Memory, MemoryType};
pub async fn handle_import_chatgpt(
file_path: PathBuf,
user_id: Option<String>,
data_dir: Option<PathBuf>,
) -> Result<()> {
let config = Config::new(data_dir)?;
let mut persona = Persona::new(&config)?;
let user_id = user_id.unwrap_or_else(|| "imported_user".to_string());
println!("{}", "🚀 Starting ChatGPT Import...".cyan().bold());
println!("File: {}", file_path.display().to_string().yellow());
println!("User ID: {}", user_id.yellow());
println!();
let mut importer = ChatGPTImporter::new(user_id);
let stats = importer.import_from_file(&file_path, &mut persona).await?;
// Display import statistics
println!("\n{}", "📊 Import Statistics".green().bold());
println!("Conversations imported: {}", stats.conversations_imported.to_string().cyan());
println!("Messages imported: {}", stats.messages_imported.to_string().cyan());
println!(" - User messages: {}", stats.user_messages.to_string().yellow());
println!(" - Assistant messages: {}", stats.assistant_messages.to_string().yellow());
if stats.skipped_messages > 0 {
println!(" - Skipped messages: {}", stats.skipped_messages.to_string().red());
}
// Show updated relationship
if let Some(relationship) = persona.get_relationship(&importer.user_id) {
println!("\n{}", "👥 Updated Relationship".blue().bold());
println!("Status: {}", relationship.status.to_string().yellow());
println!("Score: {:.2} / {}", relationship.score, relationship.threshold);
println!("Transmission enabled: {}",
if relationship.transmission_enabled { "".green() } else { "".red() });
}
println!("\n{}", "✅ ChatGPT import completed successfully!".green().bold());
Ok(())
}
#[derive(Debug, Clone)]
pub struct ImportStats {
pub conversations_imported: usize,
pub messages_imported: usize,
pub user_messages: usize,
pub assistant_messages: usize,
pub skipped_messages: usize,
}
impl Default for ImportStats {
fn default() -> Self {
ImportStats {
conversations_imported: 0,
messages_imported: 0,
user_messages: 0,
assistant_messages: 0,
skipped_messages: 0,
}
}
}
pub struct ChatGPTImporter {
user_id: String,
stats: ImportStats,
}
impl ChatGPTImporter {
pub fn new(user_id: String) -> Self {
ChatGPTImporter {
user_id,
stats: ImportStats::default(),
}
}
pub async fn import_from_file(&mut self, file_path: &PathBuf, persona: &mut Persona) -> Result<ImportStats> {
// Read and parse the JSON file
let content = std::fs::read_to_string(file_path)
.with_context(|| format!("Failed to read file: {}", file_path.display()))?;
let conversations: Vec<ChatGPTConversation> = serde_json::from_str(&content)
.context("Failed to parse ChatGPT export JSON")?;
println!("Found {} conversations to import", conversations.len());
// Import each conversation
for (i, conversation) in conversations.iter().enumerate() {
if i % 10 == 0 && i > 0 {
println!("Processed {} / {} conversations...", i, conversations.len());
}
match self.import_single_conversation(conversation, persona).await {
Ok(_) => {
self.stats.conversations_imported += 1;
}
Err(e) => {
println!("{}: Failed to import conversation '{}': {}",
"Warning".yellow(),
conversation.title.as_deref().unwrap_or("Untitled"),
e);
}
}
}
Ok(self.stats.clone())
}
async fn import_single_conversation(&mut self, conversation: &ChatGPTConversation, persona: &mut Persona) -> Result<()> {
// Extract messages from the mapping structure
let messages = self.extract_messages_from_mapping(&conversation.mapping)?;
if messages.is_empty() {
return Ok(());
}
// Process each message
for message in messages {
match self.process_message(&message, persona).await {
Ok(_) => {
self.stats.messages_imported += 1;
}
Err(_) => {
self.stats.skipped_messages += 1;
}
}
}
Ok(())
}
fn extract_messages_from_mapping(&self, mapping: &HashMap<String, ChatGPTNode>) -> Result<Vec<ChatGPTMessage>> {
let mut messages = Vec::new();
// Find all message nodes and collect them
for node in mapping.values() {
if let Some(message) = &node.message {
// Skip system messages and other non-user/assistant messages
if let Some(role) = &message.author.role {
match role.as_str() {
"user" | "assistant" => {
if let Some(content) = &message.content {
let content_text = if content.content_type == "text" && !content.parts.is_empty() {
// Extract text from parts (handle both strings and mixed content)
content.parts.iter()
.filter_map(|part| part.as_str())
.collect::<Vec<&str>>()
.join("\n")
} else if content.content_type == "multimodal_text" {
// Extract text parts from multimodal content
let mut text_parts = Vec::new();
for part in &content.parts {
if let Some(text) = part.as_str() {
if !text.is_empty() {
text_parts.push(text);
}
}
// Skip non-text parts (like image_asset_pointer)
}
if text_parts.is_empty() {
continue; // Skip if no text content
}
text_parts.join("\n")
} else if content.content_type == "user_editable_context" {
// Handle user context messages
if let Some(instructions) = &content.user_instructions {
format!("User instructions: {}", instructions)
} else if let Some(profile) = &content.user_profile {
format!("User profile: {}", profile)
} else {
continue; // Skip empty context messages
}
} else {
continue; // Skip other content types for now
};
if !content_text.trim().is_empty() {
messages.push(ChatGPTMessage {
role: role.clone(),
content: content_text,
create_time: message.create_time,
});
}
}
}
_ => {} // Skip system, tool, etc.
}
}
}
}
// Sort messages by creation time
messages.sort_by(|a, b| {
let time_a = a.create_time.unwrap_or(0.0);
let time_b = b.create_time.unwrap_or(0.0);
time_a.partial_cmp(&time_b).unwrap_or(std::cmp::Ordering::Equal)
});
Ok(messages)
}
async fn process_message(&mut self, message: &ChatGPTMessage, persona: &mut Persona) -> Result<()> {
let timestamp = self.convert_timestamp(message.create_time.unwrap_or(0.0))?;
match message.role.as_str() {
"user" => {
self.add_user_message(&message.content, timestamp, persona)?;
self.stats.user_messages += 1;
}
"assistant" => {
self.add_assistant_message(&message.content, timestamp, persona)?;
self.stats.assistant_messages += 1;
}
_ => {
return Err(anyhow::anyhow!("Unsupported message role: {}", message.role));
}
}
Ok(())
}
fn add_user_message(&self, content: &str, timestamp: DateTime<Utc>, persona: &mut Persona) -> Result<()> {
// Create high-importance memory for user messages
let memory = Memory {
id: uuid::Uuid::new_v4().to_string(),
user_id: self.user_id.clone(),
content: content.to_string(),
summary: None,
importance: 0.8, // High importance for imported user data
memory_type: MemoryType::Core,
created_at: timestamp,
last_accessed: timestamp,
access_count: 1,
};
// Add memory and update relationship
persona.add_memory(memory)?;
persona.update_relationship(&self.user_id, 1.0)?; // Positive relationship boost
Ok(())
}
fn add_assistant_message(&self, content: &str, timestamp: DateTime<Utc>, persona: &mut Persona) -> Result<()> {
// Create medium-importance memory for assistant responses
let memory = Memory {
id: uuid::Uuid::new_v4().to_string(),
user_id: self.user_id.clone(),
content: format!("[AI Response] {}", content),
summary: Some("Imported ChatGPT response".to_string()),
importance: 0.6, // Medium importance for AI responses
memory_type: MemoryType::Summary,
created_at: timestamp,
last_accessed: timestamp,
access_count: 1,
};
persona.add_memory(memory)?;
Ok(())
}
fn convert_timestamp(&self, unix_timestamp: f64) -> Result<DateTime<Utc>> {
if unix_timestamp <= 0.0 {
return Ok(Utc::now());
}
DateTime::from_timestamp(
unix_timestamp as i64,
((unix_timestamp % 1.0) * 1_000_000_000.0) as u32
).ok_or_else(|| anyhow::anyhow!("Invalid timestamp: {}", unix_timestamp))
}
}
// ChatGPT Export Data Structures
#[derive(Debug, Deserialize)]
pub struct ChatGPTConversation {
pub title: Option<String>,
pub create_time: Option<f64>,
pub mapping: HashMap<String, ChatGPTNode>,
}
#[derive(Debug, Deserialize)]
pub struct ChatGPTNode {
pub id: Option<String>,
pub message: Option<ChatGPTNodeMessage>,
pub parent: Option<String>,
pub children: Vec<String>,
}
#[derive(Debug, Deserialize)]
pub struct ChatGPTNodeMessage {
pub id: String,
pub author: ChatGPTAuthor,
pub create_time: Option<f64>,
pub content: Option<ChatGPTContent>,
}
#[derive(Debug, Deserialize)]
pub struct ChatGPTAuthor {
pub role: Option<String>,
pub name: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct ChatGPTContent {
pub content_type: String,
#[serde(default)]
pub parts: Vec<serde_json::Value>,
#[serde(default)]
pub user_profile: Option<String>,
#[serde(default)]
pub user_instructions: Option<String>,
}
// Simplified message structure for processing
#[derive(Debug, Clone)]
pub struct ChatGPTMessage {
pub role: String,
pub content: String,
pub create_time: Option<f64>,
}

View File

@@ -1,20 +0,0 @@
#![allow(dead_code)]
pub mod ai_provider;
pub mod cli;
pub mod config;
pub mod conversation;
pub mod docs;
pub mod http_client;
pub mod import;
pub mod mcp_server;
pub mod memory;
pub mod openai_provider;
pub mod persona;
pub mod relationship;
pub mod scheduler;
pub mod shell;
pub mod status;
pub mod submodules;
pub mod tokens;
pub mod transmission;

View File

@@ -1,251 +1,58 @@
#![allow(dead_code)] // main.rs
use clap::{Parser, Subcommand};
use std::path::PathBuf;
mod ai_provider;
mod cli; mod cli;
use cli::TokenCommands;
mod config; mod config;
mod conversation; mod mcp;
mod docs;
mod http_client;
mod import;
mod mcp_server;
mod memory;
mod openai_provider;
mod persona;
mod relationship;
mod scheduler;
mod shell;
mod status;
mod submodules;
mod tokens;
mod transmission;
#[derive(Parser)] use cli::{Args, Commands, ServerCommands, MemoryCommands};
#[command(name = "aigpt")] use clap::Parser;
#[command(about = "AI.GPT - Autonomous transmission AI with unique personality")]
#[command(version)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// Check AI status and relationships
Status {
/// User ID to check status for
user_id: Option<String>,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Chat with the AI
Chat {
/// User ID (atproto DID)
user_id: String,
/// Message to send to AI
message: String,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
/// AI model to use
#[arg(short, long)]
model: Option<String>,
/// AI provider (ollama/openai)
#[arg(long)]
provider: Option<String>,
},
/// Start continuous conversation mode with MCP integration
Conversation {
/// User ID (atproto DID)
user_id: String,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
/// AI model to use
#[arg(short, long)]
model: Option<String>,
/// AI provider (ollama/openai)
#[arg(long)]
provider: Option<String>,
},
/// Start continuous conversation mode with MCP integration (alias)
Conv {
/// User ID (atproto DID)
user_id: String,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
/// AI model to use
#[arg(short, long)]
model: Option<String>,
/// AI provider (ollama/openai)
#[arg(long)]
provider: Option<String>,
},
/// Check today's AI fortune
Fortune {
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// List all relationships
Relationships {
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Check and send autonomous transmissions
Transmit {
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Run daily maintenance tasks
Maintenance {
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Run scheduled tasks
Schedule {
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Start MCP server
Server {
/// Port to listen on
#[arg(short, long, default_value = "8080")]
port: u16,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Interactive shell mode
Shell {
/// User ID (atproto DID)
user_id: String,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
/// AI model to use
#[arg(short, long)]
model: Option<String>,
/// AI provider (ollama/openai)
#[arg(long)]
provider: Option<String>,
},
/// Import ChatGPT conversation data
ImportChatgpt {
/// Path to ChatGPT export JSON file
file_path: PathBuf,
/// User ID for imported conversations
#[arg(short, long)]
user_id: Option<String>,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Documentation management
Docs {
/// Action to perform (generate, sync, list, status)
action: String,
/// Project name for generate/sync actions
#[arg(short, long)]
project: Option<String>,
/// Output path for generated documentation
#[arg(short, long)]
output: Option<PathBuf>,
/// Enable AI integration for documentation enhancement
#[arg(long)]
ai_integration: bool,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Submodule management
Submodules {
/// Action to perform (list, update, status)
action: String,
/// Specific module to update
#[arg(short, long)]
module: Option<String>,
/// Update all submodules
#[arg(long)]
all: bool,
/// Show what would be done without making changes
#[arg(long)]
dry_run: bool,
/// Auto-commit changes after update
#[arg(long)]
auto_commit: bool,
/// Show verbose output
#[arg(short, long)]
verbose: bool,
/// Data directory
#[arg(short, long)]
data_dir: Option<PathBuf>,
},
/// Token usage analysis and cost estimation
Tokens {
#[command(subcommand)]
command: TokenCommands,
},
}
#[tokio::main] #[tokio::main]
async fn main() -> anyhow::Result<()> { async fn main() {
let cli = Cli::parse(); let args = Args::parse();
match cli.command { match args.command {
Commands::Status { user_id, data_dir } => { Commands::Server { command } => {
status::handle_status(user_id, data_dir).await match command {
ServerCommands::Setup => {
mcp::server::setup();
} }
Commands::Chat { user_id, message, data_dir, model, provider } => { ServerCommands::Run => {
cli::handle_chat(user_id, message, data_dir, model, provider).await mcp::server::run().await;
} }
Commands::Conversation { user_id, data_dir, model, provider } => {
conversation::handle_conversation(user_id, data_dir, model, provider).await
} }
Commands::Conv { user_id, data_dir, model, provider } => {
conversation::handle_conversation(user_id, data_dir, model, provider).await
} }
Commands::Fortune { data_dir } => { Commands::Chat { message, with_memory } => {
cli::handle_fortune(data_dir).await if with_memory {
if let Err(e) = mcp::memory::handle_chat_with_memory(&message).await {
eprintln!("❌ 記憶チャットエラー: {}", e);
} }
Commands::Relationships { data_dir } => { } else {
cli::handle_relationships(data_dir).await mcp::server::chat(&message).await;
} }
Commands::Transmit { data_dir } => {
cli::handle_transmit(data_dir).await
} }
Commands::Maintenance { data_dir } => { Commands::Memory { command } => {
cli::handle_maintenance(data_dir).await match command {
MemoryCommands::Import { file } => {
if let Err(e) = mcp::memory::handle_import(&file).await {
eprintln!("❌ インポートエラー: {}", e);
} }
Commands::Schedule { data_dir } => {
cli::handle_schedule(data_dir).await
} }
Commands::Server { port, data_dir } => { MemoryCommands::Search { query, limit } => {
cli::handle_server(Some(port), data_dir).await if let Err(e) = mcp::memory::handle_search(&query, limit).await {
eprintln!("❌ 検索エラー: {}", e);
} }
Commands::Shell { user_id, data_dir, model, provider } => {
shell::handle_shell(user_id, data_dir, model, provider).await
} }
Commands::ImportChatgpt { file_path, user_id, data_dir } => { MemoryCommands::List => {
import::handle_import_chatgpt(file_path, user_id, data_dir).await if let Err(e) = mcp::memory::handle_list().await {
eprintln!("❌ 一覧取得エラー: {}", e);
}
}
MemoryCommands::Detail { filepath } => {
if let Err(e) = mcp::memory::handle_detail(&filepath).await {
eprintln!("❌ 詳細取得エラー: {}", e);
} }
Commands::Docs { action, project, output, ai_integration, data_dir } => {
docs::handle_docs(action, project, output, ai_integration, data_dir).await
} }
Commands::Submodules { action, module, all, dry_run, auto_commit, verbose, data_dir } => {
submodules::handle_submodules(action, module, all, dry_run, auto_commit, verbose, data_dir).await
} }
Commands::Tokens { command } => {
tokens::handle_tokens(command).await
} }
} }
} }

393
src/mcp/memory.rs Normal file
View File

@@ -0,0 +1,393 @@
// src/mcp/memory.rs
use reqwest;
use serde::{Deserialize, Serialize};
use serde_json::{self, Value};
use std::fs;
use std::path::Path;
#[derive(Debug, Serialize, Deserialize)]
pub struct MemorySearchRequest {
pub query: String,
pub limit: usize,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ChatRequest {
pub message: String,
pub model: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ConversationImportRequest {
pub conversation_data: Value,
}
#[derive(Debug, Deserialize)]
pub struct ApiResponse {
pub success: bool,
pub error: Option<String>,
#[allow(dead_code)]
pub message: Option<String>,
pub filepath: Option<String>,
pub results: Option<Vec<MemoryResult>>,
pub memories: Option<Vec<MemoryResult>>,
#[allow(dead_code)]
pub count: Option<usize>,
pub memory: Option<Value>,
pub response: Option<String>,
pub memories_used: Option<usize>,
pub imported_count: Option<usize>,
pub total_count: Option<usize>,
}
#[derive(Debug, Deserialize)]
pub struct MemoryResult {
#[allow(dead_code)]
pub filepath: String,
pub title: Option<String>,
pub summary: Option<String>,
pub source: Option<String>,
pub import_time: Option<String>,
pub message_count: Option<usize>,
}
pub struct MemoryClient {
base_url: String,
client: reqwest::Client,
}
impl MemoryClient {
pub fn new(base_url: Option<String>) -> Self {
let url = base_url.unwrap_or_else(|| "http://127.0.0.1:5000".to_string());
Self {
base_url: url,
client: reqwest::Client::new(),
}
}
pub async fn import_chatgpt_file(&self, filepath: &str) -> Result<ApiResponse, Box<dyn std::error::Error>> {
// ファイルを読み込み
let content = fs::read_to_string(filepath)?;
let json_data: Value = serde_json::from_str(&content)?;
// 配列かどうかチェック
match json_data.as_array() {
Some(conversations) => {
// 複数の会話をインポート
let mut imported_count = 0;
let total_count = conversations.len();
for conversation in conversations {
match self.import_single_conversation(conversation.clone()).await {
Ok(response) => {
if response.success {
imported_count += 1;
}
}
Err(e) => {
eprintln!("❌ インポートエラー: {}", e);
}
}
}
Ok(ApiResponse {
success: true,
imported_count: Some(imported_count),
total_count: Some(total_count),
error: None,
message: Some(format!("{}個中{}個の会話をインポートしました", total_count, imported_count)),
filepath: None,
results: None,
memories: None,
count: None,
memory: None,
response: None,
memories_used: None,
})
}
None => {
// 単一の会話をインポート
self.import_single_conversation(json_data).await
}
}
}
async fn import_single_conversation(&self, conversation_data: Value) -> Result<ApiResponse, Box<dyn std::error::Error>> {
let request = ConversationImportRequest { conversation_data };
let response = self.client
.post(&format!("{}/memory/import/chatgpt", self.base_url))
.json(&request)
.send()
.await?;
let result: ApiResponse = response.json().await?;
Ok(result)
}
pub async fn search_memories(&self, query: &str, limit: usize) -> Result<ApiResponse, Box<dyn std::error::Error>> {
let request = MemorySearchRequest {
query: query.to_string(),
limit,
};
let response = self.client
.post(&format!("{}/memory/search", self.base_url))
.json(&request)
.send()
.await?;
let result: ApiResponse = response.json().await?;
Ok(result)
}
pub async fn list_memories(&self) -> Result<ApiResponse, Box<dyn std::error::Error>> {
let response = self.client
.get(&format!("{}/memory/list", self.base_url))
.send()
.await?;
let result: ApiResponse = response.json().await?;
Ok(result)
}
pub async fn get_memory_detail(&self, filepath: &str) -> Result<ApiResponse, Box<dyn std::error::Error>> {
let response = self.client
.get(&format!("{}/memory/detail", self.base_url))
.query(&[("filepath", filepath)])
.send()
.await?;
let result: ApiResponse = response.json().await?;
Ok(result)
}
pub async fn chat_with_memory(&self, message: &str) -> Result<ApiResponse, Box<dyn std::error::Error>> {
let request = ChatRequest {
message: message.to_string(),
model: None,
};
let response = self.client
.post(&format!("{}/chat", self.base_url))
.json(&request)
.send()
.await?;
let result: ApiResponse = response.json().await?;
Ok(result)
}
pub async fn is_server_running(&self) -> bool {
match self.client.get(&self.base_url).send().await {
Ok(response) => response.status().is_success(),
Err(_) => false,
}
}
}
pub async fn handle_import(filepath: &str) -> Result<(), Box<dyn std::error::Error>> {
if !Path::new(filepath).exists() {
eprintln!("❌ ファイルが見つかりません: {}", filepath);
return Ok(());
}
let client = MemoryClient::new(None);
// サーバーが起動しているかチェック
if !client.is_server_running().await {
eprintln!("❌ MCP Serverが起動していません。先に 'aigpt server run' を実行してください。");
return Ok(());
}
println!("🔄 ChatGPT会話をインポートしています: {}", filepath);
match client.import_chatgpt_file(filepath).await {
Ok(response) => {
if response.success {
if let (Some(imported), Some(total)) = (response.imported_count, response.total_count) {
println!("{}個中{}個の会話をインポートしました", total, imported);
} else {
println!("✅ 会話をインポートしました");
if let Some(path) = response.filepath {
println!("📁 保存先: {}", path);
}
}
} else {
eprintln!("❌ インポートに失敗: {:?}", response.error);
}
}
Err(e) => {
eprintln!("❌ インポートエラー: {}", e);
}
}
Ok(())
}
pub async fn handle_search(query: &str, limit: usize) -> Result<(), Box<dyn std::error::Error>> {
let client = MemoryClient::new(None);
if !client.is_server_running().await {
eprintln!("❌ MCP Serverが起動していません。先に 'aigpt server run' を実行してください。");
return Ok(());
}
println!("🔍 記憶を検索しています: {}", query);
match client.search_memories(query, limit).await {
Ok(response) => {
if response.success {
if let Some(results) = response.results {
println!("📚 {}個の記憶が見つかりました:", results.len());
for memory in results {
println!("{}", memory.title.unwrap_or_else(|| "タイトルなし".to_string()));
if let Some(summary) = memory.summary {
println!(" 概要: {}", summary);
}
if let Some(count) = memory.message_count {
println!(" メッセージ数: {}", count);
}
println!();
}
} else {
println!("📚 記憶が見つかりませんでした");
}
} else {
eprintln!("❌ 検索に失敗: {:?}", response.error);
}
}
Err(e) => {
eprintln!("❌ 検索エラー: {}", e);
}
}
Ok(())
}
pub async fn handle_list() -> Result<(), Box<dyn std::error::Error>> {
let client = MemoryClient::new(None);
if !client.is_server_running().await {
eprintln!("❌ MCP Serverが起動していません。先に 'aigpt server run' を実行してください。");
return Ok(());
}
println!("📋 記憶一覧を取得しています...");
match client.list_memories().await {
Ok(response) => {
if response.success {
if let Some(memories) = response.memories {
println!("📚 総記憶数: {}", memories.len());
for memory in memories {
println!("{}", memory.title.unwrap_or_else(|| "タイトルなし".to_string()));
if let Some(source) = memory.source {
println!(" ソース: {}", source);
}
if let Some(count) = memory.message_count {
println!(" メッセージ数: {}", count);
}
if let Some(import_time) = memory.import_time {
println!(" インポート時刻: {}", import_time);
}
println!();
}
} else {
println!("📚 記憶がありません");
}
} else {
eprintln!("❌ 一覧取得に失敗: {:?}", response.error);
}
}
Err(e) => {
eprintln!("❌ 一覧取得エラー: {}", e);
}
}
Ok(())
}
pub async fn handle_detail(filepath: &str) -> Result<(), Box<dyn std::error::Error>> {
let client = MemoryClient::new(None);
if !client.is_server_running().await {
eprintln!("❌ MCP Serverが起動していません。先に 'aigpt server run' を実行してください。");
return Ok(());
}
println!("📄 記憶の詳細を取得しています: {}", filepath);
match client.get_memory_detail(filepath).await {
Ok(response) => {
if response.success {
if let Some(memory) = response.memory {
if let Some(title) = memory.get("title").and_then(|v| v.as_str()) {
println!("タイトル: {}", title);
}
if let Some(source) = memory.get("source").and_then(|v| v.as_str()) {
println!("ソース: {}", source);
}
if let Some(summary) = memory.get("summary").and_then(|v| v.as_str()) {
println!("概要: {}", summary);
}
if let Some(messages) = memory.get("messages").and_then(|v| v.as_array()) {
println!("メッセージ数: {}", messages.len());
println!("\n最近のメッセージ:");
for msg in messages.iter().take(5) {
if let (Some(role), Some(content)) = (
msg.get("role").and_then(|v| v.as_str()),
msg.get("content").and_then(|v| v.as_str())
) {
let content_preview = if content.len() > 100 {
format!("{}...", &content[..100])
} else {
content.to_string()
};
println!(" {}: {}", role, content_preview);
}
}
}
}
} else {
eprintln!("❌ 詳細取得に失敗: {:?}", response.error);
}
}
Err(e) => {
eprintln!("❌ 詳細取得エラー: {}", e);
}
}
Ok(())
}
pub async fn handle_chat_with_memory(message: &str) -> Result<(), Box<dyn std::error::Error>> {
let client = MemoryClient::new(None);
if !client.is_server_running().await {
eprintln!("❌ MCP Serverが起動していません。先に 'aigpt server run' を実行してください。");
return Ok(());
}
println!("💬 記憶を活用してチャットしています...");
match client.chat_with_memory(message).await {
Ok(response) => {
if response.success {
if let Some(reply) = response.response {
println!("🤖 {}", reply);
}
if let Some(memories_used) = response.memories_used {
println!("📚 使用した記憶数: {}", memories_used);
}
} else {
eprintln!("❌ チャットに失敗: {:?}", response.error);
}
}
Err(e) => {
eprintln!("❌ チャットエラー: {}", e);
}
}
Ok(())
}

3
src/mcp/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
// src/mcp/mod.rs
pub mod server;
pub mod memory;

147
src/mcp/server.rs Normal file
View File

@@ -0,0 +1,147 @@
// src/mcp/server.rs
use crate::config::ConfigPaths;
//use std::fs;
use std::process::Command as OtherCommand;
use std::env;
use fs_extra::dir::{copy, CopyOptions};
pub fn setup() {
println!("🔧 MCP Server環境をセットアップしています...");
let config = ConfigPaths::new();
let mcp_dir = config.mcp_dir();
// プロジェクトのmcp/ディレクトリからファイルをコピー
let current_dir = env::current_dir().expect("現在のディレクトリを取得できません");
let project_mcp_dir = current_dir.join("mcp");
if !project_mcp_dir.exists() {
eprintln!("❌ プロジェクトのmcp/ディレクトリが見つかりません: {}", project_mcp_dir.display());
return;
}
if mcp_dir.exists() {
fs_extra::dir::remove(&mcp_dir).expect("既存のmcp_dirの削除に失敗しました");
}
let mut options = CopyOptions::new();
options.overwrite = true; // 上書き
options.copy_inside = true; // 中身だけコピー
copy(&project_mcp_dir, &mcp_dir, &options).expect("コピーに失敗しました");
// 仮想環境の作成
let venv_path = config.venv_path();
if !venv_path.exists() {
println!("🐍 仮想環境を作成しています...");
let output = OtherCommand::new("python3")
.args(&["-m", "venv", ".venv"])
.current_dir(&mcp_dir)
.output()
.expect("venvの作成に失敗しました");
if !output.status.success() {
eprintln!("❌ venv作成エラー: {}", String::from_utf8_lossy(&output.stderr));
return;
}
println!("✅ 仮想環境を作成しました");
} else {
println!("✅ 仮想環境は既に存在します");
}
// 依存関係のインストール
println!("📦 依存関係をインストールしています...");
let pip_path = config.pip_executable();
let output = OtherCommand::new(&pip_path)
.args(&["install", "-r", "requirements.txt"])
.current_dir(&mcp_dir)
.output()
.expect("pipコマンドの実行に失敗しました");
if !output.status.success() {
eprintln!("❌ pip installエラー: {}", String::from_utf8_lossy(&output.stderr));
return;
}
println!("✅ MCP Server環境のセットアップが完了しました!");
println!("📍 セットアップ場所: {}", mcp_dir.display());
}
pub async fn run() {
println!("🚀 MCP Serverを起動しています...");
let config = ConfigPaths::new();
let mcp_dir = config.mcp_dir();
let python_path = config.python_executable();
let server_py_path = mcp_dir.join("server.py");
// セットアップの確認
if !server_py_path.exists() {
eprintln!("❌ server.pyが見つかりません。先に 'aigpt server setup' を実行してください。");
return;
}
if !python_path.exists() {
eprintln!("❌ Python実行ファイルが見つかりません。先に 'aigpt server setup' を実行してください。");
return;
}
// サーバーの起動
println!("🔗 サーバーを起動中... (Ctrl+Cで停止)");
let mut child = OtherCommand::new(&python_path)
.arg("server.py")
.current_dir(&mcp_dir)
.spawn()
.expect("MCP Serverの起動に失敗しました");
// サーバーの終了を待機
match child.wait() {
Ok(status) => {
if status.success() {
println!("✅ MCP Serverが正常に終了しました");
} else {
println!("❌ MCP Serverが異常終了しました: {}", status);
}
}
Err(e) => {
eprintln!("❌ MCP Serverの実行中にエラーが発生しました: {}", e);
}
}
}
pub async fn chat(message: &str) {
println!("💬 チャットを開始しています...");
let config = ConfigPaths::new();
let mcp_dir = config.mcp_dir();
let python_path = config.python_executable();
let chat_py_path = mcp_dir.join("chat.py");
// セットアップの確認
if !chat_py_path.exists() {
eprintln!("❌ chat.pyが見つかりません。先に 'aigpt server setup' を実行してください。");
return;
}
if !python_path.exists() {
eprintln!("❌ Python実行ファイルが見つかりません。先に 'aigpt server setup' を実行してください。");
return;
}
// チャットの実行
let output = OtherCommand::new(&python_path)
.args(&["chat.py", message])
.current_dir(&mcp_dir)
.output()
.expect("chat.pyの実行に失敗しました");
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
if !stderr.is_empty() {
print!("{}", stderr);
}
print!("{}", stdout);
} else {
eprintln!("❌ チャット実行エラー: {}", String::from_utf8_lossy(&output.stderr));
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,306 +0,0 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use anyhow::{Result, Context};
use chrono::{DateTime, Utc};
use uuid::Uuid;
use crate::config::Config;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Memory {
pub id: String,
pub user_id: String,
pub content: String,
pub summary: Option<String>,
pub importance: f64,
pub memory_type: MemoryType,
pub created_at: DateTime<Utc>,
pub last_accessed: DateTime<Utc>,
pub access_count: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum MemoryType {
Interaction,
Summary,
Core,
Forgotten,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MemoryManager {
memories: HashMap<String, Memory>,
config: Config,
}
impl MemoryManager {
pub fn new(config: &Config) -> Result<Self> {
let memories = Self::load_memories(config)?;
Ok(MemoryManager {
memories,
config: config.clone(),
})
}
pub fn add_memory(&mut self, user_id: &str, content: &str, importance: f64) -> Result<String> {
let memory_id = Uuid::new_v4().to_string();
let now = Utc::now();
let memory = Memory {
id: memory_id.clone(),
user_id: user_id.to_string(),
content: content.to_string(),
summary: None,
importance,
memory_type: MemoryType::Interaction,
created_at: now,
last_accessed: now,
access_count: 1,
};
self.memories.insert(memory_id.clone(), memory);
self.save_memories()?;
Ok(memory_id)
}
pub fn get_memories(&mut self, user_id: &str, limit: usize) -> Vec<&Memory> {
// Get immutable references for sorting
let mut user_memory_ids: Vec<_> = self.memories
.iter()
.filter(|(_, m)| m.user_id == user_id)
.map(|(id, memory)| {
let score = memory.importance * 0.7 + (1.0 / ((Utc::now() - memory.created_at).num_hours() as f64 + 1.0)) * 0.3;
(id.clone(), score)
})
.collect();
// Sort by score
user_memory_ids.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
// Update access information
let now = Utc::now();
for (memory_id, _) in user_memory_ids.into_iter().take(limit) {
if let Some(memory) = self.memories.get_mut(&memory_id) {
memory.last_accessed = now;
memory.access_count += 1;
// We can't return mutable references here, so we'll need to adjust the return type
}
}
// Return immutable references
self.memories
.values()
.filter(|m| m.user_id == user_id)
.take(limit)
.collect()
}
pub fn search_memories(&self, user_id: &str, keywords: &[String]) -> Vec<&Memory> {
self.memories
.values()
.filter(|m| {
m.user_id == user_id &&
keywords.iter().any(|keyword| {
m.content.to_lowercase().contains(&keyword.to_lowercase()) ||
m.summary.as_ref().map_or(false, |s| s.to_lowercase().contains(&keyword.to_lowercase()))
})
})
.collect()
}
pub fn get_contextual_memories(&self, user_id: &str, query: &str, limit: usize) -> Vec<&Memory> {
let query_lower = query.to_lowercase();
let mut relevant_memories: Vec<_> = self.memories
.values()
.filter(|m| {
m.user_id == user_id && (
m.content.to_lowercase().contains(&query_lower) ||
m.summary.as_ref().map_or(false, |s| s.to_lowercase().contains(&query_lower))
)
})
.collect();
// Sort by relevance (simple keyword matching for now)
relevant_memories.sort_by(|a, b| {
let score_a = Self::calculate_relevance_score(a, &query_lower);
let score_b = Self::calculate_relevance_score(b, &query_lower);
score_b.partial_cmp(&score_a).unwrap_or(std::cmp::Ordering::Equal)
});
relevant_memories.into_iter().take(limit).collect()
}
fn calculate_relevance_score(memory: &Memory, query: &str) -> f64 {
let content_matches = memory.content.to_lowercase().matches(query).count() as f64;
let summary_matches = memory.summary.as_ref()
.map_or(0.0, |s| s.to_lowercase().matches(query).count() as f64);
let relevance = (content_matches + summary_matches) * memory.importance;
let recency_bonus = 1.0 / ((Utc::now() - memory.created_at).num_days() as f64).max(1.0);
relevance + recency_bonus * 0.1
}
pub fn create_summary(&mut self, user_id: &str, content: &str) -> Result<String> {
// Simple summary creation (in real implementation, this would use AI)
let summary = if content.len() > 100 {
format!("{}...", &content[..97])
} else {
content.to_string()
};
self.add_memory(user_id, &summary, 0.8)
}
pub fn create_core_memory(&mut self, user_id: &str, content: &str) -> Result<String> {
let memory_id = Uuid::new_v4().to_string();
let now = Utc::now();
let memory = Memory {
id: memory_id.clone(),
user_id: user_id.to_string(),
content: content.to_string(),
summary: None,
importance: 1.0, // Core memories have maximum importance
memory_type: MemoryType::Core,
created_at: now,
last_accessed: now,
access_count: 1,
};
self.memories.insert(memory_id.clone(), memory);
self.save_memories()?;
Ok(memory_id)
}
pub fn get_memory_stats(&self, user_id: &str) -> MemoryStats {
let user_memories: Vec<_> = self.memories
.values()
.filter(|m| m.user_id == user_id)
.collect();
let total_memories = user_memories.len();
let core_memories = user_memories.iter()
.filter(|m| matches!(m.memory_type, MemoryType::Core))
.count();
let summary_memories = user_memories.iter()
.filter(|m| matches!(m.memory_type, MemoryType::Summary))
.count();
let interaction_memories = user_memories.iter()
.filter(|m| matches!(m.memory_type, MemoryType::Interaction))
.count();
let avg_importance = if total_memories > 0 {
user_memories.iter().map(|m| m.importance).sum::<f64>() / total_memories as f64
} else {
0.0
};
MemoryStats {
total_memories,
core_memories,
summary_memories,
interaction_memories,
avg_importance,
}
}
fn load_memories(config: &Config) -> Result<HashMap<String, Memory>> {
let file_path = config.memory_file();
if !file_path.exists() {
return Ok(HashMap::new());
}
let content = std::fs::read_to_string(file_path)
.context("Failed to read memories file")?;
let memories: HashMap<String, Memory> = serde_json::from_str(&content)
.context("Failed to parse memories file")?;
Ok(memories)
}
fn save_memories(&self) -> Result<()> {
let content = serde_json::to_string_pretty(&self.memories)
.context("Failed to serialize memories")?;
std::fs::write(&self.config.memory_file(), content)
.context("Failed to write memories file")?;
Ok(())
}
pub fn get_stats(&self) -> Result<MemoryStats> {
let total_memories = self.memories.len();
let core_memories = self.memories.values()
.filter(|m| matches!(m.memory_type, MemoryType::Core))
.count();
let summary_memories = self.memories.values()
.filter(|m| matches!(m.memory_type, MemoryType::Summary))
.count();
let interaction_memories = self.memories.values()
.filter(|m| matches!(m.memory_type, MemoryType::Interaction))
.count();
let avg_importance = if total_memories > 0 {
self.memories.values().map(|m| m.importance).sum::<f64>() / total_memories as f64
} else {
0.0
};
Ok(MemoryStats {
total_memories,
core_memories,
summary_memories,
interaction_memories,
avg_importance,
})
}
pub async fn run_maintenance(&mut self) -> Result<()> {
// Cleanup old, low-importance memories
let cutoff_date = Utc::now() - chrono::Duration::days(30);
let memory_ids_to_remove: Vec<String> = self.memories
.iter()
.filter(|(_, m)| {
m.importance < 0.3
&& m.created_at < cutoff_date
&& m.access_count <= 1
&& !matches!(m.memory_type, MemoryType::Core)
})
.map(|(id, _)| id.clone())
.collect();
for id in memory_ids_to_remove {
self.memories.remove(&id);
}
// Mark old memories as forgotten instead of deleting
let forgotten_cutoff = Utc::now() - chrono::Duration::days(90);
for memory in self.memories.values_mut() {
if memory.created_at < forgotten_cutoff
&& memory.importance < 0.2
&& !matches!(memory.memory_type, MemoryType::Core) {
memory.memory_type = MemoryType::Forgotten;
}
}
// Save changes
self.save_memories()?;
Ok(())
}
}
#[derive(Debug, Clone)]
pub struct MemoryStats {
pub total_memories: usize,
pub core_memories: usize,
pub summary_memories: usize,
pub interaction_memories: usize,
pub avg_importance: f64,
}

View File

@@ -1,390 +0,0 @@
use anyhow::Result;
use async_openai::{
types::{
ChatCompletionRequestMessage,
CreateChatCompletionRequestArgs, ChatCompletionTool, ChatCompletionToolType,
FunctionObject, ChatCompletionRequestToolMessage,
ChatCompletionRequestAssistantMessage, ChatCompletionRequestUserMessage,
ChatCompletionRequestSystemMessage, ChatCompletionToolChoiceOption
},
Client,
};
use serde_json::{json, Value};
use crate::http_client::ServiceClient;
/// OpenAI provider with MCP tools support (matching Python implementation)
pub struct OpenAIProvider {
client: Client<async_openai::config::OpenAIConfig>,
model: String,
service_client: ServiceClient,
system_prompt: Option<String>,
}
impl OpenAIProvider {
pub fn new(api_key: String, model: Option<String>) -> Self {
let config = async_openai::config::OpenAIConfig::new()
.with_api_key(api_key);
let client = Client::with_config(config);
Self {
client,
model: model.unwrap_or_else(|| "gpt-4".to_string()),
service_client: ServiceClient::new(),
system_prompt: None,
}
}
pub fn with_system_prompt(mut self, prompt: String) -> Self {
self.system_prompt = Some(prompt);
self
}
/// Generate OpenAI tools from MCP endpoints (matching Python implementation)
fn get_mcp_tools(&self) -> Vec<ChatCompletionTool> {
let tools = vec![
// Memory tools
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "get_memories".to_string(),
description: Some("過去の会話記憶を取得します。「覚えている」「前回」「以前」などの質問で必ず使用してください".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {
"limit": {
"type": "integer",
"description": "取得する記憶の数",
"default": 5
}
}
})),
},
},
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "search_memories".to_string(),
description: Some("特定のトピックについて話した記憶を検索します。「プログラミングについて」「○○について話した」などの質問で使用してください".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {
"keywords": {
"type": "array",
"items": {"type": "string"},
"description": "検索キーワードの配列"
}
},
"required": ["keywords"]
})),
},
},
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "get_contextual_memories".to_string(),
description: Some("クエリに関連する文脈的記憶を取得します".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "検索クエリ"
},
"limit": {
"type": "integer",
"description": "取得する記憶の数",
"default": 5
}
},
"required": ["query"]
})),
},
},
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "get_relationship".to_string(),
description: Some("特定ユーザーとの関係性情報を取得します".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {
"user_id": {
"type": "string",
"description": "ユーザーID"
}
},
"required": ["user_id"]
})),
},
},
// ai.card tools
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "card_get_user_cards".to_string(),
description: Some("ユーザーが所有するカードの一覧を取得します".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {
"did": {
"type": "string",
"description": "ユーザーのDID"
},
"limit": {
"type": "integer",
"description": "取得するカード数の上限",
"default": 10
}
},
"required": ["did"]
})),
},
},
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "card_draw_card".to_string(),
description: Some("ガチャを引いてカードを取得します".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {
"did": {
"type": "string",
"description": "ユーザーのDID"
},
"is_paid": {
"type": "boolean",
"description": "有料ガチャかどうか",
"default": false
}
},
"required": ["did"]
})),
},
},
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "card_analyze_collection".to_string(),
description: Some("ユーザーのカードコレクションを分析します".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {
"did": {
"type": "string",
"description": "ユーザーのDID"
}
},
"required": ["did"]
})),
},
},
ChatCompletionTool {
r#type: ChatCompletionToolType::Function,
function: FunctionObject {
name: "card_get_gacha_stats".to_string(),
description: Some("ガチャの統計情報を取得します".to_string()),
parameters: Some(json!({
"type": "object",
"properties": {}
})),
},
},
];
tools
}
/// Chat interface with MCP function calling support (matching Python implementation)
pub async fn chat_with_mcp(&self, prompt: String, user_id: String) -> Result<String> {
let tools = self.get_mcp_tools();
let system_content = self.system_prompt.as_deref().unwrap_or(
"あなたは記憶システムと関係性データ、カードゲームシステムにアクセスできるAIです。\n\n【重要】以下の場合は必ずツールを使用してください:\n\n1. カード関連の質問:\n- 「カード」「コレクション」「ガチャ」「見せて」「持っている」「状況」「どんなカード」などのキーワードがある場合\n- card_get_user_cardsツールを使用してユーザーのカード情報を取得\n\n2. 記憶・関係性の質問:\n- 「覚えている」「前回」「以前」「について話した」「関係」などのキーワードがある場合\n- 適切なメモリツールを使用\n\n3. パラメータの設定:\n- didパラメータには現在会話しているユーザーのID'syui')を使用\n- ツールを積極的に使用して正確な情報を提供してください\n\nユーザーが何かを尋ねた時は、まず関連するツールがあるかを考え、適切なツールを使用してから回答してください。"
);
let request = CreateChatCompletionRequestArgs::default()
.model(&self.model)
.messages(vec![
ChatCompletionRequestMessage::System(
ChatCompletionRequestSystemMessage {
content: system_content.to_string().into(),
name: None,
}
),
ChatCompletionRequestMessage::User(
ChatCompletionRequestUserMessage {
content: prompt.clone().into(),
name: None,
}
),
])
.tools(tools)
.tool_choice(ChatCompletionToolChoiceOption::Auto)
.max_tokens(2000u16)
.temperature(0.7)
.build()?;
let response = self.client.chat().create(request).await?;
let message = &response.choices[0].message;
// Handle tool calls
if let Some(tool_calls) = &message.tool_calls {
if tool_calls.is_empty() {
println!("🔧 [OpenAI] No tools called");
} else {
println!("🔧 [OpenAI] {} tools called:", tool_calls.len());
for tc in tool_calls {
println!(" - {}({})", tc.function.name, tc.function.arguments);
}
}
} else {
println!("🔧 [OpenAI] No tools called");
}
// Process tool calls if any
if let Some(tool_calls) = &message.tool_calls {
if !tool_calls.is_empty() {
let mut messages = vec![
ChatCompletionRequestMessage::System(
ChatCompletionRequestSystemMessage {
content: system_content.to_string().into(),
name: None,
}
),
ChatCompletionRequestMessage::User(
ChatCompletionRequestUserMessage {
content: prompt.into(),
name: None,
}
),
ChatCompletionRequestMessage::Assistant(
ChatCompletionRequestAssistantMessage {
content: message.content.clone(),
name: None,
tool_calls: message.tool_calls.clone(),
function_call: None,
}
),
];
// Execute each tool call
for tool_call in tool_calls {
println!("🌐 [MCP] Executing {}...", tool_call.function.name);
let tool_result = self.execute_mcp_tool(tool_call, &user_id).await?;
let result_preview = serde_json::to_string(&tool_result)?;
let preview = if result_preview.chars().count() > 100 {
format!("{}...", result_preview.chars().take(100).collect::<String>())
} else {
result_preview.clone()
};
println!("✅ [MCP] Result: {}", preview);
messages.push(ChatCompletionRequestMessage::Tool(
ChatCompletionRequestToolMessage {
content: serde_json::to_string(&tool_result)?,
tool_call_id: tool_call.id.clone(),
}
));
}
// Get final response with tool outputs
let final_request = CreateChatCompletionRequestArgs::default()
.model(&self.model)
.messages(messages)
.max_tokens(2000u16)
.temperature(0.7)
.build()?;
let final_response = self.client.chat().create(final_request).await?;
Ok(final_response.choices[0].message.content.as_ref().unwrap_or(&"".to_string()).clone())
} else {
// No tools were called
Ok(message.content.as_ref().unwrap_or(&"".to_string()).clone())
}
} else {
// No tool_calls field at all
Ok(message.content.as_ref().unwrap_or(&"".to_string()).clone())
}
}
/// Execute MCP tool call (matching Python implementation)
async fn execute_mcp_tool(&self, tool_call: &async_openai::types::ChatCompletionMessageToolCall, context_user_id: &str) -> Result<Value> {
let function_name = &tool_call.function.name;
let arguments: Value = serde_json::from_str(&tool_call.function.arguments)?;
match function_name.as_str() {
"get_memories" => {
let limit = arguments.get("limit").and_then(|v| v.as_i64()).unwrap_or(5);
// TODO: Implement actual MCP call
Ok(json!({"info": "記憶機能は実装中です"}))
}
"search_memories" => {
let _keywords = arguments.get("keywords").and_then(|v| v.as_array());
// TODO: Implement actual MCP call
Ok(json!({"info": "記憶検索機能は実装中です"}))
}
"get_contextual_memories" => {
let _query = arguments.get("query").and_then(|v| v.as_str()).unwrap_or("");
let _limit = arguments.get("limit").and_then(|v| v.as_i64()).unwrap_or(5);
// TODO: Implement actual MCP call
Ok(json!({"info": "文脈記憶機能は実装中です"}))
}
"get_relationship" => {
let _user_id = arguments.get("user_id").and_then(|v| v.as_str()).unwrap_or(context_user_id);
// TODO: Implement actual MCP call
Ok(json!({"info": "関係性機能は実装中です"}))
}
// ai.card tools
"card_get_user_cards" => {
let did = arguments.get("did").and_then(|v| v.as_str()).unwrap_or(context_user_id);
let _limit = arguments.get("limit").and_then(|v| v.as_i64()).unwrap_or(10);
match self.service_client.get_user_cards(did).await {
Ok(result) => Ok(result),
Err(e) => {
println!("❌ ai.card API error: {}", e);
Ok(json!({
"error": "ai.cardサーバーが起動していません",
"message": "カードシステムを使用するには、ai.cardサーバーを起動してください"
}))
}
}
}
"card_draw_card" => {
let did = arguments.get("did").and_then(|v| v.as_str()).unwrap_or(context_user_id);
let is_paid = arguments.get("is_paid").and_then(|v| v.as_bool()).unwrap_or(false);
match self.service_client.draw_card(did, is_paid).await {
Ok(result) => Ok(result),
Err(e) => {
println!("❌ ai.card API error: {}", e);
Ok(json!({
"error": "ai.cardサーバーが起動していません",
"message": "カードシステムを使用するには、ai.cardサーバーを起動してください"
}))
}
}
}
"card_analyze_collection" => {
let did = arguments.get("did").and_then(|v| v.as_str()).unwrap_or(context_user_id);
// TODO: Implement collection analysis endpoint
Ok(json!({
"info": "コレクション分析機能は実装中です",
"user_did": did
}))
}
"card_get_gacha_stats" => {
// TODO: Implement gacha stats endpoint
Ok(json!({"info": "ガチャ統計機能は実装中です"}))
}
_ => {
Ok(json!({
"error": format!("Unknown tool: {}", function_name)
}))
}
}
}
}

View File

@@ -1,369 +0,0 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use anyhow::Result;
use crate::config::Config;
use crate::memory::{MemoryManager, MemoryStats, Memory};
use crate::relationship::{RelationshipTracker, Relationship as RelationshipData, RelationshipStats};
use crate::ai_provider::{AIProviderClient, ChatMessage};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Persona {
config: Config,
#[serde(skip)]
memory_manager: Option<MemoryManager>,
#[serde(skip)]
relationship_tracker: Option<RelationshipTracker>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PersonaState {
pub current_mood: String,
pub fortune_value: i32,
pub breakthrough_triggered: bool,
pub base_personality: HashMap<String, f64>,
}
impl Persona {
pub fn new(config: &Config) -> Result<Self> {
let memory_manager = MemoryManager::new(config)?;
let relationship_tracker = RelationshipTracker::new(config)?;
Ok(Persona {
config: config.clone(),
memory_manager: Some(memory_manager),
relationship_tracker: Some(relationship_tracker),
})
}
pub fn get_current_state(&self) -> Result<PersonaState> {
// Load fortune
let fortune_value = self.load_today_fortune()?;
// Create base personality
let mut base_personality = HashMap::new();
base_personality.insert("curiosity".to_string(), 0.7);
base_personality.insert("empathy".to_string(), 0.8);
base_personality.insert("creativity".to_string(), 0.6);
base_personality.insert("analytical".to_string(), 0.9);
base_personality.insert("emotional".to_string(), 0.4);
// Determine mood based on fortune
let current_mood = match fortune_value {
1..=3 => "Contemplative",
4..=6 => "Neutral",
7..=8 => "Optimistic",
9..=10 => "Energetic",
_ => "Unknown",
};
Ok(PersonaState {
current_mood: current_mood.to_string(),
fortune_value,
breakthrough_triggered: fortune_value >= 9,
base_personality,
})
}
pub fn get_relationship(&self, user_id: &str) -> Option<&RelationshipData> {
self.relationship_tracker.as_ref()
.and_then(|tracker| tracker.get_relationship(user_id))
}
pub fn process_interaction(&mut self, user_id: &str, message: &str) -> Result<(String, f64)> {
// Add memory
if let Some(memory_manager) = &mut self.memory_manager {
memory_manager.add_memory(user_id, message, 0.5)?;
}
// Calculate sentiment (simple keyword-based for now)
let sentiment = self.calculate_sentiment(message);
// Update relationship
let relationship_delta = if let Some(relationship_tracker) = &mut self.relationship_tracker {
relationship_tracker.process_interaction(user_id, sentiment)?
} else {
0.0
};
// Generate response (simple for now)
let response = format!("I understand your message: '{}'", message);
Ok((response, relationship_delta))
}
pub async fn process_ai_interaction(&mut self, user_id: &str, message: &str, provider: Option<String>, model: Option<String>) -> Result<(String, f64)> {
// Add memory for user message
if let Some(memory_manager) = &mut self.memory_manager {
memory_manager.add_memory(user_id, message, 0.5)?;
}
// Calculate sentiment
let sentiment = self.calculate_sentiment(message);
// Update relationship
let relationship_delta = if let Some(relationship_tracker) = &mut self.relationship_tracker {
relationship_tracker.process_interaction(user_id, sentiment)?
} else {
0.0
};
// Check provider type and use appropriate client
let response = if provider.as_deref() == Some("openai") {
// Use OpenAI provider with MCP tools
use crate::openai_provider::OpenAIProvider;
// Get OpenAI API key from config or environment
let api_key = std::env::var("OPENAI_API_KEY")
.or_else(|_| {
self.config.providers.get("openai")
.and_then(|p| p.api_key.clone())
.ok_or_else(|| std::env::VarError::NotPresent)
})
.map_err(|_| anyhow::anyhow!("OpenAI API key not found. Set OPENAI_API_KEY environment variable or add to config."))?;
let openai_model = model.unwrap_or_else(|| "gpt-4".to_string());
let openai_provider = OpenAIProvider::new(api_key, Some(openai_model));
// Use OpenAI with MCP tools support
openai_provider.chat_with_mcp(message.to_string(), user_id.to_string()).await?
} else {
// Use existing AI provider (Ollama)
let ai_config = self.config.get_ai_config(provider, model)?;
let ai_client = AIProviderClient::new(ai_config);
// Build conversation context
let mut messages = Vec::new();
// Get recent memories for context
if let Some(memory_manager) = &mut self.memory_manager {
let recent_memories = memory_manager.get_memories(user_id, 5);
if !recent_memories.is_empty() {
let context = recent_memories.iter()
.map(|m| m.content.clone())
.collect::<Vec<_>>()
.join("\n");
messages.push(ChatMessage::system(format!("Previous conversation context:\n{}", context)));
}
}
// Add current message
messages.push(ChatMessage::user(message));
// Generate system prompt based on personality and relationship
let system_prompt = self.generate_system_prompt(user_id);
// Get AI response
match ai_client.chat(messages, Some(system_prompt)).await {
Ok(chat_response) => chat_response.content,
Err(_) => {
// Fallback to simple response if AI fails
format!("I understand your message: '{}'", message)
}
}
};
// Store AI response in memory
if let Some(memory_manager) = &mut self.memory_manager {
memory_manager.add_memory(user_id, &format!("AI: {}", response), 0.3)?;
}
Ok((response, relationship_delta))
}
fn generate_system_prompt(&self, user_id: &str) -> String {
let mut prompt = String::from("You are a helpful AI assistant with a unique personality. ");
// Add personality based on current state
if let Ok(state) = self.get_current_state() {
prompt.push_str(&format!("Your current mood is {}. ", state.current_mood));
if state.breakthrough_triggered {
prompt.push_str("You are feeling particularly inspired today! ");
}
// Add personality traits
let mut traits = Vec::new();
for (trait_name, value) in &state.base_personality {
if *value > 0.7 {
traits.push(trait_name.clone());
}
}
if !traits.is_empty() {
prompt.push_str(&format!("Your dominant traits are: {}. ", traits.join(", ")));
}
}
// Add relationship context
if let Some(relationship) = self.get_relationship(user_id) {
match relationship.status.to_string().as_str() {
"new" => prompt.push_str("This is a new relationship, be welcoming but cautious. "),
"friend" => prompt.push_str("You have a friendly relationship with this user. "),
"close_friend" => prompt.push_str("This is a close friend, be warm and personal. "),
"broken" => prompt.push_str("This relationship is strained, be formal and distant. "),
_ => {}
}
}
prompt.push_str("Keep responses concise and natural. Avoid being overly formal or robotic.");
prompt
}
fn calculate_sentiment(&self, message: &str) -> f64 {
// Simple sentiment analysis based on keywords
let positive_words = ["good", "great", "awesome", "love", "like", "happy", "thank"];
let negative_words = ["bad", "hate", "awful", "terrible", "angry", "sad"];
let message_lower = message.to_lowercase();
let positive_count = positive_words.iter()
.filter(|word| message_lower.contains(*word))
.count() as f64;
let negative_count = negative_words.iter()
.filter(|word| message_lower.contains(*word))
.count() as f64;
(positive_count - negative_count).max(-1.0).min(1.0)
}
pub fn get_memories(&mut self, user_id: &str, limit: usize) -> Vec<String> {
if let Some(memory_manager) = &mut self.memory_manager {
memory_manager.get_memories(user_id, limit)
.into_iter()
.map(|m| m.content.clone())
.collect()
} else {
Vec::new()
}
}
pub fn search_memories(&self, user_id: &str, keywords: &[String]) -> Vec<String> {
if let Some(memory_manager) = &self.memory_manager {
memory_manager.search_memories(user_id, keywords)
.into_iter()
.map(|m| m.content.clone())
.collect()
} else {
Vec::new()
}
}
pub fn get_memory_stats(&self, user_id: &str) -> Option<MemoryStats> {
self.memory_manager.as_ref()
.map(|manager| manager.get_memory_stats(user_id))
}
pub fn get_relationship_stats(&self) -> Option<RelationshipStats> {
self.relationship_tracker.as_ref()
.map(|tracker| tracker.get_relationship_stats())
}
pub fn add_memory(&mut self, memory: Memory) -> Result<()> {
if let Some(memory_manager) = &mut self.memory_manager {
memory_manager.add_memory(&memory.user_id, &memory.content, memory.importance)?;
}
Ok(())
}
pub fn update_relationship(&mut self, user_id: &str, delta: f64) -> Result<()> {
if let Some(relationship_tracker) = &mut self.relationship_tracker {
relationship_tracker.process_interaction(user_id, delta)?;
}
Ok(())
}
pub fn daily_maintenance(&mut self) -> Result<()> {
// Apply time decay to relationships
if let Some(relationship_tracker) = &mut self.relationship_tracker {
relationship_tracker.apply_time_decay()?;
}
Ok(())
}
fn load_today_fortune(&self) -> Result<i32> {
// Try to load existing fortune for today
if let Ok(content) = std::fs::read_to_string(self.config.fortune_file()) {
if let Ok(fortune_data) = serde_json::from_str::<serde_json::Value>(&content) {
let today = chrono::Utc::now().format("%Y-%m-%d").to_string();
if let Some(fortune) = fortune_data.get(&today) {
if let Some(value) = fortune.as_i64() {
return Ok(value as i32);
}
}
}
}
// Generate new fortune for today (1-10)
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let today = chrono::Utc::now().format("%Y-%m-%d").to_string();
let mut hasher = DefaultHasher::new();
today.hash(&mut hasher);
let hash = hasher.finish();
let fortune = (hash % 10) as i32 + 1;
// Save fortune
let mut fortune_data = if let Ok(content) = std::fs::read_to_string(self.config.fortune_file()) {
serde_json::from_str(&content).unwrap_or_else(|_| serde_json::json!({}))
} else {
serde_json::json!({})
};
fortune_data[today] = serde_json::json!(fortune);
if let Ok(content) = serde_json::to_string_pretty(&fortune_data) {
let _ = std::fs::write(self.config.fortune_file(), content);
}
Ok(fortune)
}
pub fn list_all_relationships(&self) -> HashMap<String, RelationshipData> {
if let Some(tracker) = &self.relationship_tracker {
tracker.list_all_relationships().clone()
} else {
HashMap::new()
}
}
pub async fn process_message(&mut self, user_id: &str, message: &str) -> Result<ChatMessage> {
let (_response, _delta) = self.process_ai_interaction(user_id, message, None, None).await?;
Ok(ChatMessage::assistant(&_response))
}
pub fn get_fortune(&self) -> Result<i32> {
self.load_today_fortune()
}
pub fn generate_new_fortune(&self) -> Result<i32> {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let today = chrono::Utc::now().format("%Y-%m-%d").to_string();
let mut hasher = DefaultHasher::new();
today.hash(&mut hasher);
let hash = hasher.finish();
let fortune = (hash % 10) as i32 + 1;
// Save fortune
let mut fortune_data = if let Ok(content) = std::fs::read_to_string(self.config.fortune_file()) {
serde_json::from_str(&content).unwrap_or_else(|_| serde_json::json!({}))
} else {
serde_json::json!({})
};
fortune_data[today] = serde_json::json!(fortune);
if let Ok(content) = serde_json::to_string_pretty(&fortune_data) {
let _ = std::fs::write(self.config.fortune_file(), content);
}
Ok(fortune)
}
}

View File

@@ -1,306 +0,0 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use anyhow::{Result, Context};
use chrono::{DateTime, Utc};
use crate::config::Config;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Relationship {
pub user_id: String,
pub score: f64,
pub threshold: f64,
pub status: RelationshipStatus,
pub total_interactions: u32,
pub positive_interactions: u32,
pub negative_interactions: u32,
pub transmission_enabled: bool,
pub is_broken: bool,
pub last_interaction: Option<DateTime<Utc>>,
pub last_transmission: Option<DateTime<Utc>>,
pub created_at: DateTime<Utc>,
pub daily_interaction_count: u32,
pub last_daily_reset: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum RelationshipStatus {
New,
Acquaintance,
Friend,
CloseFriend,
Broken,
}
impl std::fmt::Display for RelationshipStatus {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
RelationshipStatus::New => write!(f, "new"),
RelationshipStatus::Acquaintance => write!(f, "acquaintance"),
RelationshipStatus::Friend => write!(f, "friend"),
RelationshipStatus::CloseFriend => write!(f, "close_friend"),
RelationshipStatus::Broken => write!(f, "broken"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RelationshipTracker {
relationships: HashMap<String, Relationship>,
config: Config,
}
impl RelationshipTracker {
pub fn new(config: &Config) -> Result<Self> {
let relationships = Self::load_relationships(config)?;
Ok(RelationshipTracker {
relationships,
config: config.clone(),
})
}
pub fn get_or_create_relationship(&mut self, user_id: &str) -> &mut Relationship {
let now = Utc::now();
self.relationships.entry(user_id.to_string()).or_insert_with(|| {
Relationship {
user_id: user_id.to_string(),
score: 0.0,
threshold: 10.0, // Default threshold for transmission
status: RelationshipStatus::New,
total_interactions: 0,
positive_interactions: 0,
negative_interactions: 0,
transmission_enabled: false,
is_broken: false,
last_interaction: None,
last_transmission: None,
created_at: now,
daily_interaction_count: 0,
last_daily_reset: now,
}
})
}
pub fn process_interaction(&mut self, user_id: &str, sentiment: f64) -> Result<f64> {
let now = Utc::now();
let score_change;
// Create relationship if it doesn't exist
{
let relationship = self.get_or_create_relationship(user_id);
// Reset daily count if needed
if (now - relationship.last_daily_reset).num_days() >= 1 {
relationship.daily_interaction_count = 0;
relationship.last_daily_reset = now;
}
// Apply daily interaction limit
if relationship.daily_interaction_count >= 10 {
return Ok(0.0); // No score change due to daily limit
}
// Store previous score for potential future logging
// Calculate score change based on sentiment
let mut base_score_change = sentiment * 0.5; // Base change
// Apply diminishing returns for high interaction counts
let interaction_factor = 1.0 / (1.0 + relationship.total_interactions as f64 * 0.01);
base_score_change *= interaction_factor;
score_change = base_score_change;
// Update relationship data
relationship.score += score_change;
relationship.score = relationship.score.max(-50.0).min(100.0); // Clamp score
relationship.total_interactions += 1;
relationship.daily_interaction_count += 1;
relationship.last_interaction = Some(now);
if sentiment > 0.0 {
relationship.positive_interactions += 1;
} else if sentiment < 0.0 {
relationship.negative_interactions += 1;
}
// Check for relationship breaking
if relationship.score <= -20.0 && !relationship.is_broken {
relationship.is_broken = true;
relationship.transmission_enabled = false;
relationship.status = RelationshipStatus::Broken;
}
// Enable transmission if threshold is reached
if relationship.score >= relationship.threshold && !relationship.is_broken {
relationship.transmission_enabled = true;
}
}
// Update status based on score (separate borrow)
self.update_relationship_status(user_id);
self.save_relationships()?;
Ok(score_change)
}
fn update_relationship_status(&mut self, user_id: &str) {
if let Some(relationship) = self.relationships.get_mut(user_id) {
if relationship.is_broken {
return; // Broken relationships cannot change status
}
relationship.status = match relationship.score {
score if score >= 50.0 => RelationshipStatus::CloseFriend,
score if score >= 20.0 => RelationshipStatus::Friend,
score if score >= 5.0 => RelationshipStatus::Acquaintance,
_ => RelationshipStatus::New,
};
}
}
pub fn apply_time_decay(&mut self) -> Result<()> {
let now = Utc::now();
let decay_rate = 0.1; // 10% decay per day
for relationship in self.relationships.values_mut() {
if let Some(last_interaction) = relationship.last_interaction {
let days_since_interaction = (now - last_interaction).num_days() as f64;
if days_since_interaction > 0.0 {
let decay_factor = (1.0_f64 - decay_rate).powf(days_since_interaction);
relationship.score *= decay_factor;
// Update status after decay
if relationship.score < relationship.threshold {
relationship.transmission_enabled = false;
}
}
}
}
// Update statuses for all relationships
let user_ids: Vec<String> = self.relationships.keys().cloned().collect();
for user_id in user_ids {
self.update_relationship_status(&user_id);
}
self.save_relationships()?;
Ok(())
}
pub fn get_relationship(&self, user_id: &str) -> Option<&Relationship> {
self.relationships.get(user_id)
}
pub fn list_all_relationships(&self) -> &HashMap<String, Relationship> {
&self.relationships
}
pub fn get_transmission_eligible(&self) -> HashMap<String, &Relationship> {
self.relationships
.iter()
.filter(|(_, rel)| rel.transmission_enabled && !rel.is_broken)
.map(|(id, rel)| (id.clone(), rel))
.collect()
}
pub fn record_transmission(&mut self, user_id: &str) -> Result<()> {
if let Some(relationship) = self.relationships.get_mut(user_id) {
relationship.last_transmission = Some(Utc::now());
self.save_relationships()?;
}
Ok(())
}
pub fn get_relationship_stats(&self) -> RelationshipStats {
let total_relationships = self.relationships.len();
let active_relationships = self.relationships
.values()
.filter(|r| r.total_interactions > 0)
.count();
let transmission_enabled = self.relationships
.values()
.filter(|r| r.transmission_enabled)
.count();
let broken_relationships = self.relationships
.values()
.filter(|r| r.is_broken)
.count();
let avg_score = if total_relationships > 0 {
self.relationships.values().map(|r| r.score).sum::<f64>() / total_relationships as f64
} else {
0.0
};
RelationshipStats {
total_relationships,
active_relationships,
transmission_enabled,
broken_relationships,
avg_score,
}
}
fn load_relationships(config: &Config) -> Result<HashMap<String, Relationship>> {
let file_path = config.relationships_file();
if !file_path.exists() {
return Ok(HashMap::new());
}
let content = std::fs::read_to_string(file_path)
.context("Failed to read relationships file")?;
let relationships: HashMap<String, Relationship> = serde_json::from_str(&content)
.context("Failed to parse relationships file")?;
Ok(relationships)
}
fn save_relationships(&self) -> Result<()> {
let content = serde_json::to_string_pretty(&self.relationships)
.context("Failed to serialize relationships")?;
std::fs::write(&self.config.relationships_file(), content)
.context("Failed to write relationships file")?;
Ok(())
}
pub fn get_all_relationships(&self) -> Result<HashMap<String, RelationshipCompact>> {
let mut result = HashMap::new();
for (user_id, relationship) in &self.relationships {
result.insert(user_id.clone(), RelationshipCompact {
score: relationship.score,
trust_level: relationship.score / 10.0, // Simplified trust calculation
interaction_count: relationship.total_interactions,
last_interaction: relationship.last_interaction.unwrap_or(relationship.created_at),
status: relationship.status.clone(),
});
}
Ok(result)
}
}
#[derive(Debug, Clone, Serialize)]
pub struct RelationshipStats {
pub total_relationships: usize,
pub active_relationships: usize,
pub transmission_enabled: usize,
pub broken_relationships: usize,
pub avg_score: f64,
}
#[derive(Debug, Clone, Serialize)]
pub struct RelationshipCompact {
pub score: f64,
pub trust_level: f64,
pub interaction_count: u32,
pub last_interaction: DateTime<Utc>,
pub status: RelationshipStatus,
}

View File

@@ -1,458 +0,0 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use anyhow::{Result, Context};
use chrono::{DateTime, Utc, Duration};
use crate::config::Config;
use crate::persona::Persona;
use crate::transmission::TransmissionController;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ScheduledTask {
pub id: String,
pub task_type: TaskType,
pub next_run: DateTime<Utc>,
pub interval_hours: Option<i64>,
pub enabled: bool,
pub last_run: Option<DateTime<Utc>>,
pub run_count: u32,
pub max_runs: Option<u32>,
pub created_at: DateTime<Utc>,
pub metadata: HashMap<String, String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TaskType {
DailyMaintenance,
AutoTransmission,
RelationshipDecay,
BreakthroughCheck,
MaintenanceTransmission,
Custom(String),
}
impl std::fmt::Display for TaskType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TaskType::DailyMaintenance => write!(f, "daily_maintenance"),
TaskType::AutoTransmission => write!(f, "auto_transmission"),
TaskType::RelationshipDecay => write!(f, "relationship_decay"),
TaskType::BreakthroughCheck => write!(f, "breakthrough_check"),
TaskType::MaintenanceTransmission => write!(f, "maintenance_transmission"),
TaskType::Custom(name) => write!(f, "custom_{}", name),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskExecution {
pub task_id: String,
pub execution_time: DateTime<Utc>,
pub duration_ms: u64,
pub success: bool,
pub result: Option<String>,
pub error: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AIScheduler {
config: Config,
tasks: HashMap<String, ScheduledTask>,
execution_history: Vec<TaskExecution>,
last_check: Option<DateTime<Utc>>,
}
impl AIScheduler {
pub fn new(config: &Config) -> Result<Self> {
let (tasks, execution_history) = Self::load_scheduler_data(config)?;
let mut scheduler = AIScheduler {
config: config.clone(),
tasks,
execution_history,
last_check: None,
};
// Initialize default tasks if none exist
if scheduler.tasks.is_empty() {
scheduler.create_default_tasks()?;
}
Ok(scheduler)
}
pub async fn run_scheduled_tasks(&mut self, persona: &mut Persona, transmission_controller: &mut TransmissionController) -> Result<Vec<TaskExecution>> {
let now = Utc::now();
let mut executions = Vec::new();
// Find tasks that are due to run
let due_task_ids: Vec<String> = self.tasks
.iter()
.filter(|(_, task)| task.enabled && task.next_run <= now)
.filter(|(_, task)| {
// Check if task hasn't exceeded max runs
if let Some(max_runs) = task.max_runs {
task.run_count < max_runs
} else {
true
}
})
.map(|(id, _)| id.clone())
.collect();
for task_id in due_task_ids {
let execution = self.execute_task(&task_id, persona, transmission_controller).await?;
executions.push(execution);
}
self.last_check = Some(now);
self.save_scheduler_data()?;
Ok(executions)
}
async fn execute_task(&mut self, task_id: &str, persona: &mut Persona, transmission_controller: &mut TransmissionController) -> Result<TaskExecution> {
let start_time = Utc::now();
let mut execution = TaskExecution {
task_id: task_id.to_string(),
execution_time: start_time,
duration_ms: 0,
success: false,
result: None,
error: None,
};
// Get task type without borrowing mutably
let task_type = {
let task = self.tasks.get(task_id)
.ok_or_else(|| anyhow::anyhow!("Task not found: {}", task_id))?;
task.task_type.clone()
};
// Execute the task based on its type
let result = match &task_type {
TaskType::DailyMaintenance => self.execute_daily_maintenance(persona, transmission_controller).await,
TaskType::AutoTransmission => self.execute_auto_transmission(persona, transmission_controller).await,
TaskType::RelationshipDecay => self.execute_relationship_decay(persona).await,
TaskType::BreakthroughCheck => self.execute_breakthrough_check(persona, transmission_controller).await,
TaskType::MaintenanceTransmission => self.execute_maintenance_transmission(persona, transmission_controller).await,
TaskType::Custom(name) => self.execute_custom_task(name, persona, transmission_controller).await,
};
let end_time = Utc::now();
execution.duration_ms = (end_time - start_time).num_milliseconds() as u64;
// Now update the task state with mutable borrow
match result {
Ok(message) => {
execution.success = true;
execution.result = Some(message);
// Update task state
if let Some(task) = self.tasks.get_mut(task_id) {
task.last_run = Some(start_time);
task.run_count += 1;
// Schedule next run if recurring
if let Some(interval_hours) = task.interval_hours {
task.next_run = start_time + Duration::hours(interval_hours);
} else {
// One-time task, disable it
task.enabled = false;
}
}
}
Err(e) => {
execution.error = Some(e.to_string());
// For failed tasks, retry in a shorter interval
if let Some(task) = self.tasks.get_mut(task_id) {
if task.interval_hours.is_some() {
task.next_run = start_time + Duration::minutes(15); // Retry in 15 minutes
}
}
}
}
self.execution_history.push(execution.clone());
// Keep only recent execution history (last 1000 executions)
if self.execution_history.len() > 1000 {
self.execution_history.drain(..self.execution_history.len() - 1000);
}
Ok(execution)
}
async fn execute_daily_maintenance(&self, persona: &mut Persona, transmission_controller: &mut TransmissionController) -> Result<String> {
// Run daily maintenance
persona.daily_maintenance()?;
// Check for maintenance transmissions
let transmissions = transmission_controller.check_maintenance_transmissions(persona).await?;
Ok(format!("Daily maintenance completed. {} maintenance transmissions sent.", transmissions.len()))
}
async fn execute_auto_transmission(&self, _persona: &mut Persona, transmission_controller: &mut TransmissionController) -> Result<String> {
let transmissions = transmission_controller.check_autonomous_transmissions(_persona).await?;
Ok(format!("Autonomous transmission check completed. {} transmissions sent.", transmissions.len()))
}
async fn execute_relationship_decay(&self, persona: &mut Persona) -> Result<String> {
persona.daily_maintenance()?;
Ok("Relationship time decay applied.".to_string())
}
async fn execute_breakthrough_check(&self, persona: &mut Persona, transmission_controller: &mut TransmissionController) -> Result<String> {
let transmissions = transmission_controller.check_breakthrough_transmissions(persona).await?;
Ok(format!("Breakthrough check completed. {} transmissions sent.", transmissions.len()))
}
async fn execute_maintenance_transmission(&self, persona: &mut Persona, transmission_controller: &mut TransmissionController) -> Result<String> {
let transmissions = transmission_controller.check_maintenance_transmissions(persona).await?;
Ok(format!("Maintenance transmission check completed. {} transmissions sent.", transmissions.len()))
}
async fn execute_custom_task(&self, _name: &str, _persona: &mut Persona, _transmission_controller: &mut TransmissionController) -> Result<String> {
// Placeholder for custom task execution
Ok("Custom task executed.".to_string())
}
pub fn create_task(&mut self, task_type: TaskType, next_run: DateTime<Utc>, interval_hours: Option<i64>) -> Result<String> {
let task_id = uuid::Uuid::new_v4().to_string();
let now = Utc::now();
let task = ScheduledTask {
id: task_id.clone(),
task_type,
next_run,
interval_hours,
enabled: true,
last_run: None,
run_count: 0,
max_runs: None,
created_at: now,
metadata: HashMap::new(),
};
self.tasks.insert(task_id.clone(), task);
self.save_scheduler_data()?;
Ok(task_id)
}
pub fn enable_task(&mut self, task_id: &str) -> Result<()> {
if let Some(task) = self.tasks.get_mut(task_id) {
task.enabled = true;
self.save_scheduler_data()?;
}
Ok(())
}
pub fn disable_task(&mut self, task_id: &str) -> Result<()> {
if let Some(task) = self.tasks.get_mut(task_id) {
task.enabled = false;
self.save_scheduler_data()?;
}
Ok(())
}
pub fn delete_task(&mut self, task_id: &str) -> Result<()> {
self.tasks.remove(task_id);
self.save_scheduler_data()?;
Ok(())
}
pub fn get_task(&self, task_id: &str) -> Option<&ScheduledTask> {
self.tasks.get(task_id)
}
pub fn get_tasks(&self) -> &HashMap<String, ScheduledTask> {
&self.tasks
}
pub fn get_due_tasks(&self) -> Vec<&ScheduledTask> {
let now = Utc::now();
self.tasks
.values()
.filter(|task| task.enabled && task.next_run <= now)
.collect()
}
pub fn get_execution_history(&self, limit: Option<usize>) -> Vec<&TaskExecution> {
let mut executions: Vec<_> = self.execution_history.iter().collect();
executions.sort_by(|a, b| b.execution_time.cmp(&a.execution_time));
match limit {
Some(limit) => executions.into_iter().take(limit).collect(),
None => executions,
}
}
pub fn get_scheduler_stats(&self) -> SchedulerStats {
let total_tasks = self.tasks.len();
let enabled_tasks = self.tasks.values().filter(|task| task.enabled).count();
let due_tasks = self.get_due_tasks().len();
let total_executions = self.execution_history.len();
let successful_executions = self.execution_history.iter()
.filter(|exec| exec.success)
.count();
let today = Utc::now().date_naive();
let today_executions = self.execution_history.iter()
.filter(|exec| exec.execution_time.date_naive() == today)
.count();
let avg_duration = if total_executions > 0 {
self.execution_history.iter()
.map(|exec| exec.duration_ms)
.sum::<u64>() as f64 / total_executions as f64
} else {
0.0
};
SchedulerStats {
total_tasks,
enabled_tasks,
due_tasks,
total_executions,
successful_executions,
today_executions,
success_rate: if total_executions > 0 {
successful_executions as f64 / total_executions as f64
} else {
0.0
},
avg_duration_ms: avg_duration,
}
}
fn create_default_tasks(&mut self) -> Result<()> {
let now = Utc::now();
// Daily maintenance task - run every day at 3 AM
let mut daily_maintenance_time = now.date_naive().and_hms_opt(3, 0, 0).unwrap().and_utc();
if daily_maintenance_time <= now {
daily_maintenance_time = daily_maintenance_time + Duration::days(1);
}
self.create_task(
TaskType::DailyMaintenance,
daily_maintenance_time,
Some(24), // 24 hours = 1 day
)?;
// Auto transmission check - every 4 hours
self.create_task(
TaskType::AutoTransmission,
now + Duration::hours(1),
Some(4),
)?;
// Breakthrough check - every 2 hours
self.create_task(
TaskType::BreakthroughCheck,
now + Duration::minutes(30),
Some(2),
)?;
// Maintenance transmission - once per day
let mut maintenance_time = now.date_naive().and_hms_opt(12, 0, 0).unwrap().and_utc();
if maintenance_time <= now {
maintenance_time = maintenance_time + Duration::days(1);
}
self.create_task(
TaskType::MaintenanceTransmission,
maintenance_time,
Some(24), // 24 hours = 1 day
)?;
Ok(())
}
fn load_scheduler_data(config: &Config) -> Result<(HashMap<String, ScheduledTask>, Vec<TaskExecution>)> {
let tasks_file = config.scheduler_tasks_file();
let history_file = config.scheduler_history_file();
let tasks = if tasks_file.exists() {
let content = std::fs::read_to_string(tasks_file)
.context("Failed to read scheduler tasks file")?;
serde_json::from_str(&content)
.context("Failed to parse scheduler tasks file")?
} else {
HashMap::new()
};
let history = if history_file.exists() {
let content = std::fs::read_to_string(history_file)
.context("Failed to read scheduler history file")?;
serde_json::from_str(&content)
.context("Failed to parse scheduler history file")?
} else {
Vec::new()
};
Ok((tasks, history))
}
fn save_scheduler_data(&self) -> Result<()> {
// Save tasks
let tasks_content = serde_json::to_string_pretty(&self.tasks)
.context("Failed to serialize scheduler tasks")?;
std::fs::write(&self.config.scheduler_tasks_file(), tasks_content)
.context("Failed to write scheduler tasks file")?;
// Save execution history
let history_content = serde_json::to_string_pretty(&self.execution_history)
.context("Failed to serialize scheduler history")?;
std::fs::write(&self.config.scheduler_history_file(), history_content)
.context("Failed to write scheduler history file")?;
Ok(())
}
}
// Type alias for compatibility with CLI interface
pub type Scheduler = AIScheduler;
impl Scheduler {
pub fn list_tasks(&self) -> Result<Vec<ScheduledTaskInfo>> {
let tasks: Vec<ScheduledTaskInfo> = self.tasks
.values()
.map(|task| ScheduledTaskInfo {
name: task.task_type.to_string(),
schedule: match task.interval_hours {
Some(hours) => format!("Every {} hours", hours),
None => "One-time".to_string(),
},
next_run: task.next_run,
enabled: task.enabled,
})
.collect();
Ok(tasks)
}
}
#[derive(Debug, Clone)]
pub struct SchedulerStats {
pub total_tasks: usize,
pub enabled_tasks: usize,
pub due_tasks: usize,
pub total_executions: usize,
pub successful_executions: usize,
pub today_executions: usize,
pub success_rate: f64,
pub avg_duration_ms: f64,
}
#[derive(Debug, Clone)]
pub struct ScheduledTaskInfo {
pub name: String,
pub schedule: String,
pub next_run: DateTime<Utc>,
pub enabled: bool,
}

View File

@@ -1,608 +0,0 @@
use std::path::PathBuf;
use std::process::{Command, Stdio};
use std::io::{self, Write};
use anyhow::{Result, Context};
use colored::*;
use rustyline::error::ReadlineError;
use rustyline::Editor;
use rustyline::completion::{Completer, FilenameCompleter, Pair};
use rustyline::history::{History, DefaultHistory};
use rustyline::highlight::Highlighter;
use rustyline::hint::Hinter;
use rustyline::validate::Validator;
use rustyline::Helper;
use crate::config::Config;
use crate::persona::Persona;
use crate::ai_provider::{AIProviderClient, AIProvider, AIConfig};
pub async fn handle_shell(
user_id: String,
data_dir: Option<PathBuf>,
model: Option<String>,
provider: Option<String>,
) -> Result<()> {
let config = Config::new(data_dir)?;
let mut shell = ShellMode::new(config, user_id)?
.with_ai_provider(provider, model);
shell.run().await
}
pub struct ShellMode {
config: Config,
persona: Persona,
ai_provider: Option<AIProviderClient>,
user_id: String,
editor: Editor<ShellCompleter, DefaultHistory>,
}
struct ShellCompleter {
completer: FilenameCompleter,
}
impl ShellCompleter {
fn new() -> Self {
ShellCompleter {
completer: FilenameCompleter::new(),
}
}
}
impl Helper for ShellCompleter {}
impl Hinter for ShellCompleter {
type Hint = String;
fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option<String> {
None
}
}
impl Highlighter for ShellCompleter {}
impl Validator for ShellCompleter {}
impl Completer for ShellCompleter {
type Candidate = Pair;
fn complete(
&self,
line: &str,
pos: usize,
ctx: &rustyline::Context<'_>,
) -> rustyline::Result<(usize, Vec<Pair>)> {
// Custom completion for slash commands
if line.starts_with('/') {
let commands = vec![
"/status", "/relationships", "/memories", "/analyze",
"/fortune", "/clear", "/history", "/help", "/exit"
];
let word_start = line.rfind(' ').map_or(0, |i| i + 1);
let word = &line[word_start..pos];
let matches: Vec<Pair> = commands.iter()
.filter(|cmd| cmd.starts_with(word))
.map(|cmd| Pair {
display: cmd.to_string(),
replacement: cmd.to_string(),
})
.collect();
return Ok((word_start, matches));
}
// Custom completion for shell commands starting with !
if line.starts_with('!') {
let shell_commands = vec![
"ls", "pwd", "cd", "cat", "grep", "find", "ps", "top",
"echo", "mkdir", "rmdir", "cp", "mv", "rm", "touch",
"git", "cargo", "npm", "python", "node"
];
let word_start = line.rfind(' ').map_or(1, |i| i + 1); // Skip the '!'
let word = &line[word_start..pos];
let matches: Vec<Pair> = shell_commands.iter()
.filter(|cmd| cmd.starts_with(word))
.map(|cmd| Pair {
display: cmd.to_string(),
replacement: cmd.to_string(),
})
.collect();
return Ok((word_start, matches));
}
// Fallback to filename completion
self.completer.complete(line, pos, ctx)
}
}
impl ShellMode {
pub fn new(config: Config, user_id: String) -> Result<Self> {
let persona = Persona::new(&config)?;
// Setup rustyline editor with completer
let completer = ShellCompleter::new();
let mut editor = Editor::with_config(
rustyline::Config::builder()
.tab_stop(4)
.build()
)?;
editor.set_helper(Some(completer));
// Load history if exists
let history_file = config.data_dir.join("shell_history.txt");
if history_file.exists() {
let _ = editor.load_history(&history_file);
}
Ok(ShellMode {
config,
persona,
ai_provider: None,
user_id,
editor,
})
}
pub fn with_ai_provider(mut self, provider: Option<String>, model: Option<String>) -> Self {
// Use provided parameters or fall back to config defaults
let provider_name = provider
.or_else(|| Some(self.config.default_provider.clone()))
.unwrap_or_else(|| "ollama".to_string());
let model_name = model.or_else(|| {
// Try to get default model from config for the chosen provider
self.config.providers.get(&provider_name)
.map(|p| p.default_model.clone())
}).unwrap_or_else(|| {
// Final fallback based on provider
match provider_name.as_str() {
"openai" => "gpt-4o-mini".to_string(),
"ollama" => "qwen2.5-coder:latest".to_string(),
_ => "qwen2.5-coder:latest".to_string(),
}
});
let ai_provider = match provider_name.as_str() {
"ollama" => AIProvider::Ollama,
"openai" => AIProvider::OpenAI,
"claude" => AIProvider::Claude,
_ => AIProvider::Ollama, // Default fallback
};
let ai_config = AIConfig {
provider: ai_provider,
model: model_name,
api_key: None, // Will be loaded from environment if needed
base_url: None,
max_tokens: Some(2000),
temperature: Some(0.7),
};
let client = AIProviderClient::new(ai_config);
self.ai_provider = Some(client);
self
}
pub async fn run(&mut self) -> Result<()> {
println!("{}", "🚀 Starting ai.gpt Interactive Shell".cyan().bold());
// Show AI provider info
if let Some(ai_provider) = &self.ai_provider {
println!("{}: {} ({})",
"AI Provider".green().bold(),
ai_provider.get_provider().to_string(),
ai_provider.get_model());
} else {
println!("{}: {}", "AI Provider".yellow().bold(), "Simple mode (no AI)");
}
println!("{}", "Type 'help' for commands, 'exit' to quit".dimmed());
println!("{}", "Use Tab for command completion, Ctrl+C to interrupt, Ctrl+D to exit".dimmed());
loop {
// Read user input with rustyline (supports completion, history, etc.)
let readline = self.editor.readline("ai.shell> ");
match readline {
Ok(line) => {
let input = line.trim();
// Skip empty input
if input.is_empty() {
continue;
}
// Add to history
self.editor.add_history_entry(input)
.context("Failed to add to history")?;
// Handle input
if let Err(e) = self.handle_input(input).await {
println!("{}: {}", "Error".red().bold(), e);
}
}
Err(ReadlineError::Interrupted) => {
// Ctrl+C
println!("{}", "Use 'exit' or Ctrl+D to quit".yellow());
continue;
}
Err(ReadlineError::Eof) => {
// Ctrl+D
println!("\n{}", "Goodbye!".cyan());
break;
}
Err(err) => {
println!("{}: {}", "Input error".red().bold(), err);
break;
}
}
}
// Save history before exit
self.save_history()?;
Ok(())
}
async fn handle_input(&mut self, input: &str) -> Result<()> {
match input {
// Exit commands
"exit" | "quit" | "/exit" | "/quit" => {
println!("{}", "Goodbye!".cyan());
std::process::exit(0);
}
// Help command
"help" | "/help" => {
self.show_help();
}
// Shell commands (starting with !)
input if input.starts_with('!') => {
self.execute_shell_command(&input[1..]).await?;
}
// Slash commands (starting with /)
input if input.starts_with('/') => {
self.execute_slash_command(input).await?;
}
// AI conversation
_ => {
self.handle_ai_conversation(input).await?;
}
}
Ok(())
}
fn show_help(&self) {
println!("\n{}", "ai.gpt Interactive Shell Commands".cyan().bold());
println!();
println!("{}", "Navigation & Input:".yellow().bold());
println!(" {} - Tab completion for commands and files", "Tab".green());
println!(" {} - Command history (previous/next)", "↑/↓ or Ctrl+P/N".green());
println!(" {} - Interrupt current input", "Ctrl+C".green());
println!(" {} - Exit shell", "Ctrl+D".green());
println!();
println!("{}", "Basic Commands:".yellow().bold());
println!(" {} - Show this help", "help".green());
println!(" {} - Exit the shell", "exit, quit".green());
println!(" {} - Clear screen", "/clear".green());
println!(" {} - Show command history", "/history".green());
println!();
println!("{}", "Shell Commands:".yellow().bold());
println!(" {} - Execute shell command (Tab completion)", "!<command>".green());
println!(" {} - List files", "!ls".green());
println!(" {} - Show current directory", "!pwd".green());
println!(" {} - Git status", "!git status".green());
println!(" {} - Cargo build", "!cargo build".green());
println!();
println!("{}", "AI Commands:".yellow().bold());
println!(" {} - Show AI status and relationship", "/status".green());
println!(" {} - List all relationships", "/relationships".green());
println!(" {} - Show recent memories", "/memories".green());
println!(" {} - Analyze current directory", "/analyze".green());
println!(" {} - Show today's fortune", "/fortune".green());
println!();
println!("{}", "Conversation:".yellow().bold());
println!(" {} - Chat with AI using configured provider", "Any other input".green());
println!(" {} - AI responses track relationship changes", "Relationship tracking".dimmed());
println!();
}
async fn execute_shell_command(&self, command: &str) -> Result<()> {
println!("{} {}", "Executing:".blue().bold(), command.yellow());
let output = if cfg!(target_os = "windows") {
Command::new("cmd")
.args(["/C", command])
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.context("Failed to execute command")?
} else {
Command::new("sh")
.args(["-c", command])
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.output()
.context("Failed to execute command")?
};
// Print stdout
if !output.stdout.is_empty() {
let stdout = String::from_utf8_lossy(&output.stdout);
println!("{}", stdout);
}
// Print stderr in red
if !output.stderr.is_empty() {
let stderr = String::from_utf8_lossy(&output.stderr);
println!("{}", stderr.red());
}
// Show exit code if not successful
if !output.status.success() {
if let Some(code) = output.status.code() {
println!("{}: {}", "Exit code".red().bold(), code);
}
}
Ok(())
}
async fn execute_slash_command(&mut self, command: &str) -> Result<()> {
match command {
"/status" => {
self.show_ai_status().await?;
}
"/relationships" => {
self.show_relationships().await?;
}
"/memories" => {
self.show_memories().await?;
}
"/analyze" => {
self.analyze_directory().await?;
}
"/fortune" => {
self.show_fortune().await?;
}
"/clear" => {
// Clear screen
print!("\x1B[2J\x1B[1;1H");
io::stdout().flush()?;
}
"/history" => {
self.show_history();
}
_ => {
println!("{}: {}", "Unknown command".red().bold(), command);
println!("Type '{}' for available commands", "help".green());
}
}
Ok(())
}
async fn handle_ai_conversation(&mut self, input: &str) -> Result<()> {
let (response, relationship_delta) = if let Some(ai_provider) = &self.ai_provider {
// Use AI provider for response
self.persona.process_ai_interaction(&self.user_id, input,
Some(ai_provider.get_provider().to_string()),
Some(ai_provider.get_model().to_string())).await?
} else {
// Use simple response
self.persona.process_interaction(&self.user_id, input)?
};
// Display conversation
println!("{}: {}", "You".cyan().bold(), input);
println!("{}: {}", "AI".green().bold(), response);
// Show relationship change if significant
if relationship_delta.abs() >= 0.1 {
if relationship_delta > 0.0 {
println!("{}", format!("(+{:.2} relationship)", relationship_delta).green());
} else {
println!("{}", format!("({:.2} relationship)", relationship_delta).red());
}
}
println!(); // Add spacing
Ok(())
}
async fn show_ai_status(&self) -> Result<()> {
let state = self.persona.get_current_state()?;
println!("\n{}", "AI Status".cyan().bold());
println!("Mood: {}", state.current_mood.yellow());
println!("Fortune: {}/10", state.fortune_value.to_string().yellow());
if let Some(relationship) = self.persona.get_relationship(&self.user_id) {
println!("\n{}", "Your Relationship".cyan().bold());
println!("Status: {}", relationship.status.to_string().yellow());
println!("Score: {:.2} / {}", relationship.score, relationship.threshold);
println!("Interactions: {}", relationship.total_interactions);
}
println!();
Ok(())
}
async fn show_relationships(&self) -> Result<()> {
let relationships = self.persona.list_all_relationships();
if relationships.is_empty() {
println!("{}", "No relationships yet".yellow());
return Ok(());
}
println!("\n{}", "All Relationships".cyan().bold());
println!();
for (user_id, rel) in relationships {
let transmission = if rel.is_broken {
"💔"
} else if rel.transmission_enabled {
""
} else {
""
};
let user_display = if user_id.len() > 20 {
format!("{}...", &user_id[..20])
} else {
user_id
};
println!("{:<25} {:<12} {:<8} {}",
user_display.cyan(),
rel.status.to_string(),
format!("{:.2}", rel.score),
transmission);
}
println!();
Ok(())
}
async fn show_memories(&mut self) -> Result<()> {
let memories = self.persona.get_memories(&self.user_id, 10);
if memories.is_empty() {
println!("{}", "No memories yet".yellow());
return Ok(());
}
println!("\n{}", "Recent Memories".cyan().bold());
println!();
for (i, memory) in memories.iter().enumerate() {
println!("{}: {}",
format!("Memory {}", i + 1).dimmed(),
memory);
println!();
}
Ok(())
}
async fn analyze_directory(&self) -> Result<()> {
println!("{}", "Analyzing current directory...".blue().bold());
// Get current directory
let current_dir = std::env::current_dir()
.context("Failed to get current directory")?;
println!("Directory: {}", current_dir.display().to_string().yellow());
// List files and directories
let entries = std::fs::read_dir(&current_dir)
.context("Failed to read directory")?;
let mut files = Vec::new();
let mut dirs = Vec::new();
for entry in entries {
let entry = entry.context("Failed to read directory entry")?;
let path = entry.path();
let name = path.file_name()
.and_then(|n| n.to_str())
.unwrap_or("Unknown");
if path.is_dir() {
dirs.push(name.to_string());
} else {
files.push(name.to_string());
}
}
if !dirs.is_empty() {
println!("\n{}: {}", "Directories".blue().bold(), dirs.join(", "));
}
if !files.is_empty() {
println!("{}: {}", "Files".blue().bold(), files.join(", "));
}
// Check for common project files
let project_files = ["Cargo.toml", "package.json", "requirements.txt", "Makefile", "README.md"];
let found_files: Vec<_> = project_files.iter()
.filter(|&&file| files.contains(&file.to_string()))
.collect();
if !found_files.is_empty() {
println!("\n{}: {}", "Project files detected".green().bold(),
found_files.iter().map(|s| s.to_string()).collect::<Vec<_>>().join(", "));
}
println!();
Ok(())
}
async fn show_fortune(&self) -> Result<()> {
let state = self.persona.get_current_state()?;
let fortune_stars = "🌟".repeat(state.fortune_value as usize);
let empty_stars = "".repeat((10 - state.fortune_value) as usize);
println!("\n{}", "AI Fortune".yellow().bold());
println!("{}{}", fortune_stars, empty_stars);
println!("Today's Fortune: {}/10", state.fortune_value);
if state.breakthrough_triggered {
println!("{}", "⚡ BREAKTHROUGH! Special fortune activated!".yellow());
}
println!();
Ok(())
}
fn show_history(&self) {
println!("\n{}", "Command History".cyan().bold());
let history = self.editor.history();
if history.is_empty() {
println!("{}", "No commands in history".yellow());
return;
}
// Show last 20 commands
let start = if history.len() > 20 { history.len() - 20 } else { 0 };
for (i, entry) in history.iter().enumerate().skip(start) {
println!("{:2}: {}", i + 1, entry);
}
println!();
}
fn save_history(&mut self) -> Result<()> {
let history_file = self.config.data_dir.join("shell_history.txt");
self.editor.save_history(&history_file)
.context("Failed to save shell history")?;
Ok(())
}
}
// Extend AIProvider to have Display and helper methods
impl AIProvider {
fn to_string(&self) -> String {
match self {
AIProvider::OpenAI => "openai".to_string(),
AIProvider::Ollama => "ollama".to_string(),
AIProvider::Claude => "claude".to_string(),
}
}
}

View File

@@ -1,51 +0,0 @@
use std::path::PathBuf;
use anyhow::Result;
use colored::*;
use crate::config::Config;
use crate::persona::Persona;
pub async fn handle_status(user_id: Option<String>, data_dir: Option<PathBuf>) -> Result<()> {
// Load configuration
let config = Config::new(data_dir)?;
// Initialize persona
let persona = Persona::new(&config)?;
// Get current state
let state = persona.get_current_state()?;
// Display AI status
println!("{}", "ai.gpt Status".cyan().bold());
println!("Mood: {}", state.current_mood);
println!("Fortune: {}/10", state.fortune_value);
if state.breakthrough_triggered {
println!("{}", "⚡ Breakthrough triggered!".yellow());
}
// Show personality traits
println!("\n{}", "Current Personality".cyan().bold());
for (trait_name, value) in &state.base_personality {
println!("{}: {:.2}", trait_name.cyan(), value);
}
// Show specific relationship if requested
if let Some(user_id) = user_id {
if let Some(relationship) = persona.get_relationship(&user_id) {
println!("\n{}: {}", "Relationship with".cyan(), user_id);
println!("Status: {}", relationship.status);
println!("Score: {:.2}", relationship.score);
println!("Total Interactions: {}", relationship.total_interactions);
println!("Transmission Enabled: {}", relationship.transmission_enabled);
if relationship.is_broken {
println!("{}", "⚠️ This relationship is broken and cannot be repaired.".red());
}
} else {
println!("\n{}: {}", "No relationship found with".yellow(), user_id);
}
}
Ok(())
}

View File

@@ -1,480 +0,0 @@
use std::collections::HashMap;
use std::path::PathBuf;
use anyhow::{Result, Context};
use colored::*;
use serde::{Deserialize, Serialize};
use crate::config::Config;
pub async fn handle_submodules(
action: String,
module: Option<String>,
all: bool,
dry_run: bool,
auto_commit: bool,
verbose: bool,
data_dir: Option<PathBuf>,
) -> Result<()> {
let config = Config::new(data_dir)?;
let mut submodule_manager = SubmoduleManager::new(config);
match action.as_str() {
"list" => {
submodule_manager.list_submodules(verbose).await?;
}
"update" => {
submodule_manager.update_submodules(module, all, dry_run, auto_commit, verbose).await?;
}
"status" => {
submodule_manager.show_submodule_status().await?;
}
_ => {
return Err(anyhow::anyhow!("Unknown submodule action: {}", action));
}
}
Ok(())
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SubmoduleInfo {
pub name: String,
pub path: String,
pub branch: String,
pub current_commit: Option<String>,
pub target_commit: Option<String>,
pub status: String,
}
impl Default for SubmoduleInfo {
fn default() -> Self {
SubmoduleInfo {
name: String::new(),
path: String::new(),
branch: "main".to_string(),
current_commit: None,
target_commit: None,
status: "unknown".to_string(),
}
}
}
#[allow(dead_code)]
pub struct SubmoduleManager {
config: Config,
ai_root: PathBuf,
submodules: HashMap<String, SubmoduleInfo>,
}
impl SubmoduleManager {
pub fn new(config: Config) -> Self {
let ai_root = dirs::home_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("ai")
.join("ai");
SubmoduleManager {
config,
ai_root,
submodules: HashMap::new(),
}
}
pub async fn list_submodules(&mut self, verbose: bool) -> Result<()> {
println!("{}", "📋 Submodules Status".cyan().bold());
println!();
let submodules = self.parse_gitmodules()?;
if submodules.is_empty() {
println!("{}", "No submodules found".yellow());
return Ok(());
}
// Display submodules in a table format
println!("{:<15} {:<25} {:<15} {}",
"Module".cyan().bold(),
"Path".cyan().bold(),
"Branch".cyan().bold(),
"Status".cyan().bold());
println!("{}", "-".repeat(80));
for (module_name, module_info) in &submodules {
let status_color = match module_info.status.as_str() {
"clean" => module_info.status.green(),
"modified" => module_info.status.yellow(),
"missing" => module_info.status.red(),
"conflicts" => module_info.status.red(),
_ => module_info.status.normal(),
};
println!("{:<15} {:<25} {:<15} {}",
module_name.blue(),
module_info.path,
module_info.branch.green(),
status_color);
}
println!();
if verbose {
println!("Total submodules: {}", submodules.len().to_string().cyan());
println!("Repository root: {}", self.ai_root.display().to_string().blue());
}
Ok(())
}
pub async fn update_submodules(
&mut self,
module: Option<String>,
all: bool,
dry_run: bool,
auto_commit: bool,
verbose: bool
) -> Result<()> {
if !module.is_some() && !all {
return Err(anyhow::anyhow!("Either --module or --all is required"));
}
if module.is_some() && all {
return Err(anyhow::anyhow!("Cannot use both --module and --all"));
}
let submodules = self.parse_gitmodules()?;
if submodules.is_empty() {
println!("{}", "No submodules found".yellow());
return Ok(());
}
// Determine which modules to update
let modules_to_update: Vec<String> = if all {
submodules.keys().cloned().collect()
} else if let Some(module_name) = module {
if !submodules.contains_key(&module_name) {
return Err(anyhow::anyhow!(
"Submodule '{}' not found. Available modules: {}",
module_name,
submodules.keys().cloned().collect::<Vec<_>>().join(", ")
));
}
vec![module_name]
} else {
vec![]
};
if dry_run {
println!("{}", "🔍 DRY RUN MODE - No changes will be made".yellow().bold());
}
println!("{}", format!("🔄 Updating {} submodule(s)...", modules_to_update.len()).cyan().bold());
let mut updated_modules = Vec::new();
for module_name in modules_to_update {
if let Some(module_info) = submodules.get(&module_name) {
println!("\n{}", format!("📦 Processing: {}", module_name).blue().bold());
let module_path = PathBuf::from(&module_info.path);
let full_path = self.ai_root.join(&module_path);
if !full_path.exists() {
println!("{}", format!("❌ Module directory not found: {}", module_info.path).red());
continue;
}
// Get current commit
let current_commit = self.get_current_commit(&full_path)?;
if dry_run {
println!("{}", format!("🔍 Would update {} to branch {}", module_name, module_info.branch).yellow());
if let Some(ref commit) = current_commit {
println!("{}", format!("Current: {}", commit).dimmed());
}
continue;
}
// Perform update
if let Err(e) = self.update_single_module(&module_name, &module_info, &full_path).await {
println!("{}", format!("❌ Failed to update {}: {}", module_name, e).red());
continue;
}
// Get new commit
let new_commit = self.get_current_commit(&full_path)?;
if current_commit != new_commit {
println!("{}", format!("✅ Updated {} ({:?}{:?})",
module_name,
current_commit.as_deref().unwrap_or("unknown"),
new_commit.as_deref().unwrap_or("unknown")).green());
updated_modules.push((module_name.clone(), current_commit, new_commit));
} else {
println!("{}", "✅ Already up to date".green());
}
}
}
// Summary
if !updated_modules.is_empty() {
println!("\n{}", format!("🎉 Successfully updated {} module(s)", updated_modules.len()).green().bold());
if verbose {
for (module_name, old_commit, new_commit) in &updated_modules {
println!("{}: {:?}{:?}",
module_name,
old_commit.as_deref().unwrap_or("unknown"),
new_commit.as_deref().unwrap_or("unknown"));
}
}
if auto_commit && !dry_run {
self.auto_commit_changes(&updated_modules).await?;
} else if !dry_run {
println!("{}", "💾 Changes staged but not committed".yellow());
println!("Run with --auto-commit to commit automatically");
}
} else if !dry_run {
println!("{}", "No modules needed updating".yellow());
}
Ok(())
}
pub async fn show_submodule_status(&self) -> Result<()> {
println!("{}", "📊 Submodule Status Overview".cyan().bold());
println!();
let submodules = self.parse_gitmodules()?;
let mut total_modules = 0;
let mut clean_modules = 0;
let mut modified_modules = 0;
let mut missing_modules = 0;
for (module_name, module_info) in submodules {
let module_path = self.ai_root.join(&module_info.path);
if module_path.exists() {
total_modules += 1;
match module_info.status.as_str() {
"clean" => clean_modules += 1,
"modified" => modified_modules += 1,
_ => {}
}
} else {
missing_modules += 1;
}
println!("{}: {}",
module_name.blue(),
if module_path.exists() {
module_info.status.green()
} else {
"missing".red()
});
}
println!();
println!("Summary: {} total, {} clean, {} modified, {} missing",
total_modules.to_string().cyan(),
clean_modules.to_string().green(),
modified_modules.to_string().yellow(),
missing_modules.to_string().red());
Ok(())
}
fn parse_gitmodules(&self) -> Result<HashMap<String, SubmoduleInfo>> {
let gitmodules_path = self.ai_root.join(".gitmodules");
if !gitmodules_path.exists() {
return Ok(HashMap::new());
}
let content = std::fs::read_to_string(&gitmodules_path)
.with_context(|| format!("Failed to read .gitmodules file: {}", gitmodules_path.display()))?;
let mut submodules = HashMap::new();
let mut current_name: Option<String> = None;
let mut current_path: Option<String> = None;
for line in content.lines() {
let line = line.trim();
if line.starts_with("[submodule \"") && line.ends_with("\"]") {
// Save previous submodule if complete
if let (Some(name), Some(path)) = (current_name.take(), current_path.take()) {
let mut info = SubmoduleInfo::default();
info.name = name.clone();
info.path = path;
info.branch = self.get_target_branch(&name);
info.status = self.get_submodule_status(&name, &info.path)?;
submodules.insert(name, info);
}
// Extract new submodule name
current_name = Some(line[12..line.len()-2].to_string());
} else if line.starts_with("path = ") {
current_path = Some(line[7..].to_string());
}
}
// Save last submodule
if let (Some(name), Some(path)) = (current_name, current_path) {
let mut info = SubmoduleInfo::default();
info.name = name.clone();
info.path = path;
info.branch = self.get_target_branch(&name);
info.status = self.get_submodule_status(&name, &info.path)?;
submodules.insert(name, info);
}
Ok(submodules)
}
fn get_target_branch(&self, module_name: &str) -> String {
// Try to get from ai.json configuration
match module_name {
"verse" => "main".to_string(),
"card" => "main".to_string(),
"bot" => "main".to_string(),
_ => "main".to_string(),
}
}
fn get_submodule_status(&self, _module_name: &str, module_path: &str) -> Result<String> {
let full_path = self.ai_root.join(module_path);
if !full_path.exists() {
return Ok("missing".to_string());
}
// Check git status
let output = std::process::Command::new("git")
.args(&["submodule", "status", module_path])
.current_dir(&self.ai_root)
.output();
match output {
Ok(output) if output.status.success() => {
let stdout = String::from_utf8_lossy(&output.stdout);
if let Some(status_char) = stdout.chars().next() {
match status_char {
' ' => Ok("clean".to_string()),
'+' => Ok("modified".to_string()),
'-' => Ok("not_initialized".to_string()),
'U' => Ok("conflicts".to_string()),
_ => Ok("unknown".to_string()),
}
} else {
Ok("unknown".to_string())
}
}
_ => Ok("unknown".to_string())
}
}
fn get_current_commit(&self, module_path: &PathBuf) -> Result<Option<String>> {
let output = std::process::Command::new("git")
.args(&["rev-parse", "HEAD"])
.current_dir(module_path)
.output();
match output {
Ok(output) if output.status.success() => {
let commit = String::from_utf8_lossy(&output.stdout).trim().to_string();
if commit.len() >= 8 {
Ok(Some(commit[..8].to_string()))
} else {
Ok(Some(commit))
}
}
_ => Ok(None)
}
}
async fn update_single_module(
&self,
_module_name: &str,
module_info: &SubmoduleInfo,
module_path: &PathBuf
) -> Result<()> {
// Fetch latest changes
println!("{}", "Fetching latest changes...".dimmed());
let fetch_output = std::process::Command::new("git")
.args(&["fetch", "origin"])
.current_dir(module_path)
.output()?;
if !fetch_output.status.success() {
return Err(anyhow::anyhow!("Failed to fetch: {}",
String::from_utf8_lossy(&fetch_output.stderr)));
}
// Switch to target branch
println!("{}", format!("Switching to branch {}...", module_info.branch).dimmed());
let checkout_output = std::process::Command::new("git")
.args(&["checkout", &module_info.branch])
.current_dir(module_path)
.output()?;
if !checkout_output.status.success() {
return Err(anyhow::anyhow!("Failed to checkout {}: {}",
module_info.branch, String::from_utf8_lossy(&checkout_output.stderr)));
}
// Pull latest changes
let pull_output = std::process::Command::new("git")
.args(&["pull", "origin", &module_info.branch])
.current_dir(module_path)
.output()?;
if !pull_output.status.success() {
return Err(anyhow::anyhow!("Failed to pull: {}",
String::from_utf8_lossy(&pull_output.stderr)));
}
// Stage the submodule update
let add_output = std::process::Command::new("git")
.args(&["add", &module_info.path])
.current_dir(&self.ai_root)
.output()?;
if !add_output.status.success() {
return Err(anyhow::anyhow!("Failed to stage submodule: {}",
String::from_utf8_lossy(&add_output.stderr)));
}
Ok(())
}
async fn auto_commit_changes(&self, updated_modules: &[(String, Option<String>, Option<String>)]) -> Result<()> {
println!("{}", "💾 Auto-committing changes...".blue());
let mut commit_message = format!("Update submodules\n\n📦 Updated modules: {}\n", updated_modules.len());
for (module_name, old_commit, new_commit) in updated_modules {
commit_message.push_str(&format!(
"- {}: {}{}\n",
module_name,
old_commit.as_deref().unwrap_or("unknown"),
new_commit.as_deref().unwrap_or("unknown")
));
}
commit_message.push_str("\n🤖 Generated with aigpt-rs submodules update");
let commit_output = std::process::Command::new("git")
.args(&["commit", "-m", &commit_message])
.current_dir(&self.ai_root)
.output()?;
if commit_output.status.success() {
println!("{}", "✅ Changes committed successfully".green());
} else {
return Err(anyhow::anyhow!("Failed to commit: {}",
String::from_utf8_lossy(&commit_output.stderr)));
}
Ok(())
}
}

View File

@@ -1,505 +0,0 @@
use anyhow::{anyhow, Result};
use chrono::{DateTime, Local};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use crate::cli::TokenCommands;
/// Token usage record from Claude Code JSONL files
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct TokenRecord {
#[serde(default)]
pub timestamp: String,
#[serde(default)]
pub usage: Option<TokenUsage>,
#[serde(default)]
pub model: Option<String>,
#[serde(default)]
pub conversation_id: Option<String>,
}
/// Token usage details
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct TokenUsage {
#[serde(default)]
pub input_tokens: Option<u64>,
#[serde(default)]
pub output_tokens: Option<u64>,
#[serde(default)]
pub total_tokens: Option<u64>,
}
/// Cost calculation summary
#[derive(Debug, Clone, Serialize)]
pub struct CostSummary {
pub input_tokens: u64,
pub output_tokens: u64,
pub total_tokens: u64,
pub input_cost_usd: f64,
pub output_cost_usd: f64,
pub total_cost_usd: f64,
pub total_cost_jpy: f64,
pub record_count: usize,
}
/// Daily breakdown of token usage
#[derive(Debug, Clone, Serialize)]
pub struct DailyBreakdown {
pub date: String,
pub summary: CostSummary,
}
/// Configuration for cost calculation
#[derive(Debug, Clone)]
pub struct CostConfig {
pub input_cost_per_1m: f64, // USD per 1M input tokens
pub output_cost_per_1m: f64, // USD per 1M output tokens
pub usd_to_jpy_rate: f64,
}
impl Default for CostConfig {
fn default() -> Self {
Self {
input_cost_per_1m: 3.0,
output_cost_per_1m: 15.0,
usd_to_jpy_rate: 150.0,
}
}
}
/// Token analysis functionality
pub struct TokenAnalyzer {
config: CostConfig,
}
impl TokenAnalyzer {
pub fn new() -> Self {
Self {
config: CostConfig::default(),
}
}
pub fn with_config(config: CostConfig) -> Self {
Self { config }
}
/// Find Claude Code data directory
pub fn find_claude_data_dir() -> Option<PathBuf> {
let possible_dirs = [
dirs::home_dir().map(|h| h.join(".claude")),
dirs::config_dir().map(|c| c.join("claude")),
Some(PathBuf::from(".claude")),
];
for dir_opt in possible_dirs.iter() {
if let Some(dir) = dir_opt {
if dir.exists() && dir.is_dir() {
return Some(dir.clone());
}
}
}
None
}
/// Parse JSONL files from Claude data directory
pub fn parse_jsonl_files<P: AsRef<Path>>(&self, claude_dir: P) -> Result<Vec<TokenRecord>> {
let claude_dir = claude_dir.as_ref();
let mut records = Vec::new();
// Look for JSONL files in the directory
if let Ok(entries) = std::fs::read_dir(claude_dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.extension().map_or(false, |ext| ext == "jsonl") {
match self.parse_jsonl_file(&path) {
Ok(mut file_records) => records.append(&mut file_records),
Err(e) => {
eprintln!("Warning: Failed to parse {}: {}", path.display(), e);
}
}
}
}
}
Ok(records)
}
/// Parse a single JSONL file
fn parse_jsonl_file<P: AsRef<Path>>(&self, file_path: P) -> Result<Vec<TokenRecord>> {
let file = File::open(file_path)?;
let reader = BufReader::new(file);
let mut records = Vec::new();
for (line_num, line) in reader.lines().enumerate() {
match line {
Ok(line_content) => {
if line_content.trim().is_empty() {
continue;
}
match serde_json::from_str::<TokenRecord>(&line_content) {
Ok(record) => {
// Only include records with usage data
if record.usage.is_some() {
records.push(record);
}
}
Err(e) => {
eprintln!("Warning: Failed to parse line {}: {}", line_num + 1, e);
}
}
}
Err(e) => {
eprintln!("Warning: Failed to read line {}: {}", line_num + 1, e);
}
}
}
Ok(records)
}
/// Calculate cost summary from records
pub fn calculate_costs(&self, records: &[TokenRecord]) -> CostSummary {
let mut input_tokens = 0u64;
let mut output_tokens = 0u64;
for record in records {
if let Some(usage) = &record.usage {
input_tokens += usage.input_tokens.unwrap_or(0);
output_tokens += usage.output_tokens.unwrap_or(0);
}
}
let total_tokens = input_tokens + output_tokens;
let input_cost_usd = (input_tokens as f64 / 1_000_000.0) * self.config.input_cost_per_1m;
let output_cost_usd = (output_tokens as f64 / 1_000_000.0) * self.config.output_cost_per_1m;
let total_cost_usd = input_cost_usd + output_cost_usd;
let total_cost_jpy = total_cost_usd * self.config.usd_to_jpy_rate;
CostSummary {
input_tokens,
output_tokens,
total_tokens,
input_cost_usd,
output_cost_usd,
total_cost_usd,
total_cost_jpy,
record_count: records.len(),
}
}
/// Group records by date (JST timezone)
pub fn group_by_date(&self, records: &[TokenRecord]) -> Result<HashMap<String, Vec<TokenRecord>>> {
let mut grouped: HashMap<String, Vec<TokenRecord>> = HashMap::new();
for record in records {
let date_str = self.extract_date_jst(&record.timestamp)?;
grouped.entry(date_str).or_insert_with(Vec::new).push(record.clone());
}
Ok(grouped)
}
/// Extract date in JST from timestamp
fn extract_date_jst(&self, timestamp: &str) -> Result<String> {
if timestamp.is_empty() {
return Err(anyhow!("Empty timestamp"));
}
// Try to parse various timestamp formats
let dt = if let Ok(dt) = DateTime::parse_from_rfc3339(timestamp) {
dt.with_timezone(&chrono_tz::Asia::Tokyo)
} else if let Ok(dt) = DateTime::parse_from_str(timestamp, "%Y-%m-%dT%H:%M:%S%.fZ") {
dt.with_timezone(&chrono_tz::Asia::Tokyo)
} else if let Ok(dt) = chrono::DateTime::parse_from_str(timestamp, "%Y-%m-%d %H:%M:%S") {
dt.with_timezone(&chrono_tz::Asia::Tokyo)
} else {
return Err(anyhow!("Failed to parse timestamp: {}", timestamp));
};
Ok(dt.format("%Y-%m-%d").to_string())
}
/// Generate daily breakdown
pub fn daily_breakdown(&self, records: &[TokenRecord]) -> Result<Vec<DailyBreakdown>> {
let grouped = self.group_by_date(records)?;
let mut breakdowns: Vec<DailyBreakdown> = grouped
.into_iter()
.map(|(date, date_records)| DailyBreakdown {
date,
summary: self.calculate_costs(&date_records),
})
.collect();
// Sort by date (most recent first)
breakdowns.sort_by(|a, b| b.date.cmp(&a.date));
Ok(breakdowns)
}
/// Filter records by time period
pub fn filter_by_period(&self, records: &[TokenRecord], period: &str) -> Result<Vec<TokenRecord>> {
let now = Local::now();
let cutoff = match period {
"today" => now.date_naive().and_hms_opt(0, 0, 0).unwrap(),
"week" => (now - chrono::Duration::days(7)).naive_local(),
"month" => (now - chrono::Duration::days(30)).naive_local(),
"all" => return Ok(records.to_vec()),
_ => return Err(anyhow!("Invalid period: {}", period)),
};
let filtered: Vec<TokenRecord> = records
.iter()
.filter(|record| {
if let Ok(date_str) = self.extract_date_jst(&record.timestamp) {
if let Ok(record_date) = chrono::NaiveDate::parse_from_str(&date_str, "%Y-%m-%d") {
return record_date.and_hms_opt(0, 0, 0).unwrap() >= cutoff;
}
}
false
})
.cloned()
.collect();
Ok(filtered)
}
}
/// Handle token-related commands
pub async fn handle_tokens(command: TokenCommands) -> Result<()> {
match command {
TokenCommands::Summary { period, claude_dir, details, format } => {
handle_summary(
period.unwrap_or_else(|| "week".to_string()),
claude_dir,
details,
format.unwrap_or_else(|| "table".to_string())
).await
}
TokenCommands::Daily { days, claude_dir } => {
handle_daily(days.unwrap_or(7), claude_dir).await
}
TokenCommands::Status { claude_dir } => {
handle_status(claude_dir).await
}
TokenCommands::Analyze { file } => {
println!("Token analysis for file: {:?} - Not implemented yet", file);
Ok(())
}
TokenCommands::Report { days } => {
println!("Token report for {} days - Not implemented yet", days.unwrap_or(7));
Ok(())
}
TokenCommands::Cost { month } => {
println!("Token cost for month: {} - Not implemented yet", month.unwrap_or_else(|| "current".to_string()));
Ok(())
}
}
}
/// Handle summary command
async fn handle_summary(
period: String,
claude_dir: Option<PathBuf>,
details: bool,
format: String,
) -> Result<()> {
let analyzer = TokenAnalyzer::new();
// Find Claude data directory
let data_dir = claude_dir.or_else(|| TokenAnalyzer::find_claude_data_dir())
.ok_or_else(|| anyhow!("Claude Code data directory not found"))?;
println!("Loading data from: {}", data_dir.display());
// Parse records
let all_records = analyzer.parse_jsonl_files(&data_dir)?;
if all_records.is_empty() {
println!("No token usage data found");
return Ok(());
}
// Filter by period
let filtered_records = analyzer.filter_by_period(&all_records, &period)?;
if filtered_records.is_empty() {
println!("No data found for period: {}", period);
return Ok(());
}
// Calculate summary
let summary = analyzer.calculate_costs(&filtered_records);
// Output results
match format.as_str() {
"json" => {
println!("{}", serde_json::to_string_pretty(&summary)?);
}
"table" | _ => {
print_summary_table(&summary, &period, details);
}
}
Ok(())
}
/// Handle daily command
async fn handle_daily(days: u32, claude_dir: Option<PathBuf>) -> Result<()> {
let analyzer = TokenAnalyzer::new();
// Find Claude data directory
let data_dir = claude_dir.or_else(|| TokenAnalyzer::find_claude_data_dir())
.ok_or_else(|| anyhow!("Claude Code data directory not found"))?;
println!("Loading data from: {}", data_dir.display());
// Parse records
let records = analyzer.parse_jsonl_files(&data_dir)?;
if records.is_empty() {
println!("No token usage data found");
return Ok(());
}
// Generate daily breakdown
let breakdown = analyzer.daily_breakdown(&records)?;
let limited_breakdown: Vec<_> = breakdown.into_iter().take(days as usize).collect();
// Print daily breakdown
print_daily_breakdown(&limited_breakdown);
Ok(())
}
/// Handle status command
async fn handle_status(claude_dir: Option<PathBuf>) -> Result<()> {
let analyzer = TokenAnalyzer::new();
// Find Claude data directory
let data_dir = claude_dir.or_else(|| TokenAnalyzer::find_claude_data_dir());
match data_dir {
Some(dir) => {
println!("Claude Code data directory: {}", dir.display());
// Parse records to get basic stats
let records = analyzer.parse_jsonl_files(&dir)?;
let summary = analyzer.calculate_costs(&records);
println!("Total records: {}", summary.record_count);
println!("Total tokens: {}", summary.total_tokens);
println!("Estimated total cost: ${:.4} USD (¥{:.0} JPY)",
summary.total_cost_usd, summary.total_cost_jpy);
}
None => {
println!("Claude Code data directory not found");
println!("Checked locations:");
println!(" - ~/.claude");
println!(" - ~/.config/claude");
println!(" - ./.claude");
}
}
Ok(())
}
/// Print summary table
fn print_summary_table(summary: &CostSummary, period: &str, details: bool) {
println!("\n=== Claude Code Token Usage Summary ({}) ===", period);
println!();
println!("📊 Token Usage:");
println!(" Input tokens: {:>12}", format_number(summary.input_tokens));
println!(" Output tokens: {:>12}", format_number(summary.output_tokens));
println!(" Total tokens: {:>12}", format_number(summary.total_tokens));
println!();
println!("💰 Cost Estimation:");
println!(" Input cost: {:>12}", format!("${:.4} USD", summary.input_cost_usd));
println!(" Output cost: {:>12}", format!("${:.4} USD", summary.output_cost_usd));
println!(" Total cost: {:>12}", format!("${:.4} USD", summary.total_cost_usd));
println!(" Total cost: {:>12}", format!("¥{:.0} JPY", summary.total_cost_jpy));
println!();
if details {
println!("📈 Additional Details:");
println!(" Records: {:>12}", format_number(summary.record_count as u64));
println!(" Avg per record:{:>12}", format!("${:.4} USD",
if summary.record_count > 0 { summary.total_cost_usd / summary.record_count as f64 } else { 0.0 }));
println!();
}
println!("💡 Cost calculation based on:");
println!(" Input: $3.00 per 1M tokens");
println!(" Output: $15.00 per 1M tokens");
println!(" USD to JPY: 150.0");
}
/// Print daily breakdown
fn print_daily_breakdown(breakdown: &[DailyBreakdown]) {
println!("\n=== Daily Token Usage Breakdown ===");
println!();
for daily in breakdown {
println!("📅 {} (Records: {})", daily.date, daily.summary.record_count);
println!(" Tokens: {} input + {} output = {} total",
format_number(daily.summary.input_tokens),
format_number(daily.summary.output_tokens),
format_number(daily.summary.total_tokens));
println!(" Cost: ${:.4} USD (¥{:.0} JPY)",
daily.summary.total_cost_usd,
daily.summary.total_cost_jpy);
println!();
}
}
/// Format large numbers with commas
fn format_number(n: u64) -> String {
let s = n.to_string();
let mut result = String::new();
for (i, c) in s.chars().rev().enumerate() {
if i > 0 && i % 3 == 0 {
result.push(',');
}
result.push(c);
}
result.chars().rev().collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_cost_calculation() {
let analyzer = TokenAnalyzer::new();
let records = vec![
TokenRecord {
timestamp: "2024-01-01T10:00:00Z".to_string(),
usage: Some(TokenUsage {
input_tokens: Some(1000),
output_tokens: Some(500),
total_tokens: Some(1500),
}),
model: Some("claude-3".to_string()),
conversation_id: Some("test".to_string()),
},
];
let summary = analyzer.calculate_costs(&records);
assert_eq!(summary.input_tokens, 1000);
assert_eq!(summary.output_tokens, 500);
assert_eq!(summary.total_tokens, 1500);
assert_eq!(summary.record_count, 1);
}
#[test]
fn test_date_extraction() {
let analyzer = TokenAnalyzer::new();
let result = analyzer.extract_date_jst("2024-01-01T10:00:00Z");
assert!(result.is_ok());
// Note: The exact date depends on JST conversion
}
}

View File

@@ -1,423 +0,0 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use anyhow::{Result, Context};
use chrono::{DateTime, Utc};
use crate::config::Config;
use crate::persona::Persona;
use crate::relationship::{Relationship, RelationshipStatus};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TransmissionLog {
pub user_id: String,
pub message: String,
pub timestamp: DateTime<Utc>,
pub transmission_type: TransmissionType,
pub success: bool,
pub error: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TransmissionType {
Autonomous, // AI decided to send
Scheduled, // Time-based trigger
Breakthrough, // Fortune breakthrough triggered
Maintenance, // Daily maintenance message
}
impl std::fmt::Display for TransmissionType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TransmissionType::Autonomous => write!(f, "autonomous"),
TransmissionType::Scheduled => write!(f, "scheduled"),
TransmissionType::Breakthrough => write!(f, "breakthrough"),
TransmissionType::Maintenance => write!(f, "maintenance"),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TransmissionController {
config: Config,
transmission_history: Vec<TransmissionLog>,
last_check: Option<DateTime<Utc>>,
}
impl TransmissionController {
pub fn new(config: Config) -> Result<Self> {
let transmission_history = Self::load_transmission_history(&config)?;
Ok(TransmissionController {
config,
transmission_history,
last_check: None,
})
}
pub async fn check_autonomous_transmissions(&mut self, persona: &mut Persona) -> Result<Vec<TransmissionLog>> {
let mut transmissions = Vec::new();
let now = Utc::now();
// Get all transmission-eligible relationships
let eligible_user_ids: Vec<String> = {
let relationships = persona.list_all_relationships();
relationships.iter()
.filter(|(_, rel)| rel.transmission_enabled && !rel.is_broken)
.filter(|(_, rel)| rel.score >= rel.threshold)
.map(|(id, _)| id.clone())
.collect()
};
for user_id in eligible_user_ids {
// Get fresh relationship data for each check
if let Some(relationship) = persona.get_relationship(&user_id) {
// Check if enough time has passed since last transmission
if let Some(last_transmission) = relationship.last_transmission {
let hours_since_last = (now - last_transmission).num_hours();
if hours_since_last < 24 {
continue; // Skip if transmitted in last 24 hours
}
}
// Check if conditions are met for autonomous transmission
if self.should_transmit_to_user(&user_id, relationship, persona)? {
let transmission = self.generate_autonomous_transmission(persona, &user_id).await?;
transmissions.push(transmission);
}
}
}
self.last_check = Some(now);
self.save_transmission_history()?;
Ok(transmissions)
}
pub async fn check_breakthrough_transmissions(&mut self, persona: &mut Persona) -> Result<Vec<TransmissionLog>> {
let mut transmissions = Vec::new();
let state = persona.get_current_state()?;
// Only trigger breakthrough transmissions if fortune is very high
if !state.breakthrough_triggered || state.fortune_value < 9 {
return Ok(transmissions);
}
// Get close relationships for breakthrough sharing
let relationships = persona.list_all_relationships();
let close_friends: Vec<_> = relationships.iter()
.filter(|(_, rel)| matches!(rel.status, RelationshipStatus::Friend | RelationshipStatus::CloseFriend))
.filter(|(_, rel)| rel.transmission_enabled && !rel.is_broken)
.collect();
for (user_id, _relationship) in close_friends {
// Check if we haven't sent a breakthrough message today
let today = chrono::Utc::now().date_naive();
let already_sent_today = self.transmission_history.iter()
.any(|log| {
log.user_id == *user_id &&
matches!(log.transmission_type, TransmissionType::Breakthrough) &&
log.timestamp.date_naive() == today
});
if !already_sent_today {
let transmission = self.generate_breakthrough_transmission(persona, user_id).await?;
transmissions.push(transmission);
}
}
Ok(transmissions)
}
pub async fn check_maintenance_transmissions(&mut self, persona: &mut Persona) -> Result<Vec<TransmissionLog>> {
let mut transmissions = Vec::new();
let now = Utc::now();
// Only send maintenance messages once per day
let today = now.date_naive();
let already_sent_today = self.transmission_history.iter()
.any(|log| {
matches!(log.transmission_type, TransmissionType::Maintenance) &&
log.timestamp.date_naive() == today
});
if already_sent_today {
return Ok(transmissions);
}
// Apply daily maintenance to persona
persona.daily_maintenance()?;
// Get relationships that might need a maintenance check-in
let relationships = persona.list_all_relationships();
let maintenance_candidates: Vec<_> = relationships.iter()
.filter(|(_, rel)| rel.transmission_enabled && !rel.is_broken)
.filter(|(_, rel)| {
// Send maintenance to relationships that haven't been contacted in a while
if let Some(last_interaction) = rel.last_interaction {
let days_since = (now - last_interaction).num_days();
days_since >= 7 // Haven't talked in a week
} else {
false
}
})
.take(3) // Limit to 3 maintenance messages per day
.collect();
for (user_id, _) in maintenance_candidates {
let transmission = self.generate_maintenance_transmission(persona, user_id).await?;
transmissions.push(transmission);
}
Ok(transmissions)
}
fn should_transmit_to_user(&self, user_id: &str, relationship: &Relationship, persona: &Persona) -> Result<bool> {
// Basic transmission criteria
if !relationship.transmission_enabled || relationship.is_broken {
return Ok(false);
}
// Score must be above threshold
if relationship.score < relationship.threshold {
return Ok(false);
}
// Check transmission cooldown
if let Some(last_transmission) = relationship.last_transmission {
let hours_since = (Utc::now() - last_transmission).num_hours();
if hours_since < 24 {
return Ok(false);
}
}
// Calculate transmission probability based on relationship strength
let base_probability = match relationship.status {
RelationshipStatus::New => 0.1,
RelationshipStatus::Acquaintance => 0.2,
RelationshipStatus::Friend => 0.4,
RelationshipStatus::CloseFriend => 0.6,
RelationshipStatus::Broken => 0.0,
};
// Modify probability based on fortune
let state = persona.get_current_state()?;
let fortune_modifier = (state.fortune_value as f64 - 5.0) / 10.0; // -0.4 to +0.5
let final_probability = (base_probability + fortune_modifier).max(0.0).min(1.0);
// Simple random check (in real implementation, this would be more sophisticated)
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::new();
user_id.hash(&mut hasher);
Utc::now().timestamp().hash(&mut hasher);
let hash = hasher.finish();
let random_value = (hash % 100) as f64 / 100.0;
Ok(random_value < final_probability)
}
async fn generate_autonomous_transmission(&mut self, persona: &mut Persona, user_id: &str) -> Result<TransmissionLog> {
let now = Utc::now();
// Get recent memories for context
let memories = persona.get_memories(user_id, 3);
let context = if !memories.is_empty() {
format!("Based on our recent conversations: {}", memories.join(", "))
} else {
"Starting a spontaneous conversation".to_string()
};
// Generate message using AI if available
let message = match self.generate_ai_message(persona, user_id, &context, TransmissionType::Autonomous).await {
Ok(msg) => msg,
Err(_) => {
// Fallback to simple messages
let fallback_messages = [
"Hey! How have you been?",
"Just thinking about our last conversation...",
"Hope you're having a good day!",
"Something interesting happened today and it reminded me of you.",
];
let index = (now.timestamp() as usize) % fallback_messages.len();
fallback_messages[index].to_string()
}
};
let log = TransmissionLog {
user_id: user_id.to_string(),
message,
timestamp: now,
transmission_type: TransmissionType::Autonomous,
success: true, // For now, assume success
error: None,
};
self.transmission_history.push(log.clone());
Ok(log)
}
async fn generate_breakthrough_transmission(&mut self, persona: &mut Persona, user_id: &str) -> Result<TransmissionLog> {
let now = Utc::now();
let state = persona.get_current_state()?;
let message = match self.generate_ai_message(persona, user_id, "Breakthrough moment - feeling inspired!", TransmissionType::Breakthrough).await {
Ok(msg) => msg,
Err(_) => {
format!("Amazing day today! ⚡ Fortune is at {}/10 and I'm feeling incredibly inspired. Had to share this energy with you!", state.fortune_value)
}
};
let log = TransmissionLog {
user_id: user_id.to_string(),
message,
timestamp: now,
transmission_type: TransmissionType::Breakthrough,
success: true,
error: None,
};
self.transmission_history.push(log.clone());
Ok(log)
}
async fn generate_maintenance_transmission(&mut self, persona: &mut Persona, user_id: &str) -> Result<TransmissionLog> {
let now = Utc::now();
let message = match self.generate_ai_message(persona, user_id, "Maintenance check-in", TransmissionType::Maintenance).await {
Ok(msg) => msg,
Err(_) => {
"Hey! It's been a while since we last talked. Just checking in to see how you're doing!".to_string()
}
};
let log = TransmissionLog {
user_id: user_id.to_string(),
message,
timestamp: now,
transmission_type: TransmissionType::Maintenance,
success: true,
error: None,
};
self.transmission_history.push(log.clone());
Ok(log)
}
async fn generate_ai_message(&self, _persona: &mut Persona, _user_id: &str, context: &str, transmission_type: TransmissionType) -> Result<String> {
// Try to use AI for message generation
let _system_prompt = format!(
"You are initiating a {} conversation. Context: {}. Keep the message casual, personal, and under 100 characters. Show genuine interest in the person.",
transmission_type, context
);
// This is a simplified version - in a real implementation, we'd use the AI provider
// For now, return an error to trigger fallback
Err(anyhow::anyhow!("AI provider not available for transmission generation"))
}
fn get_eligible_relationships(&self, persona: &Persona) -> Vec<String> {
persona.list_all_relationships().iter()
.filter(|(_, rel)| rel.transmission_enabled && !rel.is_broken)
.filter(|(_, rel)| rel.score >= rel.threshold)
.map(|(id, _)| id.clone())
.collect()
}
pub fn get_transmission_stats(&self) -> TransmissionStats {
let total_transmissions = self.transmission_history.len();
let successful_transmissions = self.transmission_history.iter()
.filter(|log| log.success)
.count();
let today = Utc::now().date_naive();
let today_transmissions = self.transmission_history.iter()
.filter(|log| log.timestamp.date_naive() == today)
.count();
let by_type = {
let mut counts = HashMap::new();
for log in &self.transmission_history {
*counts.entry(log.transmission_type.to_string()).or_insert(0) += 1;
}
counts
};
TransmissionStats {
total_transmissions,
successful_transmissions,
today_transmissions,
success_rate: if total_transmissions > 0 {
successful_transmissions as f64 / total_transmissions as f64
} else {
0.0
},
by_type,
}
}
pub fn get_recent_transmissions(&self, limit: usize) -> Vec<&TransmissionLog> {
let mut logs: Vec<_> = self.transmission_history.iter().collect();
logs.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
logs.into_iter().take(limit).collect()
}
fn load_transmission_history(config: &Config) -> Result<Vec<TransmissionLog>> {
let file_path = config.transmission_file();
if !file_path.exists() {
return Ok(Vec::new());
}
let content = std::fs::read_to_string(file_path)
.context("Failed to read transmission history file")?;
let history: Vec<TransmissionLog> = serde_json::from_str(&content)
.context("Failed to parse transmission history file")?;
Ok(history)
}
fn save_transmission_history(&self) -> Result<()> {
let content = serde_json::to_string_pretty(&self.transmission_history)
.context("Failed to serialize transmission history")?;
std::fs::write(&self.config.transmission_file(), content)
.context("Failed to write transmission history file")?;
Ok(())
}
pub async fn check_and_send(&mut self) -> Result<Vec<(String, String)>> {
let config = self.config.clone();
let mut persona = Persona::new(&config)?;
let mut results = Vec::new();
// Check autonomous transmissions
let autonomous = self.check_autonomous_transmissions(&mut persona).await?;
for log in autonomous {
if log.success {
results.push((log.user_id, log.message));
}
}
// Check breakthrough transmissions
let breakthrough = self.check_breakthrough_transmissions(&mut persona).await?;
for log in breakthrough {
if log.success {
results.push((log.user_id, log.message));
}
}
Ok(results)
}
}
#[derive(Debug, Clone)]
pub struct TransmissionStats {
pub total_transmissions: usize,
pub successful_transmissions: usize,
pub today_transmissions: usize,
pub success_rate: f64,
pub by_type: HashMap<String, usize>,
}