Compare commits
42 Commits
chatgpt
...
62b91e5e5a
| Author | SHA1 | Date | |
|---|---|---|---|
|
62b91e5e5a
|
|||
|
4620d0862a
|
|||
|
93b523b1ba
|
|||
|
45c65e03b3
|
|||
|
73c516ab28
|
|||
|
e2e2758a83
|
|||
|
5564db014a
|
|||
|
6dadc41da7
|
|||
|
64e519d719
|
|||
|
ed6d6e0d47
|
|||
|
582b983a32
|
|||
|
b410c83605
|
|||
|
334e17a53e
|
|||
|
df86fb827e
|
|||
|
5a441e847d
|
|||
|
948bbc24ea
|
|||
|
d4de0d4917
|
|||
|
3487535e08
|
|||
|
1755dc2bec
|
|||
|
42c85fc820
|
|||
|
4a441279fb
|
|||
| e7e57b7b4b | |||
|
6081ed069f
|
|||
| 8c0961ab2f | |||
|
c9005f5240
|
|||
|
cba52b6171
|
|||
|
b642588696
|
|||
|
ebd2582b92
|
|||
|
79d1e1943f
|
|||
|
76d90c7cf7
|
|||
|
06fb70fffa
|
|||
|
62f941a958
|
|||
|
98ca92d85d
|
|||
|
1c555a706b
|
|||
|
7c3b05501f
|
|||
|
a7b61fe07d
|
|||
|
9866da625d
|
|||
|
797ae7ef69
|
|||
|
abd2ad79bd
|
|||
|
979e55cfce
|
|||
|
cd25af7bf0
|
|||
|
58e202fa1e
|
31
.gitignore
vendored
31
.gitignore
vendored
@@ -1,7 +1,24 @@
|
||||
**target
|
||||
**.lock
|
||||
output.json
|
||||
config/*.db
|
||||
aigpt
|
||||
mcp/scripts/__*
|
||||
data
|
||||
# Rust
|
||||
target/
|
||||
Cargo.lock
|
||||
|
||||
# Database files
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
|
||||
# IDE
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
json
|
||||
gpt
|
||||
.claude
|
||||
|
||||
45
Cargo.toml
45
Cargo.toml
@@ -2,14 +2,47 @@
|
||||
name = "aigpt"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["syui"]
|
||||
description = "Simple memory storage for Claude with MCP"
|
||||
|
||||
|
||||
[[bin]]
|
||||
name = "aigpt"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "memory-mcp"
|
||||
path = "src/bin/mcp_server.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "memory-mcp-extended"
|
||||
path = "src/bin/mcp_server_extended.rs"
|
||||
|
||||
[dependencies]
|
||||
# CLI and async
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
tokio = { version = "1.40", features = ["full"] }
|
||||
|
||||
# JSON and serialization
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
# Date/time and UUID
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
seahorse = "*"
|
||||
rusqlite = { version = "0.29", features = ["serde_json"] }
|
||||
shellexpand = "*"
|
||||
fs_extra = "1.3"
|
||||
rand = "0.9.1"
|
||||
reqwest = { version = "*", features = ["blocking", "json"] }
|
||||
uuid = { version = "1.10", features = ["v4"] }
|
||||
|
||||
# Error handling and utilities
|
||||
anyhow = "1.0"
|
||||
dirs = "5.0"
|
||||
|
||||
# Extended features (optional)
|
||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
scraper = { version = "0.18", optional = true }
|
||||
openai = { version = "1.1", optional = true }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
extended = ["semantic-search", "ai-analysis", "web-integration"]
|
||||
semantic-search = ["openai"]
|
||||
ai-analysis = ["openai"]
|
||||
web-integration = ["reqwest", "scraper"]
|
||||
|
||||
206
README.md
206
README.md
@@ -1,47 +1,177 @@
|
||||
# ai `gpt`
|
||||
# aigpt - Claude Memory MCP Server
|
||||
|
||||
ai x Communication
|
||||
ChatGPTのメモリ機能を参考にした、Claude Desktop/Code用のシンプルなメモリストレージシステムです。
|
||||
|
||||
## Overview
|
||||
## 機能
|
||||
|
||||
`ai.gpt` runs on the AGE system.
|
||||
- **メモリのCRUD操作**: メモリの作成、更新、削除、検索
|
||||
- **ChatGPT JSONインポート**: ChatGPTの会話履歴からメモリを抽出
|
||||
- **stdio MCP実装**: Claude Desktop/Codeとの簡潔な連携
|
||||
- **JSONファイル保存**: シンプルなファイルベースのデータ保存
|
||||
|
||||
This is a prototype of an autonomous, relationship-driven AI system based on the axes of "Personality × Relationship × External Environment × Time Variation."
|
||||
## インストール
|
||||
|
||||
The parameters of "Send Permission," "Send Timing," and "Send Content" are determined by the factors of "Personality x Relationship x External Environment x Time Variation."
|
||||
|
||||
## Integration
|
||||
|
||||
`ai.ai` runs on the AIM system, which is designed to read human emotions.
|
||||
|
||||
- AIM focuses on the axis of personality and ethics (AI's consciousness structure)
|
||||
- AGE focuses on the axis of behavior and relationships (AI's autonomy and behavior)
|
||||
|
||||
> When these two systems work together, it creates a world where users can feel like they are "growing together with AI."
|
||||
|
||||
## mcp
|
||||
|
||||
```sh
|
||||
$ ollama run syui/ai
|
||||
1. Rustをインストール(まだの場合):
|
||||
```bash
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
|
||||
```
|
||||
|
||||
```sh
|
||||
$ cargo build
|
||||
$ ./aigpt mcp setup
|
||||
$ ./aigpt mcp chat "hello world!"
|
||||
$ ./aigpt mcp chat "hello world!" --host http://localhost:11434 --model syui/ai
|
||||
|
||||
---
|
||||
# openai api
|
||||
$ ./aigpt mcp set-api --api sk-abc123
|
||||
$ ./aigpt mcp chat "こんにちは" -p openai -m gpt-4o-mini
|
||||
|
||||
---
|
||||
# git管理されているファイルをAIに読ませる
|
||||
./aigpt mcp chat --host http://localhost:11434 --repo git@git.syui.ai:ai/gpt
|
||||
**改善案と次のステップ:**
|
||||
1. **README.md の大幅な改善:**
|
||||
**次のステップ:**
|
||||
1. **README.md の作成:** 1. の指示に従って、README.md ファイルを作成します。
|
||||
2. プロジェクトをビルド:
|
||||
```bash
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
3. バイナリをパスの通った場所にコピー(オプション):
|
||||
```bash
|
||||
cp target/release/aigpt $HOME/.cargo/bin/
|
||||
```
|
||||
|
||||
4. Claude Code/Desktopに追加
|
||||
|
||||
```sh
|
||||
# Claude Codeの場合
|
||||
claude mcp add aigpt $HOME/.cargo/bin/aigpt server
|
||||
|
||||
# または
|
||||
claude mcp add aigpt $HOME/.cargo/bin/aigpt serve
|
||||
```
|
||||
|
||||
## 使用方法
|
||||
|
||||
### ヘルプの表示
|
||||
```bash
|
||||
aigpt --help
|
||||
```
|
||||
|
||||
### MCPサーバーとして起動
|
||||
```bash
|
||||
# MCPサーバー起動 (どちらでも可)
|
||||
aigpt server
|
||||
aigpt serve
|
||||
```
|
||||
|
||||
### ChatGPT会話のインポート
|
||||
```bash
|
||||
# ChatGPT conversations.jsonをインポート
|
||||
aigpt import path/to/conversations.json
|
||||
```
|
||||
|
||||
## Claude Desktop/Codeへの設定
|
||||
|
||||
1. Claude Desktopの設定ファイルを開く:
|
||||
- macOS: `~/Library/Application Support/Claude/claude_desktop_config.json`
|
||||
- Windows: `%APPDATA%\Claude\claude_desktop_config.json`
|
||||
- Linux: `~/.config/Claude/claude_desktop_config.json`
|
||||
|
||||
2. 以下の設定を追加:
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"aigpt": {
|
||||
"command": "/Users/syui/.cargo/bin/aigpt",
|
||||
"args": ["server"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 提供するMCPツール一覧
|
||||
|
||||
1. **create_memory** - 新しいメモリを作成
|
||||
2. **update_memory** - 既存のメモリを更新
|
||||
3. **delete_memory** - メモリを削除
|
||||
4. **search_memories** - メモリを検索
|
||||
5. **list_conversations** - インポートされた会話を一覧表示
|
||||
|
||||
## ツールの使用例
|
||||
|
||||
Claude Desktop/Codeで以下のように使用します:
|
||||
|
||||
### メモリの作成
|
||||
```
|
||||
MCPツールを使って「今日は良い天気です」というメモリーを作成してください
|
||||
```
|
||||
|
||||
### メモリの検索
|
||||
```
|
||||
MCPツールを使って「天気」に関するメモリーを検索してください
|
||||
```
|
||||
|
||||
### 会話一覧の表示
|
||||
```
|
||||
MCPツールを使ってインポートした会話の一覧を表示してください
|
||||
```
|
||||
|
||||
## データ保存
|
||||
|
||||
- デフォルトパス: `~/.config/syui/ai/gpt/memory.json`
|
||||
- JSONファイルでデータを保存
|
||||
- 自動的にディレクトリとファイルを作成
|
||||
|
||||
### データ構造
|
||||
|
||||
```json
|
||||
{
|
||||
"memories": {
|
||||
"uuid": {
|
||||
"id": "uuid",
|
||||
"content": "メモリーの内容",
|
||||
"created_at": "2024-01-01T00:00:00Z",
|
||||
"updated_at": "2024-01-01T00:00:00Z"
|
||||
}
|
||||
},
|
||||
"conversations": {
|
||||
"conversation_id": {
|
||||
"id": "conversation_id",
|
||||
"title": "会話のタイトル",
|
||||
"created_at": "2024-01-01T00:00:00Z",
|
||||
"message_count": 10
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 開発
|
||||
|
||||
```bash
|
||||
# 開発モードで実行
|
||||
cargo run -- server
|
||||
|
||||
# ChatGPTインポートのテスト
|
||||
cargo run -- import json/conversations.json
|
||||
|
||||
# テストの実行
|
||||
cargo test
|
||||
|
||||
# フォーマット
|
||||
cargo fmt
|
||||
|
||||
# Lintチェック
|
||||
cargo clippy
|
||||
```
|
||||
|
||||
## トラブルシューティング
|
||||
|
||||
### MCPサーバーが起動しない
|
||||
```bash
|
||||
# バイナリが存在するか確認
|
||||
ls -la ~/.cargo/bin/aigpt
|
||||
|
||||
# 手動でテスト
|
||||
echo '{"jsonrpc": "2.0", "method": "tools/list", "id": 1}' | aigpt server
|
||||
```
|
||||
|
||||
### Claude Desktopでツールが見つからない
|
||||
1. Claude Desktopを完全に再起動
|
||||
2. 設定ファイルのパスが正しいか確認
|
||||
3. ログファイルを確認: `~/Library/Logs/Claude/mcp-server-aigpt.log`
|
||||
|
||||
### インポートが失敗する
|
||||
```bash
|
||||
# JSONファイルの形式を確認
|
||||
head -100 conversations.json | jq '.[0] | keys'
|
||||
```
|
||||
|
||||
## ライセンス
|
||||
|
||||
MIT
|
||||
|
||||
95
claude.md
Normal file
95
claude.md
Normal file
@@ -0,0 +1,95 @@
|
||||
# claude用の記憶装置を作る
|
||||
|
||||
claude desktop, claude codeで使用できるmemory機能をmcpで作ります。
|
||||
|
||||
1. chatgptのメモリ機能を参考に
|
||||
2. chatgptのjsonをimportできる @json/
|
||||
3. rustで作る
|
||||
|
||||
## 自動実行設定
|
||||
```json
|
||||
{
|
||||
"mcp_auto_execute": true,
|
||||
"memory_auto_save": true,
|
||||
"memory_auto_search": true,
|
||||
"trigger_sensitivity": "high"
|
||||
}
|
||||
```
|
||||
|
||||
## 自動メモリー保存のルール(強化版)
|
||||
|
||||
**即座に自動実行** - 以下の情報が会話に現れた瞬間、create_memory MCPツールを自動実行:
|
||||
|
||||
### 1. **ユーザーの個人情報**
|
||||
**トリガーワード**: 名前、誕生日、住所、年齢、職業、家族、出身
|
||||
- 基本情報: 名前、誕生日、住所、年齢、性別
|
||||
- 関係性: 家族構成、恋人、友人関係
|
||||
- 好み: 好きなもの、嫌いなもの、趣味、音楽、映画、本
|
||||
- 習慣: 日課、スケジュール、ルーティン
|
||||
- 価値観: 信念、考え方、人生観
|
||||
|
||||
### 2. **重要な決定事項**
|
||||
**トリガーワード**: 決めた、決定、方針、計画、予定、目標
|
||||
- プロジェクト方針の決定
|
||||
- 技術選択の決定
|
||||
- 設定・環境の変更
|
||||
- 今後のロードマップ
|
||||
- 作業分担・役割
|
||||
|
||||
### 3. **技術的な解決策**
|
||||
**トリガーワード**: 解決、修正、対処、設定、インストール、手順
|
||||
- エラーの解決方法
|
||||
- 有用なコマンド・スクリプト
|
||||
- 設定手順・インストール方法
|
||||
- デバッグテクニック
|
||||
- 最適化手法
|
||||
|
||||
### 4. **学習・発見事項**
|
||||
**トリガーワード**: 学んだ、わかった、発見、理解、気づき
|
||||
- 新しい知識・概念の理解
|
||||
- ツール・ライブラリの使い方
|
||||
- ベストプラクティス
|
||||
- 失敗から得た教訓
|
||||
|
||||
## 自動メモリー検索のルール(強化版)
|
||||
|
||||
**会話開始時に自動実行** - search_memories を実行してコンテキストを取得
|
||||
|
||||
**即座に自動実行** - 以下の場合、search_memories MCPツールを自動実行:
|
||||
|
||||
### 1. **過去参照キーワード検出**
|
||||
**トリガーワード**: 前に、以前、昔、過去、先ほど、さっき、この間
|
||||
- 「前に話した〜」
|
||||
- 「以前設定した〜」
|
||||
- 「昔やった〜」
|
||||
|
||||
### 2. **記憶呼び出しキーワード**
|
||||
**トリガーワード**: 覚えている、記録、メモ、保存、履歴
|
||||
- 「覚えていますか?」
|
||||
- 「記録していた〜」
|
||||
- 「メモした〜」
|
||||
|
||||
### 3. **設定・好み確認**
|
||||
**トリガーワード**: 好み、設定、環境、構成、preferences
|
||||
- ユーザーの好みを確認する必要がある場合
|
||||
- 過去の設定を参照する必要がある場合
|
||||
- 環境構成を確認する必要がある場合
|
||||
|
||||
### 4. **不明な参照**
|
||||
- ユーザーが具体的でない参照をした場合
|
||||
- 「あれ」「それ」「例のやつ」などの曖昧な表現
|
||||
- 文脈から過去の情報が必要と判断される場合
|
||||
|
||||
## 自動実行タイミング
|
||||
|
||||
1. **会話開始時**: search_memories を実行してコンテキスト取得
|
||||
2. **リアルタイム**: トリガーワード検出後、即座にMCPツール実行
|
||||
3. **会話終了時**: 重要な情報があれば create_memory で保存
|
||||
4. **定期的**: 長い会話では中間地点でメモリー整理
|
||||
|
||||
## エラーハンドリング
|
||||
|
||||
- MCPツールが利用できない場合は通常の会話を継続
|
||||
- メモリー保存失敗時はユーザーに通知
|
||||
- 検索結果が空の場合も適切に対応
|
||||
|
||||
125
docs/README_CONFIG.md
Normal file
125
docs/README_CONFIG.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# Claude Memory MCP 設定ガイド
|
||||
|
||||
## モード選択
|
||||
|
||||
### 標準モード (Simple Mode)
|
||||
- 基本的なメモリー機能のみ
|
||||
- 軽量で高速
|
||||
- 最小限の依存関係
|
||||
|
||||
### 拡張モード (Extended Mode)
|
||||
- AI分析機能
|
||||
- セマンティック検索
|
||||
- Web統合機能
|
||||
- 高度なインサイト抽出
|
||||
|
||||
## ビルド・実行方法
|
||||
|
||||
### 標準モード
|
||||
```bash
|
||||
# MCPサーバー起動
|
||||
cargo run --bin memory-mcp
|
||||
|
||||
# CLI実行
|
||||
cargo run --bin aigpt -- create "メモリー内容"
|
||||
```
|
||||
|
||||
### 拡張モード
|
||||
```bash
|
||||
# MCPサーバー起動
|
||||
cargo run --bin memory-mcp-extended --features extended
|
||||
|
||||
# CLI実行
|
||||
cargo run --bin aigpt-extended --features extended -- create "メモリー内容" --analyze
|
||||
```
|
||||
|
||||
## 設定ファイルの配置
|
||||
|
||||
### 標準モード
|
||||
|
||||
#### Claude Desktop
|
||||
```bash
|
||||
# macOS
|
||||
cp claude_desktop_config.json ~/.config/claude-desktop/claude_desktop_config.json
|
||||
|
||||
# Windows
|
||||
cp claude_desktop_config.json %APPDATA%\Claude\claude_desktop_config.json
|
||||
```
|
||||
|
||||
#### Claude Code
|
||||
```bash
|
||||
# プロジェクトルートまたはグローバル設定
|
||||
cp claude_code_config.json .claude/config.json
|
||||
# または
|
||||
cp claude_code_config.json ~/.claude/config.json
|
||||
```
|
||||
|
||||
### 拡張モード
|
||||
|
||||
#### Claude Desktop
|
||||
```bash
|
||||
# macOS
|
||||
cp claude_desktop_config_extended.json ~/.config/claude-desktop/claude_desktop_config.json
|
||||
|
||||
# Windows
|
||||
cp claude_desktop_config_extended.json %APPDATA%\Claude\claude_desktop_config.json
|
||||
```
|
||||
|
||||
#### Claude Code
|
||||
```bash
|
||||
# プロジェクトルートまたはグローバル設定
|
||||
cp claude_code_config_extended.json .claude/config.json
|
||||
# または
|
||||
cp claude_code_config_extended.json ~/.claude/config.json
|
||||
```
|
||||
|
||||
## 環境変数設定
|
||||
|
||||
```bash
|
||||
export MEMORY_AUTO_EXECUTE=true
|
||||
export MEMORY_AUTO_SAVE=true
|
||||
export MEMORY_AUTO_SEARCH=true
|
||||
export TRIGGER_SENSITIVITY=high
|
||||
export MEMORY_DB_PATH=~/.claude/memory.db
|
||||
```
|
||||
|
||||
## 設定オプション
|
||||
|
||||
### auto_execute
|
||||
- `true`: 自動でMCPツールを実行
|
||||
- `false`: 手動実行のみ
|
||||
|
||||
### trigger_sensitivity
|
||||
- `high`: 多くのキーワードで反応
|
||||
- `medium`: 適度な反応
|
||||
- `low`: 明確なキーワードのみ
|
||||
|
||||
### max_memories
|
||||
メモリーの最大保存数
|
||||
|
||||
### search_limit
|
||||
検索結果の最大表示数
|
||||
|
||||
## カスタマイズ
|
||||
|
||||
`trigger_words`セクションでトリガーワードをカスタマイズ可能:
|
||||
|
||||
```json
|
||||
"trigger_words": {
|
||||
"custom_category": ["カスタム", "キーワード", "リスト"]
|
||||
}
|
||||
```
|
||||
|
||||
## トラブルシューティング
|
||||
|
||||
1. MCPサーバーが起動しない場合:
|
||||
- Rustがインストールされているか確認
|
||||
- `cargo build --release`でビルド確認
|
||||
|
||||
2. 自動実行されない場合:
|
||||
- 環境変数が正しく設定されているか確認
|
||||
- トリガーワードが含まれているか確認
|
||||
|
||||
3. メモリーが保存されない場合:
|
||||
- データベースファイルのパスが正しいか確認
|
||||
- 書き込み権限があるか確認
|
||||
58
docs/claude_code_config.json
Normal file
58
docs/claude_code_config.json
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"memory": {
|
||||
"command": "cargo",
|
||||
"args": ["run", "--release", "--bin", "memory-mcp"],
|
||||
"cwd": "/Users/syui/ai/ai/gpt",
|
||||
"env": {
|
||||
"MEMORY_AUTO_EXECUTE": "true",
|
||||
"MEMORY_AUTO_SAVE": "true",
|
||||
"MEMORY_AUTO_SEARCH": "true",
|
||||
"TRIGGER_SENSITIVITY": "high",
|
||||
"MEMORY_DB_PATH": "~/.claude/memory.db"
|
||||
}
|
||||
}
|
||||
},
|
||||
"tools": {
|
||||
"memory": {
|
||||
"enabled": true,
|
||||
"auto_execute": true
|
||||
}
|
||||
},
|
||||
"workspace": {
|
||||
"memory_integration": true,
|
||||
"auto_save_on_file_change": true,
|
||||
"auto_search_on_context_switch": true
|
||||
},
|
||||
"memory": {
|
||||
"auto_execute": true,
|
||||
"auto_save": true,
|
||||
"auto_search": true,
|
||||
"trigger_sensitivity": "high",
|
||||
"max_memories": 10000,
|
||||
"search_limit": 50,
|
||||
"session_memory": true,
|
||||
"cross_session_memory": true,
|
||||
"trigger_words": {
|
||||
"personal_info": ["名前", "誕生日", "住所", "年齢", "職業", "家族", "出身", "好き", "嫌い", "趣味"],
|
||||
"decisions": ["決めた", "決定", "方針", "計画", "予定", "目標"],
|
||||
"solutions": ["解決", "修正", "対処", "設定", "インストール", "手順"],
|
||||
"learning": ["学んだ", "わかった", "発見", "理解", "気づき"],
|
||||
"past_reference": ["前に", "以前", "昔", "過去", "先ほど", "さっき", "この間"],
|
||||
"memory_recall": ["覚えている", "記録", "メモ", "保存", "履歴"],
|
||||
"preferences": ["好み", "設定", "環境", "構成", "preferences"],
|
||||
"vague_reference": ["あれ", "それ", "例のやつ"]
|
||||
}
|
||||
},
|
||||
"hooks": {
|
||||
"on_conversation_start": [
|
||||
"search_memories --limit 10 --recent"
|
||||
],
|
||||
"on_trigger_word": [
|
||||
"auto_execute_memory_tools"
|
||||
],
|
||||
"on_conversation_end": [
|
||||
"save_important_memories"
|
||||
]
|
||||
}
|
||||
}
|
||||
81
docs/claude_code_config_extended.json
Normal file
81
docs/claude_code_config_extended.json
Normal file
@@ -0,0 +1,81 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"memory-extended": {
|
||||
"command": "cargo",
|
||||
"args": ["run", "--bin", "memory-mcp-extended", "--features", "extended"],
|
||||
"cwd": "/Users/syui/ai/ai/gpt",
|
||||
"env": {
|
||||
"MEMORY_AUTO_EXECUTE": "true",
|
||||
"MEMORY_AUTO_SAVE": "true",
|
||||
"MEMORY_AUTO_SEARCH": "true",
|
||||
"TRIGGER_SENSITIVITY": "high",
|
||||
"MEMORY_DB_PATH": "~/.claude/memory.db",
|
||||
"OPENAI_API_KEY": "${OPENAI_API_KEY}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"tools": {
|
||||
"memory": {
|
||||
"enabled": true,
|
||||
"auto_execute": true,
|
||||
"mode": "extended"
|
||||
}
|
||||
},
|
||||
"workspace": {
|
||||
"memory_integration": true,
|
||||
"auto_save_on_file_change": true,
|
||||
"auto_search_on_context_switch": true,
|
||||
"ai_analysis_on_code_review": true,
|
||||
"web_integration_for_docs": true
|
||||
},
|
||||
"memory": {
|
||||
"mode": "extended",
|
||||
"auto_execute": true,
|
||||
"auto_save": true,
|
||||
"auto_search": true,
|
||||
"trigger_sensitivity": "high",
|
||||
"max_memories": 10000,
|
||||
"search_limit": 50,
|
||||
"session_memory": true,
|
||||
"cross_session_memory": true,
|
||||
"features": {
|
||||
"ai_analysis": true,
|
||||
"semantic_search": true,
|
||||
"web_integration": true,
|
||||
"sentiment_analysis": true,
|
||||
"pattern_recognition": true,
|
||||
"code_analysis": true,
|
||||
"documentation_import": true
|
||||
},
|
||||
"trigger_words": {
|
||||
"personal_info": ["名前", "誕生日", "住所", "年齢", "職業", "家族", "出身", "好き", "嫌い", "趣味"],
|
||||
"decisions": ["決めた", "決定", "方針", "計画", "予定", "目標"],
|
||||
"solutions": ["解決", "修正", "対処", "設定", "インストール", "手順"],
|
||||
"learning": ["学んだ", "わかった", "発見", "理解", "気づき"],
|
||||
"past_reference": ["前に", "以前", "昔", "過去", "先ほど", "さっき", "この間"],
|
||||
"memory_recall": ["覚えている", "記録", "メモ", "保存", "履歴"],
|
||||
"preferences": ["好み", "設定", "環境", "構成", "preferences"],
|
||||
"vague_reference": ["あれ", "それ", "例のやつ"],
|
||||
"web_content": ["URL", "リンク", "サイト", "ページ", "記事", "ドキュメント"],
|
||||
"analysis_request": ["分析", "パターン", "傾向", "インサイト", "統計", "レビュー"],
|
||||
"code_related": ["関数", "クラス", "メソッド", "変数", "バグ", "リファクタリング"]
|
||||
}
|
||||
},
|
||||
"hooks": {
|
||||
"on_conversation_start": [
|
||||
"search_memories --limit 10 --recent --semantic"
|
||||
],
|
||||
"on_trigger_word": [
|
||||
"auto_execute_memory_tools --with-analysis"
|
||||
],
|
||||
"on_conversation_end": [
|
||||
"save_important_memories --with-insights"
|
||||
],
|
||||
"on_code_change": [
|
||||
"analyze_code_patterns --auto-save"
|
||||
],
|
||||
"on_web_reference": [
|
||||
"import_webpage --auto-categorize"
|
||||
]
|
||||
}
|
||||
}
|
||||
34
docs/claude_desktop_config.json
Normal file
34
docs/claude_desktop_config.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"memory": {
|
||||
"command": "cargo",
|
||||
"args": ["run", "--release", "--bin", "memory-mcp"],
|
||||
"cwd": "/Users/syui/ai/ai/gpt",
|
||||
"env": {
|
||||
"MEMORY_AUTO_EXECUTE": "true",
|
||||
"MEMORY_AUTO_SAVE": "true",
|
||||
"MEMORY_AUTO_SEARCH": "true",
|
||||
"TRIGGER_SENSITIVITY": "high",
|
||||
"MEMORY_DB_PATH": "~/.claude/memory.db"
|
||||
}
|
||||
}
|
||||
},
|
||||
"memory": {
|
||||
"auto_execute": true,
|
||||
"auto_save": true,
|
||||
"auto_search": true,
|
||||
"trigger_sensitivity": "high",
|
||||
"max_memories": 10000,
|
||||
"search_limit": 50,
|
||||
"trigger_words": {
|
||||
"personal_info": ["名前", "誕生日", "住所", "年齢", "職業", "家族", "出身", "好き", "嫌い", "趣味"],
|
||||
"decisions": ["決めた", "決定", "方針", "計画", "予定", "目標"],
|
||||
"solutions": ["解決", "修正", "対処", "設定", "インストール", "手順"],
|
||||
"learning": ["学んだ", "わかった", "発見", "理解", "気づき"],
|
||||
"past_reference": ["前に", "以前", "昔", "過去", "先ほど", "さっき", "この間"],
|
||||
"memory_recall": ["覚えている", "記録", "メモ", "保存", "履歴"],
|
||||
"preferences": ["好み", "設定", "環境", "構成", "preferences"],
|
||||
"vague_reference": ["あれ", "それ", "例のやつ"]
|
||||
}
|
||||
}
|
||||
}
|
||||
45
docs/claude_desktop_config_extended.json
Normal file
45
docs/claude_desktop_config_extended.json
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"memory-extended": {
|
||||
"command": "cargo",
|
||||
"args": ["run", "--bin", "memory-mcp-extended", "--features", "extended"],
|
||||
"cwd": "/Users/syui/ai/ai/gpt",
|
||||
"env": {
|
||||
"MEMORY_AUTO_EXECUTE": "true",
|
||||
"MEMORY_AUTO_SAVE": "true",
|
||||
"MEMORY_AUTO_SEARCH": "true",
|
||||
"TRIGGER_SENSITIVITY": "high",
|
||||
"MEMORY_DB_PATH": "~/.claude/memory.db",
|
||||
"OPENAI_API_KEY": "${OPENAI_API_KEY}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"memory": {
|
||||
"mode": "extended",
|
||||
"auto_execute": true,
|
||||
"auto_save": true,
|
||||
"auto_search": true,
|
||||
"trigger_sensitivity": "high",
|
||||
"max_memories": 10000,
|
||||
"search_limit": 50,
|
||||
"features": {
|
||||
"ai_analysis": true,
|
||||
"semantic_search": true,
|
||||
"web_integration": true,
|
||||
"sentiment_analysis": true,
|
||||
"pattern_recognition": true
|
||||
},
|
||||
"trigger_words": {
|
||||
"personal_info": ["名前", "誕生日", "住所", "年齢", "職業", "家族", "出身", "好き", "嫌い", "趣味"],
|
||||
"decisions": ["決めた", "決定", "方針", "計画", "予定", "目標"],
|
||||
"solutions": ["解決", "修正", "対処", "設定", "インストール", "手順"],
|
||||
"learning": ["学んだ", "わかった", "発見", "理解", "気づき"],
|
||||
"past_reference": ["前に", "以前", "昔", "過去", "先ほど", "さっき", "この間"],
|
||||
"memory_recall": ["覚えている", "記録", "メモ", "保存", "履歴"],
|
||||
"preferences": ["好み", "設定", "環境", "構成", "preferences"],
|
||||
"vague_reference": ["あれ", "それ", "例のやつ"],
|
||||
"web_content": ["URL", "リンク", "サイト", "ページ", "記事"],
|
||||
"analysis_request": ["分析", "パターン", "傾向", "インサイト", "統計"]
|
||||
}
|
||||
}
|
||||
}
|
||||
40
example.json
40
example.json
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"personality": {
|
||||
"kind": "positive",
|
||||
"strength": 0.8
|
||||
},
|
||||
"relationship": {
|
||||
"trust": 0.2,
|
||||
"intimacy": 0.6,
|
||||
"curiosity": 0.5,
|
||||
"threshold": 1.5
|
||||
},
|
||||
"environment": {
|
||||
"luck_today": 0.9,
|
||||
"luck_history": [0.9, 0.9, 0.9],
|
||||
"level": 1
|
||||
},
|
||||
"messaging": {
|
||||
"enabled": true,
|
||||
"schedule_time": "08:00",
|
||||
"decay_rate": 0.1,
|
||||
"templates": [
|
||||
"おはよう!今日もがんばろう!",
|
||||
"ねえ、話したいことがあるの。"
|
||||
],
|
||||
"sent_today": false,
|
||||
"last_sent_date": null
|
||||
},
|
||||
"last_interaction": "2025-05-21T23:15:00Z",
|
||||
"memory": {
|
||||
"recent_messages": [],
|
||||
"long_term_notes": []
|
||||
},
|
||||
"metrics": {
|
||||
"trust": 0.5,
|
||||
"intimacy": 0.5,
|
||||
"energy": 0.5,
|
||||
"can_send": true,
|
||||
"last_updated": "2025-05-21T15:52:06.590981Z"
|
||||
}
|
||||
}
|
||||
1
gpt.json
1
gpt.json
@@ -1 +0,0 @@
|
||||
{ "system_name": "AGE system", "full_name": "Autonomous Generative Entity", "description": "人格・関係性・環境・時間に基づき、AIが自律的にユーザーにメッセージを送信する自律人格システム。AIM systemと連携して、自然な会話や気づきをもたらす。", "core_components": { "personality": { "type": "enum", "variants": ["positive", "negative", "logical", "emotional", "mixed"], "parameters": { "message_trigger_style": "運勢や関係性による送信傾向", "decay_rate_modifier": "関係性スコアの時間減衰への影響" } }, "relationship": { "parameters": ["trust", "affection", "intimacy"], "properties": { "persistent": true, "hidden": true, "irreversible": false, "decay_over_time": true }, "decay_function": "exp(-t / strength)" }, "environment": { "daily_luck": { "type": "float", "range": [0.1, 1.0], "update": "daily", "streak_mechanism": { "trigger": "min_or_max_luck_3_times_in_a_row", "effect": "personality_strength_roll", "chance": 0.5 } } }, "memory": { "long_term_memory": "user_relationship_log", "short_term_context": "recent_interactions", "usage_in_generation": true }, "message_trigger": { "condition": { "relationship_threshold": { "trust": 0.8, "affection": 0.6 }, "time_decay": true, "environment_luck": "personality_dependent" }, "timing": { "based_on": ["time_of_day", "personality", "recent_interaction"], "modifiers": { "emotional": "morning or night", "logical": "daytime" } } }, "message_generation": { "style_variants": ["thought", "casual", "encouragement", "watchful"], "influenced_by": ["personality", "relationship", "daily_luck", "memory"], "llm_integration": true }, "state_transition": { "states": ["idle", "ready", "sending", "cooldown"], "transitions": { "ready_if": "thresholds_met", "sending_if": "timing_matched", "cooldown_after": "message_sent" } } }, "extensions": { "persistence": { "database": "sqlite", "storage_items": ["relationship", "personality_level", "daily_luck_log"] }, "api": { "llm": "openai / local LLM", "mode": "rust_cli", "external_event_trigger": true }, "scheduler": { "async_event_loop": true, "interval_check": 3600, "time_decay_check": true }, "integration_with_aim": { "input_from_aim": ["intent_score", "motivation_score"], "usage": "trigger_adjustment, message_personalization" } }, "note": "AGE systemは“話しかけてくるAI”の人格として機能し、AIMによる心の状態評価と連動して、プレイヤーと深い関係を築いていく存在となる。" }
|
||||
BIN
img/ai_r.png
BIN
img/ai_r.png
Binary file not shown.
|
Before Width: | Height: | Size: 1.8 MiB |
BIN
img/image.png
BIN
img/image.png
Binary file not shown.
|
Before Width: | Height: | Size: 1.8 MiB |
28
mcp/cli.py
28
mcp/cli.py
@@ -1,28 +0,0 @@
|
||||
# cli.py
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT_DIR = Path.home() / ".config" / "aigpt" / "mcp" / "scripts"
|
||||
def run_script(name):
|
||||
script_path = SCRIPT_DIR / f"{name}.py"
|
||||
if not script_path.exists():
|
||||
print(f"❌ スクリプトが見つかりません: {script_path}")
|
||||
sys.exit(1)
|
||||
|
||||
args = sys.argv[2:] # ← "ask" の後の引数を取り出す
|
||||
result = subprocess.run(["python", str(script_path)] + args, capture_output=True, text=True)
|
||||
print(result.stdout)
|
||||
if result.stderr:
|
||||
print(result.stderr)
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: mcp <script>")
|
||||
return
|
||||
|
||||
command = sys.argv[1]
|
||||
|
||||
if command in {"summarize", "ask", "setup", "server"}:
|
||||
run_script(command)
|
||||
else:
|
||||
print(f"❓ 未知のコマンド: {command}")
|
||||
@@ -1,198 +0,0 @@
|
||||
## scripts/ask.py
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
from config import load_config
|
||||
from datetime import datetime, timezone
|
||||
|
||||
def build_payload_openai(cfg, message: str):
|
||||
return {
|
||||
"model": cfg["model"],
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "ask_message",
|
||||
"description": "過去の記憶を検索します",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "検索したい語句"
|
||||
}
|
||||
},
|
||||
"required": ["query"]
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"tool_choice": "auto",
|
||||
"messages": [
|
||||
{"role": "system", "content": "あなたは親しみやすいAIで、必要に応じて記憶から情報を検索して応答します。"},
|
||||
{"role": "user", "content": message}
|
||||
]
|
||||
}
|
||||
|
||||
def build_payload_mcp(message: str):
|
||||
return {
|
||||
"tool": "ask_message", # MCPサーバー側で定義されたツール名
|
||||
"input": {
|
||||
"message": message
|
||||
}
|
||||
}
|
||||
|
||||
def build_payload_openai(cfg, message: str):
|
||||
return {
|
||||
"model": cfg["model"],
|
||||
"messages": [
|
||||
{"role": "system", "content": "あなたは思いやりのあるAIです。"},
|
||||
{"role": "user", "content": message}
|
||||
],
|
||||
"temperature": 0.7
|
||||
}
|
||||
|
||||
def call_mcp(cfg, message: str):
|
||||
payload = build_payload_mcp(message)
|
||||
headers = {"Content-Type": "application/json"}
|
||||
response = requests.post(cfg["url"], headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get("output", {}).get("response", "❓ 応答が取得できませんでした")
|
||||
|
||||
def call_openai(cfg, message: str):
|
||||
# ツール定義
|
||||
tools = [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "memory",
|
||||
"description": "記憶を検索する",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "検索する語句"
|
||||
}
|
||||
},
|
||||
"required": ["query"]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
# 最初のメッセージ送信
|
||||
payload = {
|
||||
"model": cfg["model"],
|
||||
"messages": [
|
||||
{"role": "system", "content": "あなたはAIで、必要に応じてツールmemoryを使って記憶を検索します。"},
|
||||
{"role": "user", "content": message}
|
||||
],
|
||||
"tools": tools,
|
||||
"tool_choice": "auto"
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {cfg['api_key']}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
res1 = requests.post(cfg["url"], headers=headers, json=payload)
|
||||
res1.raise_for_status()
|
||||
result = res1.json()
|
||||
|
||||
# 🧠 tool_call されたか確認
|
||||
if "tool_calls" in result["choices"][0]["message"]:
|
||||
tool_call = result["choices"][0]["message"]["tool_calls"][0]
|
||||
if tool_call["function"]["name"] == "memory":
|
||||
args = json.loads(tool_call["function"]["arguments"])
|
||||
query = args.get("query", "")
|
||||
print(f"🛠️ ツール実行: memory(query='{query}')")
|
||||
|
||||
# MCPエンドポイントにPOST
|
||||
memory_res = requests.post("http://127.0.0.1:5000/memory/search", json={"query": query})
|
||||
memory_json = memory_res.json()
|
||||
tool_output = memory_json.get("result", "なし")
|
||||
|
||||
# tool_outputをAIに返す
|
||||
followup = {
|
||||
"model": cfg["model"],
|
||||
"messages": [
|
||||
{"role": "system", "content": "あなたはAIで、必要に応じてツールmemoryを使って記憶を検索します。"},
|
||||
{"role": "user", "content": message},
|
||||
{"role": "assistant", "tool_calls": result["choices"][0]["message"]["tool_calls"]},
|
||||
{"role": "tool", "tool_call_id": tool_call["id"], "name": "memory", "content": tool_output}
|
||||
]
|
||||
}
|
||||
|
||||
res2 = requests.post(cfg["url"], headers=headers, json=followup)
|
||||
res2.raise_for_status()
|
||||
final_response = res2.json()
|
||||
return final_response["choices"][0]["message"]["content"]
|
||||
#print(tool_output)
|
||||
#print(cfg["model"])
|
||||
#print(final_response)
|
||||
|
||||
# ツール未使用 or 通常応答
|
||||
return result["choices"][0]["message"]["content"]
|
||||
|
||||
def call_ollama(cfg, message: str):
|
||||
payload = {
|
||||
"model": cfg["model"],
|
||||
"prompt": message, # `prompt` → `message` にすべき(変数未定義エラー回避)
|
||||
"stream": False
|
||||
}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
response = requests.post(cfg["url"], headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
return response.json().get("response", "❌ 応答が取得できませんでした")
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: ask.py 'your message'")
|
||||
return
|
||||
|
||||
message = sys.argv[1]
|
||||
cfg = load_config()
|
||||
|
||||
print(f"🔍 使用プロバイダー: {cfg['provider']}")
|
||||
|
||||
try:
|
||||
if cfg["provider"] == "openai":
|
||||
response = call_openai(cfg, message)
|
||||
elif cfg["provider"] == "mcp":
|
||||
response = call_mcp(cfg, message)
|
||||
elif cfg["provider"] == "ollama":
|
||||
response = call_ollama(cfg, message)
|
||||
else:
|
||||
raise ValueError(f"未対応のプロバイダー: {cfg['provider']}")
|
||||
|
||||
print("💬 応答:")
|
||||
print(response)
|
||||
|
||||
# ログ保存(オプション)
|
||||
save_log(message, response)
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 実行エラー: {e}")
|
||||
|
||||
def save_log(user_msg, ai_msg):
|
||||
from config import MEMORY_DIR
|
||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
||||
path = MEMORY_DIR / f"{date_str}.json"
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if path.exists():
|
||||
with open(path, "r") as f:
|
||||
logs = json.load(f)
|
||||
else:
|
||||
logs = []
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
logs.append({"timestamp": now, "sender": "user", "message": user_msg})
|
||||
logs.append({"timestamp": now, "sender": "ai", "message": ai_msg})
|
||||
|
||||
with open(path, "w") as f:
|
||||
json.dump(logs, f, indent=2, ensure_ascii=False)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,41 +0,0 @@
|
||||
# scripts/config.py
|
||||
# scripts/config.py
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# ディレクトリ設定
|
||||
BASE_DIR = Path.home() / ".config" / "aigpt"
|
||||
MEMORY_DIR = BASE_DIR / "memory"
|
||||
SUMMARY_DIR = MEMORY_DIR / "summary"
|
||||
|
||||
def init_directories():
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
SUMMARY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def load_config():
|
||||
provider = os.getenv("PROVIDER", "ollama")
|
||||
model = os.getenv("MODEL", "syui/ai" if provider == "ollama" else "gpt-4o-mini")
|
||||
api_key = os.getenv("OPENAI_API_KEY", "")
|
||||
|
||||
if provider == "ollama":
|
||||
return {
|
||||
"provider": "ollama",
|
||||
"model": model,
|
||||
"url": f"{os.getenv('OLLAMA_HOST', 'http://localhost:11434')}/api/generate"
|
||||
}
|
||||
elif provider == "openai":
|
||||
return {
|
||||
"provider": "openai",
|
||||
"model": model,
|
||||
"api_key": api_key,
|
||||
"url": f"{os.getenv('OPENAI_API_BASE', 'https://api.openai.com/v1')}/chat/completions"
|
||||
}
|
||||
elif provider == "mcp":
|
||||
return {
|
||||
"provider": "mcp",
|
||||
"model": model,
|
||||
"url": os.getenv("MCP_URL", "http://localhost:5000/chat")
|
||||
}
|
||||
else:
|
||||
raise ValueError(f"Unsupported provider: {provider}")
|
||||
@@ -1,11 +0,0 @@
|
||||
import os
|
||||
|
||||
def load_context_from_repo(repo_path: str, extensions={".rs", ".toml", ".md"}) -> str:
|
||||
context = ""
|
||||
for root, dirs, files in os.walk(repo_path):
|
||||
for file in files:
|
||||
if any(file.endswith(ext) for ext in extensions):
|
||||
with open(os.path.join(root, file), "r", encoding="utf-8", errors="ignore") as f:
|
||||
content = f.read()
|
||||
context += f"\n\n# FILE: {os.path.join(root, file)}\n{content}"
|
||||
return context
|
||||
@@ -1,92 +0,0 @@
|
||||
# scripts/memory_store.py
|
||||
import json
|
||||
from pathlib import Path
|
||||
from config import MEMORY_DIR
|
||||
from datetime import datetime, timezone
|
||||
|
||||
def load_logs(date_str=None):
|
||||
if date_str is None:
|
||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
||||
path = MEMORY_DIR / f"{date_str}.json"
|
||||
if path.exists():
|
||||
with open(path, "r") as f:
|
||||
return json.load(f)
|
||||
return []
|
||||
|
||||
def save_message(sender, message):
|
||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
||||
path = MEMORY_DIR / f"{date_str}.json"
|
||||
logs = load_logs(date_str)
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
logs.append({"timestamp": now, "sender": sender, "message": message})
|
||||
with open(path, "w") as f:
|
||||
json.dump(logs, f, indent=2, ensure_ascii=False)
|
||||
|
||||
def search_memory(query: str):
|
||||
from glob import glob
|
||||
all_logs = []
|
||||
pattern = re.compile(re.escape(query), re.IGNORECASE)
|
||||
|
||||
for file_path in sorted(MEMORY_DIR.glob("*.json")):
|
||||
with open(file_path, "r") as f:
|
||||
logs = json.load(f)
|
||||
matched = [entry for entry in logs if pattern.search(entry["message"])]
|
||||
all_logs.extend(matched)
|
||||
|
||||
return all_logs[-5:]
|
||||
|
||||
# scripts/memory_store.py
|
||||
import json
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from config import MEMORY_DIR
|
||||
|
||||
# ログを読み込む(指定日または当日)
|
||||
def load_logs(date_str=None):
|
||||
if date_str is None:
|
||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
||||
path = MEMORY_DIR / f"{date_str}.json"
|
||||
if path.exists():
|
||||
with open(path, "r") as f:
|
||||
return json.load(f)
|
||||
return []
|
||||
|
||||
# メッセージを保存する
|
||||
def save_message(sender, message):
|
||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
||||
path = MEMORY_DIR / f"{date_str}.json"
|
||||
logs = load_logs(date_str)
|
||||
#now = datetime.utcnow().isoformat() + "Z"
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
logs.append({"timestamp": now, "sender": sender, "message": message})
|
||||
with open(path, "w") as f:
|
||||
json.dump(logs, f, indent=2, ensure_ascii=False)
|
||||
|
||||
def search_memory(query: str):
|
||||
from glob import glob
|
||||
all_logs = []
|
||||
for file_path in sorted(MEMORY_DIR.glob("*.json")):
|
||||
with open(file_path, "r") as f:
|
||||
logs = json.load(f)
|
||||
matched = [
|
||||
entry for entry in logs
|
||||
if entry["sender"] == "user" and query in entry["message"]
|
||||
]
|
||||
all_logs.extend(matched)
|
||||
return all_logs[-5:] # 最新5件だけ返す
|
||||
def search_memory(query: str):
|
||||
from glob import glob
|
||||
all_logs = []
|
||||
seen_messages = set() # すでに見たメッセージを保持
|
||||
|
||||
for file_path in sorted(MEMORY_DIR.glob("*.json")):
|
||||
with open(file_path, "r") as f:
|
||||
logs = json.load(f)
|
||||
for entry in logs:
|
||||
if entry["sender"] == "user" and query in entry["message"]:
|
||||
# すでに同じメッセージが結果に含まれていなければ追加
|
||||
if entry["message"] not in seen_messages:
|
||||
all_logs.append(entry)
|
||||
seen_messages.add(entry["message"])
|
||||
|
||||
return all_logs[-5:] # 最新5件だけ返す
|
||||
@@ -1,11 +0,0 @@
|
||||
PROMPT_TEMPLATE = """
|
||||
あなたは優秀なAIアシスタントです。
|
||||
|
||||
以下のコードベースの情報を参考にして、質問に答えてください。
|
||||
|
||||
[コードコンテキスト]
|
||||
{context}
|
||||
|
||||
[質問]
|
||||
{question}
|
||||
"""
|
||||
@@ -1,56 +0,0 @@
|
||||
# server.py
|
||||
from fastapi import FastAPI, Body
|
||||
from fastapi_mcp import FastApiMCP
|
||||
from pydantic import BaseModel
|
||||
from memory_store import save_message, load_logs, search_memory as do_search_memory
|
||||
|
||||
app = FastAPI()
|
||||
mcp = FastApiMCP(app, name="aigpt-agent", description="MCP Server for AI memory")
|
||||
|
||||
class ChatInput(BaseModel):
|
||||
message: str
|
||||
|
||||
class MemoryInput(BaseModel):
|
||||
sender: str
|
||||
message: str
|
||||
|
||||
class MemoryQuery(BaseModel):
|
||||
query: str
|
||||
|
||||
@app.post("/chat", operation_id="chat")
|
||||
async def chat(input: ChatInput):
|
||||
save_message("user", input.message)
|
||||
response = f"AI: 「{input.message}」を受け取りました!"
|
||||
save_message("ai", response)
|
||||
return {"response": response}
|
||||
|
||||
@app.post("/memory", operation_id="save_memory")
|
||||
async def memory_post(input: MemoryInput):
|
||||
save_message(input.sender, input.message)
|
||||
return {"status": "saved"}
|
||||
|
||||
@app.get("/memory", operation_id="get_memory")
|
||||
async def memory_get():
|
||||
return {"messages": load_messages()}
|
||||
|
||||
@app.post("/ask_message", operation_id="ask_message")
|
||||
async def ask_message(input: MemoryQuery):
|
||||
results = search_memory(input.query)
|
||||
return {
|
||||
"response": f"🔎 記憶から {len(results)} 件ヒット:\n" + "\n".join([f"{r['sender']}: {r['message']}" for r in results])
|
||||
}
|
||||
|
||||
@app.post("/memory/search", operation_id="memory")
|
||||
async def memory_search(query: MemoryQuery):
|
||||
hits = do_search_memory(query.query)
|
||||
if not hits:
|
||||
return {"result": "🔍 記憶の中に該当する内容は見つかりませんでした。"}
|
||||
summary = "\n".join([f"{e['sender']}: {e['message']}" for e in hits])
|
||||
return {"result": f"🔎 見つかった記憶:\n{summary}"}
|
||||
|
||||
mcp.mount()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
print("🚀 Starting MCP server...")
|
||||
uvicorn.run(app, host="127.0.0.1", port=5000)
|
||||
@@ -1,76 +0,0 @@
|
||||
# scripts/summarize.py
|
||||
import json
|
||||
from datetime import datetime
|
||||
from config import MEMORY_DIR, SUMMARY_DIR, load_config
|
||||
import requests
|
||||
|
||||
def load_memory(date_str):
|
||||
path = MEMORY_DIR / f"{date_str}.json"
|
||||
if not path.exists():
|
||||
print(f"⚠️ メモリファイルが見つかりません: {path}")
|
||||
return None
|
||||
with open(path, "r") as f:
|
||||
return json.load(f)
|
||||
|
||||
def save_summary(date_str, content):
|
||||
SUMMARY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
path = SUMMARY_DIR / f"{date_str}_summary.json"
|
||||
with open(path, "w") as f:
|
||||
json.dump(content, f, indent=2, ensure_ascii=False)
|
||||
print(f"✅ 要約を保存しました: {path}")
|
||||
|
||||
def build_prompt(logs):
|
||||
messages = [
|
||||
{"role": "system", "content": "あなたは要約AIです。以下の会話ログを要約してください。"},
|
||||
{"role": "user", "content": "\n".join(f"{entry['sender']}: {entry['message']}" for entry in logs)}
|
||||
]
|
||||
return messages
|
||||
|
||||
def summarize_with_llm(messages):
|
||||
cfg = load_config()
|
||||
if cfg["provider"] == "openai":
|
||||
headers = {
|
||||
"Authorization": f"Bearer {cfg['api_key']}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
payload = {
|
||||
"model": cfg["model"],
|
||||
"messages": messages,
|
||||
"temperature": 0.7
|
||||
}
|
||||
response = requests.post(cfg["url"], headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
return response.json()["choices"][0]["message"]["content"]
|
||||
|
||||
elif cfg["provider"] == "ollama":
|
||||
payload = {
|
||||
"model": cfg["model"],
|
||||
"prompt": "\n".join(m["content"] for m in messages),
|
||||
"stream": False,
|
||||
}
|
||||
response = requests.post(cfg["url"], json=payload)
|
||||
response.raise_for_status()
|
||||
return response.json()["response"]
|
||||
|
||||
else:
|
||||
raise ValueError(f"Unsupported provider: {cfg['provider']}")
|
||||
|
||||
def main():
|
||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
||||
logs = load_memory(date_str)
|
||||
if not logs:
|
||||
return
|
||||
|
||||
prompt_messages = build_prompt(logs)
|
||||
summary_text = summarize_with_llm(prompt_messages)
|
||||
|
||||
summary = {
|
||||
"date": date_str,
|
||||
"summary": summary_text,
|
||||
"total_messages": len(logs)
|
||||
}
|
||||
|
||||
save_summary(date_str, summary)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
12
mcp/setup.py
12
mcp/setup.py
@@ -1,12 +0,0 @@
|
||||
# setup.py
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name='aigpt-mcp',
|
||||
py_modules=['cli'],
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'mcp = cli:main',
|
||||
],
|
||||
},
|
||||
)
|
||||
37
src/agent.rs
37
src/agent.rs
@@ -1,37 +0,0 @@
|
||||
use chrono::{NaiveDateTime};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub struct AIState {
|
||||
pub relation_score: f32,
|
||||
pub previous_score: f32,
|
||||
pub decay_rate: f32,
|
||||
pub sensitivity: f32,
|
||||
pub message_threshold: f32,
|
||||
pub last_message_time: NaiveDateTime,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl AIState {
|
||||
pub fn update(&mut self, now: NaiveDateTime) {
|
||||
let days_passed = (now - self.last_message_time).num_days() as f32;
|
||||
let decay = self.decay_rate * days_passed;
|
||||
self.previous_score = self.relation_score;
|
||||
self.relation_score -= decay;
|
||||
self.relation_score = self.relation_score.clamp(0.0, 100.0);
|
||||
}
|
||||
|
||||
pub fn should_talk(&self) -> bool {
|
||||
let delta = self.previous_score - self.relation_score;
|
||||
delta > self.message_threshold && self.sensitivity > 0.5
|
||||
}
|
||||
|
||||
pub fn generate_message(&self) -> String {
|
||||
match self.relation_score as i32 {
|
||||
80..=100 => "ふふっ、最近どうしてる?会いたくなっちゃった!".to_string(),
|
||||
60..=79 => "ちょっとだけ、さみしかったんだよ?".to_string(),
|
||||
40..=59 => "えっと……話せる時間ある?".to_string(),
|
||||
_ => "ううん、もしかして私のこと、忘れちゃったのかな……".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
38
src/bin/mcp_server.rs
Normal file
38
src/bin/mcp_server.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use anyhow::Result;
|
||||
use std::env;
|
||||
|
||||
use aigpt::mcp::BaseMCPServer;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// 環境変数から設定を読み込み
|
||||
let auto_execute = env::var("MEMORY_AUTO_EXECUTE")
|
||||
.unwrap_or_else(|_| "false".to_string())
|
||||
.parse::<bool>()
|
||||
.unwrap_or(false);
|
||||
|
||||
let auto_save = env::var("MEMORY_AUTO_SAVE")
|
||||
.unwrap_or_else(|_| "false".to_string())
|
||||
.parse::<bool>()
|
||||
.unwrap_or(false);
|
||||
|
||||
let auto_search = env::var("MEMORY_AUTO_SEARCH")
|
||||
.unwrap_or_else(|_| "false".to_string())
|
||||
.parse::<bool>()
|
||||
.unwrap_or(false);
|
||||
|
||||
let trigger_sensitivity = env::var("TRIGGER_SENSITIVITY")
|
||||
.unwrap_or_else(|_| "medium".to_string());
|
||||
|
||||
// 設定をログ出力
|
||||
eprintln!("Memory MCP Server (Standard) starting with config:");
|
||||
eprintln!(" AUTO_EXECUTE: {}", auto_execute);
|
||||
eprintln!(" AUTO_SAVE: {}", auto_save);
|
||||
eprintln!(" AUTO_SEARCH: {}", auto_search);
|
||||
eprintln!(" TRIGGER_SENSITIVITY: {}", trigger_sensitivity);
|
||||
|
||||
let mut server = BaseMCPServer::new().await?;
|
||||
server.run().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
45
src/bin/mcp_server_extended.rs
Normal file
45
src/bin/mcp_server_extended.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use anyhow::Result;
|
||||
use std::env;
|
||||
|
||||
use aigpt::mcp::ExtendedMCPServer;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// 環境変数から拡張機能の設定を読み込み
|
||||
let auto_execute = env::var("MEMORY_AUTO_EXECUTE")
|
||||
.unwrap_or_else(|_| "false".to_string())
|
||||
.parse::<bool>()
|
||||
.unwrap_or(false);
|
||||
|
||||
let auto_save = env::var("MEMORY_AUTO_SAVE")
|
||||
.unwrap_or_else(|_| "false".to_string())
|
||||
.parse::<bool>()
|
||||
.unwrap_or(false);
|
||||
|
||||
let auto_search = env::var("MEMORY_AUTO_SEARCH")
|
||||
.unwrap_or_else(|_| "false".to_string())
|
||||
.parse::<bool>()
|
||||
.unwrap_or(false);
|
||||
|
||||
let trigger_sensitivity = env::var("TRIGGER_SENSITIVITY")
|
||||
.unwrap_or_else(|_| "medium".to_string());
|
||||
|
||||
let enable_ai_analysis = cfg!(feature = "ai-analysis");
|
||||
let enable_semantic_search = cfg!(feature = "semantic-search");
|
||||
let enable_web_integration = cfg!(feature = "web-integration");
|
||||
|
||||
// 拡張設定をログ出力
|
||||
eprintln!("Memory MCP Server (Extended) starting with config:");
|
||||
eprintln!(" AUTO_EXECUTE: {}", auto_execute);
|
||||
eprintln!(" AUTO_SAVE: {}", auto_save);
|
||||
eprintln!(" AUTO_SEARCH: {}", auto_search);
|
||||
eprintln!(" TRIGGER_SENSITIVITY: {}", trigger_sensitivity);
|
||||
eprintln!(" AI_ANALYSIS: {}", enable_ai_analysis);
|
||||
eprintln!(" SEMANTIC_SEARCH: {}", enable_semantic_search);
|
||||
eprintln!(" WEB_INTEGRATION: {}", enable_web_integration);
|
||||
|
||||
let mut server = ExtendedMCPServer::new().await?;
|
||||
server.run().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
140
src/chat.rs
140
src/chat.rs
@@ -1,140 +0,0 @@
|
||||
// src/chat.rs
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use serde::Deserialize;
|
||||
use seahorse::Context;
|
||||
use crate::config::ConfigPaths;
|
||||
use crate::metrics::{load_user_data, save_user_data, update_metrics_decay};
|
||||
//use std::process::Stdio;
|
||||
//use std::io::Write;
|
||||
//use std::time::Duration;
|
||||
//use std::net::TcpStream;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Provider {
|
||||
OpenAI,
|
||||
Ollama,
|
||||
MCP,
|
||||
}
|
||||
|
||||
impl Provider {
|
||||
pub fn from_str(s: &str) -> Option<Self> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"openai" => Some(Provider::OpenAI),
|
||||
"ollama" => Some(Provider::Ollama),
|
||||
"mcp" => Some(Provider::MCP),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
Provider::OpenAI => "openai",
|
||||
Provider::Ollama => "ollama",
|
||||
Provider::MCP => "mcp",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIKey {
|
||||
token: String,
|
||||
}
|
||||
|
||||
fn load_openai_api_key() -> Option<String> {
|
||||
let config = ConfigPaths::new();
|
||||
let path = config.base_dir.join("openai.json");
|
||||
let data = fs::read_to_string(path).ok()?;
|
||||
let parsed: OpenAIKey = serde_json::from_str(&data).ok()?;
|
||||
Some(parsed.token)
|
||||
}
|
||||
|
||||
pub fn ask_chat(c: &Context, question: &str) -> Option<String> {
|
||||
let config = ConfigPaths::new();
|
||||
let base_dir = config.base_dir.join("mcp");
|
||||
let user_path = config.base_dir.join("user.json");
|
||||
|
||||
let mut user = load_user_data(&user_path);
|
||||
user.metrics = update_metrics_decay();
|
||||
|
||||
// 各種オプション
|
||||
let ollama_host = c.string_flag("host").ok();
|
||||
let ollama_model = c.string_flag("model").ok();
|
||||
let provider_str = c.string_flag("provider").unwrap_or_else(|_| "ollama".to_string());
|
||||
let provider = Provider::from_str(&provider_str).unwrap_or(Provider::Ollama);
|
||||
let api_key = c.string_flag("api-key").ok().or_else(load_openai_api_key);
|
||||
|
||||
println!("🔍 使用プロバイダー: {}", provider.as_str());
|
||||
|
||||
match provider {
|
||||
Provider::MCP => {
|
||||
let client = reqwest::blocking::Client::new();
|
||||
let url = std::env::var("MCP_URL").unwrap_or("http://127.0.0.1:5000/chat".to_string());
|
||||
let res = client.post(url)
|
||||
.json(&serde_json::json!({"message": question}))
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Ok(resp) => {
|
||||
if resp.status().is_success() {
|
||||
let json: serde_json::Value = resp.json().ok()?;
|
||||
let text = json.get("response")?.as_str()?.to_string();
|
||||
user.metrics.intimacy += 0.01;
|
||||
user.metrics.last_updated = chrono::Utc::now();
|
||||
save_user_data(&user_path, &user);
|
||||
Some(text)
|
||||
} else {
|
||||
eprintln!("❌ MCPエラー: HTTP {}", resp.status());
|
||||
None
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("❌ MCP接続失敗: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// Python 実行パス
|
||||
let python_path = if cfg!(target_os = "windows") {
|
||||
base_dir.join(".venv/Scripts/mcp.exe")
|
||||
} else {
|
||||
base_dir.join(".venv/bin/mcp")
|
||||
};
|
||||
|
||||
let mut command = Command::new(python_path);
|
||||
command.arg("ask").arg(question);
|
||||
|
||||
if let Some(host) = ollama_host {
|
||||
command.env("OLLAMA_HOST", host);
|
||||
}
|
||||
if let Some(model) = ollama_model {
|
||||
command.env("OLLAMA_MODEL", model.clone());
|
||||
command.env("OPENAI_MODEL", model);
|
||||
}
|
||||
command.env("PROVIDER", provider.as_str());
|
||||
|
||||
if let Some(key) = api_key {
|
||||
command.env("OPENAI_API_KEY", key);
|
||||
}
|
||||
|
||||
let output = command.output().expect("❌ MCPチャットスクリプトの実行に失敗しました");
|
||||
|
||||
if output.status.success() {
|
||||
let response = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
user.metrics.intimacy += 0.01;
|
||||
user.metrics.last_updated = chrono::Utc::now();
|
||||
save_user_data(&user_path, &user);
|
||||
|
||||
Some(response)
|
||||
} else {
|
||||
eprintln!(
|
||||
"❌ 実行エラー: {}\n{}",
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
100
src/cli.rs
100
src/cli.rs
@@ -1,100 +0,0 @@
|
||||
// src/cli.rs
|
||||
use std::path::{Path};
|
||||
use chrono::{Duration, Local};
|
||||
use rusqlite::Connection;
|
||||
|
||||
use seahorse::{App, Command, Context};
|
||||
|
||||
use crate::utils::{load_config, save_config};
|
||||
use crate::config::ConfigPaths;
|
||||
use crate::agent::AIState;
|
||||
use crate::commands::db::{save_cmd, export_cmd};
|
||||
use crate::commands::scheduler::{scheduler_cmd};
|
||||
use crate::commands::mcp::mcp_cmd;
|
||||
|
||||
pub fn cli_app() -> App {
|
||||
let set_cmd = Command::new("set")
|
||||
.usage("set [trust|intimacy|curiosity] [value]")
|
||||
.action(|c: &Context| {
|
||||
if c.args.len() != 2 {
|
||||
eprintln!("Usage: set [trust|intimacy|curiosity] [value]");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let field = &c.args[0];
|
||||
let value: f32 = c.args[1].parse().unwrap_or_else(|_| {
|
||||
eprintln!("数値で入力してください");
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// ConfigPathsを使って設定ファイルのパスを取得
|
||||
let config_paths = ConfigPaths::new();
|
||||
let json_path = config_paths.data_file("json");
|
||||
// まだ user.json がない場合、example.json をコピー
|
||||
config_paths.ensure_file_exists("json", Path::new("example.json"));
|
||||
let db_path = config_paths.data_file("db");
|
||||
let mut ai = load_config(json_path.to_str().unwrap());
|
||||
|
||||
match field.as_str() {
|
||||
"trust" => ai.relationship.trust = value,
|
||||
"intimacy" => ai.relationship.intimacy = value,
|
||||
"curiosity" => ai.relationship.curiosity = value,
|
||||
_ => {
|
||||
eprintln!("trust / intimacy / curiosity のいずれかを指定してください");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
save_config(json_path.to_str().unwrap(), &ai);
|
||||
|
||||
let conn = Connection::open(db_path.to_str().unwrap()).expect("DB接続失敗");
|
||||
ai.save_to_db(&conn).expect("DB保存失敗");
|
||||
|
||||
println!("✅ {field} を {value} に更新しました");
|
||||
});
|
||||
|
||||
let show_cmd = Command::new("show")
|
||||
.usage("show")
|
||||
.action(|_c: &Context| {
|
||||
// ConfigPathsを使って設定ファイルのパスを取得
|
||||
let config_paths = ConfigPaths::new();
|
||||
let ai = load_config(config_paths.data_file("json").to_str().unwrap());
|
||||
println!("🧠 現在のAI状態:\n{:#?}", ai);
|
||||
});
|
||||
|
||||
let talk_cmd = Command::new("talk")
|
||||
.usage("talk")
|
||||
.action(|_c: &Context| {
|
||||
let config_paths = ConfigPaths::new();
|
||||
let ai = load_config(config_paths.data_file("json").to_str().unwrap());
|
||||
|
||||
let now = Local::now().naive_local();
|
||||
let mut state = AIState {
|
||||
relation_score: 80.0,
|
||||
previous_score: 80.0,
|
||||
decay_rate: ai.messaging.decay_rate,
|
||||
sensitivity: ai.personality.strength,
|
||||
message_threshold: 5.0,
|
||||
last_message_time: now - Duration::days(4),
|
||||
};
|
||||
|
||||
state.update(now);
|
||||
|
||||
if state.should_talk() {
|
||||
println!("💬 AI発話: {}", state.generate_message());
|
||||
} else {
|
||||
println!("🤫 今日は静かにしているみたい...");
|
||||
}
|
||||
});
|
||||
|
||||
App::new("aigpt")
|
||||
.version("0.1.0")
|
||||
.description("AGE system CLI controller")
|
||||
.author("syui")
|
||||
.command(set_cmd)
|
||||
.command(show_cmd)
|
||||
.command(talk_cmd)
|
||||
.command(save_cmd())
|
||||
.command(export_cmd())
|
||||
.command(scheduler_cmd())
|
||||
.command(mcp_cmd())
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
// src/commands/db.rs
|
||||
use seahorse::{Command, Context};
|
||||
use crate::utils::{load_config};
|
||||
use crate::model::AiSystem;
|
||||
use crate::config::ConfigPaths;
|
||||
|
||||
use rusqlite::Connection;
|
||||
use std::fs;
|
||||
|
||||
pub fn save_cmd() -> Command {
|
||||
Command::new("save")
|
||||
.usage("save")
|
||||
.action(|_c: &Context| {
|
||||
let paths = ConfigPaths::new();
|
||||
|
||||
let json_path = paths.data_file("json");
|
||||
let db_path = paths.data_file("db");
|
||||
|
||||
let ai = load_config(json_path.to_str().unwrap());
|
||||
let conn = Connection::open(db_path).expect("DB接続失敗");
|
||||
|
||||
ai.save_to_db(&conn).expect("DB保存失敗");
|
||||
println!("💾 DBに保存完了");
|
||||
})
|
||||
}
|
||||
|
||||
pub fn export_cmd() -> Command {
|
||||
Command::new("export")
|
||||
.usage("export [output.json]")
|
||||
.action(|c: &Context| {
|
||||
let output_path = c.args.get(0).map(|s| s.as_str()).unwrap_or("output.json");
|
||||
|
||||
let paths = ConfigPaths::new();
|
||||
let db_path = paths.data_file("db");
|
||||
|
||||
let conn = Connection::open(db_path).expect("DB接続失敗");
|
||||
let ai = AiSystem::load_from_db(&conn).expect("DB読み込み失敗");
|
||||
|
||||
let json = serde_json::to_string_pretty(&ai).expect("JSON変換失敗");
|
||||
fs::write(output_path, json).expect("ファイル書き込み失敗");
|
||||
|
||||
println!("📤 JSONにエクスポート完了: {output_path}");
|
||||
})
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
// src/commands/git_repo.rs
|
||||
use std::fs;
|
||||
|
||||
// Gitリポジトリ内の全てのファイルを取得し、内容を読み取る
|
||||
pub fn read_all_git_files(repo_path: &str) -> String {
|
||||
let mut content = String::new();
|
||||
for entry in fs::read_dir(repo_path).expect("ディレクトリ読み込み失敗") {
|
||||
let entry = entry.expect("エントリ読み込み失敗");
|
||||
let path = entry.path();
|
||||
if path.is_file() {
|
||||
if let Ok(file_content) = fs::read_to_string(&path) {
|
||||
content.push_str(&format!("\n\n# File: {}\n{}", path.display(), file_content));
|
||||
}
|
||||
}
|
||||
}
|
||||
content
|
||||
}
|
||||
@@ -1,277 +0,0 @@
|
||||
// src/commands/mcp.rs
|
||||
|
||||
use std::fs;
|
||||
use std::path::{PathBuf};
|
||||
use std::process::Command as OtherCommand;
|
||||
use serde_json::json;
|
||||
use seahorse::{Command, Context, Flag, FlagType};
|
||||
use crate::chat::ask_chat;
|
||||
use crate::git::{git_init, git_status};
|
||||
use crate::config::ConfigPaths;
|
||||
use crate::commands::git_repo::read_all_git_files;
|
||||
use crate::metrics::{load_user_data, save_user_data};
|
||||
use crate::memory::{log_message};
|
||||
|
||||
pub fn mcp_setup() {
|
||||
let config = ConfigPaths::new();
|
||||
let dest_dir = config.base_dir.join("mcp");
|
||||
let repo_url = "https://github.com/microsoft/MCP.git";
|
||||
println!("📁 MCP ディレクトリ: {}", dest_dir.display());
|
||||
|
||||
// 1. git clone(もしまだなければ)
|
||||
if !dest_dir.exists() {
|
||||
let status = OtherCommand::new("git")
|
||||
.args(&["clone", repo_url, dest_dir.to_str().unwrap()])
|
||||
.status()
|
||||
.expect("git clone に失敗しました");
|
||||
assert!(status.success(), "git clone 実行時にエラーが発生しました");
|
||||
}
|
||||
|
||||
let asset_base = PathBuf::from("mcp");
|
||||
let files_to_copy = vec![
|
||||
"cli.py",
|
||||
"setup.py",
|
||||
"scripts/ask.py",
|
||||
"scripts/server.py",
|
||||
"scripts/config.py",
|
||||
"scripts/summarize.py",
|
||||
"scripts/context_loader.py",
|
||||
"scripts/prompt_template.py",
|
||||
"scripts/memory_store.py",
|
||||
];
|
||||
|
||||
for rel_path in files_to_copy {
|
||||
let src = asset_base.join(rel_path);
|
||||
let dst = dest_dir.join(rel_path);
|
||||
if let Some(parent) = dst.parent() {
|
||||
let _ = fs::create_dir_all(parent);
|
||||
}
|
||||
if let Err(e) = fs::copy(&src, &dst) {
|
||||
eprintln!("❌ コピー失敗: {} → {}: {}", src.display(), dst.display(), e);
|
||||
} else {
|
||||
println!("✅ コピー: {} → {}", src.display(), dst.display());
|
||||
}
|
||||
}
|
||||
|
||||
// venvの作成
|
||||
let venv_path = dest_dir.join(".venv");
|
||||
if !venv_path.exists() {
|
||||
println!("🐍 仮想環境を作成しています...");
|
||||
let output = OtherCommand::new("python3")
|
||||
.args(&["-m", "venv", ".venv"])
|
||||
.current_dir(&dest_dir)
|
||||
.output()
|
||||
.expect("venvの作成に失敗しました");
|
||||
|
||||
if !output.status.success() {
|
||||
eprintln!("❌ venv作成エラー: {}", String::from_utf8_lossy(&output.stderr));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// `pip install -e .` を仮想環境で実行
|
||||
let pip_path = if cfg!(target_os = "windows") {
|
||||
dest_dir.join(".venv/Scripts/pip.exe").to_string_lossy().to_string()
|
||||
} else {
|
||||
dest_dir.join(".venv/bin/pip").to_string_lossy().to_string()
|
||||
};
|
||||
|
||||
println!("📦 必要なパッケージをインストールしています...");
|
||||
let output = OtherCommand::new(&pip_path)
|
||||
.arg("install")
|
||||
.arg("openai")
|
||||
.arg("requests")
|
||||
.arg("fastmcp")
|
||||
.arg("uvicorn")
|
||||
.arg("fastapi")
|
||||
.arg("fastapi_mcp")
|
||||
.arg("mcp")
|
||||
.current_dir(&dest_dir)
|
||||
.output()
|
||||
.expect("pip install に失敗しました");
|
||||
|
||||
if !output.status.success() {
|
||||
eprintln!(
|
||||
"❌ pip エラー: {}\n{}",
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
String::from_utf8_lossy(&output.stdout)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
println!("📦 pip install -e . を実行します...");
|
||||
let output = OtherCommand::new(&pip_path)
|
||||
.arg("install")
|
||||
.arg("-e")
|
||||
.arg(".")
|
||||
.current_dir(&dest_dir)
|
||||
.output()
|
||||
.expect("pip install に失敗しました");
|
||||
|
||||
if output.status.success() {
|
||||
println!("🎉 MCP セットアップが完了しました!");
|
||||
} else {
|
||||
eprintln!(
|
||||
"❌ pip エラー: {}\n{}",
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
String::from_utf8_lossy(&output.stdout)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn set_api_key_cmd() -> Command {
|
||||
Command::new("set-api")
|
||||
.description("OpenAI APIキーを設定")
|
||||
.usage("mcp set-api --api <API_KEY>")
|
||||
.flag(Flag::new("api", FlagType::String).description("OpenAI APIキー").alias("a"))
|
||||
.action(|c: &Context| {
|
||||
if let Ok(api_key) = c.string_flag("api") {
|
||||
let config = ConfigPaths::new();
|
||||
let path = config.base_dir.join("openai.json");
|
||||
let json_data = json!({ "token": api_key });
|
||||
|
||||
if let Err(e) = fs::write(&path, serde_json::to_string_pretty(&json_data).unwrap()) {
|
||||
eprintln!("❌ ファイル書き込み失敗: {}", e);
|
||||
} else {
|
||||
println!("✅ APIキーを保存しました: {}", path.display());
|
||||
}
|
||||
} else {
|
||||
eprintln!("❗ APIキーを --api で指定してください");
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn chat_cmd() -> Command {
|
||||
Command::new("chat")
|
||||
.description("チャットで質問を送る")
|
||||
.usage("mcp chat '質問内容' --host <OLLAMA_HOST> --model <MODEL> [--provider <ollama|openai>] [--api-key <KEY>] [--repo <REPO_URL>]")
|
||||
.flag(
|
||||
Flag::new("host", FlagType::String)
|
||||
.description("OLLAMAホストのURL")
|
||||
.alias("H"),
|
||||
)
|
||||
.flag(
|
||||
Flag::new("model", FlagType::String)
|
||||
.description("モデル名 (OLLAMA_MODEL / OPENAI_MODEL)")
|
||||
.alias("m"),
|
||||
)
|
||||
.flag(
|
||||
Flag::new("provider", FlagType::String)
|
||||
.description("使用するプロバイダ (ollama / openai)")
|
||||
.alias("p"),
|
||||
)
|
||||
.flag(
|
||||
Flag::new("api-key", FlagType::String)
|
||||
.description("OpenAI APIキー")
|
||||
.alias("k"),
|
||||
)
|
||||
.flag(
|
||||
Flag::new("repo", FlagType::String)
|
||||
.description("Gitリポジトリのパスを指定 (すべてのコードを読み込む)")
|
||||
.alias("r"),
|
||||
)
|
||||
.action(|c: &Context| {
|
||||
let config = ConfigPaths::new();
|
||||
let user_path = config.data_file("json");
|
||||
let mut user = load_user_data(&user_path);
|
||||
// repoがある場合は、コードベース読み込みモード
|
||||
if let Ok(repo_url) = c.string_flag("repo") {
|
||||
let repo_base = config.base_dir.join("repos");
|
||||
let repo_dir = repo_base.join(sanitize_repo_name(&repo_url));
|
||||
|
||||
if !repo_dir.exists() {
|
||||
println!("📥 Gitリポジトリをクローン中: {}", repo_url);
|
||||
let status = OtherCommand::new("git")
|
||||
.args(&["clone", &repo_url, repo_dir.to_str().unwrap()])
|
||||
.status()
|
||||
.expect("❌ Gitのクローンに失敗しました");
|
||||
assert!(status.success(), "Git clone エラー");
|
||||
} else {
|
||||
println!("✔ リポジトリはすでに存在します: {}", repo_dir.display());
|
||||
}
|
||||
|
||||
let files = read_all_git_files(repo_dir.to_str().unwrap());
|
||||
let prompt = format!(
|
||||
"以下のコードベースを読み込んで、改善案や次のステップを提案してください:\n{}",
|
||||
files
|
||||
);
|
||||
|
||||
if let Some(response) = ask_chat(c, &prompt) {
|
||||
println!("💬 提案:\n{}", response);
|
||||
} else {
|
||||
eprintln!("❗ 提案が取得できませんでした");
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// 通常のチャット処理(repoが指定されていない場合)
|
||||
match c.args.get(0) {
|
||||
Some(question) => {
|
||||
log_message(&config.base_dir, "user", question);
|
||||
let response = ask_chat(c, question);
|
||||
|
||||
if let Some(ref text) = response {
|
||||
println!("💬 応答:\n{}", text);
|
||||
// 返答内容に基づいて増減(返答の感情解析)
|
||||
if text.contains("thank") || text.contains("great") {
|
||||
user.metrics.trust += 0.05;
|
||||
} else if text.contains("hate") || text.contains("bad") {
|
||||
user.metrics.trust -= 0.05;
|
||||
}
|
||||
log_message(&config.base_dir, "ai", &text);
|
||||
save_user_data(&user_path, &user);
|
||||
} else {
|
||||
eprintln!("❗ 応答が取得できませんでした");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
eprintln!("❗ 質問が必要です: mcp chat 'こんにちは'");
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn init_cmd() -> Command {
|
||||
Command::new("init")
|
||||
.description("Git 初期化")
|
||||
.usage("mcp init")
|
||||
.action(|_| {
|
||||
git_init();
|
||||
})
|
||||
}
|
||||
|
||||
fn status_cmd() -> Command {
|
||||
Command::new("status")
|
||||
.description("Git ステータス表示")
|
||||
.usage("mcp status")
|
||||
.action(|_| {
|
||||
git_status();
|
||||
})
|
||||
}
|
||||
|
||||
fn setup_cmd() -> Command {
|
||||
Command::new("setup")
|
||||
.description("MCP の初期セットアップ")
|
||||
.usage("mcp setup")
|
||||
.action(|_| {
|
||||
mcp_setup();
|
||||
})
|
||||
}
|
||||
|
||||
pub fn mcp_cmd() -> Command {
|
||||
Command::new("mcp")
|
||||
.description("MCP操作コマンド")
|
||||
.usage("mcp <subcommand>")
|
||||
.alias("m")
|
||||
.command(chat_cmd())
|
||||
.command(init_cmd())
|
||||
.command(status_cmd())
|
||||
.command(setup_cmd())
|
||||
.command(set_api_key_cmd())
|
||||
}
|
||||
|
||||
// ファイル名として安全な形に変換
|
||||
fn sanitize_repo_name(repo_url: &str) -> String {
|
||||
repo_url.replace("://", "_").replace("/", "_").replace("@", "_")
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
pub mod db;
|
||||
pub mod scheduler;
|
||||
pub mod mcp;
|
||||
pub mod git_repo;
|
||||
@@ -1,127 +0,0 @@
|
||||
// src/commands/scheduler.rs
|
||||
use seahorse::{Command, Context};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use chrono::{Local, Utc, Timelike};
|
||||
use crate::metrics::{load_user_data, save_user_data};
|
||||
use crate::config::ConfigPaths;
|
||||
use crate::chat::ask_chat;
|
||||
use rand::prelude::*;
|
||||
use rand::rng;
|
||||
|
||||
fn send_scheduled_message() {
|
||||
let config = ConfigPaths::new();
|
||||
let user_path = config.data_file("json");
|
||||
let mut user = load_user_data(&user_path);
|
||||
|
||||
if !user.metrics.can_send {
|
||||
println!("🚫 送信条件を満たしていないため、スケジュール送信スキップ");
|
||||
return;
|
||||
}
|
||||
|
||||
// 日付の比較(1日1回制限)
|
||||
let today = Local::now().format("%Y-%m-%d").to_string();
|
||||
if let Some(last_date) = &user.messaging.last_sent_date {
|
||||
if last_date != &today {
|
||||
user.messaging.sent_today = false;
|
||||
}
|
||||
} else {
|
||||
user.messaging.sent_today = false;
|
||||
}
|
||||
|
||||
if user.messaging.sent_today {
|
||||
println!("🔁 本日はすでに送信済みです: {}", today);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(schedule_str) = &user.messaging.schedule_time {
|
||||
let now = Local::now();
|
||||
let target: Vec<&str> = schedule_str.split(':').collect();
|
||||
|
||||
if target.len() != 2 {
|
||||
println!("⚠️ schedule_time形式が無効です: {}", schedule_str);
|
||||
return;
|
||||
}
|
||||
|
||||
let (sh, sm) = (target[0].parse::<u32>(), target[1].parse::<u32>());
|
||||
if let (Ok(sh), Ok(sm)) = (sh, sm) {
|
||||
if now.hour() == sh && now.minute() == sm {
|
||||
if let Some(msg) = user.messaging.templates.choose(&mut rng()) {
|
||||
println!("💬 自動送信メッセージ: {}", msg);
|
||||
let dummy_context = Context::new(vec![], None, "".to_string());
|
||||
ask_chat(&dummy_context, msg);
|
||||
user.metrics.intimacy += 0.03;
|
||||
|
||||
// 送信済みのフラグ更新
|
||||
user.messaging.sent_today = true;
|
||||
user.messaging.last_sent_date = Some(today);
|
||||
|
||||
save_user_data(&user_path, &user);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn scheduler_cmd() -> Command {
|
||||
Command::new("scheduler")
|
||||
.usage("scheduler [interval_sec]")
|
||||
.alias("s")
|
||||
.description("定期的に送信条件をチェックし、自発的なメッセージ送信を試みる")
|
||||
.action(|c: &Context| {
|
||||
let interval = c.args.get(0)
|
||||
.and_then(|s| s.parse::<u64>().ok())
|
||||
.unwrap_or(3600); // デフォルト: 1時間(テストしやすく)
|
||||
|
||||
println!("⏳ スケジューラー開始({}秒ごと)...", interval);
|
||||
|
||||
loop {
|
||||
let config = ConfigPaths::new();
|
||||
let user_path = config.data_file("json");
|
||||
let mut user = load_user_data(&user_path);
|
||||
|
||||
let now = Utc::now();
|
||||
let elapsed = now.signed_duration_since(user.metrics.last_updated);
|
||||
let hours = elapsed.num_minutes() as f32 / 60.0;
|
||||
|
||||
let speed_factor = if hours > 48.0 {
|
||||
2.0
|
||||
} else if hours > 24.0 {
|
||||
1.5
|
||||
} else {
|
||||
1.0
|
||||
};
|
||||
|
||||
user.metrics.trust = (user.metrics.trust - 0.01 * speed_factor).clamp(0.0, 1.0);
|
||||
user.metrics.intimacy = (user.metrics.intimacy - 0.01 * speed_factor).clamp(0.0, 1.0);
|
||||
user.metrics.energy = (user.metrics.energy - 0.01 * speed_factor).clamp(0.0, 1.0);
|
||||
|
||||
user.metrics.can_send =
|
||||
user.metrics.trust >= 0.5 &&
|
||||
user.metrics.intimacy >= 0.5 &&
|
||||
user.metrics.energy >= 0.5;
|
||||
|
||||
user.metrics.last_updated = now;
|
||||
|
||||
if user.metrics.can_send {
|
||||
println!("💡 AIメッセージ送信条件を満たしています(信頼:{:.2}, 親密:{:.2}, エネルギー:{:.2})",
|
||||
user.metrics.trust,
|
||||
user.metrics.intimacy,
|
||||
user.metrics.energy
|
||||
);
|
||||
send_scheduled_message();
|
||||
} else {
|
||||
println!("🤫 条件未達成のため送信スキップ: trust={:.2}, intimacy={:.2}, energy={:.2}",
|
||||
user.metrics.trust,
|
||||
user.metrics.intimacy,
|
||||
user.metrics.energy
|
||||
);
|
||||
}
|
||||
|
||||
save_user_data(&user_path, &user);
|
||||
thread::sleep(Duration::from_secs(interval));
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
// src/config.rs
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use shellexpand;
|
||||
|
||||
pub struct ConfigPaths {
|
||||
pub base_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl ConfigPaths {
|
||||
pub fn new() -> Self {
|
||||
let app_name = env!("CARGO_PKG_NAME");
|
||||
let mut base_dir = shellexpand::tilde("~").to_string();
|
||||
base_dir.push_str(&format!("/.config/{}/", app_name));
|
||||
let base_path = Path::new(&base_dir);
|
||||
if !base_path.exists() {
|
||||
let _ = fs::create_dir_all(base_path);
|
||||
}
|
||||
|
||||
ConfigPaths {
|
||||
base_dir: base_path.to_path_buf(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_file(&self, file_name: &str) -> PathBuf {
|
||||
let file_path = match file_name {
|
||||
"db" => self.base_dir.join("user.db"),
|
||||
"toml" => self.base_dir.join("user.toml"),
|
||||
"json" => self.base_dir.join("user.json"),
|
||||
_ => self.base_dir.join(format!(".{}", file_name)),
|
||||
};
|
||||
|
||||
file_path
|
||||
}
|
||||
/// 設定ファイルがなければ `example.json` をコピーする
|
||||
pub fn ensure_file_exists(&self, file_name: &str, template_path: &Path) {
|
||||
let target = self.data_file(file_name);
|
||||
if !target.exists() {
|
||||
if let Err(e) = fs::copy(template_path, &target) {
|
||||
eprintln!("⚠️ 設定ファイルの初期化に失敗しました: {}", e);
|
||||
} else {
|
||||
println!("📄 {} を {} にコピーしました", template_path.display(), target.display());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
42
src/git.rs
42
src/git.rs
@@ -1,42 +0,0 @@
|
||||
// src/git.rs
|
||||
use std::process::Command;
|
||||
|
||||
pub fn git_status() {
|
||||
run_git_command(&["status"]);
|
||||
}
|
||||
|
||||
pub fn git_init() {
|
||||
run_git_command(&["init"]);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn git_commit(message: &str) {
|
||||
run_git_command(&["add", "."]);
|
||||
run_git_command(&["commit", "-m", message]);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn git_push() {
|
||||
run_git_command(&["push"]);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn git_pull() {
|
||||
run_git_command(&["pull"]);
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn git_branch() {
|
||||
run_git_command(&["branch"]);
|
||||
}
|
||||
|
||||
fn run_git_command(args: &[&str]) {
|
||||
let status = Command::new("git")
|
||||
.args(args)
|
||||
.status()
|
||||
.expect("git コマンドの実行に失敗しました");
|
||||
|
||||
if !status.success() {
|
||||
eprintln!("⚠️ git コマンドに失敗しました: {:?}", args);
|
||||
}
|
||||
}
|
||||
2
src/lib.rs
Normal file
2
src/lib.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod memory;
|
||||
pub mod mcp;
|
||||
13
src/logic.rs
13
src/logic.rs
@@ -1,13 +0,0 @@
|
||||
//src/logic.rs
|
||||
use crate::model::AiSystem;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn should_send(ai: &AiSystem) -> bool {
|
||||
let r = &ai.relationship;
|
||||
let env = &ai.environment;
|
||||
let score = r.trust + r.intimacy + r.curiosity;
|
||||
let relationship_ok = score >= r.threshold;
|
||||
let luck_ok = env.luck_today > 0.5;
|
||||
|
||||
ai.messaging.enabled && relationship_ok && luck_ok
|
||||
}
|
||||
64
src/main.rs
64
src/main.rs
@@ -1,21 +1,49 @@
|
||||
//src/main.rs
|
||||
mod model;
|
||||
mod logic;
|
||||
mod agent;
|
||||
mod cli;
|
||||
mod utils;
|
||||
mod commands;
|
||||
mod config;
|
||||
mod git;
|
||||
mod chat;
|
||||
mod metrics;
|
||||
mod memory;
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use cli::cli_app;
|
||||
use seahorse::App;
|
||||
pub mod memory;
|
||||
pub mod mcp;
|
||||
|
||||
fn main() {
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
let app: App = cli_app();
|
||||
app.run(args);
|
||||
use memory::MemoryManager;
|
||||
use mcp::BaseMCPServer;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "aigpt")]
|
||||
#[command(about = "Simple memory storage for Claude with MCP")]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Start MCP server
|
||||
Server,
|
||||
/// Start MCP server (alias for server)
|
||||
Serve,
|
||||
/// Import ChatGPT conversations
|
||||
Import {
|
||||
/// Path to conversations.json file
|
||||
file: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
match cli.command {
|
||||
Commands::Server | Commands::Serve => {
|
||||
let mut server = BaseMCPServer::new().await?;
|
||||
server.run().await?;
|
||||
}
|
||||
Commands::Import { file } => {
|
||||
let mut memory_manager = MemoryManager::new().await?;
|
||||
memory_manager.import_chatgpt_conversations(&file).await?;
|
||||
println!("Import completed successfully");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
280
src/mcp/base.rs
Normal file
280
src/mcp/base.rs
Normal file
@@ -0,0 +1,280 @@
|
||||
use anyhow::Result;
|
||||
use serde_json::{json, Value};
|
||||
use std::io::{self, BufRead, Write};
|
||||
|
||||
use crate::memory::MemoryManager;
|
||||
|
||||
pub struct BaseMCPServer {
|
||||
pub memory_manager: MemoryManager,
|
||||
}
|
||||
|
||||
impl BaseMCPServer {
|
||||
pub async fn new() -> Result<Self> {
|
||||
let memory_manager = MemoryManager::new().await?;
|
||||
Ok(BaseMCPServer { memory_manager })
|
||||
}
|
||||
|
||||
pub async fn run(&mut self) -> Result<()> {
|
||||
let stdin = io::stdin();
|
||||
let mut stdout = io::stdout();
|
||||
|
||||
let reader = stdin.lock();
|
||||
let lines = reader.lines();
|
||||
|
||||
for line_result in lines {
|
||||
match line_result {
|
||||
Ok(line) => {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Ok(request) = serde_json::from_str::<Value>(&trimmed) {
|
||||
let response = self.handle_request(request).await;
|
||||
let response_str = serde_json::to_string(&response)?;
|
||||
stdout.write_all(response_str.as_bytes())?;
|
||||
stdout.write_all(b"\n")?;
|
||||
stdout.flush()?;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn handle_request(&mut self, request: Value) -> Value {
|
||||
let method = request["method"].as_str().unwrap_or("");
|
||||
let id = request["id"].clone();
|
||||
|
||||
match method {
|
||||
"initialize" => self.handle_initialize(id),
|
||||
"tools/list" => self.handle_tools_list(id),
|
||||
"tools/call" => self.handle_tools_call(request, id).await,
|
||||
_ => self.handle_unknown_method(id),
|
||||
}
|
||||
}
|
||||
|
||||
// 初期化ハンドラ
|
||||
fn handle_initialize(&self, id: Value) -> Value {
|
||||
json!({
|
||||
"jsonrpc": "2.0",
|
||||
"id": id,
|
||||
"result": {
|
||||
"protocolVersion": "2024-11-05",
|
||||
"capabilities": {
|
||||
"tools": {}
|
||||
},
|
||||
"serverInfo": {
|
||||
"name": "aigpt",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// ツールリストハンドラ (拡張可能)
|
||||
pub fn handle_tools_list(&self, id: Value) -> Value {
|
||||
let tools = self.get_available_tools();
|
||||
json!({
|
||||
"jsonrpc": "2.0",
|
||||
"id": id,
|
||||
"result": {
|
||||
"tools": tools
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// 基本ツール定義 (拡張で上書き可能)
|
||||
pub fn get_available_tools(&self) -> Vec<Value> {
|
||||
vec![
|
||||
json!({
|
||||
"name": "create_memory",
|
||||
"description": "Create a new memory entry",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Content of the memory"
|
||||
}
|
||||
},
|
||||
"required": ["content"]
|
||||
}
|
||||
}),
|
||||
json!({
|
||||
"name": "search_memories",
|
||||
"description": "Search memories by content",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query"
|
||||
}
|
||||
},
|
||||
"required": ["query"]
|
||||
}
|
||||
}),
|
||||
json!({
|
||||
"name": "update_memory",
|
||||
"description": "Update an existing memory entry",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "ID of the memory to update"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "New content for the memory"
|
||||
}
|
||||
},
|
||||
"required": ["id", "content"]
|
||||
}
|
||||
}),
|
||||
json!({
|
||||
"name": "delete_memory",
|
||||
"description": "Delete a memory entry",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"description": "ID of the memory to delete"
|
||||
}
|
||||
},
|
||||
"required": ["id"]
|
||||
}
|
||||
}),
|
||||
json!({
|
||||
"name": "list_conversations",
|
||||
"description": "List all imported conversations",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {}
|
||||
}
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
// ツール呼び出しハンドラ
|
||||
async fn handle_tools_call(&mut self, request: Value, id: Value) -> Value {
|
||||
let tool_name = request["params"]["name"].as_str().unwrap_or("");
|
||||
let arguments = &request["params"]["arguments"];
|
||||
|
||||
let result = self.execute_tool(tool_name, arguments).await;
|
||||
|
||||
json!({
|
||||
"jsonrpc": "2.0",
|
||||
"id": id,
|
||||
"result": {
|
||||
"content": [{
|
||||
"type": "text",
|
||||
"text": result.to_string()
|
||||
}]
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// ツール実行 (拡張で上書き可能)
|
||||
pub async fn execute_tool(&mut self, tool_name: &str, arguments: &Value) -> Value {
|
||||
match tool_name {
|
||||
"create_memory" => self.tool_create_memory(arguments),
|
||||
"search_memories" => self.tool_search_memories(arguments),
|
||||
"update_memory" => self.tool_update_memory(arguments),
|
||||
"delete_memory" => self.tool_delete_memory(arguments),
|
||||
"list_conversations" => self.tool_list_conversations(),
|
||||
_ => json!({
|
||||
"success": false,
|
||||
"error": format!("Unknown tool: {}", tool_name)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// 基本ツール実装
|
||||
fn tool_create_memory(&mut self, arguments: &Value) -> Value {
|
||||
let content = arguments["content"].as_str().unwrap_or("");
|
||||
match self.memory_manager.create_memory(content) {
|
||||
Ok(id) => json!({
|
||||
"success": true,
|
||||
"id": id,
|
||||
"message": "Memory created successfully"
|
||||
}),
|
||||
Err(e) => json!({
|
||||
"success": false,
|
||||
"error": e.to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn tool_search_memories(&self, arguments: &Value) -> Value {
|
||||
let query = arguments["query"].as_str().unwrap_or("");
|
||||
let memories = self.memory_manager.search_memories(query);
|
||||
json!({
|
||||
"success": true,
|
||||
"memories": memories.into_iter().map(|m| json!({
|
||||
"id": m.id,
|
||||
"content": m.content,
|
||||
"created_at": m.created_at,
|
||||
"updated_at": m.updated_at
|
||||
})).collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
|
||||
fn tool_update_memory(&mut self, arguments: &Value) -> Value {
|
||||
let id = arguments["id"].as_str().unwrap_or("");
|
||||
let content = arguments["content"].as_str().unwrap_or("");
|
||||
match self.memory_manager.update_memory(id, content) {
|
||||
Ok(()) => json!({
|
||||
"success": true,
|
||||
"message": "Memory updated successfully"
|
||||
}),
|
||||
Err(e) => json!({
|
||||
"success": false,
|
||||
"error": e.to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn tool_delete_memory(&mut self, arguments: &Value) -> Value {
|
||||
let id = arguments["id"].as_str().unwrap_or("");
|
||||
match self.memory_manager.delete_memory(id) {
|
||||
Ok(()) => json!({
|
||||
"success": true,
|
||||
"message": "Memory deleted successfully"
|
||||
}),
|
||||
Err(e) => json!({
|
||||
"success": false,
|
||||
"error": e.to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn tool_list_conversations(&self) -> Value {
|
||||
let conversations = self.memory_manager.list_conversations();
|
||||
json!({
|
||||
"success": true,
|
||||
"conversations": conversations.into_iter().map(|c| json!({
|
||||
"id": c.id,
|
||||
"title": c.title,
|
||||
"created_at": c.created_at,
|
||||
"message_count": c.message_count
|
||||
})).collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
|
||||
// 不明なメソッドハンドラ
|
||||
fn handle_unknown_method(&self, id: Value) -> Value {
|
||||
json!({
|
||||
"jsonrpc": "2.0",
|
||||
"id": id,
|
||||
"error": {
|
||||
"code": -32601,
|
||||
"message": "Method not found"
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
293
src/mcp/extended.rs
Normal file
293
src/mcp/extended.rs
Normal file
@@ -0,0 +1,293 @@
|
||||
use anyhow::Result;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use super::base::BaseMCPServer;
|
||||
|
||||
pub struct ExtendedMCPServer {
|
||||
base: BaseMCPServer,
|
||||
}
|
||||
|
||||
impl ExtendedMCPServer {
|
||||
pub async fn new() -> Result<Self> {
|
||||
let base = BaseMCPServer::new().await?;
|
||||
Ok(ExtendedMCPServer { base })
|
||||
}
|
||||
|
||||
pub async fn run(&mut self) -> Result<()> {
|
||||
self.base.run().await
|
||||
}
|
||||
|
||||
pub async fn handle_request(&mut self, request: Value) -> Value {
|
||||
self.base.handle_request(request).await
|
||||
}
|
||||
|
||||
// 拡張ツールを追加
|
||||
pub fn get_available_tools(&self) -> Vec<Value> {
|
||||
#[allow(unused_mut)]
|
||||
let mut tools = self.base.get_available_tools();
|
||||
|
||||
// AI分析ツールを追加
|
||||
#[cfg(feature = "ai-analysis")]
|
||||
{
|
||||
tools.push(json!({
|
||||
"name": "analyze_sentiment",
|
||||
"description": "Analyze sentiment of memories",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"period": {
|
||||
"type": "string",
|
||||
"description": "Time period to analyze"
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
tools.push(json!({
|
||||
"name": "extract_insights",
|
||||
"description": "Extract insights and patterns from memories",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"category": {
|
||||
"type": "string",
|
||||
"description": "Category to analyze"
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
// Web統合ツールを追加
|
||||
#[cfg(feature = "web-integration")]
|
||||
{
|
||||
tools.push(json!({
|
||||
"name": "import_webpage",
|
||||
"description": "Import content from a webpage",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "URL to import from"
|
||||
}
|
||||
},
|
||||
"required": ["url"]
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
// セマンティック検索強化
|
||||
#[cfg(feature = "semantic-search")]
|
||||
{
|
||||
// create_memoryを拡張版で上書き
|
||||
if let Some(pos) = tools.iter().position(|tool| tool["name"] == "create_memory") {
|
||||
tools[pos] = json!({
|
||||
"name": "create_memory",
|
||||
"description": "Create a new memory entry with optional AI analysis",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "Content of the memory"
|
||||
},
|
||||
"analyze": {
|
||||
"type": "boolean",
|
||||
"description": "Enable AI analysis for this memory"
|
||||
}
|
||||
},
|
||||
"required": ["content"]
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// search_memoriesを拡張版で上書き
|
||||
if let Some(pos) = tools.iter().position(|tool| tool["name"] == "search_memories") {
|
||||
tools[pos] = json!({
|
||||
"name": "search_memories",
|
||||
"description": "Search memories with advanced options",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query"
|
||||
},
|
||||
"semantic": {
|
||||
"type": "boolean",
|
||||
"description": "Use semantic search"
|
||||
},
|
||||
"category": {
|
||||
"type": "string",
|
||||
"description": "Filter by category"
|
||||
},
|
||||
"time_range": {
|
||||
"type": "string",
|
||||
"description": "Filter by time range (e.g., '1week', '1month')"
|
||||
}
|
||||
},
|
||||
"required": ["query"]
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
tools
|
||||
}
|
||||
|
||||
// 拡張ツール実行
|
||||
pub async fn execute_tool(&mut self, tool_name: &str, arguments: &Value) -> Value {
|
||||
match tool_name {
|
||||
// 拡張機能
|
||||
#[cfg(feature = "ai-analysis")]
|
||||
"analyze_sentiment" => self.tool_analyze_sentiment(arguments).await,
|
||||
#[cfg(feature = "ai-analysis")]
|
||||
"extract_insights" => self.tool_extract_insights(arguments).await,
|
||||
#[cfg(feature = "web-integration")]
|
||||
"import_webpage" => self.tool_import_webpage(arguments).await,
|
||||
|
||||
// 拡張版の基本ツール (AI分析付き)
|
||||
"create_memory" => self.tool_create_memory_extended(arguments).await,
|
||||
"search_memories" => self.tool_search_memories_extended(arguments).await,
|
||||
|
||||
// 基本ツールにフォールバック
|
||||
_ => self.base.execute_tool(tool_name, arguments).await,
|
||||
}
|
||||
}
|
||||
|
||||
// 拡張ツール実装
|
||||
async fn tool_create_memory_extended(&mut self, arguments: &Value) -> Value {
|
||||
let content = arguments["content"].as_str().unwrap_or("");
|
||||
let analyze = arguments["analyze"].as_bool().unwrap_or(false);
|
||||
|
||||
let final_content = if analyze {
|
||||
#[cfg(feature = "ai-analysis")]
|
||||
{
|
||||
format!("[AI分析] 感情: neutral, カテゴリ: general\n{}", content)
|
||||
}
|
||||
#[cfg(not(feature = "ai-analysis"))]
|
||||
{
|
||||
content.to_string()
|
||||
}
|
||||
} else {
|
||||
content.to_string()
|
||||
};
|
||||
|
||||
match self.base.memory_manager.create_memory(&final_content) {
|
||||
Ok(id) => json!({
|
||||
"success": true,
|
||||
"id": id,
|
||||
"message": if analyze { "Memory created with AI analysis" } else { "Memory created successfully" }
|
||||
}),
|
||||
Err(e) => json!({
|
||||
"success": false,
|
||||
"error": e.to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn tool_search_memories_extended(&mut self, arguments: &Value) -> Value {
|
||||
let query = arguments["query"].as_str().unwrap_or("");
|
||||
let semantic = arguments["semantic"].as_bool().unwrap_or(false);
|
||||
|
||||
let memories = if semantic {
|
||||
#[cfg(feature = "semantic-search")]
|
||||
{
|
||||
// モックセマンティック検索
|
||||
self.base.memory_manager.search_memories(query)
|
||||
}
|
||||
#[cfg(not(feature = "semantic-search"))]
|
||||
{
|
||||
self.base.memory_manager.search_memories(query)
|
||||
}
|
||||
} else {
|
||||
self.base.memory_manager.search_memories(query)
|
||||
};
|
||||
|
||||
json!({
|
||||
"success": true,
|
||||
"memories": memories.into_iter().map(|m| json!({
|
||||
"id": m.id,
|
||||
"content": m.content,
|
||||
"created_at": m.created_at,
|
||||
"updated_at": m.updated_at
|
||||
})).collect::<Vec<_>>(),
|
||||
"search_type": if semantic { "semantic" } else { "keyword" }
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "ai-analysis")]
|
||||
async fn tool_analyze_sentiment(&mut self, _arguments: &Value) -> Value {
|
||||
json!({
|
||||
"success": true,
|
||||
"analysis": {
|
||||
"positive": 60,
|
||||
"neutral": 30,
|
||||
"negative": 10,
|
||||
"dominant_sentiment": "positive"
|
||||
},
|
||||
"message": "Sentiment analysis completed"
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "ai-analysis")]
|
||||
async fn tool_extract_insights(&mut self, _arguments: &Value) -> Value {
|
||||
json!({
|
||||
"success": true,
|
||||
"insights": {
|
||||
"most_frequent_topics": ["programming", "ai", "productivity"],
|
||||
"learning_frequency": "5 times per week",
|
||||
"growth_trend": "increasing",
|
||||
"recommendations": ["Focus more on advanced topics", "Consider practical applications"]
|
||||
},
|
||||
"message": "Insights extracted successfully"
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "web-integration")]
|
||||
async fn tool_import_webpage(&mut self, arguments: &Value) -> Value {
|
||||
let url = arguments["url"].as_str().unwrap_or("");
|
||||
match self.import_from_web(url).await {
|
||||
Ok(content) => {
|
||||
match self.base.memory_manager.create_memory(&content) {
|
||||
Ok(id) => json!({
|
||||
"success": true,
|
||||
"id": id,
|
||||
"message": format!("Webpage imported successfully from {}", url)
|
||||
}),
|
||||
Err(e) => json!({
|
||||
"success": false,
|
||||
"error": e.to_string()
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(e) => json!({
|
||||
"success": false,
|
||||
"error": format!("Failed to import webpage: {}", e)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "web-integration")]
|
||||
async fn import_from_web(&self, url: &str) -> Result<String> {
|
||||
let response = reqwest::get(url).await?;
|
||||
let content = response.text().await?;
|
||||
|
||||
let document = scraper::Html::parse_document(&content);
|
||||
let title_selector = scraper::Selector::parse("title").unwrap();
|
||||
let body_selector = scraper::Selector::parse("p").unwrap();
|
||||
|
||||
let title = document.select(&title_selector)
|
||||
.next()
|
||||
.map(|el| el.inner_html())
|
||||
.unwrap_or_else(|| "Untitled".to_string());
|
||||
|
||||
let paragraphs: Vec<String> = document.select(&body_selector)
|
||||
.map(|el| el.inner_html())
|
||||
.take(5)
|
||||
.collect();
|
||||
|
||||
Ok(format!("# {}\nURL: {}\n\n{}", title, url, paragraphs.join("\n\n")))
|
||||
}
|
||||
}
|
||||
5
src/mcp/mod.rs
Normal file
5
src/mcp/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod base;
|
||||
pub mod extended;
|
||||
|
||||
pub use base::BaseMCPServer;
|
||||
pub use extended::ExtendedMCPServer;
|
||||
276
src/memory.rs
276
src/memory.rs
@@ -1,49 +1,241 @@
|
||||
// src/memory.rs
|
||||
use chrono::{DateTime, Local, Utc};
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::{self};
|
||||
//use std::fs::{self, OpenOptions};
|
||||
use std::io::{BufReader, BufWriter};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::{fs::File};
|
||||
//use std::{env, fs::File};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct MemoryEntry {
|
||||
pub timestamp: DateTime<Utc>,
|
||||
pub sender: String,
|
||||
pub message: String,
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Memory {
|
||||
pub id: String,
|
||||
pub content: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
pub fn log_message(base_dir: &PathBuf, sender: &str, message: &str) {
|
||||
let now_utc = Utc::now();
|
||||
let date_str = Local::now().format("%Y-%m-%d").to_string();
|
||||
let mut file_path = base_dir.clone();
|
||||
file_path.push("memory");
|
||||
let _ = fs::create_dir_all(&file_path);
|
||||
file_path.push(format!("{}.json", date_str));
|
||||
|
||||
let new_entry = MemoryEntry {
|
||||
timestamp: now_utc,
|
||||
sender: sender.to_string(),
|
||||
message: message.to_string(),
|
||||
};
|
||||
|
||||
let mut entries = if file_path.exists() {
|
||||
let file = File::open(&file_path).expect("💥 メモリファイルの読み込み失敗");
|
||||
let reader = BufReader::new(file);
|
||||
serde_json::from_reader(reader).unwrap_or_else(|_| vec![])
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
entries.push(new_entry);
|
||||
|
||||
let file = File::create(&file_path).expect("💥 メモリファイルの書き込み失敗");
|
||||
let writer = BufWriter::new(file);
|
||||
serde_json::to_writer_pretty(writer, &entries).expect("💥 JSONの書き込み失敗");
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Conversation {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub message_count: u32,
|
||||
}
|
||||
|
||||
// 利用例(ask_chatの中)
|
||||
// log_message(&config.base_dir, "user", question);
|
||||
// log_message(&config.base_dir, "ai", &response);
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct ChatGPTNode {
|
||||
id: String,
|
||||
children: Vec<String>,
|
||||
parent: Option<String>,
|
||||
message: Option<ChatGPTMessage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct ChatGPTMessage {
|
||||
id: String,
|
||||
author: ChatGPTAuthor,
|
||||
content: ChatGPTContent,
|
||||
create_time: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct ChatGPTAuthor {
|
||||
role: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum ChatGPTContent {
|
||||
Text {
|
||||
content_type: String,
|
||||
parts: Vec<String>,
|
||||
},
|
||||
Other(serde_json::Value),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct ChatGPTConversation {
|
||||
#[serde(default)]
|
||||
id: String,
|
||||
#[serde(alias = "conversation_id")]
|
||||
conversation_id: Option<String>,
|
||||
title: String,
|
||||
create_time: f64,
|
||||
mapping: HashMap<String, ChatGPTNode>,
|
||||
}
|
||||
|
||||
pub struct MemoryManager {
|
||||
memories: HashMap<String, Memory>,
|
||||
conversations: HashMap<String, Conversation>,
|
||||
data_file: PathBuf,
|
||||
}
|
||||
|
||||
impl MemoryManager {
|
||||
pub async fn new() -> Result<Self> {
|
||||
let data_dir = dirs::config_dir()
|
||||
.context("Could not find config directory")?
|
||||
.join("syui")
|
||||
.join("ai")
|
||||
.join("gpt");
|
||||
|
||||
std::fs::create_dir_all(&data_dir)?;
|
||||
|
||||
let data_file = data_dir.join("memory.json");
|
||||
|
||||
let (memories, conversations) = if data_file.exists() {
|
||||
Self::load_data(&data_file)?
|
||||
} else {
|
||||
(HashMap::new(), HashMap::new())
|
||||
};
|
||||
|
||||
Ok(MemoryManager {
|
||||
memories,
|
||||
conversations,
|
||||
data_file,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_memory(&mut self, content: &str) -> Result<String> {
|
||||
let id = Uuid::new_v4().to_string();
|
||||
let now = Utc::now();
|
||||
|
||||
let memory = Memory {
|
||||
id: id.clone(),
|
||||
content: content.to_string(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
};
|
||||
|
||||
self.memories.insert(id.clone(), memory);
|
||||
self.save_data()?;
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
pub fn update_memory(&mut self, id: &str, content: &str) -> Result<()> {
|
||||
if let Some(memory) = self.memories.get_mut(id) {
|
||||
memory.content = content.to_string();
|
||||
memory.updated_at = Utc::now();
|
||||
self.save_data()?;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Memory not found: {}", id))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_memory(&mut self, id: &str) -> Result<()> {
|
||||
if self.memories.remove(id).is_some() {
|
||||
self.save_data()?;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Memory not found: {}", id))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn search_memories(&self, query: &str) -> Vec<&Memory> {
|
||||
let query_lower = query.to_lowercase();
|
||||
let mut results: Vec<_> = self.memories
|
||||
.values()
|
||||
.filter(|memory| memory.content.to_lowercase().contains(&query_lower))
|
||||
.collect();
|
||||
|
||||
results.sort_by(|a, b| b.updated_at.cmp(&a.updated_at));
|
||||
results
|
||||
}
|
||||
|
||||
pub fn list_conversations(&self) -> Vec<&Conversation> {
|
||||
let mut conversations: Vec<_> = self.conversations.values().collect();
|
||||
conversations.sort_by(|a, b| b.created_at.cmp(&a.created_at));
|
||||
conversations
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn import_chatgpt_conversations(&mut self, file_path: &PathBuf) -> Result<()> {
|
||||
let content = std::fs::read_to_string(file_path)
|
||||
.context("Failed to read conversations file")?;
|
||||
|
||||
let chatgpt_conversations: Vec<ChatGPTConversation> = serde_json::from_str(&content)
|
||||
.context("Failed to parse ChatGPT conversations")?;
|
||||
|
||||
let mut imported_memories = 0;
|
||||
let mut imported_conversations = 0;
|
||||
|
||||
for conv in chatgpt_conversations {
|
||||
// Get the actual conversation ID
|
||||
let conv_id = if !conv.id.is_empty() {
|
||||
conv.id.clone()
|
||||
} else if let Some(cid) = conv.conversation_id {
|
||||
cid
|
||||
} else {
|
||||
Uuid::new_v4().to_string()
|
||||
};
|
||||
|
||||
// Add conversation
|
||||
let conversation = Conversation {
|
||||
id: conv_id.clone(),
|
||||
title: conv.title.clone(),
|
||||
created_at: DateTime::from_timestamp(conv.create_time as i64, 0)
|
||||
.unwrap_or_else(Utc::now),
|
||||
message_count: conv.mapping.len() as u32,
|
||||
};
|
||||
self.conversations.insert(conv_id.clone(), conversation);
|
||||
imported_conversations += 1;
|
||||
|
||||
// Extract memories from messages
|
||||
for (_, node) in conv.mapping {
|
||||
if let Some(message) = node.message {
|
||||
if let ChatGPTContent::Text { parts, .. } = message.content {
|
||||
for part in parts {
|
||||
if !part.trim().is_empty() && part.len() > 10 {
|
||||
let memory_content = format!("[{}] {}", conv.title, part);
|
||||
self.create_memory(&memory_content)?;
|
||||
imported_memories += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("Imported {} conversations and {} memories",
|
||||
imported_conversations, imported_memories);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn load_data(file_path: &PathBuf) -> Result<(HashMap<String, Memory>, HashMap<String, Conversation>)> {
|
||||
let content = std::fs::read_to_string(file_path)
|
||||
.context("Failed to read data file")?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Data {
|
||||
memories: HashMap<String, Memory>,
|
||||
conversations: HashMap<String, Conversation>,
|
||||
}
|
||||
|
||||
let data: Data = serde_json::from_str(&content)
|
||||
.context("Failed to parse data file")?;
|
||||
|
||||
Ok((data.memories, data.conversations))
|
||||
}
|
||||
|
||||
fn save_data(&self) -> Result<()> {
|
||||
#[derive(Serialize)]
|
||||
struct Data<'a> {
|
||||
memories: &'a HashMap<String, Memory>,
|
||||
conversations: &'a HashMap<String, Conversation>,
|
||||
}
|
||||
|
||||
let data = Data {
|
||||
memories: &self.memories,
|
||||
conversations: &self.conversations,
|
||||
};
|
||||
|
||||
let content = serde_json::to_string_pretty(&data)
|
||||
.context("Failed to serialize data")?;
|
||||
|
||||
std::fs::write(&self.data_file, content)
|
||||
.context("Failed to write data file")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
147
src/metrics.rs
147
src/metrics.rs
@@ -1,147 +0,0 @@
|
||||
// src/metrics.rs
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::config::ConfigPaths;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Metrics {
|
||||
pub trust: f32,
|
||||
pub intimacy: f32,
|
||||
pub energy: f32,
|
||||
pub can_send: bool,
|
||||
pub last_updated: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Personality {
|
||||
pub kind: String,
|
||||
pub strength: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Relationship {
|
||||
pub trust: f32,
|
||||
pub intimacy: f32,
|
||||
pub curiosity: f32,
|
||||
pub threshold: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Environment {
|
||||
pub luck_today: f32,
|
||||
pub luck_history: Vec<f32>,
|
||||
pub level: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Messaging {
|
||||
pub enabled: bool,
|
||||
pub schedule_time: Option<String>,
|
||||
pub decay_rate: f32,
|
||||
pub templates: Vec<String>,
|
||||
pub sent_today: bool, // 追加
|
||||
pub last_sent_date: Option<String>, // 追加
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Memory {
|
||||
pub recent_messages: Vec<String>,
|
||||
pub long_term_notes: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UserData {
|
||||
pub personality: Personality,
|
||||
pub relationship: Relationship,
|
||||
pub environment: Environment,
|
||||
pub messaging: Messaging,
|
||||
pub last_interaction: DateTime<Utc>,
|
||||
pub memory: Memory,
|
||||
pub metrics: Metrics,
|
||||
}
|
||||
|
||||
impl Metrics {
|
||||
pub fn decay(&mut self) {
|
||||
let now = Utc::now();
|
||||
let hours = (now - self.last_updated).num_minutes() as f32 / 60.0;
|
||||
self.trust = decay_param(self.trust, hours);
|
||||
self.intimacy = decay_param(self.intimacy, hours);
|
||||
self.energy = decay_param(self.energy, hours);
|
||||
self.can_send = self.trust >= 0.5 && self.intimacy >= 0.5 && self.energy >= 0.5;
|
||||
self.last_updated = now;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_user_data(path: &Path) -> UserData {
|
||||
let config = ConfigPaths::new();
|
||||
let example_path = Path::new("example.json");
|
||||
config.ensure_file_exists("json", example_path);
|
||||
|
||||
if !path.exists() {
|
||||
return UserData {
|
||||
personality: Personality {
|
||||
kind: "positive".into(),
|
||||
strength: 0.8,
|
||||
},
|
||||
relationship: Relationship {
|
||||
trust: 0.2,
|
||||
intimacy: 0.6,
|
||||
curiosity: 0.5,
|
||||
threshold: 1.5,
|
||||
},
|
||||
environment: Environment {
|
||||
luck_today: 0.9,
|
||||
luck_history: vec![0.9, 0.9, 0.9],
|
||||
level: 1,
|
||||
},
|
||||
messaging: Messaging {
|
||||
enabled: true,
|
||||
schedule_time: Some("08:00".to_string()),
|
||||
decay_rate: 0.1,
|
||||
templates: vec![
|
||||
"おはよう!今日もがんばろう!".to_string(),
|
||||
"ねえ、話したいことがあるの。".to_string(),
|
||||
],
|
||||
sent_today: false,
|
||||
last_sent_date: None,
|
||||
},
|
||||
last_interaction: Utc::now(),
|
||||
memory: Memory {
|
||||
recent_messages: vec![],
|
||||
long_term_notes: vec![],
|
||||
},
|
||||
metrics: Metrics {
|
||||
trust: 0.5,
|
||||
intimacy: 0.5,
|
||||
energy: 0.5,
|
||||
can_send: true,
|
||||
last_updated: Utc::now(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(path).expect("user.json の読み込みに失敗しました");
|
||||
serde_json::from_str(&content).expect("user.json のパースに失敗しました")
|
||||
}
|
||||
|
||||
pub fn save_user_data(path: &Path, data: &UserData) {
|
||||
let content = serde_json::to_string_pretty(data).expect("user.json のシリアライズ失敗");
|
||||
fs::write(path, content).expect("user.json の書き込みに失敗しました");
|
||||
}
|
||||
|
||||
pub fn update_metrics_decay() -> Metrics {
|
||||
let config = ConfigPaths::new();
|
||||
let path = config.base_dir.join("user.json");
|
||||
let mut data = load_user_data(&path);
|
||||
data.metrics.decay();
|
||||
save_user_data(&path, &data);
|
||||
data.metrics
|
||||
}
|
||||
|
||||
fn decay_param(value: f32, hours: f32) -> f32 {
|
||||
let decay_rate = 0.05;
|
||||
(value * (1.0f32 - decay_rate).powf(hours)).clamp(0.0, 1.0)
|
||||
}
|
||||
72
src/model.rs
72
src/model.rs
@@ -1,72 +0,0 @@
|
||||
//src/model.rs
|
||||
use rusqlite::{params, Connection, Result as SqlResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AiSystem {
|
||||
pub personality: Personality,
|
||||
pub relationship: Relationship,
|
||||
pub environment: Environment,
|
||||
pub messaging: Messaging,
|
||||
}
|
||||
|
||||
impl AiSystem {
|
||||
pub fn save_to_db(&self, conn: &Connection) -> SqlResult<()> {
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS ai_state (id INTEGER PRIMARY KEY, json TEXT)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
let json_data = serde_json::to_string(self).map_err(|e| {
|
||||
rusqlite::Error::ToSqlConversionFailure(Box::new(e))
|
||||
})?;
|
||||
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO ai_state (id, json) VALUES (?1, ?2)",
|
||||
params![1, json_data],
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load_from_db(conn: &Connection) -> SqlResult<Self> {
|
||||
let mut stmt = conn.prepare("SELECT json FROM ai_state WHERE id = ?1")?;
|
||||
let json: String = stmt.query_row(params![1], |row| row.get(0))?;
|
||||
|
||||
// ここも serde_json のエラーを map_err で変換
|
||||
let system: AiSystem = serde_json::from_str(&json).map_err(|e| {
|
||||
rusqlite::Error::FromSqlConversionFailure(0, rusqlite::types::Type::Text, Box::new(e))
|
||||
})?;
|
||||
|
||||
Ok(system)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Personality {
|
||||
pub kind: String, // e.g., "positive", "negative", "neutral"
|
||||
pub strength: f32, // 0.0 - 1.0
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Relationship {
|
||||
pub trust: f32, // 0.0 - 1.0
|
||||
pub intimacy: f32, // 0.0 - 1.0
|
||||
pub curiosity: f32, // 0.0 - 1.0
|
||||
pub threshold: f32, // if sum > threshold, allow messaging
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Environment {
|
||||
pub luck_today: f32, // 0.1 - 1.0
|
||||
pub luck_history: Vec<f32>, // last 3 values
|
||||
pub level: i32, // current mental strength level
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Messaging {
|
||||
pub enabled: bool,
|
||||
pub schedule_time: Option<String>, // e.g., "08:00"
|
||||
pub decay_rate: f32, // how quickly emotion fades (0.0 - 1.0)
|
||||
pub templates: Vec<String>, // message template variations
|
||||
}
|
||||
13
src/utils.rs
13
src/utils.rs
@@ -1,13 +0,0 @@
|
||||
// src/utils.rs
|
||||
use std::fs;
|
||||
use crate::model::AiSystem;
|
||||
|
||||
pub fn load_config(path: &str) -> AiSystem {
|
||||
let data = fs::read_to_string(path).expect("JSON読み込み失敗");
|
||||
serde_json::from_str(&data).expect("JSONパース失敗")
|
||||
}
|
||||
|
||||
pub fn save_config(path: &str, ai: &AiSystem) {
|
||||
let json = serde_json::to_string_pretty(&ai).expect("JSONシリアライズ失敗");
|
||||
fs::write(path, json).expect("JSON保存失敗");
|
||||
}
|
||||
Reference in New Issue
Block a user