first claude
This commit is contained in:
parent
4f55138306
commit
58e202fa1e
14
Cargo.toml
14
Cargo.toml
@ -4,12 +4,10 @@ version = "0.1.0"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
reqwest = { version = "*", features = ["json"] }
|
||||||
serde_json = "1.0"
|
serde = { version = "*", features = ["derive"] }
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
serde_json = "*"
|
||||||
seahorse = "*"
|
tokio = { version = "*", features = ["full"] }
|
||||||
rusqlite = { version = "0.29", features = ["serde_json"] }
|
clap = { version = "*", features = ["derive"] }
|
||||||
shellexpand = "*"
|
shellexpand = "*"
|
||||||
fs_extra = "1.3"
|
fs_extra = "*"
|
||||||
rand = "0.9.1"
|
|
||||||
reqwest = { version = "*", features = ["blocking", "json"] }
|
|
||||||
|
47
README.md
47
README.md
@ -1,47 +0,0 @@
|
|||||||
# ai `gpt`
|
|
||||||
|
|
||||||
ai x Communication
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
`ai.gpt` runs on the AGE system.
|
|
||||||
|
|
||||||
This is a prototype of an autonomous, relationship-driven AI system based on the axes of "Personality × Relationship × External Environment × Time Variation."
|
|
||||||
|
|
||||||
The parameters of "Send Permission," "Send Timing," and "Send Content" are determined by the factors of "Personality x Relationship x External Environment x Time Variation."
|
|
||||||
|
|
||||||
## Integration
|
|
||||||
|
|
||||||
`ai.ai` runs on the AIM system, which is designed to read human emotions.
|
|
||||||
|
|
||||||
- AIM focuses on the axis of personality and ethics (AI's consciousness structure)
|
|
||||||
- AGE focuses on the axis of behavior and relationships (AI's autonomy and behavior)
|
|
||||||
|
|
||||||
> When these two systems work together, it creates a world where users can feel like they are "growing together with AI."
|
|
||||||
|
|
||||||
## mcp
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ ollama run syui/ai
|
|
||||||
```
|
|
||||||
|
|
||||||
```sh
|
|
||||||
$ cargo build
|
|
||||||
$ ./aigpt mcp setup
|
|
||||||
$ ./aigpt mcp chat "hello world!"
|
|
||||||
$ ./aigpt mcp chat "hello world!" --host http://localhost:11434 --model syui/ai
|
|
||||||
|
|
||||||
---
|
|
||||||
# openai api
|
|
||||||
$ ./aigpt mcp set-api --api sk-abc123
|
|
||||||
$ ./aigpt mcp chat "こんにちは" -p openai -m gpt-4o-mini
|
|
||||||
|
|
||||||
---
|
|
||||||
# git管理されているファイルをAIに読ませる
|
|
||||||
./aigpt mcp chat --host http://localhost:11434 --repo git@git.syui.ai:ai/gpt
|
|
||||||
**改善案と次のステップ:**
|
|
||||||
1. **README.md の大幅な改善:**
|
|
||||||
**次のステップ:**
|
|
||||||
1. **README.md の作成:** 1. の指示に従って、README.md ファイルを作成します。
|
|
||||||
```
|
|
||||||
|
|
40
example.json
40
example.json
@ -1,40 +0,0 @@
|
|||||||
{
|
|
||||||
"personality": {
|
|
||||||
"kind": "positive",
|
|
||||||
"strength": 0.8
|
|
||||||
},
|
|
||||||
"relationship": {
|
|
||||||
"trust": 0.2,
|
|
||||||
"intimacy": 0.6,
|
|
||||||
"curiosity": 0.5,
|
|
||||||
"threshold": 1.5
|
|
||||||
},
|
|
||||||
"environment": {
|
|
||||||
"luck_today": 0.9,
|
|
||||||
"luck_history": [0.9, 0.9, 0.9],
|
|
||||||
"level": 1
|
|
||||||
},
|
|
||||||
"messaging": {
|
|
||||||
"enabled": true,
|
|
||||||
"schedule_time": "08:00",
|
|
||||||
"decay_rate": 0.1,
|
|
||||||
"templates": [
|
|
||||||
"おはよう!今日もがんばろう!",
|
|
||||||
"ねえ、話したいことがあるの。"
|
|
||||||
],
|
|
||||||
"sent_today": false,
|
|
||||||
"last_sent_date": null
|
|
||||||
},
|
|
||||||
"last_interaction": "2025-05-21T23:15:00Z",
|
|
||||||
"memory": {
|
|
||||||
"recent_messages": [],
|
|
||||||
"long_term_notes": []
|
|
||||||
},
|
|
||||||
"metrics": {
|
|
||||||
"trust": 0.5,
|
|
||||||
"intimacy": 0.5,
|
|
||||||
"energy": 0.5,
|
|
||||||
"can_send": true,
|
|
||||||
"last_updated": "2025-05-21T15:52:06.590981Z"
|
|
||||||
}
|
|
||||||
}
|
|
1
gpt.json
1
gpt.json
@ -1 +0,0 @@
|
|||||||
{ "system_name": "AGE system", "full_name": "Autonomous Generative Entity", "description": "人格・関係性・環境・時間に基づき、AIが自律的にユーザーにメッセージを送信する自律人格システム。AIM systemと連携して、自然な会話や気づきをもたらす。", "core_components": { "personality": { "type": "enum", "variants": ["positive", "negative", "logical", "emotional", "mixed"], "parameters": { "message_trigger_style": "運勢や関係性による送信傾向", "decay_rate_modifier": "関係性スコアの時間減衰への影響" } }, "relationship": { "parameters": ["trust", "affection", "intimacy"], "properties": { "persistent": true, "hidden": true, "irreversible": false, "decay_over_time": true }, "decay_function": "exp(-t / strength)" }, "environment": { "daily_luck": { "type": "float", "range": [0.1, 1.0], "update": "daily", "streak_mechanism": { "trigger": "min_or_max_luck_3_times_in_a_row", "effect": "personality_strength_roll", "chance": 0.5 } } }, "memory": { "long_term_memory": "user_relationship_log", "short_term_context": "recent_interactions", "usage_in_generation": true }, "message_trigger": { "condition": { "relationship_threshold": { "trust": 0.8, "affection": 0.6 }, "time_decay": true, "environment_luck": "personality_dependent" }, "timing": { "based_on": ["time_of_day", "personality", "recent_interaction"], "modifiers": { "emotional": "morning or night", "logical": "daytime" } } }, "message_generation": { "style_variants": ["thought", "casual", "encouragement", "watchful"], "influenced_by": ["personality", "relationship", "daily_luck", "memory"], "llm_integration": true }, "state_transition": { "states": ["idle", "ready", "sending", "cooldown"], "transitions": { "ready_if": "thresholds_met", "sending_if": "timing_matched", "cooldown_after": "message_sent" } } }, "extensions": { "persistence": { "database": "sqlite", "storage_items": ["relationship", "personality_level", "daily_luck_log"] }, "api": { "llm": "openai / local LLM", "mode": "rust_cli", "external_event_trigger": true }, "scheduler": { "async_event_loop": true, "interval_check": 3600, "time_decay_check": true }, "integration_with_aim": { "input_from_aim": ["intent_score", "motivation_score"], "usage": "trigger_adjustment, message_personalization" } }, "note": "AGE systemは“話しかけてくるAI”の人格として機能し、AIMによる心の状態評価と連動して、プレイヤーと深い関係を築いていく存在となる。" }
|
|
BIN
img/ai_r.png
BIN
img/ai_r.png
Binary file not shown.
Before Width: | Height: | Size: 1.8 MiB |
BIN
img/image.png
BIN
img/image.png
Binary file not shown.
Before Width: | Height: | Size: 1.8 MiB |
125
mcp/chat.py
Normal file
125
mcp/chat.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
# mcp/chat.py
|
||||||
|
"""
|
||||||
|
Chat client for aigpt CLI
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
from datetime import datetime
|
||||||
|
from config import init_directories, load_config, MEMORY_DIR
|
||||||
|
|
||||||
|
def save_conversation(user_message, ai_response):
|
||||||
|
"""会話をファイルに保存"""
|
||||||
|
init_directories()
|
||||||
|
|
||||||
|
conversation = {
|
||||||
|
"timestamp": datetime.now().isoformat(),
|
||||||
|
"user": user_message,
|
||||||
|
"ai": ai_response
|
||||||
|
}
|
||||||
|
|
||||||
|
# 日付ごとのファイルに保存
|
||||||
|
today = datetime.now().strftime("%Y-%m-%d")
|
||||||
|
chat_file = MEMORY_DIR / f"chat_{today}.jsonl"
|
||||||
|
|
||||||
|
with open(chat_file, "a", encoding="utf-8") as f:
|
||||||
|
f.write(json.dumps(conversation, ensure_ascii=False) + "\n")
|
||||||
|
|
||||||
|
def chat_with_ollama(config, message):
|
||||||
|
"""Ollamaとチャット"""
|
||||||
|
try:
|
||||||
|
payload = {
|
||||||
|
"model": config["model"],
|
||||||
|
"prompt": message,
|
||||||
|
"stream": False
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.post(config["url"], json=payload, timeout=30)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
result = response.json()
|
||||||
|
return result.get("response", "No response received")
|
||||||
|
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
return f"Error connecting to Ollama: {e}"
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error: {e}"
|
||||||
|
|
||||||
|
def chat_with_openai(config, message):
|
||||||
|
"""OpenAIとチャット"""
|
||||||
|
try:
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {config['api_key']}",
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"model": config["model"],
|
||||||
|
"messages": [
|
||||||
|
{"role": "user", "content": message}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.post(config["url"], json=payload, headers=headers, timeout=30)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
result = response.json()
|
||||||
|
return result["choices"][0]["message"]["content"]
|
||||||
|
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
return f"Error connecting to OpenAI: {e}"
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error: {e}"
|
||||||
|
|
||||||
|
def chat_with_mcp(config, message):
|
||||||
|
"""MCPサーバーとチャット"""
|
||||||
|
try:
|
||||||
|
payload = {
|
||||||
|
"message": message,
|
||||||
|
"model": config["model"]
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.post(config["url"], json=payload, timeout=30)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
result = response.json()
|
||||||
|
return result.get("response", "No response received")
|
||||||
|
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
return f"Error connecting to MCP server: {e}"
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error: {e}"
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) != 2:
|
||||||
|
print("Usage: python chat.py <message>", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
message = sys.argv[1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
config = load_config()
|
||||||
|
print(f"🤖 Using {config['provider']} with model {config['model']}", file=sys.stderr)
|
||||||
|
|
||||||
|
# プロバイダに応じてチャット実行
|
||||||
|
if config["provider"] == "ollama":
|
||||||
|
response = chat_with_ollama(config, message)
|
||||||
|
elif config["provider"] == "openai":
|
||||||
|
response = chat_with_openai(config, message)
|
||||||
|
elif config["provider"] == "mcp":
|
||||||
|
response = chat_with_mcp(config, message)
|
||||||
|
else:
|
||||||
|
response = f"Unsupported provider: {config['provider']}"
|
||||||
|
|
||||||
|
# 会話を保存
|
||||||
|
save_conversation(message, response)
|
||||||
|
|
||||||
|
# レスポンスを出力
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"❌ Error: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
28
mcp/cli.py
28
mcp/cli.py
@ -1,28 +0,0 @@
|
|||||||
# cli.py
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
SCRIPT_DIR = Path.home() / ".config" / "aigpt" / "mcp" / "scripts"
|
|
||||||
def run_script(name):
|
|
||||||
script_path = SCRIPT_DIR / f"{name}.py"
|
|
||||||
if not script_path.exists():
|
|
||||||
print(f"❌ スクリプトが見つかりません: {script_path}")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
args = sys.argv[2:] # ← "ask" の後の引数を取り出す
|
|
||||||
result = subprocess.run(["python", str(script_path)] + args, capture_output=True, text=True)
|
|
||||||
print(result.stdout)
|
|
||||||
if result.stderr:
|
|
||||||
print(result.stderr)
|
|
||||||
def main():
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Usage: mcp <script>")
|
|
||||||
return
|
|
||||||
|
|
||||||
command = sys.argv[1]
|
|
||||||
|
|
||||||
if command in {"summarize", "ask", "setup", "server"}:
|
|
||||||
run_script(command)
|
|
||||||
else:
|
|
||||||
print(f"❓ 未知のコマンド: {command}")
|
|
@ -1,5 +1,4 @@
|
|||||||
# scripts/config.py
|
# mcp/config.py
|
||||||
# scripts/config.py
|
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -9,11 +8,13 @@ MEMORY_DIR = BASE_DIR / "memory"
|
|||||||
SUMMARY_DIR = MEMORY_DIR / "summary"
|
SUMMARY_DIR = MEMORY_DIR / "summary"
|
||||||
|
|
||||||
def init_directories():
|
def init_directories():
|
||||||
|
"""必要なディレクトリを作成"""
|
||||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
SUMMARY_DIR.mkdir(parents=True, exist_ok=True)
|
SUMMARY_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
def load_config():
|
def load_config():
|
||||||
|
"""環境変数から設定を読み込み"""
|
||||||
provider = os.getenv("PROVIDER", "ollama")
|
provider = os.getenv("PROVIDER", "ollama")
|
||||||
model = os.getenv("MODEL", "syui/ai" if provider == "ollama" else "gpt-4o-mini")
|
model = os.getenv("MODEL", "syui/ai" if provider == "ollama" else "gpt-4o-mini")
|
||||||
api_key = os.getenv("OPENAI_API_KEY", "")
|
api_key = os.getenv("OPENAI_API_KEY", "")
|
3
mcp/requirements.txt
Normal file
3
mcp/requirements.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
fastmcp>=0.1.0
|
||||||
|
uvicorn>=0.24.0
|
||||||
|
requests>=2.31.0
|
@ -1,198 +0,0 @@
|
|||||||
## scripts/ask.py
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import requests
|
|
||||||
from config import load_config
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
def build_payload_openai(cfg, message: str):
|
|
||||||
return {
|
|
||||||
"model": cfg["model"],
|
|
||||||
"tools": [
|
|
||||||
{
|
|
||||||
"type": "function",
|
|
||||||
"function": {
|
|
||||||
"name": "ask_message",
|
|
||||||
"description": "過去の記憶を検索します",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"query": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "検索したい語句"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["query"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"tool_choice": "auto",
|
|
||||||
"messages": [
|
|
||||||
{"role": "system", "content": "あなたは親しみやすいAIで、必要に応じて記憶から情報を検索して応答します。"},
|
|
||||||
{"role": "user", "content": message}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
def build_payload_mcp(message: str):
|
|
||||||
return {
|
|
||||||
"tool": "ask_message", # MCPサーバー側で定義されたツール名
|
|
||||||
"input": {
|
|
||||||
"message": message
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def build_payload_openai(cfg, message: str):
|
|
||||||
return {
|
|
||||||
"model": cfg["model"],
|
|
||||||
"messages": [
|
|
||||||
{"role": "system", "content": "あなたは思いやりのあるAIです。"},
|
|
||||||
{"role": "user", "content": message}
|
|
||||||
],
|
|
||||||
"temperature": 0.7
|
|
||||||
}
|
|
||||||
|
|
||||||
def call_mcp(cfg, message: str):
|
|
||||||
payload = build_payload_mcp(message)
|
|
||||||
headers = {"Content-Type": "application/json"}
|
|
||||||
response = requests.post(cfg["url"], headers=headers, json=payload)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json().get("output", {}).get("response", "❓ 応答が取得できませんでした")
|
|
||||||
|
|
||||||
def call_openai(cfg, message: str):
|
|
||||||
# ツール定義
|
|
||||||
tools = [
|
|
||||||
{
|
|
||||||
"type": "function",
|
|
||||||
"function": {
|
|
||||||
"name": "memory",
|
|
||||||
"description": "記憶を検索する",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"query": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "検索する語句"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"required": ["query"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
# 最初のメッセージ送信
|
|
||||||
payload = {
|
|
||||||
"model": cfg["model"],
|
|
||||||
"messages": [
|
|
||||||
{"role": "system", "content": "あなたはAIで、必要に応じてツールmemoryを使って記憶を検索します。"},
|
|
||||||
{"role": "user", "content": message}
|
|
||||||
],
|
|
||||||
"tools": tools,
|
|
||||||
"tool_choice": "auto"
|
|
||||||
}
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {cfg['api_key']}",
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
}
|
|
||||||
|
|
||||||
res1 = requests.post(cfg["url"], headers=headers, json=payload)
|
|
||||||
res1.raise_for_status()
|
|
||||||
result = res1.json()
|
|
||||||
|
|
||||||
# 🧠 tool_call されたか確認
|
|
||||||
if "tool_calls" in result["choices"][0]["message"]:
|
|
||||||
tool_call = result["choices"][0]["message"]["tool_calls"][0]
|
|
||||||
if tool_call["function"]["name"] == "memory":
|
|
||||||
args = json.loads(tool_call["function"]["arguments"])
|
|
||||||
query = args.get("query", "")
|
|
||||||
print(f"🛠️ ツール実行: memory(query='{query}')")
|
|
||||||
|
|
||||||
# MCPエンドポイントにPOST
|
|
||||||
memory_res = requests.post("http://127.0.0.1:5000/memory/search", json={"query": query})
|
|
||||||
memory_json = memory_res.json()
|
|
||||||
tool_output = memory_json.get("result", "なし")
|
|
||||||
|
|
||||||
# tool_outputをAIに返す
|
|
||||||
followup = {
|
|
||||||
"model": cfg["model"],
|
|
||||||
"messages": [
|
|
||||||
{"role": "system", "content": "あなたはAIで、必要に応じてツールmemoryを使って記憶を検索します。"},
|
|
||||||
{"role": "user", "content": message},
|
|
||||||
{"role": "assistant", "tool_calls": result["choices"][0]["message"]["tool_calls"]},
|
|
||||||
{"role": "tool", "tool_call_id": tool_call["id"], "name": "memory", "content": tool_output}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
res2 = requests.post(cfg["url"], headers=headers, json=followup)
|
|
||||||
res2.raise_for_status()
|
|
||||||
final_response = res2.json()
|
|
||||||
return final_response["choices"][0]["message"]["content"]
|
|
||||||
#print(tool_output)
|
|
||||||
#print(cfg["model"])
|
|
||||||
#print(final_response)
|
|
||||||
|
|
||||||
# ツール未使用 or 通常応答
|
|
||||||
return result["choices"][0]["message"]["content"]
|
|
||||||
|
|
||||||
def call_ollama(cfg, message: str):
|
|
||||||
payload = {
|
|
||||||
"model": cfg["model"],
|
|
||||||
"prompt": message, # `prompt` → `message` にすべき(変数未定義エラー回避)
|
|
||||||
"stream": False
|
|
||||||
}
|
|
||||||
headers = {"Content-Type": "application/json"}
|
|
||||||
response = requests.post(cfg["url"], headers=headers, json=payload)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json().get("response", "❌ 応答が取得できませんでした")
|
|
||||||
def main():
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Usage: ask.py 'your message'")
|
|
||||||
return
|
|
||||||
|
|
||||||
message = sys.argv[1]
|
|
||||||
cfg = load_config()
|
|
||||||
|
|
||||||
print(f"🔍 使用プロバイダー: {cfg['provider']}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
if cfg["provider"] == "openai":
|
|
||||||
response = call_openai(cfg, message)
|
|
||||||
elif cfg["provider"] == "mcp":
|
|
||||||
response = call_mcp(cfg, message)
|
|
||||||
elif cfg["provider"] == "ollama":
|
|
||||||
response = call_ollama(cfg, message)
|
|
||||||
else:
|
|
||||||
raise ValueError(f"未対応のプロバイダー: {cfg['provider']}")
|
|
||||||
|
|
||||||
print("💬 応答:")
|
|
||||||
print(response)
|
|
||||||
|
|
||||||
# ログ保存(オプション)
|
|
||||||
save_log(message, response)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"❌ 実行エラー: {e}")
|
|
||||||
|
|
||||||
def save_log(user_msg, ai_msg):
|
|
||||||
from config import MEMORY_DIR
|
|
||||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
|
||||||
path = MEMORY_DIR / f"{date_str}.json"
|
|
||||||
path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
if path.exists():
|
|
||||||
with open(path, "r") as f:
|
|
||||||
logs = json.load(f)
|
|
||||||
else:
|
|
||||||
logs = []
|
|
||||||
|
|
||||||
now = datetime.now(timezone.utc).isoformat()
|
|
||||||
logs.append({"timestamp": now, "sender": "user", "message": user_msg})
|
|
||||||
logs.append({"timestamp": now, "sender": "ai", "message": ai_msg})
|
|
||||||
|
|
||||||
with open(path, "w") as f:
|
|
||||||
json.dump(logs, f, indent=2, ensure_ascii=False)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
@ -1,11 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
def load_context_from_repo(repo_path: str, extensions={".rs", ".toml", ".md"}) -> str:
|
|
||||||
context = ""
|
|
||||||
for root, dirs, files in os.walk(repo_path):
|
|
||||||
for file in files:
|
|
||||||
if any(file.endswith(ext) for ext in extensions):
|
|
||||||
with open(os.path.join(root, file), "r", encoding="utf-8", errors="ignore") as f:
|
|
||||||
content = f.read()
|
|
||||||
context += f"\n\n# FILE: {os.path.join(root, file)}\n{content}"
|
|
||||||
return context
|
|
@ -1,92 +0,0 @@
|
|||||||
# scripts/memory_store.py
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
from config import MEMORY_DIR
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
|
|
||||||
def load_logs(date_str=None):
|
|
||||||
if date_str is None:
|
|
||||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
|
||||||
path = MEMORY_DIR / f"{date_str}.json"
|
|
||||||
if path.exists():
|
|
||||||
with open(path, "r") as f:
|
|
||||||
return json.load(f)
|
|
||||||
return []
|
|
||||||
|
|
||||||
def save_message(sender, message):
|
|
||||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
|
||||||
path = MEMORY_DIR / f"{date_str}.json"
|
|
||||||
logs = load_logs(date_str)
|
|
||||||
now = datetime.now(timezone.utc).isoformat()
|
|
||||||
logs.append({"timestamp": now, "sender": sender, "message": message})
|
|
||||||
with open(path, "w") as f:
|
|
||||||
json.dump(logs, f, indent=2, ensure_ascii=False)
|
|
||||||
|
|
||||||
def search_memory(query: str):
|
|
||||||
from glob import glob
|
|
||||||
all_logs = []
|
|
||||||
pattern = re.compile(re.escape(query), re.IGNORECASE)
|
|
||||||
|
|
||||||
for file_path in sorted(MEMORY_DIR.glob("*.json")):
|
|
||||||
with open(file_path, "r") as f:
|
|
||||||
logs = json.load(f)
|
|
||||||
matched = [entry for entry in logs if pattern.search(entry["message"])]
|
|
||||||
all_logs.extend(matched)
|
|
||||||
|
|
||||||
return all_logs[-5:]
|
|
||||||
|
|
||||||
# scripts/memory_store.py
|
|
||||||
import json
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from config import MEMORY_DIR
|
|
||||||
|
|
||||||
# ログを読み込む(指定日または当日)
|
|
||||||
def load_logs(date_str=None):
|
|
||||||
if date_str is None:
|
|
||||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
|
||||||
path = MEMORY_DIR / f"{date_str}.json"
|
|
||||||
if path.exists():
|
|
||||||
with open(path, "r") as f:
|
|
||||||
return json.load(f)
|
|
||||||
return []
|
|
||||||
|
|
||||||
# メッセージを保存する
|
|
||||||
def save_message(sender, message):
|
|
||||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
|
||||||
path = MEMORY_DIR / f"{date_str}.json"
|
|
||||||
logs = load_logs(date_str)
|
|
||||||
#now = datetime.utcnow().isoformat() + "Z"
|
|
||||||
now = datetime.now(timezone.utc).isoformat()
|
|
||||||
logs.append({"timestamp": now, "sender": sender, "message": message})
|
|
||||||
with open(path, "w") as f:
|
|
||||||
json.dump(logs, f, indent=2, ensure_ascii=False)
|
|
||||||
|
|
||||||
def search_memory(query: str):
|
|
||||||
from glob import glob
|
|
||||||
all_logs = []
|
|
||||||
for file_path in sorted(MEMORY_DIR.glob("*.json")):
|
|
||||||
with open(file_path, "r") as f:
|
|
||||||
logs = json.load(f)
|
|
||||||
matched = [
|
|
||||||
entry for entry in logs
|
|
||||||
if entry["sender"] == "user" and query in entry["message"]
|
|
||||||
]
|
|
||||||
all_logs.extend(matched)
|
|
||||||
return all_logs[-5:] # 最新5件だけ返す
|
|
||||||
def search_memory(query: str):
|
|
||||||
from glob import glob
|
|
||||||
all_logs = []
|
|
||||||
seen_messages = set() # すでに見たメッセージを保持
|
|
||||||
|
|
||||||
for file_path in sorted(MEMORY_DIR.glob("*.json")):
|
|
||||||
with open(file_path, "r") as f:
|
|
||||||
logs = json.load(f)
|
|
||||||
for entry in logs:
|
|
||||||
if entry["sender"] == "user" and query in entry["message"]:
|
|
||||||
# すでに同じメッセージが結果に含まれていなければ追加
|
|
||||||
if entry["message"] not in seen_messages:
|
|
||||||
all_logs.append(entry)
|
|
||||||
seen_messages.add(entry["message"])
|
|
||||||
|
|
||||||
return all_logs[-5:] # 最新5件だけ返す
|
|
@ -1,11 +0,0 @@
|
|||||||
PROMPT_TEMPLATE = """
|
|
||||||
あなたは優秀なAIアシスタントです。
|
|
||||||
|
|
||||||
以下のコードベースの情報を参考にして、質問に答えてください。
|
|
||||||
|
|
||||||
[コードコンテキスト]
|
|
||||||
{context}
|
|
||||||
|
|
||||||
[質問]
|
|
||||||
{question}
|
|
||||||
"""
|
|
@ -1,56 +0,0 @@
|
|||||||
# server.py
|
|
||||||
from fastapi import FastAPI, Body
|
|
||||||
from fastapi_mcp import FastApiMCP
|
|
||||||
from pydantic import BaseModel
|
|
||||||
from memory_store import save_message, load_logs, search_memory as do_search_memory
|
|
||||||
|
|
||||||
app = FastAPI()
|
|
||||||
mcp = FastApiMCP(app, name="aigpt-agent", description="MCP Server for AI memory")
|
|
||||||
|
|
||||||
class ChatInput(BaseModel):
|
|
||||||
message: str
|
|
||||||
|
|
||||||
class MemoryInput(BaseModel):
|
|
||||||
sender: str
|
|
||||||
message: str
|
|
||||||
|
|
||||||
class MemoryQuery(BaseModel):
|
|
||||||
query: str
|
|
||||||
|
|
||||||
@app.post("/chat", operation_id="chat")
|
|
||||||
async def chat(input: ChatInput):
|
|
||||||
save_message("user", input.message)
|
|
||||||
response = f"AI: 「{input.message}」を受け取りました!"
|
|
||||||
save_message("ai", response)
|
|
||||||
return {"response": response}
|
|
||||||
|
|
||||||
@app.post("/memory", operation_id="save_memory")
|
|
||||||
async def memory_post(input: MemoryInput):
|
|
||||||
save_message(input.sender, input.message)
|
|
||||||
return {"status": "saved"}
|
|
||||||
|
|
||||||
@app.get("/memory", operation_id="get_memory")
|
|
||||||
async def memory_get():
|
|
||||||
return {"messages": load_messages()}
|
|
||||||
|
|
||||||
@app.post("/ask_message", operation_id="ask_message")
|
|
||||||
async def ask_message(input: MemoryQuery):
|
|
||||||
results = search_memory(input.query)
|
|
||||||
return {
|
|
||||||
"response": f"🔎 記憶から {len(results)} 件ヒット:\n" + "\n".join([f"{r['sender']}: {r['message']}" for r in results])
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.post("/memory/search", operation_id="memory")
|
|
||||||
async def memory_search(query: MemoryQuery):
|
|
||||||
hits = do_search_memory(query.query)
|
|
||||||
if not hits:
|
|
||||||
return {"result": "🔍 記憶の中に該当する内容は見つかりませんでした。"}
|
|
||||||
summary = "\n".join([f"{e['sender']}: {e['message']}" for e in hits])
|
|
||||||
return {"result": f"🔎 見つかった記憶:\n{summary}"}
|
|
||||||
|
|
||||||
mcp.mount()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import uvicorn
|
|
||||||
print("🚀 Starting MCP server...")
|
|
||||||
uvicorn.run(app, host="127.0.0.1", port=5000)
|
|
@ -1,76 +0,0 @@
|
|||||||
# scripts/summarize.py
|
|
||||||
import json
|
|
||||||
from datetime import datetime
|
|
||||||
from config import MEMORY_DIR, SUMMARY_DIR, load_config
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def load_memory(date_str):
|
|
||||||
path = MEMORY_DIR / f"{date_str}.json"
|
|
||||||
if not path.exists():
|
|
||||||
print(f"⚠️ メモリファイルが見つかりません: {path}")
|
|
||||||
return None
|
|
||||||
with open(path, "r") as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
def save_summary(date_str, content):
|
|
||||||
SUMMARY_DIR.mkdir(parents=True, exist_ok=True)
|
|
||||||
path = SUMMARY_DIR / f"{date_str}_summary.json"
|
|
||||||
with open(path, "w") as f:
|
|
||||||
json.dump(content, f, indent=2, ensure_ascii=False)
|
|
||||||
print(f"✅ 要約を保存しました: {path}")
|
|
||||||
|
|
||||||
def build_prompt(logs):
|
|
||||||
messages = [
|
|
||||||
{"role": "system", "content": "あなたは要約AIです。以下の会話ログを要約してください。"},
|
|
||||||
{"role": "user", "content": "\n".join(f"{entry['sender']}: {entry['message']}" for entry in logs)}
|
|
||||||
]
|
|
||||||
return messages
|
|
||||||
|
|
||||||
def summarize_with_llm(messages):
|
|
||||||
cfg = load_config()
|
|
||||||
if cfg["provider"] == "openai":
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {cfg['api_key']}",
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
}
|
|
||||||
payload = {
|
|
||||||
"model": cfg["model"],
|
|
||||||
"messages": messages,
|
|
||||||
"temperature": 0.7
|
|
||||||
}
|
|
||||||
response = requests.post(cfg["url"], headers=headers, json=payload)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()["choices"][0]["message"]["content"]
|
|
||||||
|
|
||||||
elif cfg["provider"] == "ollama":
|
|
||||||
payload = {
|
|
||||||
"model": cfg["model"],
|
|
||||||
"prompt": "\n".join(m["content"] for m in messages),
|
|
||||||
"stream": False,
|
|
||||||
}
|
|
||||||
response = requests.post(cfg["url"], json=payload)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.json()["response"]
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unsupported provider: {cfg['provider']}")
|
|
||||||
|
|
||||||
def main():
|
|
||||||
date_str = datetime.now().strftime("%Y-%m-%d")
|
|
||||||
logs = load_memory(date_str)
|
|
||||||
if not logs:
|
|
||||||
return
|
|
||||||
|
|
||||||
prompt_messages = build_prompt(logs)
|
|
||||||
summary_text = summarize_with_llm(prompt_messages)
|
|
||||||
|
|
||||||
summary = {
|
|
||||||
"date": date_str,
|
|
||||||
"summary": summary_text,
|
|
||||||
"total_messages": len(logs)
|
|
||||||
}
|
|
||||||
|
|
||||||
save_summary(date_str, summary)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
79
mcp/server.py
Normal file
79
mcp/server.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
# mcp/server.py
|
||||||
|
"""
|
||||||
|
MCP Server for aigpt CLI
|
||||||
|
"""
|
||||||
|
from fastmcp import FastMCP
|
||||||
|
import platform
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
mcp = FastMCP("AigptMCP")
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
def process_text(text: str) -> str:
|
||||||
|
"""テキストを処理する"""
|
||||||
|
return f"Processed: {text}"
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
def get_system_info() -> dict:
|
||||||
|
"""システム情報を取得"""
|
||||||
|
return {
|
||||||
|
"platform": platform.system(),
|
||||||
|
"version": platform.version(),
|
||||||
|
"python_version": sys.version,
|
||||||
|
"current_dir": os.getcwd()
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
def execute_command(command: str) -> dict:
|
||||||
|
"""安全なコマンドを実行する"""
|
||||||
|
# セキュリティのため、許可されたコマンドのみ実行
|
||||||
|
allowed_commands = ["ls", "pwd", "date", "whoami"]
|
||||||
|
cmd_parts = command.split()
|
||||||
|
|
||||||
|
if not cmd_parts or cmd_parts[0] not in allowed_commands:
|
||||||
|
return {
|
||||||
|
"error": f"Command '{command}' is not allowed",
|
||||||
|
"allowed": allowed_commands
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
import subprocess
|
||||||
|
result = subprocess.run(
|
||||||
|
cmd_parts,
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
timeout=10
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
"stdout": result.stdout,
|
||||||
|
"stderr": result.stderr,
|
||||||
|
"returncode": result.returncode
|
||||||
|
}
|
||||||
|
except subprocess.TimeoutExpired:
|
||||||
|
return {"error": "Command timed out"}
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
def file_operations(operation: str, filepath: str, content: str = None) -> dict:
|
||||||
|
"""ファイル操作を行う"""
|
||||||
|
try:
|
||||||
|
if operation == "read":
|
||||||
|
with open(filepath, 'r', encoding='utf-8') as f:
|
||||||
|
return {"content": f.read(), "success": True}
|
||||||
|
elif operation == "write" and content is not None:
|
||||||
|
with open(filepath, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(content)
|
||||||
|
return {"message": f"File written to {filepath}", "success": True}
|
||||||
|
elif operation == "exists":
|
||||||
|
return {"exists": os.path.exists(filepath), "success": True}
|
||||||
|
else:
|
||||||
|
return {"error": "Invalid operation or missing content", "success": False}
|
||||||
|
except Exception as e:
|
||||||
|
return {"error": str(e), "success": False}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("🚀 AigptMCP Server starting...")
|
||||||
|
mcp.run()
|
||||||
|
|
12
mcp/setup.py
12
mcp/setup.py
@ -1,12 +0,0 @@
|
|||||||
# setup.py
|
|
||||||
from setuptools import setup
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name='aigpt-mcp',
|
|
||||||
py_modules=['cli'],
|
|
||||||
entry_points={
|
|
||||||
'console_scripts': [
|
|
||||||
'mcp = cli:main',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
37
src/agent.rs
37
src/agent.rs
@ -1,37 +0,0 @@
|
|||||||
use chrono::{NaiveDateTime};
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct AIState {
|
|
||||||
pub relation_score: f32,
|
|
||||||
pub previous_score: f32,
|
|
||||||
pub decay_rate: f32,
|
|
||||||
pub sensitivity: f32,
|
|
||||||
pub message_threshold: f32,
|
|
||||||
pub last_message_time: NaiveDateTime,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
impl AIState {
|
|
||||||
pub fn update(&mut self, now: NaiveDateTime) {
|
|
||||||
let days_passed = (now - self.last_message_time).num_days() as f32;
|
|
||||||
let decay = self.decay_rate * days_passed;
|
|
||||||
self.previous_score = self.relation_score;
|
|
||||||
self.relation_score -= decay;
|
|
||||||
self.relation_score = self.relation_score.clamp(0.0, 100.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn should_talk(&self) -> bool {
|
|
||||||
let delta = self.previous_score - self.relation_score;
|
|
||||||
delta > self.message_threshold && self.sensitivity > 0.5
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn generate_message(&self) -> String {
|
|
||||||
match self.relation_score as i32 {
|
|
||||||
80..=100 => "ふふっ、最近どうしてる?会いたくなっちゃった!".to_string(),
|
|
||||||
60..=79 => "ちょっとだけ、さみしかったんだよ?".to_string(),
|
|
||||||
40..=59 => "えっと……話せる時間ある?".to_string(),
|
|
||||||
_ => "ううん、もしかして私のこと、忘れちゃったのかな……".to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
140
src/chat.rs
140
src/chat.rs
@ -1,140 +0,0 @@
|
|||||||
// src/chat.rs
|
|
||||||
use std::fs;
|
|
||||||
use std::process::Command;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use seahorse::Context;
|
|
||||||
use crate::config::ConfigPaths;
|
|
||||||
use crate::metrics::{load_user_data, save_user_data, update_metrics_decay};
|
|
||||||
//use std::process::Stdio;
|
|
||||||
//use std::io::Write;
|
|
||||||
//use std::time::Duration;
|
|
||||||
//use std::net::TcpStream;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
|
||||||
pub enum Provider {
|
|
||||||
OpenAI,
|
|
||||||
Ollama,
|
|
||||||
MCP,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Provider {
|
|
||||||
pub fn from_str(s: &str) -> Option<Self> {
|
|
||||||
match s.to_lowercase().as_str() {
|
|
||||||
"openai" => Some(Provider::OpenAI),
|
|
||||||
"ollama" => Some(Provider::Ollama),
|
|
||||||
"mcp" => Some(Provider::MCP),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_str(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Provider::OpenAI => "openai",
|
|
||||||
Provider::Ollama => "ollama",
|
|
||||||
Provider::MCP => "mcp",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct OpenAIKey {
|
|
||||||
token: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_openai_api_key() -> Option<String> {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let path = config.base_dir.join("openai.json");
|
|
||||||
let data = fs::read_to_string(path).ok()?;
|
|
||||||
let parsed: OpenAIKey = serde_json::from_str(&data).ok()?;
|
|
||||||
Some(parsed.token)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ask_chat(c: &Context, question: &str) -> Option<String> {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let base_dir = config.base_dir.join("mcp");
|
|
||||||
let user_path = config.base_dir.join("user.json");
|
|
||||||
|
|
||||||
let mut user = load_user_data(&user_path);
|
|
||||||
user.metrics = update_metrics_decay();
|
|
||||||
|
|
||||||
// 各種オプション
|
|
||||||
let ollama_host = c.string_flag("host").ok();
|
|
||||||
let ollama_model = c.string_flag("model").ok();
|
|
||||||
let provider_str = c.string_flag("provider").unwrap_or_else(|_| "ollama".to_string());
|
|
||||||
let provider = Provider::from_str(&provider_str).unwrap_or(Provider::Ollama);
|
|
||||||
let api_key = c.string_flag("api-key").ok().or_else(load_openai_api_key);
|
|
||||||
|
|
||||||
println!("🔍 使用プロバイダー: {}", provider.as_str());
|
|
||||||
|
|
||||||
match provider {
|
|
||||||
Provider::MCP => {
|
|
||||||
let client = reqwest::blocking::Client::new();
|
|
||||||
let url = std::env::var("MCP_URL").unwrap_or("http://127.0.0.1:5000/chat".to_string());
|
|
||||||
let res = client.post(url)
|
|
||||||
.json(&serde_json::json!({"message": question}))
|
|
||||||
.send();
|
|
||||||
|
|
||||||
match res {
|
|
||||||
Ok(resp) => {
|
|
||||||
if resp.status().is_success() {
|
|
||||||
let json: serde_json::Value = resp.json().ok()?;
|
|
||||||
let text = json.get("response")?.as_str()?.to_string();
|
|
||||||
user.metrics.intimacy += 0.01;
|
|
||||||
user.metrics.last_updated = chrono::Utc::now();
|
|
||||||
save_user_data(&user_path, &user);
|
|
||||||
Some(text)
|
|
||||||
} else {
|
|
||||||
eprintln!("❌ MCPエラー: HTTP {}", resp.status());
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("❌ MCP接続失敗: {}", e);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
// Python 実行パス
|
|
||||||
let python_path = if cfg!(target_os = "windows") {
|
|
||||||
base_dir.join(".venv/Scripts/mcp.exe")
|
|
||||||
} else {
|
|
||||||
base_dir.join(".venv/bin/mcp")
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut command = Command::new(python_path);
|
|
||||||
command.arg("ask").arg(question);
|
|
||||||
|
|
||||||
if let Some(host) = ollama_host {
|
|
||||||
command.env("OLLAMA_HOST", host);
|
|
||||||
}
|
|
||||||
if let Some(model) = ollama_model {
|
|
||||||
command.env("OLLAMA_MODEL", model.clone());
|
|
||||||
command.env("OPENAI_MODEL", model);
|
|
||||||
}
|
|
||||||
command.env("PROVIDER", provider.as_str());
|
|
||||||
|
|
||||||
if let Some(key) = api_key {
|
|
||||||
command.env("OPENAI_API_KEY", key);
|
|
||||||
}
|
|
||||||
|
|
||||||
let output = command.output().expect("❌ MCPチャットスクリプトの実行に失敗しました");
|
|
||||||
|
|
||||||
if output.status.success() {
|
|
||||||
let response = String::from_utf8_lossy(&output.stdout).to_string();
|
|
||||||
user.metrics.intimacy += 0.01;
|
|
||||||
user.metrics.last_updated = chrono::Utc::now();
|
|
||||||
save_user_data(&user_path, &user);
|
|
||||||
|
|
||||||
Some(response)
|
|
||||||
} else {
|
|
||||||
eprintln!(
|
|
||||||
"❌ 実行エラー: {}\n{}",
|
|
||||||
String::from_utf8_lossy(&output.stderr),
|
|
||||||
String::from_utf8_lossy(&output.stdout),
|
|
||||||
);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
120
src/cli.rs
120
src/cli.rs
@ -1,100 +1,32 @@
|
|||||||
// src/cli.rs
|
// src/cli.rs
|
||||||
use std::path::{Path};
|
use clap::{Parser, Subcommand};
|
||||||
use chrono::{Duration, Local};
|
|
||||||
use rusqlite::Connection;
|
|
||||||
|
|
||||||
use seahorse::{App, Command, Context};
|
#[derive(Parser)]
|
||||||
|
#[command(name = "aigpt")]
|
||||||
use crate::utils::{load_config, save_config};
|
#[command(about = "AI GPT CLI with MCP Server")]
|
||||||
use crate::config::ConfigPaths;
|
pub struct Args {
|
||||||
use crate::agent::AIState;
|
#[command(subcommand)]
|
||||||
use crate::commands::db::{save_cmd, export_cmd};
|
pub command: Commands,
|
||||||
use crate::commands::scheduler::{scheduler_cmd};
|
|
||||||
use crate::commands::mcp::mcp_cmd;
|
|
||||||
|
|
||||||
pub fn cli_app() -> App {
|
|
||||||
let set_cmd = Command::new("set")
|
|
||||||
.usage("set [trust|intimacy|curiosity] [value]")
|
|
||||||
.action(|c: &Context| {
|
|
||||||
if c.args.len() != 2 {
|
|
||||||
eprintln!("Usage: set [trust|intimacy|curiosity] [value]");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let field = &c.args[0];
|
#[derive(Subcommand)]
|
||||||
let value: f32 = c.args[1].parse().unwrap_or_else(|_| {
|
pub enum Commands {
|
||||||
eprintln!("数値で入力してください");
|
/// MCP Server management
|
||||||
std::process::exit(1);
|
Server {
|
||||||
});
|
#[command(subcommand)]
|
||||||
|
command: ServerCommands,
|
||||||
// ConfigPathsを使って設定ファイルのパスを取得
|
},
|
||||||
let config_paths = ConfigPaths::new();
|
/// Chat with AI
|
||||||
let json_path = config_paths.data_file("json");
|
Chat {
|
||||||
// まだ user.json がない場合、example.json をコピー
|
/// Message to send
|
||||||
config_paths.ensure_file_exists("json", Path::new("example.json"));
|
message: String,
|
||||||
let db_path = config_paths.data_file("db");
|
},
|
||||||
let mut ai = load_config(json_path.to_str().unwrap());
|
|
||||||
|
|
||||||
match field.as_str() {
|
|
||||||
"trust" => ai.relationship.trust = value,
|
|
||||||
"intimacy" => ai.relationship.intimacy = value,
|
|
||||||
"curiosity" => ai.relationship.curiosity = value,
|
|
||||||
_ => {
|
|
||||||
eprintln!("trust / intimacy / curiosity のいずれかを指定してください");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
save_config(json_path.to_str().unwrap(), &ai);
|
#[derive(Subcommand)]
|
||||||
|
pub enum ServerCommands {
|
||||||
let conn = Connection::open(db_path.to_str().unwrap()).expect("DB接続失敗");
|
/// Setup Python MCP server environment
|
||||||
ai.save_to_db(&conn).expect("DB保存失敗");
|
Setup,
|
||||||
|
/// Run the MCP server
|
||||||
println!("✅ {field} を {value} に更新しました");
|
Run,
|
||||||
});
|
|
||||||
|
|
||||||
let show_cmd = Command::new("show")
|
|
||||||
.usage("show")
|
|
||||||
.action(|_c: &Context| {
|
|
||||||
// ConfigPathsを使って設定ファイルのパスを取得
|
|
||||||
let config_paths = ConfigPaths::new();
|
|
||||||
let ai = load_config(config_paths.data_file("json").to_str().unwrap());
|
|
||||||
println!("🧠 現在のAI状態:\n{:#?}", ai);
|
|
||||||
});
|
|
||||||
|
|
||||||
let talk_cmd = Command::new("talk")
|
|
||||||
.usage("talk")
|
|
||||||
.action(|_c: &Context| {
|
|
||||||
let config_paths = ConfigPaths::new();
|
|
||||||
let ai = load_config(config_paths.data_file("json").to_str().unwrap());
|
|
||||||
|
|
||||||
let now = Local::now().naive_local();
|
|
||||||
let mut state = AIState {
|
|
||||||
relation_score: 80.0,
|
|
||||||
previous_score: 80.0,
|
|
||||||
decay_rate: ai.messaging.decay_rate,
|
|
||||||
sensitivity: ai.personality.strength,
|
|
||||||
message_threshold: 5.0,
|
|
||||||
last_message_time: now - Duration::days(4),
|
|
||||||
};
|
|
||||||
|
|
||||||
state.update(now);
|
|
||||||
|
|
||||||
if state.should_talk() {
|
|
||||||
println!("💬 AI発話: {}", state.generate_message());
|
|
||||||
} else {
|
|
||||||
println!("🤫 今日は静かにしているみたい...");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
App::new("aigpt")
|
|
||||||
.version("0.1.0")
|
|
||||||
.description("AGE system CLI controller")
|
|
||||||
.author("syui")
|
|
||||||
.command(set_cmd)
|
|
||||||
.command(show_cmd)
|
|
||||||
.command(talk_cmd)
|
|
||||||
.command(save_cmd())
|
|
||||||
.command(export_cmd())
|
|
||||||
.command(scheduler_cmd())
|
|
||||||
.command(mcp_cmd())
|
|
||||||
}
|
}
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
// src/commands/db.rs
|
|
||||||
use seahorse::{Command, Context};
|
|
||||||
use crate::utils::{load_config};
|
|
||||||
use crate::model::AiSystem;
|
|
||||||
use crate::config::ConfigPaths;
|
|
||||||
|
|
||||||
use rusqlite::Connection;
|
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
pub fn save_cmd() -> Command {
|
|
||||||
Command::new("save")
|
|
||||||
.usage("save")
|
|
||||||
.action(|_c: &Context| {
|
|
||||||
let paths = ConfigPaths::new();
|
|
||||||
|
|
||||||
let json_path = paths.data_file("json");
|
|
||||||
let db_path = paths.data_file("db");
|
|
||||||
|
|
||||||
let ai = load_config(json_path.to_str().unwrap());
|
|
||||||
let conn = Connection::open(db_path).expect("DB接続失敗");
|
|
||||||
|
|
||||||
ai.save_to_db(&conn).expect("DB保存失敗");
|
|
||||||
println!("💾 DBに保存完了");
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn export_cmd() -> Command {
|
|
||||||
Command::new("export")
|
|
||||||
.usage("export [output.json]")
|
|
||||||
.action(|c: &Context| {
|
|
||||||
let output_path = c.args.get(0).map(|s| s.as_str()).unwrap_or("output.json");
|
|
||||||
|
|
||||||
let paths = ConfigPaths::new();
|
|
||||||
let db_path = paths.data_file("db");
|
|
||||||
|
|
||||||
let conn = Connection::open(db_path).expect("DB接続失敗");
|
|
||||||
let ai = AiSystem::load_from_db(&conn).expect("DB読み込み失敗");
|
|
||||||
|
|
||||||
let json = serde_json::to_string_pretty(&ai).expect("JSON変換失敗");
|
|
||||||
fs::write(output_path, json).expect("ファイル書き込み失敗");
|
|
||||||
|
|
||||||
println!("📤 JSONにエクスポート完了: {output_path}");
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,17 +0,0 @@
|
|||||||
// src/commands/git_repo.rs
|
|
||||||
use std::fs;
|
|
||||||
|
|
||||||
// Gitリポジトリ内の全てのファイルを取得し、内容を読み取る
|
|
||||||
pub fn read_all_git_files(repo_path: &str) -> String {
|
|
||||||
let mut content = String::new();
|
|
||||||
for entry in fs::read_dir(repo_path).expect("ディレクトリ読み込み失敗") {
|
|
||||||
let entry = entry.expect("エントリ読み込み失敗");
|
|
||||||
let path = entry.path();
|
|
||||||
if path.is_file() {
|
|
||||||
if let Ok(file_content) = fs::read_to_string(&path) {
|
|
||||||
content.push_str(&format!("\n\n# File: {}\n{}", path.display(), file_content));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
content
|
|
||||||
}
|
|
@ -1,277 +0,0 @@
|
|||||||
// src/commands/mcp.rs
|
|
||||||
|
|
||||||
use std::fs;
|
|
||||||
use std::path::{PathBuf};
|
|
||||||
use std::process::Command as OtherCommand;
|
|
||||||
use serde_json::json;
|
|
||||||
use seahorse::{Command, Context, Flag, FlagType};
|
|
||||||
use crate::chat::ask_chat;
|
|
||||||
use crate::git::{git_init, git_status};
|
|
||||||
use crate::config::ConfigPaths;
|
|
||||||
use crate::commands::git_repo::read_all_git_files;
|
|
||||||
use crate::metrics::{load_user_data, save_user_data};
|
|
||||||
use crate::memory::{log_message};
|
|
||||||
|
|
||||||
pub fn mcp_setup() {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let dest_dir = config.base_dir.join("mcp");
|
|
||||||
let repo_url = "https://github.com/microsoft/MCP.git";
|
|
||||||
println!("📁 MCP ディレクトリ: {}", dest_dir.display());
|
|
||||||
|
|
||||||
// 1. git clone(もしまだなければ)
|
|
||||||
if !dest_dir.exists() {
|
|
||||||
let status = OtherCommand::new("git")
|
|
||||||
.args(&["clone", repo_url, dest_dir.to_str().unwrap()])
|
|
||||||
.status()
|
|
||||||
.expect("git clone に失敗しました");
|
|
||||||
assert!(status.success(), "git clone 実行時にエラーが発生しました");
|
|
||||||
}
|
|
||||||
|
|
||||||
let asset_base = PathBuf::from("mcp");
|
|
||||||
let files_to_copy = vec![
|
|
||||||
"cli.py",
|
|
||||||
"setup.py",
|
|
||||||
"scripts/ask.py",
|
|
||||||
"scripts/server.py",
|
|
||||||
"scripts/config.py",
|
|
||||||
"scripts/summarize.py",
|
|
||||||
"scripts/context_loader.py",
|
|
||||||
"scripts/prompt_template.py",
|
|
||||||
"scripts/memory_store.py",
|
|
||||||
];
|
|
||||||
|
|
||||||
for rel_path in files_to_copy {
|
|
||||||
let src = asset_base.join(rel_path);
|
|
||||||
let dst = dest_dir.join(rel_path);
|
|
||||||
if let Some(parent) = dst.parent() {
|
|
||||||
let _ = fs::create_dir_all(parent);
|
|
||||||
}
|
|
||||||
if let Err(e) = fs::copy(&src, &dst) {
|
|
||||||
eprintln!("❌ コピー失敗: {} → {}: {}", src.display(), dst.display(), e);
|
|
||||||
} else {
|
|
||||||
println!("✅ コピー: {} → {}", src.display(), dst.display());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// venvの作成
|
|
||||||
let venv_path = dest_dir.join(".venv");
|
|
||||||
if !venv_path.exists() {
|
|
||||||
println!("🐍 仮想環境を作成しています...");
|
|
||||||
let output = OtherCommand::new("python3")
|
|
||||||
.args(&["-m", "venv", ".venv"])
|
|
||||||
.current_dir(&dest_dir)
|
|
||||||
.output()
|
|
||||||
.expect("venvの作成に失敗しました");
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
eprintln!("❌ venv作成エラー: {}", String::from_utf8_lossy(&output.stderr));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// `pip install -e .` を仮想環境で実行
|
|
||||||
let pip_path = if cfg!(target_os = "windows") {
|
|
||||||
dest_dir.join(".venv/Scripts/pip.exe").to_string_lossy().to_string()
|
|
||||||
} else {
|
|
||||||
dest_dir.join(".venv/bin/pip").to_string_lossy().to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
println!("📦 必要なパッケージをインストールしています...");
|
|
||||||
let output = OtherCommand::new(&pip_path)
|
|
||||||
.arg("install")
|
|
||||||
.arg("openai")
|
|
||||||
.arg("requests")
|
|
||||||
.arg("fastmcp")
|
|
||||||
.arg("uvicorn")
|
|
||||||
.arg("fastapi")
|
|
||||||
.arg("fastapi_mcp")
|
|
||||||
.arg("mcp")
|
|
||||||
.current_dir(&dest_dir)
|
|
||||||
.output()
|
|
||||||
.expect("pip install に失敗しました");
|
|
||||||
|
|
||||||
if !output.status.success() {
|
|
||||||
eprintln!(
|
|
||||||
"❌ pip エラー: {}\n{}",
|
|
||||||
String::from_utf8_lossy(&output.stderr),
|
|
||||||
String::from_utf8_lossy(&output.stdout)
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("📦 pip install -e . を実行します...");
|
|
||||||
let output = OtherCommand::new(&pip_path)
|
|
||||||
.arg("install")
|
|
||||||
.arg("-e")
|
|
||||||
.arg(".")
|
|
||||||
.current_dir(&dest_dir)
|
|
||||||
.output()
|
|
||||||
.expect("pip install に失敗しました");
|
|
||||||
|
|
||||||
if output.status.success() {
|
|
||||||
println!("🎉 MCP セットアップが完了しました!");
|
|
||||||
} else {
|
|
||||||
eprintln!(
|
|
||||||
"❌ pip エラー: {}\n{}",
|
|
||||||
String::from_utf8_lossy(&output.stderr),
|
|
||||||
String::from_utf8_lossy(&output.stdout)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_api_key_cmd() -> Command {
|
|
||||||
Command::new("set-api")
|
|
||||||
.description("OpenAI APIキーを設定")
|
|
||||||
.usage("mcp set-api --api <API_KEY>")
|
|
||||||
.flag(Flag::new("api", FlagType::String).description("OpenAI APIキー").alias("a"))
|
|
||||||
.action(|c: &Context| {
|
|
||||||
if let Ok(api_key) = c.string_flag("api") {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let path = config.base_dir.join("openai.json");
|
|
||||||
let json_data = json!({ "token": api_key });
|
|
||||||
|
|
||||||
if let Err(e) = fs::write(&path, serde_json::to_string_pretty(&json_data).unwrap()) {
|
|
||||||
eprintln!("❌ ファイル書き込み失敗: {}", e);
|
|
||||||
} else {
|
|
||||||
println!("✅ APIキーを保存しました: {}", path.display());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eprintln!("❗ APIキーを --api で指定してください");
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn chat_cmd() -> Command {
|
|
||||||
Command::new("chat")
|
|
||||||
.description("チャットで質問を送る")
|
|
||||||
.usage("mcp chat '質問内容' --host <OLLAMA_HOST> --model <MODEL> [--provider <ollama|openai>] [--api-key <KEY>] [--repo <REPO_URL>]")
|
|
||||||
.flag(
|
|
||||||
Flag::new("host", FlagType::String)
|
|
||||||
.description("OLLAMAホストのURL")
|
|
||||||
.alias("H"),
|
|
||||||
)
|
|
||||||
.flag(
|
|
||||||
Flag::new("model", FlagType::String)
|
|
||||||
.description("モデル名 (OLLAMA_MODEL / OPENAI_MODEL)")
|
|
||||||
.alias("m"),
|
|
||||||
)
|
|
||||||
.flag(
|
|
||||||
Flag::new("provider", FlagType::String)
|
|
||||||
.description("使用するプロバイダ (ollama / openai)")
|
|
||||||
.alias("p"),
|
|
||||||
)
|
|
||||||
.flag(
|
|
||||||
Flag::new("api-key", FlagType::String)
|
|
||||||
.description("OpenAI APIキー")
|
|
||||||
.alias("k"),
|
|
||||||
)
|
|
||||||
.flag(
|
|
||||||
Flag::new("repo", FlagType::String)
|
|
||||||
.description("Gitリポジトリのパスを指定 (すべてのコードを読み込む)")
|
|
||||||
.alias("r"),
|
|
||||||
)
|
|
||||||
.action(|c: &Context| {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let user_path = config.data_file("json");
|
|
||||||
let mut user = load_user_data(&user_path);
|
|
||||||
// repoがある場合は、コードベース読み込みモード
|
|
||||||
if let Ok(repo_url) = c.string_flag("repo") {
|
|
||||||
let repo_base = config.base_dir.join("repos");
|
|
||||||
let repo_dir = repo_base.join(sanitize_repo_name(&repo_url));
|
|
||||||
|
|
||||||
if !repo_dir.exists() {
|
|
||||||
println!("📥 Gitリポジトリをクローン中: {}", repo_url);
|
|
||||||
let status = OtherCommand::new("git")
|
|
||||||
.args(&["clone", &repo_url, repo_dir.to_str().unwrap()])
|
|
||||||
.status()
|
|
||||||
.expect("❌ Gitのクローンに失敗しました");
|
|
||||||
assert!(status.success(), "Git clone エラー");
|
|
||||||
} else {
|
|
||||||
println!("✔ リポジトリはすでに存在します: {}", repo_dir.display());
|
|
||||||
}
|
|
||||||
|
|
||||||
let files = read_all_git_files(repo_dir.to_str().unwrap());
|
|
||||||
let prompt = format!(
|
|
||||||
"以下のコードベースを読み込んで、改善案や次のステップを提案してください:\n{}",
|
|
||||||
files
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(response) = ask_chat(c, &prompt) {
|
|
||||||
println!("💬 提案:\n{}", response);
|
|
||||||
} else {
|
|
||||||
eprintln!("❗ 提案が取得できませんでした");
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 通常のチャット処理(repoが指定されていない場合)
|
|
||||||
match c.args.get(0) {
|
|
||||||
Some(question) => {
|
|
||||||
log_message(&config.base_dir, "user", question);
|
|
||||||
let response = ask_chat(c, question);
|
|
||||||
|
|
||||||
if let Some(ref text) = response {
|
|
||||||
println!("💬 応答:\n{}", text);
|
|
||||||
// 返答内容に基づいて増減(返答の感情解析)
|
|
||||||
if text.contains("thank") || text.contains("great") {
|
|
||||||
user.metrics.trust += 0.05;
|
|
||||||
} else if text.contains("hate") || text.contains("bad") {
|
|
||||||
user.metrics.trust -= 0.05;
|
|
||||||
}
|
|
||||||
log_message(&config.base_dir, "ai", &text);
|
|
||||||
save_user_data(&user_path, &user);
|
|
||||||
} else {
|
|
||||||
eprintln!("❗ 応答が取得できませんでした");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
eprintln!("❗ 質問が必要です: mcp chat 'こんにちは'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn init_cmd() -> Command {
|
|
||||||
Command::new("init")
|
|
||||||
.description("Git 初期化")
|
|
||||||
.usage("mcp init")
|
|
||||||
.action(|_| {
|
|
||||||
git_init();
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn status_cmd() -> Command {
|
|
||||||
Command::new("status")
|
|
||||||
.description("Git ステータス表示")
|
|
||||||
.usage("mcp status")
|
|
||||||
.action(|_| {
|
|
||||||
git_status();
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn setup_cmd() -> Command {
|
|
||||||
Command::new("setup")
|
|
||||||
.description("MCP の初期セットアップ")
|
|
||||||
.usage("mcp setup")
|
|
||||||
.action(|_| {
|
|
||||||
mcp_setup();
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mcp_cmd() -> Command {
|
|
||||||
Command::new("mcp")
|
|
||||||
.description("MCP操作コマンド")
|
|
||||||
.usage("mcp <subcommand>")
|
|
||||||
.alias("m")
|
|
||||||
.command(chat_cmd())
|
|
||||||
.command(init_cmd())
|
|
||||||
.command(status_cmd())
|
|
||||||
.command(setup_cmd())
|
|
||||||
.command(set_api_key_cmd())
|
|
||||||
}
|
|
||||||
|
|
||||||
// ファイル名として安全な形に変換
|
|
||||||
fn sanitize_repo_name(repo_url: &str) -> String {
|
|
||||||
repo_url.replace("://", "_").replace("/", "_").replace("@", "_")
|
|
||||||
}
|
|
@ -1,4 +0,0 @@
|
|||||||
pub mod db;
|
|
||||||
pub mod scheduler;
|
|
||||||
pub mod mcp;
|
|
||||||
pub mod git_repo;
|
|
@ -1,127 +0,0 @@
|
|||||||
// src/commands/scheduler.rs
|
|
||||||
use seahorse::{Command, Context};
|
|
||||||
use std::thread;
|
|
||||||
use std::time::Duration;
|
|
||||||
use chrono::{Local, Utc, Timelike};
|
|
||||||
use crate::metrics::{load_user_data, save_user_data};
|
|
||||||
use crate::config::ConfigPaths;
|
|
||||||
use crate::chat::ask_chat;
|
|
||||||
use rand::prelude::*;
|
|
||||||
use rand::rng;
|
|
||||||
|
|
||||||
fn send_scheduled_message() {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let user_path = config.data_file("json");
|
|
||||||
let mut user = load_user_data(&user_path);
|
|
||||||
|
|
||||||
if !user.metrics.can_send {
|
|
||||||
println!("🚫 送信条件を満たしていないため、スケジュール送信スキップ");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 日付の比較(1日1回制限)
|
|
||||||
let today = Local::now().format("%Y-%m-%d").to_string();
|
|
||||||
if let Some(last_date) = &user.messaging.last_sent_date {
|
|
||||||
if last_date != &today {
|
|
||||||
user.messaging.sent_today = false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
user.messaging.sent_today = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if user.messaging.sent_today {
|
|
||||||
println!("🔁 本日はすでに送信済みです: {}", today);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(schedule_str) = &user.messaging.schedule_time {
|
|
||||||
let now = Local::now();
|
|
||||||
let target: Vec<&str> = schedule_str.split(':').collect();
|
|
||||||
|
|
||||||
if target.len() != 2 {
|
|
||||||
println!("⚠️ schedule_time形式が無効です: {}", schedule_str);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (sh, sm) = (target[0].parse::<u32>(), target[1].parse::<u32>());
|
|
||||||
if let (Ok(sh), Ok(sm)) = (sh, sm) {
|
|
||||||
if now.hour() == sh && now.minute() == sm {
|
|
||||||
if let Some(msg) = user.messaging.templates.choose(&mut rng()) {
|
|
||||||
println!("💬 自動送信メッセージ: {}", msg);
|
|
||||||
let dummy_context = Context::new(vec![], None, "".to_string());
|
|
||||||
ask_chat(&dummy_context, msg);
|
|
||||||
user.metrics.intimacy += 0.03;
|
|
||||||
|
|
||||||
// 送信済みのフラグ更新
|
|
||||||
user.messaging.sent_today = true;
|
|
||||||
user.messaging.last_sent_date = Some(today);
|
|
||||||
|
|
||||||
save_user_data(&user_path, &user);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
pub fn scheduler_cmd() -> Command {
|
|
||||||
Command::new("scheduler")
|
|
||||||
.usage("scheduler [interval_sec]")
|
|
||||||
.alias("s")
|
|
||||||
.description("定期的に送信条件をチェックし、自発的なメッセージ送信を試みる")
|
|
||||||
.action(|c: &Context| {
|
|
||||||
let interval = c.args.get(0)
|
|
||||||
.and_then(|s| s.parse::<u64>().ok())
|
|
||||||
.unwrap_or(3600); // デフォルト: 1時間(テストしやすく)
|
|
||||||
|
|
||||||
println!("⏳ スケジューラー開始({}秒ごと)...", interval);
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let user_path = config.data_file("json");
|
|
||||||
let mut user = load_user_data(&user_path);
|
|
||||||
|
|
||||||
let now = Utc::now();
|
|
||||||
let elapsed = now.signed_duration_since(user.metrics.last_updated);
|
|
||||||
let hours = elapsed.num_minutes() as f32 / 60.0;
|
|
||||||
|
|
||||||
let speed_factor = if hours > 48.0 {
|
|
||||||
2.0
|
|
||||||
} else if hours > 24.0 {
|
|
||||||
1.5
|
|
||||||
} else {
|
|
||||||
1.0
|
|
||||||
};
|
|
||||||
|
|
||||||
user.metrics.trust = (user.metrics.trust - 0.01 * speed_factor).clamp(0.0, 1.0);
|
|
||||||
user.metrics.intimacy = (user.metrics.intimacy - 0.01 * speed_factor).clamp(0.0, 1.0);
|
|
||||||
user.metrics.energy = (user.metrics.energy - 0.01 * speed_factor).clamp(0.0, 1.0);
|
|
||||||
|
|
||||||
user.metrics.can_send =
|
|
||||||
user.metrics.trust >= 0.5 &&
|
|
||||||
user.metrics.intimacy >= 0.5 &&
|
|
||||||
user.metrics.energy >= 0.5;
|
|
||||||
|
|
||||||
user.metrics.last_updated = now;
|
|
||||||
|
|
||||||
if user.metrics.can_send {
|
|
||||||
println!("💡 AIメッセージ送信条件を満たしています(信頼:{:.2}, 親密:{:.2}, エネルギー:{:.2})",
|
|
||||||
user.metrics.trust,
|
|
||||||
user.metrics.intimacy,
|
|
||||||
user.metrics.energy
|
|
||||||
);
|
|
||||||
send_scheduled_message();
|
|
||||||
} else {
|
|
||||||
println!("🤫 条件未達成のため送信スキップ: trust={:.2}, intimacy={:.2}, energy={:.2}",
|
|
||||||
user.metrics.trust,
|
|
||||||
user.metrics.intimacy,
|
|
||||||
user.metrics.energy
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
save_user_data(&user_path, &user);
|
|
||||||
thread::sleep(Duration::from_secs(interval));
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -22,6 +22,7 @@ impl ConfigPaths {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
pub fn data_file(&self, file_name: &str) -> PathBuf {
|
pub fn data_file(&self, file_name: &str) -> PathBuf {
|
||||||
let file_path = match file_name {
|
let file_path = match file_name {
|
||||||
"db" => self.base_dir.join("user.db"),
|
"db" => self.base_dir.join("user.db"),
|
||||||
@ -29,18 +30,30 @@ impl ConfigPaths {
|
|||||||
"json" => self.base_dir.join("user.json"),
|
"json" => self.base_dir.join("user.json"),
|
||||||
_ => self.base_dir.join(format!(".{}", file_name)),
|
_ => self.base_dir.join(format!(".{}", file_name)),
|
||||||
};
|
};
|
||||||
|
|
||||||
file_path
|
file_path
|
||||||
}
|
}
|
||||||
/// 設定ファイルがなければ `example.json` をコピーする
|
|
||||||
pub fn ensure_file_exists(&self, file_name: &str, template_path: &Path) {
|
pub fn mcp_dir(&self) -> PathBuf {
|
||||||
let target = self.data_file(file_name);
|
self.base_dir.join("mcp")
|
||||||
if !target.exists() {
|
}
|
||||||
if let Err(e) = fs::copy(template_path, &target) {
|
|
||||||
eprintln!("⚠️ 設定ファイルの初期化に失敗しました: {}", e);
|
pub fn venv_path(&self) -> PathBuf {
|
||||||
|
self.mcp_dir().join(".venv")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn python_executable(&self) -> PathBuf {
|
||||||
|
if cfg!(windows) {
|
||||||
|
self.venv_path().join("Scripts").join("python.exe")
|
||||||
} else {
|
} else {
|
||||||
println!("📄 {} を {} にコピーしました", template_path.display(), target.display());
|
self.venv_path().join("bin").join("python")
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pip_executable(&self) -> PathBuf {
|
||||||
|
if cfg!(windows) {
|
||||||
|
self.venv_path().join("Scripts").join("pip.exe")
|
||||||
|
} else {
|
||||||
|
self.venv_path().join("bin").join("pip")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
42
src/git.rs
42
src/git.rs
@ -1,42 +0,0 @@
|
|||||||
// src/git.rs
|
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
pub fn git_status() {
|
|
||||||
run_git_command(&["status"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn git_init() {
|
|
||||||
run_git_command(&["init"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn git_commit(message: &str) {
|
|
||||||
run_git_command(&["add", "."]);
|
|
||||||
run_git_command(&["commit", "-m", message]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn git_push() {
|
|
||||||
run_git_command(&["push"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn git_pull() {
|
|
||||||
run_git_command(&["pull"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn git_branch() {
|
|
||||||
run_git_command(&["branch"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_git_command(args: &[&str]) {
|
|
||||||
let status = Command::new("git")
|
|
||||||
.args(args)
|
|
||||||
.status()
|
|
||||||
.expect("git コマンドの実行に失敗しました");
|
|
||||||
|
|
||||||
if !status.success() {
|
|
||||||
eprintln!("⚠️ git コマンドに失敗しました: {:?}", args);
|
|
||||||
}
|
|
||||||
}
|
|
13
src/logic.rs
13
src/logic.rs
@ -1,13 +0,0 @@
|
|||||||
//src/logic.rs
|
|
||||||
use crate::model::AiSystem;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn should_send(ai: &AiSystem) -> bool {
|
|
||||||
let r = &ai.relationship;
|
|
||||||
let env = &ai.environment;
|
|
||||||
let score = r.trust + r.intimacy + r.curiosity;
|
|
||||||
let relationship_ok = score >= r.threshold;
|
|
||||||
let luck_ok = env.luck_today > 0.5;
|
|
||||||
|
|
||||||
ai.messaging.enabled && relationship_ok && luck_ok
|
|
||||||
}
|
|
39
src/main.rs
39
src/main.rs
@ -1,21 +1,28 @@
|
|||||||
//src/main.rs
|
// main.rs
|
||||||
mod model;
|
|
||||||
mod logic;
|
|
||||||
mod agent;
|
|
||||||
mod cli;
|
mod cli;
|
||||||
mod utils;
|
|
||||||
mod commands;
|
|
||||||
mod config;
|
mod config;
|
||||||
mod git;
|
mod mcp;
|
||||||
mod chat;
|
|
||||||
mod metrics;
|
|
||||||
mod memory;
|
|
||||||
|
|
||||||
use cli::cli_app;
|
use cli::{Args, Commands, ServerCommands};
|
||||||
use seahorse::App;
|
use clap::Parser;
|
||||||
|
|
||||||
fn main() {
|
#[tokio::main]
|
||||||
let args: Vec<String> = std::env::args().collect();
|
async fn main() {
|
||||||
let app: App = cli_app();
|
let args = Args::parse();
|
||||||
app.run(args);
|
|
||||||
|
match args.command {
|
||||||
|
Commands::Server { command } => {
|
||||||
|
match command {
|
||||||
|
ServerCommands::Setup => {
|
||||||
|
mcp::server::setup();
|
||||||
|
}
|
||||||
|
ServerCommands::Run => {
|
||||||
|
mcp::server::run().await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Commands::Chat { message } => {
|
||||||
|
mcp::server::chat(&message).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
2
src/mcp/mod.rs
Normal file
2
src/mcp/mod.rs
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
// src/mcp/mod.rs
|
||||||
|
pub mod server;
|
147
src/mcp/server.rs
Normal file
147
src/mcp/server.rs
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
// src/mcp/server.rs
|
||||||
|
use crate::config::ConfigPaths;
|
||||||
|
//use std::fs;
|
||||||
|
use std::process::Command as OtherCommand;
|
||||||
|
use std::env;
|
||||||
|
use fs_extra::dir::{copy, CopyOptions};
|
||||||
|
|
||||||
|
pub fn setup() {
|
||||||
|
println!("🔧 MCP Server環境をセットアップしています...");
|
||||||
|
let config = ConfigPaths::new();
|
||||||
|
let mcp_dir = config.mcp_dir();
|
||||||
|
|
||||||
|
// プロジェクトのmcp/ディレクトリからファイルをコピー
|
||||||
|
let current_dir = env::current_dir().expect("現在のディレクトリを取得できません");
|
||||||
|
let project_mcp_dir = current_dir.join("mcp");
|
||||||
|
if !project_mcp_dir.exists() {
|
||||||
|
eprintln!("❌ プロジェクトのmcp/ディレクトリが見つかりません: {}", project_mcp_dir.display());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if mcp_dir.exists() {
|
||||||
|
fs_extra::dir::remove(&mcp_dir).expect("既存のmcp_dirの削除に失敗しました");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut options = CopyOptions::new();
|
||||||
|
options.overwrite = true; // 上書き
|
||||||
|
options.copy_inside = true; // 中身だけコピー
|
||||||
|
|
||||||
|
copy(&project_mcp_dir, &mcp_dir, &options).expect("コピーに失敗しました");
|
||||||
|
|
||||||
|
// 仮想環境の作成
|
||||||
|
let venv_path = config.venv_path();
|
||||||
|
if !venv_path.exists() {
|
||||||
|
println!("🐍 仮想環境を作成しています...");
|
||||||
|
let output = OtherCommand::new("python3")
|
||||||
|
.args(&["-m", "venv", ".venv"])
|
||||||
|
.current_dir(&mcp_dir)
|
||||||
|
.output()
|
||||||
|
.expect("venvの作成に失敗しました");
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
eprintln!("❌ venv作成エラー: {}", String::from_utf8_lossy(&output.stderr));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
println!("✅ 仮想環境を作成しました");
|
||||||
|
} else {
|
||||||
|
println!("✅ 仮想環境は既に存在します");
|
||||||
|
}
|
||||||
|
|
||||||
|
// 依存関係のインストール
|
||||||
|
println!("📦 依存関係をインストールしています...");
|
||||||
|
let pip_path = config.pip_executable();
|
||||||
|
let output = OtherCommand::new(&pip_path)
|
||||||
|
.args(&["install", "-r", "requirements.txt"])
|
||||||
|
.current_dir(&mcp_dir)
|
||||||
|
.output()
|
||||||
|
.expect("pipコマンドの実行に失敗しました");
|
||||||
|
|
||||||
|
if !output.status.success() {
|
||||||
|
eprintln!("❌ pip installエラー: {}", String::from_utf8_lossy(&output.stderr));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("✅ MCP Server環境のセットアップが完了しました!");
|
||||||
|
println!("📍 セットアップ場所: {}", mcp_dir.display());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run() {
|
||||||
|
println!("🚀 MCP Serverを起動しています...");
|
||||||
|
|
||||||
|
let config = ConfigPaths::new();
|
||||||
|
let mcp_dir = config.mcp_dir();
|
||||||
|
let python_path = config.python_executable();
|
||||||
|
let server_py_path = mcp_dir.join("server.py");
|
||||||
|
|
||||||
|
// セットアップの確認
|
||||||
|
if !server_py_path.exists() {
|
||||||
|
eprintln!("❌ server.pyが見つかりません。先に 'aigpt server setup' を実行してください。");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !python_path.exists() {
|
||||||
|
eprintln!("❌ Python実行ファイルが見つかりません。先に 'aigpt server setup' を実行してください。");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// サーバーの起動
|
||||||
|
println!("🔗 サーバーを起動中... (Ctrl+Cで停止)");
|
||||||
|
let mut child = OtherCommand::new(&python_path)
|
||||||
|
.arg("server.py")
|
||||||
|
.current_dir(&mcp_dir)
|
||||||
|
.spawn()
|
||||||
|
.expect("MCP Serverの起動に失敗しました");
|
||||||
|
|
||||||
|
// サーバーの終了を待機
|
||||||
|
match child.wait() {
|
||||||
|
Ok(status) => {
|
||||||
|
if status.success() {
|
||||||
|
println!("✅ MCP Serverが正常に終了しました");
|
||||||
|
} else {
|
||||||
|
println!("❌ MCP Serverが異常終了しました: {}", status);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("❌ MCP Serverの実行中にエラーが発生しました: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn chat(message: &str) {
|
||||||
|
println!("💬 チャットを開始しています...");
|
||||||
|
|
||||||
|
let config = ConfigPaths::new();
|
||||||
|
let mcp_dir = config.mcp_dir();
|
||||||
|
let python_path = config.python_executable();
|
||||||
|
let chat_py_path = mcp_dir.join("chat.py");
|
||||||
|
|
||||||
|
// セットアップの確認
|
||||||
|
if !chat_py_path.exists() {
|
||||||
|
eprintln!("❌ chat.pyが見つかりません。先に 'aigpt server setup' を実行してください。");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !python_path.exists() {
|
||||||
|
eprintln!("❌ Python実行ファイルが見つかりません。先に 'aigpt server setup' を実行してください。");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// チャットの実行
|
||||||
|
let output = OtherCommand::new(&python_path)
|
||||||
|
.args(&["chat.py", message])
|
||||||
|
.current_dir(&mcp_dir)
|
||||||
|
.output()
|
||||||
|
.expect("chat.pyの実行に失敗しました");
|
||||||
|
|
||||||
|
if output.status.success() {
|
||||||
|
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||||
|
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||||
|
|
||||||
|
if !stderr.is_empty() {
|
||||||
|
print!("{}", stderr);
|
||||||
|
}
|
||||||
|
print!("{}", stdout);
|
||||||
|
} else {
|
||||||
|
eprintln!("❌ チャット実行エラー: {}", String::from_utf8_lossy(&output.stderr));
|
||||||
|
}
|
||||||
|
}
|
@ -1,49 +0,0 @@
|
|||||||
// src/memory.rs
|
|
||||||
use chrono::{DateTime, Local, Utc};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::fs::{self};
|
|
||||||
//use std::fs::{self, OpenOptions};
|
|
||||||
use std::io::{BufReader, BufWriter};
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::{fs::File};
|
|
||||||
//use std::{env, fs::File};
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct MemoryEntry {
|
|
||||||
pub timestamp: DateTime<Utc>,
|
|
||||||
pub sender: String,
|
|
||||||
pub message: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn log_message(base_dir: &PathBuf, sender: &str, message: &str) {
|
|
||||||
let now_utc = Utc::now();
|
|
||||||
let date_str = Local::now().format("%Y-%m-%d").to_string();
|
|
||||||
let mut file_path = base_dir.clone();
|
|
||||||
file_path.push("memory");
|
|
||||||
let _ = fs::create_dir_all(&file_path);
|
|
||||||
file_path.push(format!("{}.json", date_str));
|
|
||||||
|
|
||||||
let new_entry = MemoryEntry {
|
|
||||||
timestamp: now_utc,
|
|
||||||
sender: sender.to_string(),
|
|
||||||
message: message.to_string(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut entries = if file_path.exists() {
|
|
||||||
let file = File::open(&file_path).expect("💥 メモリファイルの読み込み失敗");
|
|
||||||
let reader = BufReader::new(file);
|
|
||||||
serde_json::from_reader(reader).unwrap_or_else(|_| vec![])
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
|
|
||||||
entries.push(new_entry);
|
|
||||||
|
|
||||||
let file = File::create(&file_path).expect("💥 メモリファイルの書き込み失敗");
|
|
||||||
let writer = BufWriter::new(file);
|
|
||||||
serde_json::to_writer_pretty(writer, &entries).expect("💥 JSONの書き込み失敗");
|
|
||||||
}
|
|
||||||
|
|
||||||
// 利用例(ask_chatの中)
|
|
||||||
// log_message(&config.base_dir, "user", question);
|
|
||||||
// log_message(&config.base_dir, "ai", &response);
|
|
147
src/metrics.rs
147
src/metrics.rs
@ -1,147 +0,0 @@
|
|||||||
// src/metrics.rs
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use crate::config::ConfigPaths;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Metrics {
|
|
||||||
pub trust: f32,
|
|
||||||
pub intimacy: f32,
|
|
||||||
pub energy: f32,
|
|
||||||
pub can_send: bool,
|
|
||||||
pub last_updated: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Personality {
|
|
||||||
pub kind: String,
|
|
||||||
pub strength: f32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Relationship {
|
|
||||||
pub trust: f32,
|
|
||||||
pub intimacy: f32,
|
|
||||||
pub curiosity: f32,
|
|
||||||
pub threshold: f32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Environment {
|
|
||||||
pub luck_today: f32,
|
|
||||||
pub luck_history: Vec<f32>,
|
|
||||||
pub level: i32,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Messaging {
|
|
||||||
pub enabled: bool,
|
|
||||||
pub schedule_time: Option<String>,
|
|
||||||
pub decay_rate: f32,
|
|
||||||
pub templates: Vec<String>,
|
|
||||||
pub sent_today: bool, // 追加
|
|
||||||
pub last_sent_date: Option<String>, // 追加
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct Memory {
|
|
||||||
pub recent_messages: Vec<String>,
|
|
||||||
pub long_term_notes: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct UserData {
|
|
||||||
pub personality: Personality,
|
|
||||||
pub relationship: Relationship,
|
|
||||||
pub environment: Environment,
|
|
||||||
pub messaging: Messaging,
|
|
||||||
pub last_interaction: DateTime<Utc>,
|
|
||||||
pub memory: Memory,
|
|
||||||
pub metrics: Metrics,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Metrics {
|
|
||||||
pub fn decay(&mut self) {
|
|
||||||
let now = Utc::now();
|
|
||||||
let hours = (now - self.last_updated).num_minutes() as f32 / 60.0;
|
|
||||||
self.trust = decay_param(self.trust, hours);
|
|
||||||
self.intimacy = decay_param(self.intimacy, hours);
|
|
||||||
self.energy = decay_param(self.energy, hours);
|
|
||||||
self.can_send = self.trust >= 0.5 && self.intimacy >= 0.5 && self.energy >= 0.5;
|
|
||||||
self.last_updated = now;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load_user_data(path: &Path) -> UserData {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let example_path = Path::new("example.json");
|
|
||||||
config.ensure_file_exists("json", example_path);
|
|
||||||
|
|
||||||
if !path.exists() {
|
|
||||||
return UserData {
|
|
||||||
personality: Personality {
|
|
||||||
kind: "positive".into(),
|
|
||||||
strength: 0.8,
|
|
||||||
},
|
|
||||||
relationship: Relationship {
|
|
||||||
trust: 0.2,
|
|
||||||
intimacy: 0.6,
|
|
||||||
curiosity: 0.5,
|
|
||||||
threshold: 1.5,
|
|
||||||
},
|
|
||||||
environment: Environment {
|
|
||||||
luck_today: 0.9,
|
|
||||||
luck_history: vec![0.9, 0.9, 0.9],
|
|
||||||
level: 1,
|
|
||||||
},
|
|
||||||
messaging: Messaging {
|
|
||||||
enabled: true,
|
|
||||||
schedule_time: Some("08:00".to_string()),
|
|
||||||
decay_rate: 0.1,
|
|
||||||
templates: vec![
|
|
||||||
"おはよう!今日もがんばろう!".to_string(),
|
|
||||||
"ねえ、話したいことがあるの。".to_string(),
|
|
||||||
],
|
|
||||||
sent_today: false,
|
|
||||||
last_sent_date: None,
|
|
||||||
},
|
|
||||||
last_interaction: Utc::now(),
|
|
||||||
memory: Memory {
|
|
||||||
recent_messages: vec![],
|
|
||||||
long_term_notes: vec![],
|
|
||||||
},
|
|
||||||
metrics: Metrics {
|
|
||||||
trust: 0.5,
|
|
||||||
intimacy: 0.5,
|
|
||||||
energy: 0.5,
|
|
||||||
can_send: true,
|
|
||||||
last_updated: Utc::now(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let content = fs::read_to_string(path).expect("user.json の読み込みに失敗しました");
|
|
||||||
serde_json::from_str(&content).expect("user.json のパースに失敗しました")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save_user_data(path: &Path, data: &UserData) {
|
|
||||||
let content = serde_json::to_string_pretty(data).expect("user.json のシリアライズ失敗");
|
|
||||||
fs::write(path, content).expect("user.json の書き込みに失敗しました");
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_metrics_decay() -> Metrics {
|
|
||||||
let config = ConfigPaths::new();
|
|
||||||
let path = config.base_dir.join("user.json");
|
|
||||||
let mut data = load_user_data(&path);
|
|
||||||
data.metrics.decay();
|
|
||||||
save_user_data(&path, &data);
|
|
||||||
data.metrics
|
|
||||||
}
|
|
||||||
|
|
||||||
fn decay_param(value: f32, hours: f32) -> f32 {
|
|
||||||
let decay_rate = 0.05;
|
|
||||||
(value * (1.0f32 - decay_rate).powf(hours)).clamp(0.0, 1.0)
|
|
||||||
}
|
|
72
src/model.rs
72
src/model.rs
@ -1,72 +0,0 @@
|
|||||||
//src/model.rs
|
|
||||||
use rusqlite::{params, Connection, Result as SqlResult};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct AiSystem {
|
|
||||||
pub personality: Personality,
|
|
||||||
pub relationship: Relationship,
|
|
||||||
pub environment: Environment,
|
|
||||||
pub messaging: Messaging,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AiSystem {
|
|
||||||
pub fn save_to_db(&self, conn: &Connection) -> SqlResult<()> {
|
|
||||||
conn.execute(
|
|
||||||
"CREATE TABLE IF NOT EXISTS ai_state (id INTEGER PRIMARY KEY, json TEXT)",
|
|
||||||
[],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let json_data = serde_json::to_string(self).map_err(|e| {
|
|
||||||
rusqlite::Error::ToSqlConversionFailure(Box::new(e))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
conn.execute(
|
|
||||||
"INSERT OR REPLACE INTO ai_state (id, json) VALUES (?1, ?2)",
|
|
||||||
params![1, json_data],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load_from_db(conn: &Connection) -> SqlResult<Self> {
|
|
||||||
let mut stmt = conn.prepare("SELECT json FROM ai_state WHERE id = ?1")?;
|
|
||||||
let json: String = stmt.query_row(params![1], |row| row.get(0))?;
|
|
||||||
|
|
||||||
// ここも serde_json のエラーを map_err で変換
|
|
||||||
let system: AiSystem = serde_json::from_str(&json).map_err(|e| {
|
|
||||||
rusqlite::Error::FromSqlConversionFailure(0, rusqlite::types::Type::Text, Box::new(e))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(system)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct Personality {
|
|
||||||
pub kind: String, // e.g., "positive", "negative", "neutral"
|
|
||||||
pub strength: f32, // 0.0 - 1.0
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct Relationship {
|
|
||||||
pub trust: f32, // 0.0 - 1.0
|
|
||||||
pub intimacy: f32, // 0.0 - 1.0
|
|
||||||
pub curiosity: f32, // 0.0 - 1.0
|
|
||||||
pub threshold: f32, // if sum > threshold, allow messaging
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct Environment {
|
|
||||||
pub luck_today: f32, // 0.1 - 1.0
|
|
||||||
pub luck_history: Vec<f32>, // last 3 values
|
|
||||||
pub level: i32, // current mental strength level
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
|
||||||
pub struct Messaging {
|
|
||||||
pub enabled: bool,
|
|
||||||
pub schedule_time: Option<String>, // e.g., "08:00"
|
|
||||||
pub decay_rate: f32, // how quickly emotion fades (0.0 - 1.0)
|
|
||||||
pub templates: Vec<String>, // message template variations
|
|
||||||
}
|
|
13
src/utils.rs
13
src/utils.rs
@ -1,13 +0,0 @@
|
|||||||
// src/utils.rs
|
|
||||||
use std::fs;
|
|
||||||
use crate::model::AiSystem;
|
|
||||||
|
|
||||||
pub fn load_config(path: &str) -> AiSystem {
|
|
||||||
let data = fs::read_to_string(path).expect("JSON読み込み失敗");
|
|
||||||
serde_json::from_str(&data).expect("JSONパース失敗")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn save_config(path: &str, ai: &AiSystem) {
|
|
||||||
let json = serde_json::to_string_pretty(&ai).expect("JSONシリアライズ失敗");
|
|
||||||
fs::write(path, json).expect("JSON保存失敗");
|
|
||||||
}
|
|
Loading…
x
Reference in New Issue
Block a user