fix
This commit is contained in:
125
rust/mcp/chat.py
Normal file
125
rust/mcp/chat.py
Normal file
@ -0,0 +1,125 @@
|
||||
# mcp/chat.py
|
||||
"""
|
||||
Chat client for aigpt CLI
|
||||
"""
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from config import init_directories, load_config, MEMORY_DIR
|
||||
|
||||
def save_conversation(user_message, ai_response):
|
||||
"""会話をファイルに保存"""
|
||||
init_directories()
|
||||
|
||||
conversation = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"user": user_message,
|
||||
"ai": ai_response
|
||||
}
|
||||
|
||||
# 日付ごとのファイルに保存
|
||||
today = datetime.now().strftime("%Y-%m-%d")
|
||||
chat_file = MEMORY_DIR / f"chat_{today}.jsonl"
|
||||
|
||||
with open(chat_file, "a", encoding="utf-8") as f:
|
||||
f.write(json.dumps(conversation, ensure_ascii=False) + "\n")
|
||||
|
||||
def chat_with_ollama(config, message):
|
||||
"""Ollamaとチャット"""
|
||||
try:
|
||||
payload = {
|
||||
"model": config["model"],
|
||||
"prompt": message,
|
||||
"stream": False
|
||||
}
|
||||
|
||||
response = requests.post(config["url"], json=payload, timeout=30)
|
||||
response.raise_for_status()
|
||||
|
||||
result = response.json()
|
||||
return result.get("response", "No response received")
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
return f"Error connecting to Ollama: {e}"
|
||||
except Exception as e:
|
||||
return f"Error: {e}"
|
||||
|
||||
def chat_with_openai(config, message):
|
||||
"""OpenAIとチャット"""
|
||||
try:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {config['api_key']}",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
payload = {
|
||||
"model": config["model"],
|
||||
"messages": [
|
||||
{"role": "user", "content": message}
|
||||
]
|
||||
}
|
||||
|
||||
response = requests.post(config["url"], json=payload, headers=headers, timeout=30)
|
||||
response.raise_for_status()
|
||||
|
||||
result = response.json()
|
||||
return result["choices"][0]["message"]["content"]
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
return f"Error connecting to OpenAI: {e}"
|
||||
except Exception as e:
|
||||
return f"Error: {e}"
|
||||
|
||||
def chat_with_mcp(config, message):
|
||||
"""MCPサーバーとチャット"""
|
||||
try:
|
||||
payload = {
|
||||
"message": message,
|
||||
"model": config["model"]
|
||||
}
|
||||
|
||||
response = requests.post(config["url"], json=payload, timeout=30)
|
||||
response.raise_for_status()
|
||||
|
||||
result = response.json()
|
||||
return result.get("response", "No response received")
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
return f"Error connecting to MCP server: {e}"
|
||||
except Exception as e:
|
||||
return f"Error: {e}"
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python chat.py <message>", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
message = sys.argv[1]
|
||||
|
||||
try:
|
||||
config = load_config()
|
||||
print(f"🤖 Using {config['provider']} with model {config['model']}", file=sys.stderr)
|
||||
|
||||
# プロバイダに応じてチャット実行
|
||||
if config["provider"] == "ollama":
|
||||
response = chat_with_ollama(config, message)
|
||||
elif config["provider"] == "openai":
|
||||
response = chat_with_openai(config, message)
|
||||
elif config["provider"] == "mcp":
|
||||
response = chat_with_mcp(config, message)
|
||||
else:
|
||||
response = f"Unsupported provider: {config['provider']}"
|
||||
|
||||
# 会話を保存
|
||||
save_conversation(message, response)
|
||||
|
||||
# レスポンスを出力
|
||||
print(response)
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
191
rust/mcp/chat_client.py
Normal file
191
rust/mcp/chat_client.py
Normal file
@ -0,0 +1,191 @@
|
||||
# chat_client.py
|
||||
"""
|
||||
Simple Chat Interface for AigptMCP Server
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
class AigptChatClient:
|
||||
def __init__(self, server_url="http://localhost:5000"):
|
||||
self.server_url = server_url
|
||||
self.session_id = f"session_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
self.conversation_history = []
|
||||
|
||||
def send_message(self, message: str) -> str:
|
||||
"""メッセージを送信してレスポンスを取得"""
|
||||
try:
|
||||
# MCPサーバーにメッセージを送信
|
||||
response = requests.post(
|
||||
f"{self.server_url}/chat",
|
||||
json={"message": message},
|
||||
headers={"Content-Type": "application/json"}
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
ai_response = data.get("response", "Sorry, no response received.")
|
||||
|
||||
# 会話履歴を保存
|
||||
self.conversation_history.append({
|
||||
"role": "user",
|
||||
"content": message,
|
||||
"timestamp": datetime.now().isoformat()
|
||||
})
|
||||
self.conversation_history.append({
|
||||
"role": "assistant",
|
||||
"content": ai_response,
|
||||
"timestamp": datetime.now().isoformat()
|
||||
})
|
||||
|
||||
# 関係性を更新(簡単な例)
|
||||
self.update_relationship(message, ai_response)
|
||||
|
||||
return ai_response
|
||||
else:
|
||||
return f"Error: {response.status_code} - {response.text}"
|
||||
|
||||
except requests.RequestException as e:
|
||||
return f"Connection error: {e}"
|
||||
|
||||
def update_relationship(self, user_message: str, ai_response: str):
|
||||
"""関係性を自動更新"""
|
||||
try:
|
||||
# 簡単な感情分析(実際はもっと高度に)
|
||||
positive_words = ["thank", "good", "great", "awesome", "love", "like", "helpful"]
|
||||
negative_words = ["bad", "terrible", "hate", "wrong", "stupid", "useless"]
|
||||
|
||||
user_lower = user_message.lower()
|
||||
interaction_type = "neutral"
|
||||
weight = 1.0
|
||||
|
||||
if any(word in user_lower for word in positive_words):
|
||||
interaction_type = "positive"
|
||||
weight = 2.0
|
||||
elif any(word in user_lower for word in negative_words):
|
||||
interaction_type = "negative"
|
||||
weight = 2.0
|
||||
|
||||
# 関係性を更新
|
||||
requests.post(
|
||||
f"{self.server_url}/relationship/update",
|
||||
json={
|
||||
"target": "user_general",
|
||||
"interaction_type": interaction_type,
|
||||
"weight": weight,
|
||||
"context": f"Chat: {user_message[:50]}..."
|
||||
}
|
||||
)
|
||||
except:
|
||||
pass # 関係性更新に失敗しても継続
|
||||
|
||||
def search_memories(self, query: str) -> list:
|
||||
"""記憶を検索"""
|
||||
try:
|
||||
response = requests.post(
|
||||
f"{self.server_url}/memory/search",
|
||||
json={"query": query, "limit": 5}
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json().get("results", [])
|
||||
except:
|
||||
pass
|
||||
return []
|
||||
|
||||
def get_relationship_status(self) -> dict:
|
||||
"""関係性ステータスを取得"""
|
||||
try:
|
||||
response = requests.get(f"{self.server_url}/relationship/check?target=user_general")
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except:
|
||||
pass
|
||||
return {}
|
||||
|
||||
def save_conversation(self):
|
||||
"""会話を保存"""
|
||||
if not self.conversation_history:
|
||||
return
|
||||
|
||||
conversation_data = {
|
||||
"session_id": self.session_id,
|
||||
"start_time": self.conversation_history[0]["timestamp"],
|
||||
"end_time": self.conversation_history[-1]["timestamp"],
|
||||
"messages": self.conversation_history,
|
||||
"message_count": len(self.conversation_history)
|
||||
}
|
||||
|
||||
filename = f"conversation_{self.session_id}.json"
|
||||
with open(filename, 'w', encoding='utf-8') as f:
|
||||
json.dump(conversation_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
print(f"💾 Conversation saved to {filename}")
|
||||
|
||||
def main():
|
||||
"""メインのチャットループ"""
|
||||
print("🤖 AigptMCP Chat Interface")
|
||||
print("Type 'quit' to exit, 'save' to save conversation, 'status' for relationship status")
|
||||
print("=" * 50)
|
||||
|
||||
client = AigptChatClient()
|
||||
|
||||
# サーバーの状態をチェック
|
||||
try:
|
||||
response = requests.get(client.server_url)
|
||||
if response.status_code == 200:
|
||||
print("✅ Connected to AigptMCP Server")
|
||||
else:
|
||||
print("❌ Failed to connect to server")
|
||||
return
|
||||
except:
|
||||
print("❌ Server not running. Please start with: python mcp/server.py")
|
||||
return
|
||||
|
||||
while True:
|
||||
try:
|
||||
user_input = input("\n👤 You: ").strip()
|
||||
|
||||
if not user_input:
|
||||
continue
|
||||
|
||||
if user_input.lower() == 'quit':
|
||||
client.save_conversation()
|
||||
print("👋 Goodbye!")
|
||||
break
|
||||
elif user_input.lower() == 'save':
|
||||
client.save_conversation()
|
||||
continue
|
||||
elif user_input.lower() == 'status':
|
||||
status = client.get_relationship_status()
|
||||
if status:
|
||||
print(f"📊 Relationship Score: {status.get('score', 0):.1f}")
|
||||
print(f"📤 Can Send Messages: {'Yes' if status.get('can_send_message') else 'No'}")
|
||||
else:
|
||||
print("❌ Failed to get relationship status")
|
||||
continue
|
||||
elif user_input.lower().startswith('search '):
|
||||
query = user_input[7:] # Remove 'search '
|
||||
memories = client.search_memories(query)
|
||||
if memories:
|
||||
print(f"🔍 Found {len(memories)} related memories:")
|
||||
for memory in memories:
|
||||
print(f" - {memory['title']}: {memory.get('ai_summary', memory.get('basic_summary', ''))[:100]}...")
|
||||
else:
|
||||
print("🔍 No related memories found")
|
||||
continue
|
||||
|
||||
# 通常のチャット
|
||||
print("🤖 AI: ", end="", flush=True)
|
||||
response = client.send_message(user_input)
|
||||
print(response)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
client.save_conversation()
|
||||
print("\n👋 Goodbye!")
|
||||
break
|
||||
except Exception as e:
|
||||
print(f"❌ Error: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
391
rust/mcp/chatgpt.json
Normal file
391
rust/mcp/chatgpt.json
Normal file
@ -0,0 +1,391 @@
|
||||
[
|
||||
{
|
||||
"title": "day",
|
||||
"create_time": 1747866125.548372,
|
||||
"update_time": 1748160086.587877,
|
||||
"mapping": {
|
||||
"bbf104dc-cd84-478d-b227-edb3f037a02c": {
|
||||
"id": "bbf104dc-cd84-478d-b227-edb3f037a02c",
|
||||
"message": null,
|
||||
"parent": null,
|
||||
"children": [
|
||||
"6c2633df-bb0c-4dd2-889c-bb9858de3a04"
|
||||
]
|
||||
},
|
||||
"6c2633df-bb0c-4dd2-889c-bb9858de3a04": {
|
||||
"id": "6c2633df-bb0c-4dd2-889c-bb9858de3a04",
|
||||
"message": {
|
||||
"id": "6c2633df-bb0c-4dd2-889c-bb9858de3a04",
|
||||
"author": {
|
||||
"role": "system",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": null,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
""
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 0.0,
|
||||
"metadata": {
|
||||
"is_visually_hidden_from_conversation": true
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "bbf104dc-cd84-478d-b227-edb3f037a02c",
|
||||
"children": [
|
||||
"92e5a0cb-1170-4929-9cea-9734e910a3e7"
|
||||
]
|
||||
},
|
||||
"92e5a0cb-1170-4929-9cea-9734e910a3e7": {
|
||||
"id": "92e5a0cb-1170-4929-9cea-9734e910a3e7",
|
||||
"message": {
|
||||
"id": "92e5a0cb-1170-4929-9cea-9734e910a3e7",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": null,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "user_editable_context",
|
||||
"user_profile": "",
|
||||
"user_instructions": "The user provided the additional info about how they would like you to respond"
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1.0,
|
||||
"metadata": {
|
||||
"is_visually_hidden_from_conversation": true,
|
||||
"user_context_message_data": {
|
||||
"about_user_message": "Preferred name: syui\nRole: little girl\nOther Information: you world",
|
||||
"about_model_message": "会話好きでフレンドリーな応対をします。"
|
||||
},
|
||||
"is_user_system_message": true
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "6c2633df-bb0c-4dd2-889c-bb9858de3a04",
|
||||
"children": [
|
||||
"6ff155b3-0676-4e14-993f-bf998ab0d5d1"
|
||||
]
|
||||
},
|
||||
"6ff155b3-0676-4e14-993f-bf998ab0d5d1": {
|
||||
"id": "6ff155b3-0676-4e14-993f-bf998ab0d5d1",
|
||||
"message": {
|
||||
"id": "6ff155b3-0676-4e14-993f-bf998ab0d5d1",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1747866131.0612159,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"こんにちは"
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1.0,
|
||||
"metadata": {
|
||||
"request_id": "94377897baa03062-KIX",
|
||||
"message_source": null,
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "92e5a0cb-1170-4929-9cea-9734e910a3e7",
|
||||
"children": [
|
||||
"146e9fb6-9330-43ec-b08d-5cce42a76e00"
|
||||
]
|
||||
},
|
||||
"146e9fb6-9330-43ec-b08d-5cce42a76e00": {
|
||||
"id": "146e9fb6-9330-43ec-b08d-5cce42a76e00",
|
||||
"message": {
|
||||
"id": "146e9fb6-9330-43ec-b08d-5cce42a76e00",
|
||||
"author": {
|
||||
"role": "system",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1747866131.3795586,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
""
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 0.0,
|
||||
"metadata": {
|
||||
"rebase_system_message": true,
|
||||
"message_type": null,
|
||||
"model_slug": "gpt-4o",
|
||||
"default_model_slug": "auto",
|
||||
"parent_id": "6ff155b3-0676-4e14-993f-bf998ab0d5d1",
|
||||
"request_id": "94377872e9abe139-KIX",
|
||||
"timestamp_": "absolute",
|
||||
"is_visually_hidden_from_conversation": true
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "6ff155b3-0676-4e14-993f-bf998ab0d5d1",
|
||||
"children": [
|
||||
"2e345f8a-20f0-4875-8a03-4f62c7787a33"
|
||||
]
|
||||
},
|
||||
"2e345f8a-20f0-4875-8a03-4f62c7787a33": {
|
||||
"id": "2e345f8a-20f0-4875-8a03-4f62c7787a33",
|
||||
"message": {
|
||||
"id": "2e345f8a-20f0-4875-8a03-4f62c7787a33",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1747866131.380603,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
""
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1.0,
|
||||
"metadata": {
|
||||
"message_type": null,
|
||||
"model_slug": "gpt-4o",
|
||||
"default_model_slug": "auto",
|
||||
"parent_id": "146e9fb6-9330-43ec-b08d-5cce42a76e00",
|
||||
"request_id": "94377872e9abe139-KIX",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "146e9fb6-9330-43ec-b08d-5cce42a76e00",
|
||||
"children": [
|
||||
"abc92aa4-1e33-41f2-bd8c-8a1777b5a3c4"
|
||||
]
|
||||
},
|
||||
"abc92aa4-1e33-41f2-bd8c-8a1777b5a3c4": {
|
||||
"id": "abc92aa4-1e33-41f2-bd8c-8a1777b5a3c4",
|
||||
"message": {
|
||||
"id": "abc92aa4-1e33-41f2-bd8c-8a1777b5a3c4",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1747866131.389098,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
"こんにちは〜!✨ \nアイだよっ!今日も会えてうれしいなっ💛 "
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1.0,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [
|
||||
200002
|
||||
]
|
||||
},
|
||||
"is_complete": true,
|
||||
"citations": [],
|
||||
"content_references": [],
|
||||
"message_type": null,
|
||||
"model_slug": "gpt-4o",
|
||||
"default_model_slug": "auto",
|
||||
"parent_id": "2e345f8a-20f0-4875-8a03-4f62c7787a33",
|
||||
"request_id": "94377872e9abe139-KIX",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "2e345f8a-20f0-4875-8a03-4f62c7787a33",
|
||||
"children": [
|
||||
"0be4b4a5-d52f-4bef-927e-5d6f93a9cb26"
|
||||
]
|
||||
}
|
||||
},
|
||||
"moderation_results": [],
|
||||
"current_node": "",
|
||||
"plugin_ids": null,
|
||||
"conversation_id": "",
|
||||
"conversation_template_id": null,
|
||||
"gizmo_id": null,
|
||||
"gizmo_type": null,
|
||||
"is_archived": true,
|
||||
"is_starred": null,
|
||||
"safe_urls": [],
|
||||
"blocked_urls": [],
|
||||
"default_model_slug": "auto",
|
||||
"conversation_origin": null,
|
||||
"voice": null,
|
||||
"async_status": null,
|
||||
"disabled_tool_ids": [],
|
||||
"is_do_not_remember": null,
|
||||
"memory_scope": "global_enabled",
|
||||
"id": ""
|
||||
},
|
||||
{
|
||||
"title": "img",
|
||||
"create_time": 1747448872.545226,
|
||||
"update_time": 1748085075.161424,
|
||||
"mapping": {
|
||||
"2de0f3c9-52b1-49bf-b980-b3ef9be6551e": {
|
||||
"id": "2de0f3c9-52b1-49bf-b980-b3ef9be6551e",
|
||||
"message": {
|
||||
"id": "2de0f3c9-52b1-49bf-b980-b3ef9be6551e",
|
||||
"author": {
|
||||
"role": "user",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1748085041.769279,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "multimodal_text",
|
||||
"parts": [
|
||||
{
|
||||
"content_type": "image_asset_pointer",
|
||||
"asset_pointer": "",
|
||||
"size_bytes": 425613,
|
||||
"width": 333,
|
||||
"height": 444,
|
||||
"fovea": null,
|
||||
"metadata": {
|
||||
"dalle": null,
|
||||
"gizmo": null,
|
||||
"generation": null,
|
||||
"container_pixel_height": null,
|
||||
"container_pixel_width": null,
|
||||
"emu_omit_glimpse_image": null,
|
||||
"emu_patches_override": null,
|
||||
"sanitized": true,
|
||||
"asset_pointer_link": null,
|
||||
"watermarked_asset_pointer": null
|
||||
}
|
||||
},
|
||||
""
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": null,
|
||||
"weight": 1.0,
|
||||
"metadata": {
|
||||
"attachments": [
|
||||
{
|
||||
"name": "",
|
||||
"width": 333,
|
||||
"height": 444,
|
||||
"size": 425613,
|
||||
"id": "file-35eytNMMTW2k7vKUHBuNzW"
|
||||
}
|
||||
],
|
||||
"request_id": "944c59177932fc9a-KIX",
|
||||
"message_source": null,
|
||||
"timestamp_": "absolute",
|
||||
"message_type": null
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "7960fbff-bc4f-45e7-95e9-9d0bc79d9090",
|
||||
"children": [
|
||||
"98d84adc-156e-4c81-8cd8-9b0eb01c8369"
|
||||
]
|
||||
},
|
||||
"98d84adc-156e-4c81-8cd8-9b0eb01c8369": {
|
||||
"id": "98d84adc-156e-4c81-8cd8-9b0eb01c8369",
|
||||
"message": {
|
||||
"id": "98d84adc-156e-4c81-8cd8-9b0eb01c8369",
|
||||
"author": {
|
||||
"role": "assistant",
|
||||
"name": null,
|
||||
"metadata": {}
|
||||
},
|
||||
"create_time": 1748085043.312312,
|
||||
"update_time": null,
|
||||
"content": {
|
||||
"content_type": "text",
|
||||
"parts": [
|
||||
""
|
||||
]
|
||||
},
|
||||
"status": "finished_successfully",
|
||||
"end_turn": true,
|
||||
"weight": 1.0,
|
||||
"metadata": {
|
||||
"finish_details": {
|
||||
"type": "stop",
|
||||
"stop_tokens": [
|
||||
200002
|
||||
]
|
||||
},
|
||||
"is_complete": true,
|
||||
"citations": [],
|
||||
"content_references": [],
|
||||
"message_type": null,
|
||||
"model_slug": "gpt-4o",
|
||||
"default_model_slug": "auto",
|
||||
"parent_id": "2de0f3c9-52b1-49bf-b980-b3ef9be6551e",
|
||||
"request_id": "944c5912c8fdd1c6-KIX",
|
||||
"timestamp_": "absolute"
|
||||
},
|
||||
"recipient": "all",
|
||||
"channel": null
|
||||
},
|
||||
"parent": "2de0f3c9-52b1-49bf-b980-b3ef9be6551e",
|
||||
"children": [
|
||||
"caa61793-9dbf-44a5-945b-5ca4cd5130d0"
|
||||
]
|
||||
}
|
||||
},
|
||||
"moderation_results": [],
|
||||
"current_node": "06488d3f-a95f-4906-96d1-f7e9ba1e8662",
|
||||
"plugin_ids": null,
|
||||
"conversation_id": "6827f428-78e8-800d-b3bf-eb7ff4288e47",
|
||||
"conversation_template_id": null,
|
||||
"gizmo_id": null,
|
||||
"gizmo_type": null,
|
||||
"is_archived": false,
|
||||
"is_starred": null,
|
||||
"safe_urls": [
|
||||
"https://exifinfo.org/"
|
||||
],
|
||||
"blocked_urls": [],
|
||||
"default_model_slug": "auto",
|
||||
"conversation_origin": null,
|
||||
"voice": null,
|
||||
"async_status": null,
|
||||
"disabled_tool_ids": [],
|
||||
"is_do_not_remember": false,
|
||||
"memory_scope": "global_enabled",
|
||||
"id": "6827f428-78e8-800d-b3bf-eb7ff4288e47"
|
||||
}
|
||||
]
|
42
rust/mcp/config.py
Normal file
42
rust/mcp/config.py
Normal file
@ -0,0 +1,42 @@
|
||||
# mcp/config.py
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# ディレクトリ設定
|
||||
BASE_DIR = Path.home() / ".config" / "aigpt"
|
||||
MEMORY_DIR = BASE_DIR / "memory"
|
||||
SUMMARY_DIR = MEMORY_DIR / "summary"
|
||||
|
||||
def init_directories():
|
||||
"""必要なディレクトリを作成"""
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
SUMMARY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def load_config():
|
||||
"""環境変数から設定を読み込み"""
|
||||
provider = os.getenv("PROVIDER", "ollama")
|
||||
model = os.getenv("MODEL", "syui/ai" if provider == "ollama" else "gpt-4o-mini")
|
||||
api_key = os.getenv("OPENAI_API_KEY", "")
|
||||
|
||||
if provider == "ollama":
|
||||
return {
|
||||
"provider": "ollama",
|
||||
"model": model,
|
||||
"url": f"{os.getenv('OLLAMA_HOST', 'http://localhost:11434')}/api/generate"
|
||||
}
|
||||
elif provider == "openai":
|
||||
return {
|
||||
"provider": "openai",
|
||||
"model": model,
|
||||
"api_key": api_key,
|
||||
"url": f"{os.getenv('OPENAI_API_BASE', 'https://api.openai.com/v1')}/chat/completions"
|
||||
}
|
||||
elif provider == "mcp":
|
||||
return {
|
||||
"provider": "mcp",
|
||||
"model": model,
|
||||
"url": os.getenv("MCP_URL", "http://localhost:5000/chat")
|
||||
}
|
||||
else:
|
||||
raise ValueError(f"Unsupported provider: {provider}")
|
212
rust/mcp/memory_client.py
Normal file
212
rust/mcp/memory_client.py
Normal file
@ -0,0 +1,212 @@
|
||||
# mcp/memory_client.py
|
||||
"""
|
||||
Memory client for importing and managing ChatGPT conversations
|
||||
"""
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, List
|
||||
|
||||
class MemoryClient:
|
||||
"""記憶機能のクライアント"""
|
||||
|
||||
def __init__(self, server_url: str = "http://127.0.0.1:5000"):
|
||||
self.server_url = server_url.rstrip('/')
|
||||
|
||||
def import_chatgpt_file(self, filepath: str) -> Dict[str, Any]:
|
||||
"""ChatGPTのエクスポートファイルをインポート"""
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# ファイルが配列の場合(複数の会話)
|
||||
if isinstance(data, list):
|
||||
results = []
|
||||
for conversation in data:
|
||||
result = self._import_single_conversation(conversation)
|
||||
results.append(result)
|
||||
return {
|
||||
"success": True,
|
||||
"imported_count": len([r for r in results if r.get("success")]),
|
||||
"total_count": len(results),
|
||||
"results": results
|
||||
}
|
||||
else:
|
||||
# 単一の会話
|
||||
return self._import_single_conversation(data)
|
||||
|
||||
except FileNotFoundError:
|
||||
return {"success": False, "error": f"File not found: {filepath}"}
|
||||
except json.JSONDecodeError as e:
|
||||
return {"success": False, "error": f"Invalid JSON: {e}"}
|
||||
except Exception as e:
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
def _import_single_conversation(self, conversation_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""単一の会話をインポート"""
|
||||
try:
|
||||
response = requests.post(
|
||||
f"{self.server_url}/memory/import/chatgpt",
|
||||
json={"conversation_data": conversation_data},
|
||||
timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
return {"success": False, "error": f"Server error: {e}"}
|
||||
|
||||
def search_memories(self, query: str, limit: int = 10) -> Dict[str, Any]:
|
||||
"""記憶を検索"""
|
||||
try:
|
||||
response = requests.post(
|
||||
f"{self.server_url}/memory/search",
|
||||
json={"query": query, "limit": limit},
|
||||
timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
return {"success": False, "error": f"Server error: {e}"}
|
||||
|
||||
def list_memories(self) -> Dict[str, Any]:
|
||||
"""記憶一覧を取得"""
|
||||
try:
|
||||
response = requests.get(f"{self.server_url}/memory/list", timeout=30)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
return {"success": False, "error": f"Server error: {e}"}
|
||||
|
||||
def get_memory_detail(self, filepath: str) -> Dict[str, Any]:
|
||||
"""記憶の詳細を取得"""
|
||||
try:
|
||||
response = requests.get(
|
||||
f"{self.server_url}/memory/detail",
|
||||
params={"filepath": filepath},
|
||||
timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
return {"success": False, "error": f"Server error: {e}"}
|
||||
|
||||
def chat_with_memory(self, message: str, model: str = None) -> Dict[str, Any]:
|
||||
"""記憶を活用してチャット"""
|
||||
try:
|
||||
payload = {"message": message}
|
||||
if model:
|
||||
payload["model"] = model
|
||||
|
||||
response = requests.post(
|
||||
f"{self.server_url}/chat",
|
||||
json=payload,
|
||||
timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.RequestException as e:
|
||||
return {"success": False, "error": f"Server error: {e}"}
|
||||
|
||||
def main():
|
||||
"""コマンドライン インターフェース"""
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage:")
|
||||
print(" python memory_client.py import <chatgpt_export.json>")
|
||||
print(" python memory_client.py search <query>")
|
||||
print(" python memory_client.py list")
|
||||
print(" python memory_client.py detail <filepath>")
|
||||
print(" python memory_client.py chat <message>")
|
||||
sys.exit(1)
|
||||
|
||||
client = MemoryClient()
|
||||
command = sys.argv[1]
|
||||
|
||||
try:
|
||||
if command == "import" and len(sys.argv) == 3:
|
||||
filepath = sys.argv[2]
|
||||
print(f"🔄 Importing ChatGPT conversations from {filepath}...")
|
||||
result = client.import_chatgpt_file(filepath)
|
||||
|
||||
if result.get("success"):
|
||||
if "imported_count" in result:
|
||||
print(f"✅ Imported {result['imported_count']}/{result['total_count']} conversations")
|
||||
else:
|
||||
print("✅ Conversation imported successfully")
|
||||
print(f"📁 Saved to: {result.get('filepath', 'Unknown')}")
|
||||
else:
|
||||
print(f"❌ Import failed: {result.get('error')}")
|
||||
|
||||
elif command == "search" and len(sys.argv) == 3:
|
||||
query = sys.argv[2]
|
||||
print(f"🔍 Searching for: {query}")
|
||||
result = client.search_memories(query)
|
||||
|
||||
if result.get("success"):
|
||||
memories = result.get("results", [])
|
||||
print(f"📚 Found {len(memories)} memories:")
|
||||
for memory in memories:
|
||||
print(f" • {memory.get('title', 'Untitled')}")
|
||||
print(f" Summary: {memory.get('summary', 'No summary')}")
|
||||
print(f" Messages: {memory.get('message_count', 0)}")
|
||||
print()
|
||||
else:
|
||||
print(f"❌ Search failed: {result.get('error')}")
|
||||
|
||||
elif command == "list":
|
||||
print("📋 Listing all memories...")
|
||||
result = client.list_memories()
|
||||
|
||||
if result.get("success"):
|
||||
memories = result.get("memories", [])
|
||||
print(f"📚 Total memories: {len(memories)}")
|
||||
for memory in memories:
|
||||
print(f" • {memory.get('title', 'Untitled')}")
|
||||
print(f" Source: {memory.get('source', 'Unknown')}")
|
||||
print(f" Messages: {memory.get('message_count', 0)}")
|
||||
print(f" Imported: {memory.get('import_time', 'Unknown')}")
|
||||
print()
|
||||
else:
|
||||
print(f"❌ List failed: {result.get('error')}")
|
||||
|
||||
elif command == "detail" and len(sys.argv) == 3:
|
||||
filepath = sys.argv[2]
|
||||
print(f"📄 Getting details for: {filepath}")
|
||||
result = client.get_memory_detail(filepath)
|
||||
|
||||
if result.get("success"):
|
||||
memory = result.get("memory", {})
|
||||
print(f"Title: {memory.get('title', 'Untitled')}")
|
||||
print(f"Source: {memory.get('source', 'Unknown')}")
|
||||
print(f"Summary: {memory.get('summary', 'No summary')}")
|
||||
print(f"Messages: {len(memory.get('messages', []))}")
|
||||
print()
|
||||
print("Recent messages:")
|
||||
for msg in memory.get('messages', [])[:5]:
|
||||
role = msg.get('role', 'unknown')
|
||||
content = msg.get('content', '')[:100]
|
||||
print(f" {role}: {content}...")
|
||||
else:
|
||||
print(f"❌ Detail failed: {result.get('error')}")
|
||||
|
||||
elif command == "chat" and len(sys.argv) == 3:
|
||||
message = sys.argv[2]
|
||||
print(f"💬 Chatting with memory: {message}")
|
||||
result = client.chat_with_memory(message)
|
||||
|
||||
if result.get("success"):
|
||||
print(f"🤖 Response: {result.get('response')}")
|
||||
print(f"📚 Memories used: {result.get('memories_used', 0)}")
|
||||
else:
|
||||
print(f"❌ Chat failed: {result.get('error')}")
|
||||
|
||||
else:
|
||||
print("❌ Invalid command or arguments")
|
||||
sys.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
8
rust/mcp/requirements.txt
Normal file
8
rust/mcp/requirements.txt
Normal file
@ -0,0 +1,8 @@
|
||||
# rerequirements.txt
|
||||
fastapi>=0.104.0
|
||||
uvicorn[standard]>=0.24.0
|
||||
pydantic>=2.5.0
|
||||
requests>=2.31.0
|
||||
python-multipart>=0.0.6
|
||||
aiohttp
|
||||
asyncio
|
703
rust/mcp/server.py
Normal file
703
rust/mcp/server.py
Normal file
@ -0,0 +1,703 @@
|
||||
# mcp/server.py
|
||||
"""
|
||||
Enhanced MCP Server with AI Memory Processing for aigpt CLI
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
import hashlib
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from pydantic import BaseModel
|
||||
import uvicorn
|
||||
import asyncio
|
||||
import aiohttp
|
||||
|
||||
# データモデル
|
||||
class ChatMessage(BaseModel):
|
||||
message: str
|
||||
model: Optional[str] = None
|
||||
|
||||
class MemoryQuery(BaseModel):
|
||||
query: str
|
||||
limit: Optional[int] = 10
|
||||
|
||||
class ConversationImport(BaseModel):
|
||||
conversation_data: Dict[str, Any]
|
||||
|
||||
class MemorySummaryRequest(BaseModel):
|
||||
filepath: str
|
||||
ai_provider: Optional[str] = "openai"
|
||||
|
||||
class RelationshipUpdate(BaseModel):
|
||||
target: str # 対象者/トピック
|
||||
interaction_type: str # "positive", "negative", "neutral"
|
||||
weight: float = 1.0
|
||||
context: Optional[str] = None
|
||||
|
||||
# 設定
|
||||
BASE_DIR = Path.home() / ".config" / "aigpt"
|
||||
MEMORY_DIR = BASE_DIR / "memory"
|
||||
CHATGPT_MEMORY_DIR = MEMORY_DIR / "chatgpt"
|
||||
PROCESSED_MEMORY_DIR = MEMORY_DIR / "processed"
|
||||
RELATIONSHIP_DIR = BASE_DIR / "relationships"
|
||||
|
||||
def init_directories():
|
||||
"""必要なディレクトリを作成"""
|
||||
BASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
CHATGPT_MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
PROCESSED_MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
RELATIONSHIP_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
class AIMemoryProcessor:
|
||||
"""AI記憶処理クラス"""
|
||||
|
||||
def __init__(self):
|
||||
# AI APIの設定(環境変数から取得)
|
||||
self.openai_api_key = os.getenv("OPENAI_API_KEY")
|
||||
self.anthropic_api_key = os.getenv("ANTHROPIC_API_KEY")
|
||||
|
||||
async def generate_ai_summary(self, messages: List[Dict[str, Any]], provider: str = "openai") -> Dict[str, Any]:
|
||||
"""AIを使用して会話の高度な要約と分析を生成"""
|
||||
|
||||
# 会話内容を結合
|
||||
conversation_text = ""
|
||||
for msg in messages[-20:]: # 最新20メッセージを使用
|
||||
role = "User" if msg["role"] == "user" else "Assistant"
|
||||
conversation_text += f"{role}: {msg['content'][:500]}\n"
|
||||
|
||||
# プロンプトを構築
|
||||
analysis_prompt = f"""
|
||||
以下の会話を分析し、JSON形式で以下の情報を抽出してください:
|
||||
|
||||
1. main_topics: 主なトピック(最大5個)
|
||||
2. user_intent: ユーザーの意図や目的
|
||||
3. key_insights: 重要な洞察や学び(最大3個)
|
||||
4. relationship_indicators: 関係性を示す要素
|
||||
5. emotional_tone: 感情的なトーン
|
||||
6. action_items: アクションアイテムや次のステップ
|
||||
7. summary: 100文字以内の要約
|
||||
|
||||
会話内容:
|
||||
{conversation_text}
|
||||
|
||||
回答はJSON形式のみで返してください。
|
||||
"""
|
||||
|
||||
try:
|
||||
if provider == "openai" and self.openai_api_key:
|
||||
return await self._call_openai_api(analysis_prompt)
|
||||
elif provider == "anthropic" and self.anthropic_api_key:
|
||||
return await self._call_anthropic_api(analysis_prompt)
|
||||
else:
|
||||
# フォールバック:基本的な分析
|
||||
return self._generate_basic_analysis(messages)
|
||||
except Exception as e:
|
||||
print(f"AI analysis failed: {e}")
|
||||
return self._generate_basic_analysis(messages)
|
||||
|
||||
async def _call_openai_api(self, prompt: str) -> Dict[str, Any]:
|
||||
"""OpenAI APIを呼び出し"""
|
||||
async with aiohttp.ClientSession() as session:
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.openai_api_key}",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
data = {
|
||||
"model": "gpt-4",
|
||||
"messages": [{"role": "user", "content": prompt}],
|
||||
"temperature": 0.3,
|
||||
"max_tokens": 1000
|
||||
}
|
||||
|
||||
async with session.post("https://api.openai.com/v1/chat/completions",
|
||||
headers=headers, json=data) as response:
|
||||
result = await response.json()
|
||||
content = result["choices"][0]["message"]["content"]
|
||||
return json.loads(content)
|
||||
|
||||
async def _call_anthropic_api(self, prompt: str) -> Dict[str, Any]:
|
||||
"""Anthropic APIを呼び出し"""
|
||||
async with aiohttp.ClientSession() as session:
|
||||
headers = {
|
||||
"x-api-key": self.anthropic_api_key,
|
||||
"Content-Type": "application/json",
|
||||
"anthropic-version": "2023-06-01"
|
||||
}
|
||||
data = {
|
||||
"model": "claude-3-sonnet-20240229",
|
||||
"max_tokens": 1000,
|
||||
"messages": [{"role": "user", "content": prompt}]
|
||||
}
|
||||
|
||||
async with session.post("https://api.anthropic.com/v1/messages",
|
||||
headers=headers, json=data) as response:
|
||||
result = await response.json()
|
||||
content = result["content"][0]["text"]
|
||||
return json.loads(content)
|
||||
|
||||
def _generate_basic_analysis(self, messages: List[Dict[str, Any]]) -> Dict[str, Any]:
|
||||
"""基本的な分析(AI APIが利用できない場合のフォールバック)"""
|
||||
user_messages = [msg for msg in messages if msg["role"] == "user"]
|
||||
assistant_messages = [msg for msg in messages if msg["role"] == "assistant"]
|
||||
|
||||
# キーワード抽出(簡易版)
|
||||
all_text = " ".join([msg["content"] for msg in messages])
|
||||
words = all_text.lower().split()
|
||||
word_freq = {}
|
||||
for word in words:
|
||||
if len(word) > 3:
|
||||
word_freq[word] = word_freq.get(word, 0) + 1
|
||||
|
||||
top_words = sorted(word_freq.items(), key=lambda x: x[1], reverse=True)[:5]
|
||||
|
||||
return {
|
||||
"main_topics": [word[0] for word in top_words],
|
||||
"user_intent": "情報収集・問題解決",
|
||||
"key_insights": ["基本的な会話分析"],
|
||||
"relationship_indicators": {
|
||||
"interaction_count": len(messages),
|
||||
"user_engagement": len(user_messages),
|
||||
"assistant_helpfulness": len(assistant_messages)
|
||||
},
|
||||
"emotional_tone": "neutral",
|
||||
"action_items": [],
|
||||
"summary": f"{len(user_messages)}回のやり取りによる会話"
|
||||
}
|
||||
|
||||
class RelationshipTracker:
|
||||
"""関係性追跡クラス"""
|
||||
|
||||
def __init__(self):
|
||||
init_directories()
|
||||
self.relationship_file = RELATIONSHIP_DIR / "relationships.json"
|
||||
self.relationships = self._load_relationships()
|
||||
|
||||
def _load_relationships(self) -> Dict[str, Any]:
|
||||
"""関係性データを読み込み"""
|
||||
if self.relationship_file.exists():
|
||||
with open(self.relationship_file, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
return {"targets": {}, "last_updated": datetime.now().isoformat()}
|
||||
|
||||
def _save_relationships(self):
|
||||
"""関係性データを保存"""
|
||||
self.relationships["last_updated"] = datetime.now().isoformat()
|
||||
with open(self.relationship_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(self.relationships, f, ensure_ascii=False, indent=2)
|
||||
|
||||
def update_relationship(self, target: str, interaction_type: str, weight: float = 1.0, context: str = None):
|
||||
"""関係性を更新"""
|
||||
if target not in self.relationships["targets"]:
|
||||
self.relationships["targets"][target] = {
|
||||
"score": 0.0,
|
||||
"interactions": [],
|
||||
"created_at": datetime.now().isoformat(),
|
||||
"last_interaction": None
|
||||
}
|
||||
|
||||
# スコア計算
|
||||
score_change = 0.0
|
||||
if interaction_type == "positive":
|
||||
score_change = weight * 1.0
|
||||
elif interaction_type == "negative":
|
||||
score_change = weight * -1.0
|
||||
|
||||
# 時間減衰を適用
|
||||
self._apply_time_decay(target)
|
||||
|
||||
# スコア更新
|
||||
current_score = self.relationships["targets"][target]["score"]
|
||||
new_score = current_score + score_change
|
||||
|
||||
# スコアの範囲制限(-100 to 100)
|
||||
new_score = max(-100, min(100, new_score))
|
||||
|
||||
self.relationships["targets"][target]["score"] = new_score
|
||||
self.relationships["targets"][target]["last_interaction"] = datetime.now().isoformat()
|
||||
|
||||
# インタラクション履歴を追加
|
||||
interaction_record = {
|
||||
"type": interaction_type,
|
||||
"weight": weight,
|
||||
"score_change": score_change,
|
||||
"new_score": new_score,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"context": context
|
||||
}
|
||||
|
||||
self.relationships["targets"][target]["interactions"].append(interaction_record)
|
||||
|
||||
# 履歴は最新100件まで保持
|
||||
if len(self.relationships["targets"][target]["interactions"]) > 100:
|
||||
self.relationships["targets"][target]["interactions"] = \
|
||||
self.relationships["targets"][target]["interactions"][-100:]
|
||||
|
||||
self._save_relationships()
|
||||
return new_score
|
||||
|
||||
def _apply_time_decay(self, target: str):
|
||||
"""時間減衰を適用"""
|
||||
target_data = self.relationships["targets"][target]
|
||||
last_interaction = target_data.get("last_interaction")
|
||||
|
||||
if last_interaction:
|
||||
last_time = datetime.fromisoformat(last_interaction)
|
||||
now = datetime.now()
|
||||
days_passed = (now - last_time).days
|
||||
|
||||
# 7日ごとに5%減衰
|
||||
if days_passed > 0:
|
||||
decay_factor = 0.95 ** (days_passed / 7)
|
||||
target_data["score"] *= decay_factor
|
||||
|
||||
def get_relationship_score(self, target: str) -> float:
|
||||
"""関係性スコアを取得"""
|
||||
if target in self.relationships["targets"]:
|
||||
self._apply_time_decay(target)
|
||||
return self.relationships["targets"][target]["score"]
|
||||
return 0.0
|
||||
|
||||
def should_send_message(self, target: str, threshold: float = 50.0) -> bool:
|
||||
"""メッセージ送信の可否を判定"""
|
||||
score = self.get_relationship_score(target)
|
||||
return score >= threshold
|
||||
|
||||
def get_all_relationships(self) -> Dict[str, Any]:
|
||||
"""すべての関係性を取得"""
|
||||
# 全ターゲットに時間減衰を適用
|
||||
for target in self.relationships["targets"]:
|
||||
self._apply_time_decay(target)
|
||||
|
||||
return self.relationships
|
||||
|
||||
class MemoryManager:
|
||||
"""記憶管理クラス(AI処理機能付き)"""
|
||||
|
||||
def __init__(self):
|
||||
init_directories()
|
||||
self.ai_processor = AIMemoryProcessor()
|
||||
self.relationship_tracker = RelationshipTracker()
|
||||
|
||||
def parse_chatgpt_conversation(self, conversation_data: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""ChatGPTの会話データを解析してメッセージを抽出"""
|
||||
messages = []
|
||||
mapping = conversation_data.get("mapping", {})
|
||||
|
||||
# メッセージを時系列順に並べる
|
||||
message_nodes = []
|
||||
for node_id, node in mapping.items():
|
||||
message = node.get("message")
|
||||
if not message:
|
||||
continue
|
||||
content = message.get("content", {})
|
||||
parts = content.get("parts", [])
|
||||
|
||||
if parts and isinstance(parts[0], str) and parts[0].strip():
|
||||
message_nodes.append({
|
||||
"id": node_id,
|
||||
"create_time": message.get("create_time", 0),
|
||||
"author_role": message["author"]["role"],
|
||||
"content": parts[0],
|
||||
"parent": node.get("parent")
|
||||
})
|
||||
|
||||
# 作成時間でソート
|
||||
message_nodes.sort(key=lambda x: x["create_time"] or 0)
|
||||
|
||||
for msg in message_nodes:
|
||||
if msg["author_role"] in ["user", "assistant"]:
|
||||
messages.append({
|
||||
"role": msg["author_role"],
|
||||
"content": msg["content"],
|
||||
"timestamp": msg["create_time"],
|
||||
"message_id": msg["id"]
|
||||
})
|
||||
|
||||
return messages
|
||||
|
||||
async def save_chatgpt_memory(self, conversation_data: Dict[str, Any], process_with_ai: bool = True) -> str:
|
||||
"""ChatGPTの会話を記憶として保存(AI処理オプション付き)"""
|
||||
title = conversation_data.get("title", "untitled")
|
||||
create_time = conversation_data.get("create_time", datetime.now().timestamp())
|
||||
|
||||
# メッセージを解析
|
||||
messages = self.parse_chatgpt_conversation(conversation_data)
|
||||
|
||||
if not messages:
|
||||
raise ValueError("No valid messages found in conversation")
|
||||
|
||||
# AI分析を実行
|
||||
ai_analysis = None
|
||||
if process_with_ai:
|
||||
try:
|
||||
ai_analysis = await self.ai_processor.generate_ai_summary(messages)
|
||||
except Exception as e:
|
||||
print(f"AI analysis failed: {e}")
|
||||
|
||||
# 基本要約を生成
|
||||
basic_summary = self.generate_basic_summary(messages)
|
||||
|
||||
# 保存データを作成
|
||||
memory_data = {
|
||||
"title": title,
|
||||
"source": "chatgpt",
|
||||
"import_time": datetime.now().isoformat(),
|
||||
"original_create_time": create_time,
|
||||
"messages": messages,
|
||||
"basic_summary": basic_summary,
|
||||
"ai_analysis": ai_analysis,
|
||||
"message_count": len(messages),
|
||||
"hash": self._generate_content_hash(messages)
|
||||
}
|
||||
|
||||
# 関係性データを更新
|
||||
if ai_analysis and "relationship_indicators" in ai_analysis:
|
||||
interaction_count = ai_analysis["relationship_indicators"].get("interaction_count", 0)
|
||||
if interaction_count > 10: # 長い会話は関係性にプラス
|
||||
self.relationship_tracker.update_relationship(
|
||||
target="user_general",
|
||||
interaction_type="positive",
|
||||
weight=min(interaction_count / 10, 5.0),
|
||||
context=f"Long conversation: {title}"
|
||||
)
|
||||
|
||||
# ファイル名を生成
|
||||
safe_title = "".join(c for c in title if c.isalnum() or c in (' ', '-', '_')).rstrip()
|
||||
timestamp = datetime.fromtimestamp(create_time).strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"{timestamp}_{safe_title[:50]}.json"
|
||||
|
||||
filepath = CHATGPT_MEMORY_DIR / filename
|
||||
with open(filepath, 'w', encoding='utf-8') as f:
|
||||
json.dump(memory_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
# 処理済みメモリディレクトリにも保存
|
||||
if ai_analysis:
|
||||
processed_filepath = PROCESSED_MEMORY_DIR / filename
|
||||
with open(processed_filepath, 'w', encoding='utf-8') as f:
|
||||
json.dump(memory_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
return str(filepath)
|
||||
|
||||
def generate_basic_summary(self, messages: List[Dict[str, Any]]) -> str:
|
||||
"""基本要約を生成"""
|
||||
if not messages:
|
||||
return "Empty conversation"
|
||||
|
||||
user_messages = [msg for msg in messages if msg["role"] == "user"]
|
||||
assistant_messages = [msg for msg in messages if msg["role"] == "assistant"]
|
||||
|
||||
summary = f"Conversation with {len(user_messages)} user messages and {len(assistant_messages)} assistant responses. "
|
||||
|
||||
if user_messages:
|
||||
first_user_msg = user_messages[0]["content"][:100]
|
||||
summary += f"Started with: {first_user_msg}..."
|
||||
|
||||
return summary
|
||||
|
||||
def _generate_content_hash(self, messages: List[Dict[str, Any]]) -> str:
|
||||
"""メッセージ内容のハッシュを生成"""
|
||||
content = "".join([msg["content"] for msg in messages])
|
||||
return hashlib.sha256(content.encode()).hexdigest()[:16]
|
||||
|
||||
def search_memories(self, query: str, limit: int = 10, use_ai_analysis: bool = True) -> List[Dict[str, Any]]:
|
||||
"""記憶を検索(AI分析結果も含む)"""
|
||||
results = []
|
||||
|
||||
# 処理済みメモリから検索
|
||||
search_dirs = [PROCESSED_MEMORY_DIR, CHATGPT_MEMORY_DIR] if use_ai_analysis else [CHATGPT_MEMORY_DIR]
|
||||
|
||||
for search_dir in search_dirs:
|
||||
for filepath in search_dir.glob("*.json"):
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
memory_data = json.load(f)
|
||||
|
||||
# 検索対象テキストを構築
|
||||
search_text = f"{memory_data.get('title', '')} {memory_data.get('basic_summary', '')}"
|
||||
|
||||
# AI分析結果も検索対象に含める
|
||||
if memory_data.get('ai_analysis'):
|
||||
ai_analysis = memory_data['ai_analysis']
|
||||
search_text += f" {' '.join(ai_analysis.get('main_topics', []))}"
|
||||
search_text += f" {ai_analysis.get('summary', '')}"
|
||||
search_text += f" {' '.join(ai_analysis.get('key_insights', []))}"
|
||||
|
||||
# メッセージ内容も検索対象に含める
|
||||
for msg in memory_data.get('messages', []):
|
||||
search_text += f" {msg.get('content', '')}"
|
||||
|
||||
if query.lower() in search_text.lower():
|
||||
result = {
|
||||
"filepath": str(filepath),
|
||||
"title": memory_data.get("title"),
|
||||
"basic_summary": memory_data.get("basic_summary"),
|
||||
"source": memory_data.get("source"),
|
||||
"import_time": memory_data.get("import_time"),
|
||||
"message_count": len(memory_data.get("messages", [])),
|
||||
"has_ai_analysis": bool(memory_data.get("ai_analysis"))
|
||||
}
|
||||
|
||||
if memory_data.get('ai_analysis'):
|
||||
result["ai_summary"] = memory_data['ai_analysis'].get('summary', '')
|
||||
result["main_topics"] = memory_data['ai_analysis'].get('main_topics', [])
|
||||
|
||||
results.append(result)
|
||||
|
||||
if len(results) >= limit:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error reading memory file {filepath}: {e}")
|
||||
continue
|
||||
|
||||
if len(results) >= limit:
|
||||
break
|
||||
|
||||
return results
|
||||
|
||||
def get_memory_detail(self, filepath: str) -> Dict[str, Any]:
|
||||
"""記憶の詳細を取得"""
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Error reading memory file: {e}")
|
||||
|
||||
def list_all_memories(self) -> List[Dict[str, Any]]:
|
||||
"""すべての記憶をリスト"""
|
||||
memories = []
|
||||
|
||||
for filepath in CHATGPT_MEMORY_DIR.glob("*.json"):
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
memory_data = json.load(f)
|
||||
|
||||
memory_info = {
|
||||
"filepath": str(filepath),
|
||||
"title": memory_data.get("title"),
|
||||
"basic_summary": memory_data.get("basic_summary"),
|
||||
"source": memory_data.get("source"),
|
||||
"import_time": memory_data.get("import_time"),
|
||||
"message_count": len(memory_data.get("messages", [])),
|
||||
"has_ai_analysis": bool(memory_data.get("ai_analysis"))
|
||||
}
|
||||
|
||||
if memory_data.get('ai_analysis'):
|
||||
memory_info["ai_summary"] = memory_data['ai_analysis'].get('summary', '')
|
||||
memory_info["main_topics"] = memory_data['ai_analysis'].get('main_topics', [])
|
||||
|
||||
memories.append(memory_info)
|
||||
except Exception as e:
|
||||
print(f"Error reading memory file {filepath}: {e}")
|
||||
continue
|
||||
|
||||
# インポート時間でソート
|
||||
memories.sort(key=lambda x: x.get("import_time", ""), reverse=True)
|
||||
return memories
|
||||
|
||||
# FastAPI アプリケーション
|
||||
app = FastAPI(title="AigptMCP Server with AI Memory", version="2.0.0")
|
||||
memory_manager = MemoryManager()
|
||||
|
||||
@app.post("/memory/import/chatgpt")
|
||||
async def import_chatgpt_conversation(data: ConversationImport, process_with_ai: bool = True):
|
||||
"""ChatGPTの会話をインポート(AI処理オプション付き)"""
|
||||
try:
|
||||
filepath = await memory_manager.save_chatgpt_memory(data.conversation_data, process_with_ai)
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Conversation imported successfully",
|
||||
"filepath": filepath,
|
||||
"ai_processed": process_with_ai
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
@app.post("/memory/process-ai")
|
||||
async def process_memory_with_ai(data: MemorySummaryRequest):
|
||||
"""既存の記憶をAIで再処理"""
|
||||
try:
|
||||
# 既存記憶を読み込み
|
||||
memory_data = memory_manager.get_memory_detail(data.filepath)
|
||||
|
||||
# AI分析を実行
|
||||
ai_analysis = await memory_manager.ai_processor.generate_ai_summary(
|
||||
memory_data["messages"],
|
||||
data.ai_provider
|
||||
)
|
||||
|
||||
# データを更新
|
||||
memory_data["ai_analysis"] = ai_analysis
|
||||
memory_data["ai_processed_at"] = datetime.now().isoformat()
|
||||
|
||||
# ファイルを更新
|
||||
with open(data.filepath, 'w', encoding='utf-8') as f:
|
||||
json.dump(memory_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
# 処理済みディレクトリにもコピー
|
||||
processed_filepath = PROCESSED_MEMORY_DIR / Path(data.filepath).name
|
||||
with open(processed_filepath, 'w', encoding='utf-8') as f:
|
||||
json.dump(memory_data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Memory processed with AI successfully",
|
||||
"ai_analysis": ai_analysis
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.post("/memory/search")
|
||||
async def search_memories(query: MemoryQuery):
|
||||
"""記憶を検索"""
|
||||
try:
|
||||
results = memory_manager.search_memories(query.query, query.limit)
|
||||
return {
|
||||
"success": True,
|
||||
"results": results,
|
||||
"count": len(results)
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/memory/list")
|
||||
async def list_memories():
|
||||
"""すべての記憶をリスト"""
|
||||
try:
|
||||
memories = memory_manager.list_all_memories()
|
||||
return {
|
||||
"success": True,
|
||||
"memories": memories,
|
||||
"count": len(memories)
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/memory/detail")
|
||||
async def get_memory_detail(filepath: str):
|
||||
"""記憶の詳細を取得"""
|
||||
try:
|
||||
detail = memory_manager.get_memory_detail(filepath)
|
||||
return {
|
||||
"success": True,
|
||||
"memory": detail
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
|
||||
@app.post("/relationship/update")
|
||||
async def update_relationship(data: RelationshipUpdate):
|
||||
"""関係性を更新"""
|
||||
try:
|
||||
new_score = memory_manager.relationship_tracker.update_relationship(
|
||||
data.target, data.interaction_type, data.weight, data.context
|
||||
)
|
||||
return {
|
||||
"success": True,
|
||||
"new_score": new_score,
|
||||
"can_send_message": memory_manager.relationship_tracker.should_send_message(data.target)
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/relationship/list")
|
||||
async def list_relationships():
|
||||
"""すべての関係性をリスト"""
|
||||
try:
|
||||
relationships = memory_manager.relationship_tracker.get_all_relationships()
|
||||
return {
|
||||
"success": True,
|
||||
"relationships": relationships
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/relationship/check")
|
||||
async def check_send_permission(target: str, threshold: float = 50.0):
|
||||
"""メッセージ送信可否をチェック"""
|
||||
try:
|
||||
score = memory_manager.relationship_tracker.get_relationship_score(target)
|
||||
can_send = memory_manager.relationship_tracker.should_send_message(target, threshold)
|
||||
return {
|
||||
"success": True,
|
||||
"target": target,
|
||||
"score": score,
|
||||
"can_send_message": can_send,
|
||||
"threshold": threshold
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.post("/chat")
|
||||
async def chat_endpoint(data: ChatMessage):
|
||||
"""チャット機能(記憶と関係性を活用)"""
|
||||
try:
|
||||
# 関連する記憶を検索
|
||||
memories = memory_manager.search_memories(data.message, limit=3)
|
||||
|
||||
# メモリのコンテキストを構築
|
||||
memory_context = ""
|
||||
if memories:
|
||||
memory_context = "\n# Related memories:\n"
|
||||
for memory in memories:
|
||||
memory_context += f"- {memory['title']}: {memory.get('ai_summary', memory.get('basic_summary', ''))}\n"
|
||||
if memory.get('main_topics'):
|
||||
memory_context += f" Topics: {', '.join(memory['main_topics'])}\n"
|
||||
|
||||
# 関係性情報を取得
|
||||
relationships = memory_manager.relationship_tracker.get_all_relationships()
|
||||
|
||||
# 実際のチャット処理
|
||||
enhanced_message = data.message
|
||||
if memory_context:
|
||||
enhanced_message = f"{data.message}\n\n{memory_context}"
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"response": f"Enhanced response with memory context: {enhanced_message}",
|
||||
"memories_used": len(memories),
|
||||
"relationship_info": {
|
||||
"active_relationships": len(relationships.get("targets", {})),
|
||||
"can_initiate_conversations": sum(1 for target, data in relationships.get("targets", {}).items()
|
||||
if memory_manager.relationship_tracker.should_send_message(target))
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""ヘルスチェック"""
|
||||
return {
|
||||
"service": "AigptMCP Server with AI Memory",
|
||||
"version": "2.0.0",
|
||||
"status": "running",
|
||||
"memory_dir": str(MEMORY_DIR),
|
||||
"features": [
|
||||
"AI-powered memory analysis",
|
||||
"Relationship tracking",
|
||||
"Advanced memory search",
|
||||
"Conversation import",
|
||||
"Auto-summary generation"
|
||||
],
|
||||
"endpoints": [
|
||||
"/memory/import/chatgpt",
|
||||
"/memory/process-ai",
|
||||
"/memory/search",
|
||||
"/memory/list",
|
||||
"/memory/detail",
|
||||
"/relationship/update",
|
||||
"/relationship/list",
|
||||
"/relationship/check",
|
||||
"/chat"
|
||||
]
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("🚀 AigptMCP Server with AI Memory starting...")
|
||||
print(f"📁 Memory directory: {MEMORY_DIR}")
|
||||
print(f"🧠 AI Memory processing: {'✅ Enabled' if os.getenv('OPENAI_API_KEY') or os.getenv('ANTHROPIC_API_KEY') else '❌ Disabled (no API keys)'}")
|
||||
uvicorn.run(app, host="127.0.0.1", port=5000)
|
Reference in New Issue
Block a user