Implement AI memory system with psychological priority scoring

Core changes:
- Add AI interpreter module for content interpretation and priority scoring
- Extend Memory struct with interpreted_content, priority_score (f32: 0.0-1.0), and user_context
- Implement automatic memory pruning based on priority scores
- Add capacity management (default: 100 memories max)
- Create comprehensive design documentation

Technical details:
- Changed priority_score from u8 (1-100) to f32 (0.0-1.0) for better AI compatibility
- Add create_memory_with_ai() method for AI-enhanced memory creation
- Implement get_memories_by_priority() for priority-based sorting
- Score evaluation criteria: emotional impact, user relevance, novelty, utility

Philosophy:
This implements a "psychological priority memory system" where AI interprets
and evaluates memories rather than storing raw content. Inspired by how human
memory works - interpreting and prioritizing rather than perfect recording.
This commit is contained in:
Claude
2025-11-05 14:09:39 +00:00
parent 62b91e5e5a
commit fd97ba2d81
5 changed files with 378 additions and 7 deletions

View File

@@ -4,11 +4,15 @@ use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use uuid::Uuid;
use crate::ai_interpreter::AIInterpreter;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Memory {
pub id: String,
pub content: String,
pub interpreted_content: String, // AI解釈後のコンテンツ
pub priority_score: f32, // 心理判定スコア (0.0-1.0)
pub user_context: Option<String>, // ユーザー固有性
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
@@ -67,6 +71,9 @@ pub struct MemoryManager {
memories: HashMap<String, Memory>,
conversations: HashMap<String, Conversation>,
data_file: PathBuf,
max_memories: usize, // 最大記憶数
min_priority_score: f32, // 最小優先度スコア (0.0-1.0)
ai_interpreter: AIInterpreter, // AI解釈エンジン
}
impl MemoryManager {
@@ -91,23 +98,68 @@ impl MemoryManager {
memories,
conversations,
data_file,
max_memories: 100, // デフォルト: 100件
min_priority_score: 0.3, // デフォルト: 0.3以上
ai_interpreter: AIInterpreter::new(),
})
}
pub fn create_memory(&mut self, content: &str) -> Result<String> {
let id = Uuid::new_v4().to_string();
let now = Utc::now();
let memory = Memory {
id: id.clone(),
content: content.to_string(),
interpreted_content: content.to_string(), // 後でAI解釈を実装
priority_score: 0.5, // 後で心理判定を実装
user_context: None,
created_at: now,
updated_at: now,
};
self.memories.insert(id.clone(), memory);
// 容量制限チェック
self.prune_memories_if_needed()?;
self.save_data()?;
Ok(id)
}
/// AI解釈と心理判定を使った記憶作成
pub async fn create_memory_with_ai(
&mut self,
content: &str,
user_context: Option<&str>,
) -> Result<String> {
let id = Uuid::new_v4().to_string();
let now = Utc::now();
// AI解釈と心理判定を実行
let (interpreted_content, priority_score) = self
.ai_interpreter
.analyze(content, user_context)
.await?;
let memory = Memory {
id: id.clone(),
content: content.to_string(),
interpreted_content,
priority_score,
user_context: user_context.map(|s| s.to_string()),
created_at: now,
updated_at: now,
};
self.memories.insert(id.clone(), memory);
// 容量制限チェック
self.prune_memories_if_needed()?;
self.save_data()?;
Ok(id)
}
@@ -131,6 +183,34 @@ impl MemoryManager {
}
}
// 容量制限: 優先度が低いものから削除
fn prune_memories_if_needed(&mut self) -> Result<()> {
if self.memories.len() <= self.max_memories {
return Ok(());
}
// 優先度でソートして、低いものから削除
let mut sorted_memories: Vec<_> = self.memories.iter()
.map(|(id, mem)| (id.clone(), mem.priority_score))
.collect();
sorted_memories.sort_by(|a, b| a.1.cmp(&b.1));
let to_remove = self.memories.len() - self.max_memories;
for (id, _) in sorted_memories.iter().take(to_remove) {
self.memories.remove(id);
}
Ok(())
}
// 優先度順に記憶を取得
pub fn get_memories_by_priority(&self) -> Vec<&Memory> {
let mut memories: Vec<_> = self.memories.values().collect();
memories.sort_by(|a, b| b.priority_score.cmp(&a.priority_score));
memories
}
pub fn search_memories(&self, query: &str) -> Vec<&Memory> {
let query_lower = query.to_lowercase();
let mut results: Vec<_> = self.memories