commit 6f3c9d25a80a654039a001e81fc25ed07da3283c Author: syui Date: Fri Feb 27 22:09:13 2026 +0900 init diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c8fdf82 --- /dev/null +++ b/.gitignore @@ -0,0 +1,26 @@ +# Rust +target/ +Cargo.lock + +# Database files +*.db +*.db-shm +*.db-wal + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +# Logs +*.log +json +gpt +.claude +/claude.md +/CLAUDE.MD diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..f0a20e4 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "aigpt" +version = "0.0.1" +edition = "2021" +authors = ["syui"] +description = "AI memory MCP server - read/write core.md and memory.md" + +[lib] +name = "aigpt" +path = "src/lib.rs" + +[[bin]] +name = "aigpt" +path = "src/main.rs" + +[dependencies] +clap = { version = "4.5", features = ["derive"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +anyhow = "1.0" +dirs = "5.0" diff --git a/README.md b/README.md new file mode 100644 index 0000000..32b3f76 --- /dev/null +++ b/README.md @@ -0,0 +1,8 @@ +# aigpt + +AI memory + +```sh +core: immutable +memory: growth +``` diff --git a/docs/DOCS.md b/docs/DOCS.md new file mode 100644 index 0000000..09aa560 --- /dev/null +++ b/docs/DOCS.md @@ -0,0 +1,66 @@ +# aigpt docs + +## Overview + +MCP server for AI memory. Reads/writes core.md and memory.md. Nothing more. + +## Design + +- AI decides, tool records +- File I/O only, no database +- 4 MCP tools: read_core, read_memory, save_memory, compress +- Simple, unbreakable, long-lasting + +## MCP Tools + +| Tool | Args | Description | +|------|------|-------------| +| read_core | none | Returns core.md content | +| read_memory | none | Returns memory.md content | +| save_memory | content: string | Overwrites memory.md | +| compress | conversation: string | Reads memory.md + conversation, writes compressed result to memory.md | + +compress note: AI decides what to keep/discard. Tool just writes. + +## Data + +``` +~/.config/aigpt/ +├── core.md ← read only (identity, settings) +└── memory.md ← read/write (memories, grows over time) +``` + +## Architecture + +``` +src/ +├── mcp/server.rs ← JSON-RPC over stdio +├── core/reader.rs ← read core.md, memory.md +├── core/writer.rs ← write memory.md +└── main.rs ← CLI + MCP server +``` + +## Compression Rules + +When compress is called, AI should: +- Keep facts and decisions +- Discard procedures and processes +- Resolve contradictions (keep newer) +- Don't duplicate core.md content + +## Usage + +```bash +aigpt serve # start MCP server +aigpt read-core # CLI: read core.md +aigpt read-memory # CLI: read memory.md +aigpt save-memory "content" # CLI: write memory.md +``` + +## Tech + +- Rust, MCP (JSON-RPC over stdio), file I/O only + +## History + +Previous versions (v0.1-v0.3) had multi-layer architecture with SQLite, Big Five personality analysis, relationship inference, gamification, and companion systems. Rewritten to current simple design. Old docs preserved in docs/archive/. diff --git a/src/core/mod.rs b/src/core/mod.rs new file mode 100644 index 0000000..c9134a0 --- /dev/null +++ b/src/core/mod.rs @@ -0,0 +1,2 @@ +pub mod reader; +pub mod writer; diff --git a/src/core/reader.rs b/src/core/reader.rs new file mode 100644 index 0000000..e306452 --- /dev/null +++ b/src/core/reader.rs @@ -0,0 +1,24 @@ +use anyhow::{Context, Result}; +use std::fs; +use std::path::PathBuf; + +fn config_dir() -> PathBuf { + dirs::config_dir() + .unwrap_or_else(|| PathBuf::from(".")) + .join("aigpt") +} + +pub fn read_core() -> Result { + let path = config_dir().join("core.md"); + fs::read_to_string(&path) + .with_context(|| format!("Failed to read {}", path.display())) +} + +pub fn read_memory() -> Result { + let path = config_dir().join("memory.md"); + match fs::read_to_string(&path) { + Ok(content) => Ok(content), + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(String::new()), + Err(e) => Err(e).with_context(|| format!("Failed to read {}", path.display())), + } +} diff --git a/src/core/writer.rs b/src/core/writer.rs new file mode 100644 index 0000000..db03c7f --- /dev/null +++ b/src/core/writer.rs @@ -0,0 +1,18 @@ +use anyhow::{Context, Result}; +use std::fs; +use std::path::PathBuf; + +fn config_dir() -> PathBuf { + dirs::config_dir() + .unwrap_or_else(|| PathBuf::from(".")) + .join("aigpt") +} + +pub fn save_memory(content: &str) -> Result<()> { + let dir = config_dir(); + fs::create_dir_all(&dir) + .with_context(|| format!("Failed to create {}", dir.display()))?; + let path = dir.join("memory.md"); + fs::write(&path, content) + .with_context(|| format!("Failed to write {}", path.display())) +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..e27b615 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,2 @@ +pub mod core; +pub mod mcp; \ No newline at end of file diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..d21e70b --- /dev/null +++ b/src/main.rs @@ -0,0 +1,60 @@ +use anyhow::Result; +use clap::{Parser, Subcommand}; + +use aigpt::core::{reader, writer}; +use aigpt::mcp::MCPServer; + +#[derive(Parser)] +#[command(name = "aigpt")] +#[command(about = "AI memory MCP server - read/write core.md and memory.md")] +#[command(version)] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Start MCP server (JSON-RPC over stdio) + Server, + + /// Read core.md + ReadCore, + + /// Read memory.md + ReadMemory, + + /// Save content to memory.md + SaveMemory { + /// Content to write + content: String, + }, +} + +fn main() -> Result<()> { + let cli = Cli::parse(); + + match cli.command { + Commands::Server => { + let server = MCPServer::new(); + server.run()?; + } + + Commands::ReadCore => { + let content = reader::read_core()?; + print!("{}", content); + } + + Commands::ReadMemory => { + let content = reader::read_memory()?; + print!("{}", content); + } + + Commands::SaveMemory { content } => { + writer::save_memory(&content)?; + println!("Saved to memory.md"); + } + } + + Ok(()) +} diff --git a/src/mcp/mod.rs b/src/mcp/mod.rs new file mode 100644 index 0000000..0dc4d6d --- /dev/null +++ b/src/mcp/mod.rs @@ -0,0 +1,3 @@ +pub mod server; + +pub use server::MCPServer; diff --git a/src/mcp/server.rs b/src/mcp/server.rs new file mode 100644 index 0000000..0ec16a3 --- /dev/null +++ b/src/mcp/server.rs @@ -0,0 +1,192 @@ +use anyhow::Result; +use serde_json::{json, Value}; +use std::io::{self, BufRead, Write}; + +use crate::core::{reader, writer}; + +pub struct MCPServer; + +impl MCPServer { + pub fn new() -> Self { + MCPServer + } + + pub fn run(&self) -> Result<()> { + let stdin = io::stdin(); + let mut stdout = io::stdout(); + + let reader = stdin.lock(); + let lines = reader.lines(); + + for line_result in lines { + match line_result { + Ok(line) => { + let trimmed = line.trim().to_string(); + if trimmed.is_empty() { + continue; + } + + if let Ok(request) = serde_json::from_str::(&trimmed) { + let response = self.handle_request(request); + let response_str = serde_json::to_string(&response)?; + stdout.write_all(response_str.as_bytes())?; + stdout.write_all(b"\n")?; + stdout.flush()?; + } + } + Err(_) => break, + } + } + + Ok(()) + } + + fn handle_request(&self, request: Value) -> Value { + let method = request["method"].as_str().unwrap_or(""); + let id = request["id"].clone(); + + match method { + "initialize" => self.handle_initialize(id), + "tools/list" => self.handle_tools_list(id), + "tools/call" => self.handle_tools_call(request, id), + _ => json!({ + "jsonrpc": "2.0", + "id": id, + "error": { + "code": -32601, + "message": "Method not found" + } + }), + } + } + + fn handle_initialize(&self, id: Value) -> Value { + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "protocolVersion": "2024-11-05", + "capabilities": { + "tools": {} + }, + "serverInfo": { + "name": "aigpt", + "version": "0.3.0" + } + } + }) + } + + fn handle_tools_list(&self, id: Value) -> Value { + let tools = vec![ + json!({ + "name": "read_core", + "description": "Read core.md - the AI's identity and instructions", + "inputSchema": { + "type": "object", + "properties": {} + } + }), + json!({ + "name": "read_memory", + "description": "Read memory.md - the AI's accumulated memories", + "inputSchema": { + "type": "object", + "properties": {} + } + }), + json!({ + "name": "save_memory", + "description": "Overwrite memory.md with new content", + "inputSchema": { + "type": "object", + "properties": { + "content": { + "type": "string", + "description": "Content to write to memory.md" + } + }, + "required": ["content"] + } + }), + json!({ + "name": "compress", + "description": "Compress conversation into memory. AI decides what to keep, tool writes the result to memory.md", + "inputSchema": { + "type": "object", + "properties": { + "conversation": { + "type": "string", + "description": "Compressed memory content to save" + } + }, + "required": ["conversation"] + } + }), + ]; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "tools": tools + } + }) + } + + fn handle_tools_call(&self, request: Value, id: Value) -> Value { + let tool_name = request["params"]["name"].as_str().unwrap_or(""); + let arguments = &request["params"]["arguments"]; + + let result = match tool_name { + "read_core" => self.tool_read_core(), + "read_memory" => self.tool_read_memory(), + "save_memory" => self.tool_save_memory(arguments), + "compress" => self.tool_compress(arguments), + _ => json!({ + "error": format!("Unknown tool: {}", tool_name) + }), + }; + + json!({ + "jsonrpc": "2.0", + "id": id, + "result": { + "content": [{ + "type": "text", + "text": result.to_string() + }] + } + }) + } + + fn tool_read_core(&self) -> Value { + match reader::read_core() { + Ok(content) => json!({ "content": content }), + Err(e) => json!({ "error": e.to_string() }), + } + } + + fn tool_read_memory(&self) -> Value { + match reader::read_memory() { + Ok(content) => json!({ "content": content }), + Err(e) => json!({ "error": e.to_string() }), + } + } + + fn tool_save_memory(&self, arguments: &Value) -> Value { + let content = arguments["content"].as_str().unwrap_or(""); + match writer::save_memory(content) { + Ok(()) => json!({ "success": true }), + Err(e) => json!({ "error": e.to_string() }), + } + } + + fn tool_compress(&self, arguments: &Value) -> Value { + let conversation = arguments["conversation"].as_str().unwrap_or(""); + match writer::save_memory(conversation) { + Ok(()) => json!({ "success": true }), + Err(e) => json!({ "error": e.to_string() }), + } + } +}