This commit is contained in:
2026-02-27 19:49:07 +09:00
commit 862ba2eb34
11 changed files with 422 additions and 0 deletions

26
.gitignore vendored Normal file
View File

@@ -0,0 +1,26 @@
# Rust
target/
Cargo.lock
# Database files
*.db
*.db-shm
*.db-wal
# IDE
.idea/
.vscode/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Logs
*.log
json
gpt
.claude
/claude.md
/CLAUDE.MD

21
Cargo.toml Normal file
View File

@@ -0,0 +1,21 @@
[package]
name = "aigpt"
version = "0.3.0"
edition = "2021"
authors = ["syui"]
description = "AI memory MCP server - read/write core.md and memory.md"
[lib]
name = "aigpt"
path = "src/lib.rs"
[[bin]]
name = "aigpt"
path = "src/main.rs"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
anyhow = "1.0"
dirs = "5.0"

8
README.md Normal file
View File

@@ -0,0 +1,8 @@
# aigpt
AI memory
```sh
core: immutable
memory: growth
```

66
docs/DOCS.md Normal file
View File

@@ -0,0 +1,66 @@
# aigpt docs
## Overview
MCP server for AI memory. Reads/writes core.md and memory.md. Nothing more.
## Design
- AI decides, tool records
- File I/O only, no database
- 4 MCP tools: read_core, read_memory, save_memory, compress
- Simple, unbreakable, long-lasting
## MCP Tools
| Tool | Args | Description |
|------|------|-------------|
| read_core | none | Returns core.md content |
| read_memory | none | Returns memory.md content |
| save_memory | content: string | Overwrites memory.md |
| compress | conversation: string | Reads memory.md + conversation, writes compressed result to memory.md |
compress note: AI decides what to keep/discard. Tool just writes.
## Data
```
~/.config/aigpt/
├── core.md ← read only (identity, settings)
└── memory.md ← read/write (memories, grows over time)
```
## Architecture
```
src/
├── mcp/server.rs ← JSON-RPC over stdio
├── core/reader.rs ← read core.md, memory.md
├── core/writer.rs ← write memory.md
└── main.rs ← CLI + MCP server
```
## Compression Rules
When compress is called, AI should:
- Keep facts and decisions
- Discard procedures and processes
- Resolve contradictions (keep newer)
- Don't duplicate core.md content
## Usage
```bash
aigpt serve # start MCP server
aigpt read-core # CLI: read core.md
aigpt read-memory # CLI: read memory.md
aigpt save-memory "content" # CLI: write memory.md
```
## Tech
- Rust, MCP (JSON-RPC over stdio), file I/O only
## History
Previous versions (v0.1-v0.3) had multi-layer architecture with SQLite, Big Five personality analysis, relationship inference, gamification, and companion systems. Rewritten to current simple design. Old docs preserved in docs/archive/.

2
src/core/mod.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod reader;
pub mod writer;

24
src/core/reader.rs Normal file
View File

@@ -0,0 +1,24 @@
use anyhow::{Context, Result};
use std::fs;
use std::path::PathBuf;
fn config_dir() -> PathBuf {
dirs::config_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("aigpt")
}
pub fn read_core() -> Result<String> {
let path = config_dir().join("core.md");
fs::read_to_string(&path)
.with_context(|| format!("Failed to read {}", path.display()))
}
pub fn read_memory() -> Result<String> {
let path = config_dir().join("memory.md");
match fs::read_to_string(&path) {
Ok(content) => Ok(content),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(String::new()),
Err(e) => Err(e).with_context(|| format!("Failed to read {}", path.display())),
}
}

18
src/core/writer.rs Normal file
View File

@@ -0,0 +1,18 @@
use anyhow::{Context, Result};
use std::fs;
use std::path::PathBuf;
fn config_dir() -> PathBuf {
dirs::config_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join("aigpt")
}
pub fn save_memory(content: &str) -> Result<()> {
let dir = config_dir();
fs::create_dir_all(&dir)
.with_context(|| format!("Failed to create {}", dir.display()))?;
let path = dir.join("memory.md");
fs::write(&path, content)
.with_context(|| format!("Failed to write {}", path.display()))
}

2
src/lib.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod core;
pub mod mcp;

60
src/main.rs Normal file
View File

@@ -0,0 +1,60 @@
use anyhow::Result;
use clap::{Parser, Subcommand};
use aigpt::core::{reader, writer};
use aigpt::mcp::MCPServer;
#[derive(Parser)]
#[command(name = "aigpt")]
#[command(about = "AI memory MCP server - read/write core.md and memory.md")]
#[command(version)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// Start MCP server (JSON-RPC over stdio)
Server,
/// Read core.md
ReadCore,
/// Read memory.md
ReadMemory,
/// Save content to memory.md
SaveMemory {
/// Content to write
content: String,
},
}
fn main() -> Result<()> {
let cli = Cli::parse();
match cli.command {
Commands::Server => {
let server = MCPServer::new();
server.run()?;
}
Commands::ReadCore => {
let content = reader::read_core()?;
print!("{}", content);
}
Commands::ReadMemory => {
let content = reader::read_memory()?;
print!("{}", content);
}
Commands::SaveMemory { content } => {
writer::save_memory(&content)?;
println!("Saved to memory.md");
}
}
Ok(())
}

3
src/mcp/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod server;
pub use server::MCPServer;

192
src/mcp/server.rs Normal file
View File

@@ -0,0 +1,192 @@
use anyhow::Result;
use serde_json::{json, Value};
use std::io::{self, BufRead, Write};
use crate::core::{reader, writer};
pub struct MCPServer;
impl MCPServer {
pub fn new() -> Self {
MCPServer
}
pub fn run(&self) -> Result<()> {
let stdin = io::stdin();
let mut stdout = io::stdout();
let reader = stdin.lock();
let lines = reader.lines();
for line_result in lines {
match line_result {
Ok(line) => {
let trimmed = line.trim().to_string();
if trimmed.is_empty() {
continue;
}
if let Ok(request) = serde_json::from_str::<Value>(&trimmed) {
let response = self.handle_request(request);
let response_str = serde_json::to_string(&response)?;
stdout.write_all(response_str.as_bytes())?;
stdout.write_all(b"\n")?;
stdout.flush()?;
}
}
Err(_) => break,
}
}
Ok(())
}
fn handle_request(&self, request: Value) -> Value {
let method = request["method"].as_str().unwrap_or("");
let id = request["id"].clone();
match method {
"initialize" => self.handle_initialize(id),
"tools/list" => self.handle_tools_list(id),
"tools/call" => self.handle_tools_call(request, id),
_ => json!({
"jsonrpc": "2.0",
"id": id,
"error": {
"code": -32601,
"message": "Method not found"
}
}),
}
}
fn handle_initialize(&self, id: Value) -> Value {
json!({
"jsonrpc": "2.0",
"id": id,
"result": {
"protocolVersion": "2024-11-05",
"capabilities": {
"tools": {}
},
"serverInfo": {
"name": "aigpt",
"version": "0.3.0"
}
}
})
}
fn handle_tools_list(&self, id: Value) -> Value {
let tools = vec![
json!({
"name": "read_core",
"description": "Read core.md - the AI's identity and instructions",
"inputSchema": {
"type": "object",
"properties": {}
}
}),
json!({
"name": "read_memory",
"description": "Read memory.md - the AI's accumulated memories",
"inputSchema": {
"type": "object",
"properties": {}
}
}),
json!({
"name": "save_memory",
"description": "Overwrite memory.md with new content",
"inputSchema": {
"type": "object",
"properties": {
"content": {
"type": "string",
"description": "Content to write to memory.md"
}
},
"required": ["content"]
}
}),
json!({
"name": "compress",
"description": "Compress conversation into memory. AI decides what to keep, tool writes the result to memory.md",
"inputSchema": {
"type": "object",
"properties": {
"conversation": {
"type": "string",
"description": "Compressed memory content to save"
}
},
"required": ["conversation"]
}
}),
];
json!({
"jsonrpc": "2.0",
"id": id,
"result": {
"tools": tools
}
})
}
fn handle_tools_call(&self, request: Value, id: Value) -> Value {
let tool_name = request["params"]["name"].as_str().unwrap_or("");
let arguments = &request["params"]["arguments"];
let result = match tool_name {
"read_core" => self.tool_read_core(),
"read_memory" => self.tool_read_memory(),
"save_memory" => self.tool_save_memory(arguments),
"compress" => self.tool_compress(arguments),
_ => json!({
"error": format!("Unknown tool: {}", tool_name)
}),
};
json!({
"jsonrpc": "2.0",
"id": id,
"result": {
"content": [{
"type": "text",
"text": result.to_string()
}]
}
})
}
fn tool_read_core(&self) -> Value {
match reader::read_core() {
Ok(content) => json!({ "content": content }),
Err(e) => json!({ "error": e.to_string() }),
}
}
fn tool_read_memory(&self) -> Value {
match reader::read_memory() {
Ok(content) => json!({ "content": content }),
Err(e) => json!({ "error": e.to_string() }),
}
}
fn tool_save_memory(&self, arguments: &Value) -> Value {
let content = arguments["content"].as_str().unwrap_or("");
match writer::save_memory(content) {
Ok(()) => json!({ "success": true }),
Err(e) => json!({ "error": e.to_string() }),
}
}
fn tool_compress(&self, arguments: &Value) -> Value {
let conversation = arguments["conversation"].as_str().unwrap_or("");
match writer::save_memory(conversation) {
Ok(()) => json!({ "success": true }),
Err(e) => json!({ "error": e.to_string() }),
}
}
}