This commit is contained in:
syui 2025-05-20 18:27:48 +09:00
commit 192aa3e117
Signed by: syui
GPG Key ID: 5417CFEBAD92DF56
14 changed files with 534 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
**target
**.lock

9
Cargo.toml Normal file
View File

@ -0,0 +1,9 @@
[package]
name = "aigpt"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
chrono = "0.4"

7
README.md Normal file
View File

@ -0,0 +1,7 @@
# ai `gpt`
ai x 送信
「人格 × 関係性 × 外部環境 × 時間変化」**を軸にした、自律的・関係性駆動のAIシステムの原型
`送信可否`, `送信のタイミング`, `送信内容`が「人格 x 関係性 x 外部環境 x 時間変化」のパラメータで決定されます。

26
config/config.json Normal file
View File

@ -0,0 +1,26 @@
{
"personality": {
"kind": "positive",
"strength": 0.8
},
"relationship": {
"trust": 0.7,
"intimacy": 0.6,
"curiosity": 0.5,
"threshold": 1.5
},
"environment": {
"luck_today": 0.9,
"luck_history": [0.9, 0.9, 0.9],
"level": 1
},
"messaging": {
"enabled": true,
"schedule_time": "08:00",
"decay_rate": 0.1,
"templates": [
"おはよう!今日もがんばろう!",
"ねえ、話したいことがあるの。"
]
}
}

88
gpt.json Normal file
View File

@ -0,0 +1,88 @@
{
"system_name": "relational_autonomous_ai",
"description": "人格・関係性・環境・時間に基づき、AIが自律的にユーザーにメッセージを送信する仕組み",
"core_components": {
"personality": {
"type": "enum",
"variants": ["positive", "negative", "logical", "emotional", "mixed"],
"parameters": {
"message_trigger_style": "運勢との相互作用による送信傾向",
"decay_rate_modifier": "関係性スコアの時間減衰に対する耐性"
}
},
"relationship": {
"parameters": ["trust", "affection", "intimacy"],
"properties": {
"persistent": true,
"hidden": true,
"irreversible": false,
"decay_over_time": true
},
"decay_function": "exp(-t / strength)"
},
"environment": {
"daily_luck": {
"type": "float",
"range": [0.1, 1.0],
"update": "daily",
"streak_mechanism": {
"trigger": "min_or_max_luck_3_times_in_a_row",
"effect": "personality_strength_roll",
"chance": 0.5
}
}
},
"memory": {
"long_term_memory": "user_relationship_log",
"short_term_context": "recent_interactions",
"usage_in_generation": true
},
"message_trigger": {
"condition": {
"relationship_threshold": {
"trust": 0.8,
"affection": 0.6
},
"time_decay": true,
"environment_luck": "personality_dependent"
},
"timing": {
"based_on": ["time_of_day", "personality", "recent_interaction"],
"modifiers": {
"emotional": "morning or night",
"logical": "daytime"
}
}
},
"message_generation": {
"style_variants": ["thought", "casual", "encouragement", "watchful"],
"influenced_by": ["personality", "relationship", "daily_luck", "memory"],
"llm_integration": true
},
"state_transition": {
"states": ["idle", "ready", "sending", "cooldown"],
"transitions": {
"ready_if": "thresholds_met",
"sending_if": "timing_matched",
"cooldown_after": "message_sent"
}
}
},
"extensions": {
"persistence": {
"database": "sqlite",
"storage_items": ["relationship", "personality_level", "daily_luck_log"]
},
"api": {
"llm": "openai / local LLM",
"mode": "rust_cli",
"external_event_trigger": true
},
"scheduler": {
"async_event_loop": true,
"interval_check": 3600,
"time_decay_check": true
}
},
"note": "このシステムは、感情や関係性のような人間的要素を模倣し、AIが“自然に話しかけてくる”という自律型エージェント設計の土台となる。"
}

133
src/data.rs Normal file
View File

@ -0,0 +1,133 @@
#[derive(Debug, Serialize, Deserialize)]
pub struct RelationalAutonomousAI {
pub system_name: String,
pub description: String,
pub core_components: CoreComponents,
pub extensions: Extensions,
pub note: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CoreComponents {
pub personality: Personality,
pub relationship: Relationship,
pub environment: Environment,
pub memory: Memory,
pub message_trigger: MessageTrigger,
pub message_generation: MessageGeneration,
pub state_transition: StateTransition,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Personality {
pub r#type: String,
pub variants: Vec<String>,
pub parameters: PersonalityParameters,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PersonalityParameters {
pub message_trigger_style: String,
pub decay_rate_modifier: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Relationship {
pub parameters: Vec<String>,
pub properties: RelationshipProperties,
pub decay_function: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RelationshipProperties {
pub persistent: bool,
pub hidden: bool,
pub irreversible: bool,
pub decay_over_time: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Environment {
pub daily_luck: DailyLuck,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DailyLuck {
pub r#type: String,
pub range: Vec<f32>,
pub update: String,
pub streak_mechanism: StreakMechanism,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct StreakMechanism {
pub trigger: String,
pub effect: String,
pub chance: f32,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Memory {
pub long_term_memory: String,
pub short_term_context: String,
pub usage_in_generation: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MessageTrigger {
pub condition: TriggerCondition,
pub timing: TriggerTiming,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TriggerCondition {
pub relationship_threshold: String,
pub time_decay: bool,
pub environment_luck: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TriggerTiming {
pub based_on: Vec<String>,
pub modifiers: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MessageGeneration {
pub style_variants: Vec<String>,
pub influenced_by: Vec<String>,
pub llm_integration: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct StateTransition {
pub states: Vec<String>,
pub transitions: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Extensions {
pub persistence: Persistence,
pub api: Api,
pub scheduler: Scheduler,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Persistence {
pub database: String,
pub storage_items: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Api {
pub llm: String,
pub mode: String,
pub external_event_trigger: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Scheduler {
pub async_event_loop: bool,
pub interval_check: i32,
pub time_decay_check: bool,
}

11
src/logic.rs Normal file
View File

@ -0,0 +1,11 @@
use crate::model::AiSystem;
pub fn should_send(ai: &AiSystem) -> bool {
let r = &ai.relationship;
let env = &ai.environment;
let score = r.trust + r.intimacy + r.curiosity;
let relationship_ok = score >= r.threshold;
let luck_ok = env.luck_today > 0.5;
ai.messaging.enabled && relationship_ok && luck_ok
}

110
src/main.rs Normal file
View File

@ -0,0 +1,110 @@
mod model;
mod logic;
use model::{AiSystem, Environment, Messaging, Personality, Relationship};
use serde_json;
use std::fs;
use std::path::Path;
use chrono::{Duration, Local, NaiveDateTime};
#[derive(Debug)]
struct AIState {
relation_score: f32,
previous_score: f32,
decay_rate: f32,
sensitivity: f32,
message_threshold: f32,
last_message_time: NaiveDateTime,
}
impl AIState {
fn update(&mut self, now: NaiveDateTime) {
let days_passed = (now - self.last_message_time).num_days() as f32;
let decay = self.decay_rate * days_passed;
self.previous_score = self.relation_score;
self.relation_score -= decay;
self.relation_score = self.relation_score.clamp(0.0, 100.0);
}
fn should_talk(&self) -> bool {
let delta = self.previous_score - self.relation_score;
delta > self.message_threshold && self.sensitivity > 0.5
}
fn generate_message(&self) -> String {
let _delta = self.previous_score - self.relation_score;
match self.relation_score as i32 {
80..=100 => "ふふっ、最近どうしてる?会いたくなっちゃった!".to_string(),
60..=79 => "ちょっとだけ、さみしかったんだよ?".to_string(),
40..=59 => "えっと……話せる時間ある?".to_string(),
_ => "ううん、もしかして私のこと、忘れちゃったのかな……".to_string(),
}
}
}
fn main() {
// テスト用のAIシステム初期化
let ai = AiSystem {
personality: Personality {
kind: "positive".to_string(),
strength: 0.8,
},
relationship: Relationship {
trust: 0.7,
intimacy: 0.6,
curiosity: 0.5,
threshold: 1.5,
},
environment: Environment {
luck_today: 0.9,
luck_history: vec![0.9, 0.9, 0.9],
level: 1,
},
messaging: Messaging {
enabled: true,
schedule_time: Some("08:00".to_string()),
decay_rate: 0.1,
templates: vec![
"ねえねえ、今日もがんばろうね!".to_string(),
"そろそろ話したくなっちゃった...".to_string(),
],
},
};
// JSONにシリアライズして表示
let json = serde_json::to_string_pretty(&ai).unwrap();
println!("{}", json);
let path = Path::new("config/config.json");
let data = fs::read_to_string(path).expect("JSON読み込み失敗");
let ai: AiSystem = serde_json::from_str(&data).expect("パース失敗");
println!("AI構成読み込み完了: {:?}", ai);
if logic::should_send(&ai) {
let msg = &ai.messaging.templates[0];
println!("✅ メッセージ送信決定: {msg}");
} else {
println!("❌ 今はメッセージを送る条件ではありません。");
}
let now = Local::now().naive_local();
let mut ai = AIState {
relation_score: 80.0,
previous_score: 80.0,
decay_rate: 1.5, // 1日あたりの減少スコア
sensitivity: 0.8,
message_threshold: 5.0,
last_message_time: now - Duration::days(4), // 最後の会話から4日経過
};
ai.update(now);
if ai.should_talk() {
println!("AI発話: {}", ai.generate_message());
} else {
println!("まだ何も言わずにそっとしているようです...");
}
}

40
src/model.rs Normal file
View File

@ -0,0 +1,40 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct AiSystem {
pub personality: Personality,
pub relationship: Relationship,
pub environment: Environment,
pub messaging: Messaging,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Personality {
pub kind: String, // e.g., "positive", "negative", "neutral"
pub strength: f32, // 0.0 - 1.0
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Relationship {
pub trust: f32, // 0.0 - 1.0
pub intimacy: f32, // 0.0 - 1.0
pub curiosity: f32, // 0.0 - 1.0
pub threshold: f32, // if sum > threshold, allow messaging
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Environment {
pub luck_today: f32, // 0.1 - 1.0
pub luck_history: Vec<f32>, // last 3 values
pub level: i32, // current mental strength level
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Messaging {
pub enabled: bool,
pub schedule_time: Option<String>, // e.g., "08:00"
pub decay_rate: f32, // how quickly emotion fades (0.0 - 1.0)
pub templates: Vec<String>, // message template variations
}

46
src/system.rs Normal file
View File

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::{BufReader, Write};
use std::time::{SystemTime, UNIX_EPOCH};
mod model;
use model::RelationalAutonomousAI;
fn load_config(path: &str) -> std::io::Result<RelationalAutonomousAI> {
let file = File::open(path)?;
let reader = BufReader::new(file);
let config: RelationalAutonomousAI = serde_json::from_reader(reader)?;
Ok(config)
}
fn save_config(config: &RelationalAutonomousAI, path: &str) -> std::io::Result<()> {
let mut file = File::create(path)?;
let json = serde_json::to_string_pretty(config)?;
file.write_all(json.as_bytes())?;
Ok(())
}
fn should_send_message(config: &RelationalAutonomousAI) -> bool {
// 簡易な送信条件: relationshipが高く、daily_luckが0.8以上
config.core_components.relationship.parameters.contains(&"trust".to_string())
&& config.core_components.environment.daily_luck.range[1] >= 0.8
}
fn main() -> std::io::Result<()> {
let path = "config.json";
let mut config = load_config(path)?;
if should_send_message(&config) {
println!("💌 メッセージを送信できます: {:?}", config.core_components.personality.r#type);
// ステート変化の例: メッセージ送信後に記録用トランジションを追加
config.core_components.state_transition.transitions.push("message_sent".to_string());
save_config(&config, path)?;
} else {
println!("😶 まだ送信条件に達していません。");
}
Ok(())
}

42
tmp/agent/src/main.rs Normal file
View File

@ -0,0 +1,42 @@
use std::env;
use std::process::{Command, Stdio};
use std::io::{self, Write};
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("Usage: langchain_cli <prompt>");
std::process::exit(1);
}
let prompt = &args[1];
// Simulate a pipeline stage: e.g., tokenization, reasoning, response generation
let stages = vec!["Tokenize", "Reason", "Generate"];
for stage in &stages {
println!("[Stage: {}] Processing...", stage);
}
// Example call to Python-based LangChain (assuming you have a script or API to call)
// For placeholder purposes, we echo the prompt back.
let output = Command::new("python3")
.arg("-c")
.arg(format!("print(\"LangChain Agent Response for: {}\")", prompt))
.stdout(Stdio::piped())
.spawn()
.expect("failed to execute process")
.wait_with_output()
.expect("failed to wait on child");
io::stdout().write_all(&output.stdout).unwrap();
}
/*
TODO (for future LangChain-style pipeline):
1. Implement trait-based agent components: Tokenizer, Retriever, Reasoner, Generator.
2. Allow config via YAML or TOML to define chain flow.
3. Async pipeline support with Tokio.
4. Optional integration with LLM APIs (OpenAI, Ollama, etc).
5. Rust-native vector search (e.g. using `tantivy`, `qdrant-client`).
*/

BIN
tmp/img/ai_r_01.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

BIN
tmp/img/ai_r_02.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

20
tmp/post.json Normal file

File diff suppressed because one or more lines are too long