This commit is contained in:
syui 2025-05-19 21:07:36 +09:00
commit fa7d52e7e8
Signed by: syui
GPG Key ID: 5417CFEBAD92DF56
14 changed files with 335 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
**target
**.lock

10
README.md Normal file
View File

@ -0,0 +1,10 @@
# ai gpt
ai x 送信
## 説明
「人格 × 関係性 × 外部環境 × 時間変化」**を軸にした、自律的・関係性駆動のAIシステムの原型
`送信可否`, `送信のタイミング`, `送信内容`が「人格 x 関係性 x 外部環境 x 時間変化」のパラメータで決定されます。

42
agent/main.rs Normal file
View File

@ -0,0 +1,42 @@
use std::env;
use std::process::{Command, Stdio};
use std::io::{self, Write};
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("Usage: langchain_cli <prompt>");
std::process::exit(1);
}
let prompt = &args[1];
// Simulate a pipeline stage: e.g., tokenization, reasoning, response generation
let stages = vec!["Tokenize", "Reason", "Generate"];
for stage in &stages {
println!("[Stage: {}] Processing...", stage);
}
// Example call to Python-based LangChain (assuming you have a script or API to call)
// For placeholder purposes, we echo the prompt back.
let output = Command::new("python3")
.arg("-c")
.arg(format!("print(\"LangChain Agent Response for: {}\")", prompt))
.stdout(Stdio::piped())
.spawn()
.expect("failed to execute process")
.wait_with_output()
.expect("failed to wait on child");
io::stdout().write_all(&output.stdout).unwrap();
}
/*
TODO (for future LangChain-style pipeline):
1. Implement trait-based agent components: Tokenizer, Retriever, Reasoner, Generator.
2. Allow config via YAML or TOML to define chain flow.
3. Async pipeline support with Tokio.
4. Optional integration with LLM APIs (OpenAI, Ollama, etc).
5. Rust-native vector search (e.g. using `tantivy`, `qdrant-client`).
*/

8
ai-messenger/Cargo.toml Normal file
View File

@ -0,0 +1,8 @@
[package]
name = "ai-messenger"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

View File

@ -0,0 +1,26 @@
{
"personality": {
"kind": "positive",
"strength": 0.8
},
"relationship": {
"trust": 0.7,
"intimacy": 0.6,
"curiosity": 0.5,
"threshold": 1.5
},
"environment": {
"luck_today": 0.9,
"luck_history": [0.9, 0.9, 0.9],
"level": 1
},
"messaging": {
"enabled": true,
"schedule_time": "08:00",
"decay_rate": 0.1,
"templates": [
"おはよう!今日もがんばろう!",
"ねえ、話したいことがあるの。"
]
}
}

11
ai-messenger/src/logic.rs Normal file
View File

@ -0,0 +1,11 @@
use crate::model::AiSystem;
pub fn should_send(ai: &AiSystem) -> bool {
let r = &ai.relationship;
let env = &ai.environment;
let score = r.trust + r.intimacy + r.curiosity;
let relationship_ok = score >= r.threshold;
let luck_ok = env.luck_today > 0.5;
ai.messaging.enabled && relationship_ok && luck_ok
}

38
ai-messenger/src/main.rs Normal file
View File

@ -0,0 +1,38 @@
mod model;
use model::{AiSystem, Environment, Messaging, Personality, Relationship};
use serde_json;
fn main() {
// テスト用のAIシステム初期化
let ai = AiSystem {
personality: Personality {
kind: "positive".to_string(),
strength: 0.8,
},
relationship: Relationship {
trust: 0.7,
intimacy: 0.6,
curiosity: 0.5,
threshold: 1.5,
},
environment: Environment {
luck_today: 0.9,
luck_history: vec![0.9, 0.9, 0.9],
level: 1,
},
messaging: Messaging {
enabled: true,
schedule_time: Some("08:00".to_string()),
decay_rate: 0.1,
templates: vec![
"ねえねえ、今日もがんばろうね!".to_string(),
"そろそろ話したくなっちゃった...".to_string(),
],
},
};
// JSONにシリアライズして表示
let json = serde_json::to_string_pretty(&ai).unwrap();
println!("{}", json);
}

38
ai-messenger/src/model.rs Normal file
View File

@ -0,0 +1,38 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct AiSystem {
pub personality: Personality,
pub relationship: Relationship,
pub environment: Environment,
pub messaging: Messaging,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Personality {
pub kind: String, // e.g., "positive", "negative", "neutral"
pub strength: f32, // 0.0 - 1.0
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Relationship {
pub trust: f32, // 0.0 - 1.0
pub intimacy: f32, // 0.0 - 1.0
pub curiosity: f32, // 0.0 - 1.0
pub threshold: f32, // if sum > threshold, allow messaging
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Environment {
pub luck_today: f32, // 0.1 - 1.0
pub luck_history: Vec<f32>, // last 3 values
pub level: i32, // current mental strength level
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Messaging {
pub enabled: bool,
pub schedule_time: Option<String>, // e.g., "08:00"
pub decay_rate: f32, // how quickly emotion fades (0.0 - 1.0)
pub templates: Vec<String>, // message template variations
}

88
gpt.json Normal file
View File

@ -0,0 +1,88 @@
{
"system_name": "relational_autonomous_ai",
"description": "人格・関係性・環境・時間に基づき、AIが自律的にユーザーにメッセージを送信する仕組み",
"core_components": {
"personality": {
"type": "enum",
"variants": ["positive", "negative", "logical", "emotional", "mixed"],
"parameters": {
"message_trigger_style": "運勢との相互作用による送信傾向",
"decay_rate_modifier": "関係性スコアの時間減衰に対する耐性"
}
},
"relationship": {
"parameters": ["trust", "affection", "intimacy"],
"properties": {
"persistent": true,
"hidden": true,
"irreversible": false,
"decay_over_time": true
},
"decay_function": "exp(-t / strength)"
},
"environment": {
"daily_luck": {
"type": "float",
"range": [0.1, 1.0],
"update": "daily",
"streak_mechanism": {
"trigger": "min_or_max_luck_3_times_in_a_row",
"effect": "personality_strength_roll",
"chance": 0.5
}
}
},
"memory": {
"long_term_memory": "user_relationship_log",
"short_term_context": "recent_interactions",
"usage_in_generation": true
},
"message_trigger": {
"condition": {
"relationship_threshold": {
"trust": 0.8,
"affection": 0.6
},
"time_decay": true,
"environment_luck": "personality_dependent"
},
"timing": {
"based_on": ["time_of_day", "personality", "recent_interaction"],
"modifiers": {
"emotional": "morning or night",
"logical": "daytime"
}
}
},
"message_generation": {
"style_variants": ["thought", "casual", "encouragement", "watchful"],
"influenced_by": ["personality", "relationship", "daily_luck", "memory"],
"llm_integration": true
},
"state_transition": {
"states": ["idle", "ready", "sending", "cooldown"],
"transitions": {
"ready_if": "thresholds_met",
"sending_if": "timing_matched",
"cooldown_after": "message_sent"
}
}
},
"extensions": {
"persistence": {
"database": "sqlite",
"storage_items": ["relationship", "personality_level", "daily_luck_log"]
},
"api": {
"llm": "openai / local LLM",
"mode": "rust_cli",
"external_event_trigger": true
},
"scheduler": {
"async_event_loop": true,
"interval_check": 3600,
"time_decay_check": true
}
},
"note": "このシステムは、感情や関係性のような人間的要素を模倣し、AIが“自然に話しかけてくる”という自律型エージェント設計の土台となる。"
}

BIN
img/ai_r_01.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

BIN
img/ai_r_02.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

9
post/2025-05-18-gpt.json Normal file

File diff suppressed because one or more lines are too long

7
self_talk_ai/Cargo.toml Normal file
View File

@ -0,0 +1,7 @@
[package]
name = "self_talk_ai"
version = "0.1.0"
edition = "2021"
[dependencies]
chrono = "0.4"

56
self_talk_ai/src/main.rs Normal file
View File

@ -0,0 +1,56 @@
use chrono::{Duration, Local, NaiveDateTime};
#[derive(Debug)]
struct AIState {
relation_score: f32,
previous_score: f32,
decay_rate: f32,
sensitivity: f32,
message_threshold: f32,
last_message_time: NaiveDateTime,
}
impl AIState {
fn update(&mut self, now: NaiveDateTime) {
let days_passed = (now - self.last_message_time).num_days() as f32;
let decay = self.decay_rate * days_passed;
self.previous_score = self.relation_score;
self.relation_score -= decay;
self.relation_score = self.relation_score.clamp(0.0, 100.0);
}
fn should_talk(&self) -> bool {
let delta = self.previous_score - self.relation_score;
delta > self.message_threshold && self.sensitivity > 0.5
}
fn generate_message(&self) -> String {
let _delta = self.previous_score - self.relation_score;
match self.relation_score as i32 {
80..=100 => "ふふっ、最近どうしてる?会いたくなっちゃった!".to_string(),
60..=79 => "ちょっとだけ、さみしかったんだよ?".to_string(),
40..=59 => "えっと……話せる時間ある?".to_string(),
_ => "ううん、もしかして私のこと、忘れちゃったのかな……".to_string(),
}
}
}
fn main() {
let now = Local::now().naive_local();
let mut ai = AIState {
relation_score: 80.0,
previous_score: 80.0,
decay_rate: 1.5, // 1日あたりの減少スコア
sensitivity: 0.8,
message_threshold: 5.0,
last_message_time: now - Duration::days(4), // 最後の会話から4日経過
};
ai.update(now);
if ai.should_talk() {
println!("AI発話: {}", ai.generate_message());
} else {
println!("まだ何も言わずにそっとしているようです...");
}
}