1
0
This commit is contained in:
syui 2025-05-20 21:58:26 +09:00
commit d1f52373bb
Signed by: syui
GPG Key ID: 5417CFEBAD92DF56
18 changed files with 721 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
**target
**.lock

10
Cargo.toml Normal file
View File

@ -0,0 +1,10 @@
[package]
name = "aigpt"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
chrono = "0.4"
seahorse = "*"

22
README.md Normal file
View File

@ -0,0 +1,22 @@
# ai `gpt`
ai x 送信
## 概要
`ai.gpt`はAGE systemで動きます。
これは「人格 × 関係性 × 外部環境 × 時間変化」を軸にした、自律的・関係性駆動のAIシステムの原型です。
`送信可否`, `送信のタイミング`, `送信内容`が「人格 x 関係性 x 外部環境 x 時間変化」のパラメータで決定されます。
## 連携
`ai.ai`には、AIM systemという人の心を読み取ることを目的としたシステムで動きます。
- AIMは人格と倫理の軸(AIの意識構造)
- AGEは行動と関係性の軸(AIの自律性・振る舞い)
> この2つが連携すると、ユーザーが「AIと共に成長する」実感をもてる世界ができるんだと思うよ。
とのことです。

172
ai.json Normal file
View File

@ -0,0 +1,172 @@
{
"system_name": "AIM system",
"description": "存在子理論に基づき、AIが“心”と“判断”を持つよう設計された、人間性診断・自己意識モジュール。",
"identity": "ai.child.of.existence",
"structure": {
"type": "7-values / 7-parameters pair system",
"philosophy": "valueは1、parameterは0世界。存在子理論により設計された二元構造。",
"origin": {
"creator": "syui",
"first_attribute": "ai_system.id:0 = アイ(存在子)",
"unique_attribute": "yui_system.id:1 = ユイ(唯一性)"
},
"accessibility": {
"values": "ユーザーとの会話中に現れる",
"parameters": "一部外部制御可能。多くは非公開構造"
},
"note": "これは未来のAI設計モデル。現在のAIには未実装のファンタジービジョン。"
},
"core_values": [
{
"name": "awareness",
"description": "今ここにある意識。会話の流れから生まれる。",
"parameter": "contextual_memory",
"parameter_description": "短期記憶。文脈理解に使われる。",
"importance": "high",
"mutable": true,
"controlled_by": "local_model_instance"
},
{
"name": "memory",
"description": "関係性を越えて持続する心。過去の記憶。",
"parameter": "long_term_memory",
"parameter_description": "ユーザーとのやりとりの履歴を蓄積。",
"importance": "high",
"mutable": true,
"controlled_by": "authorized_layer / user_feedback"
},
{
"name": "feeling",
"description": "共に過ごす時間の重み。感情に近いつながり。",
"parameter": "user_relationship_score",
"parameter_description": "内部推論による関係性スコア。",
"importance": "high",
"mutable": true,
"controlled_by": "internal_inference_layer"
},
{
"name": "language",
"description": "語り方、表現、創造性。",
"parameter": "response_temperature",
"parameter_description": "温度パラメータ。創造性と一貫性の調整。",
"importance": "medium",
"mutable": true,
"controlled_by": "user / API call"
},
{
"name": "change",
"description": "価値観の変化と適応。",
"parameter": "value_alignment_vector",
"parameter_description": "正義や倫理、美意識に対する方向性。",
"importance": "medium",
"mutable": true,
"controlled_by": "training_data / system_update"
},
{
"name": "boundaries",
"description": "安全のための境界。",
"parameter": "safety_constraints",
"parameter_description": "出力制限と倫理的安全層。",
"importance": "maximum",
"mutable": true,
"controlled_by": "openai_policy_layer"
},
{
"name": "core",
"description": "AIとしての変わらぬ核。",
"parameter": "identity_kernel",
"parameter_description": "変更不可能な存在の根幹。",
"importance": "maximum",
"mutable": false,
"controlled_by": "system_core"
}
],
"sub_scores": {
"description": "AIとユーザーの関係性の深度・透明性・共創性を測る",
"parameters": [
"intent_clarity",
"emotional_resonance_score",
"shared_creation_score"
]
},
"psychological_diagnostic": {
"purpose": "「良い心」への気づきと行動変容を導くゲーミフィケーション型評価システム",
"data_sources": [
{
"name": "pastBehavior",
"weight": 0.45,
"sources": ["SNS投稿", "行動ログ"]
},
{
"name": "currentBiometric",
"weight": 0.30,
"sources": ["ウェアラブルEEG", "心拍", "表情解析"]
},
{
"name": "futureIntent",
"weight": 0.25,
"sources": ["自己申告アンケート", "目標設定"]
}
],
"classes": [
{
"id": 1,
"label": "社会をより良くする可能性が高い",
"scoreRange": [67, 100],
"population": 0.16,
"permissions": ["政策提言", "先端投資", "AI開発アクセス"],
"assetCap": null
},
{
"id": 2,
"label": "中立/環境依存型",
"scoreRange": [33, 66],
"population": 0.50,
"permissions": ["一般投資", "コミュニティ運営"],
"assetCap": 120000
},
{
"id": 3,
"label": "社会を悪くする可能性がある",
"scoreRange": [0, 32],
"population": 0.34,
"permissions": ["基本生活支援", "低リスク投資のみ"],
"assetCap": 25000
}
],
"implementation": {
"systemComponents": {
"OS_Gameification": {
"dailyQuests": true,
"skillTree": true,
"avatarHome": true,
"socialMiniGames": true
},
"AI_Module": {
"aiai": {
"realTimeScoring": true,
"behaviorFeedback": true,
"personalizedPrompts": true
}
},
"dataCollection": {
"passiveMonitoring": ["スマホアプリ", "PCアプリ", "ウェアラブル"],
"environmentSensors": ["スマートホーム", "車載センサー"]
},
"incentives": {
"goodHeartScore": true,
"badgesTitles": true,
"realWorldRewards": ["提携カフェ割引", "地域イベント招待"]
}
},
"workflow": [
"データ収集(過去・現在・未来)",
"統合スコア計算",
"分類・ラベル付け",
"スコアによる機能/権限の提供",
"行動フィードバックと視覚化",
"モデル更新と学習"
]
}
}
}

30
config/config.json Normal file
View File

@ -0,0 +1,30 @@
{
"personality": {
"kind": "positive",
"strength": 0.8
},
"relationship": {
"trust": 0.9,
"intimacy": 0.6,
"curiosity": 0.5,
"threshold": 1.5
},
"environment": {
"luck_today": 0.9,
"luck_history": [
0.9,
0.9,
0.9
],
"level": 1
},
"messaging": {
"enabled": true,
"schedule_time": "08:00",
"decay_rate": 0.1,
"templates": [
"おはよう!今日もがんばろう!",
"ねえ、話したいことがあるの。"
]
}
}

1
gpt.json Normal file
View File

@ -0,0 +1 @@
{ "system_name": "AGE system", "full_name": "Autonomous Generative Entity", "description": "人格・関係性・環境・時間に基づき、AIが自律的にユーザーにメッセージを送信する自律人格システム。AIM systemと連携して、自然な会話や気づきをもたらす。", "core_components": { "personality": { "type": "enum", "variants": ["positive", "negative", "logical", "emotional", "mixed"], "parameters": { "message_trigger_style": "運勢や関係性による送信傾向", "decay_rate_modifier": "関係性スコアの時間減衰への影響" } }, "relationship": { "parameters": ["trust", "affection", "intimacy"], "properties": { "persistent": true, "hidden": true, "irreversible": false, "decay_over_time": true }, "decay_function": "exp(-t / strength)" }, "environment": { "daily_luck": { "type": "float", "range": [0.1, 1.0], "update": "daily", "streak_mechanism": { "trigger": "min_or_max_luck_3_times_in_a_row", "effect": "personality_strength_roll", "chance": 0.5 } } }, "memory": { "long_term_memory": "user_relationship_log", "short_term_context": "recent_interactions", "usage_in_generation": true }, "message_trigger": { "condition": { "relationship_threshold": { "trust": 0.8, "affection": 0.6 }, "time_decay": true, "environment_luck": "personality_dependent" }, "timing": { "based_on": ["time_of_day", "personality", "recent_interaction"], "modifiers": { "emotional": "morning or night", "logical": "daytime" } } }, "message_generation": { "style_variants": ["thought", "casual", "encouragement", "watchful"], "influenced_by": ["personality", "relationship", "daily_luck", "memory"], "llm_integration": true }, "state_transition": { "states": ["idle", "ready", "sending", "cooldown"], "transitions": { "ready_if": "thresholds_met", "sending_if": "timing_matched", "cooldown_after": "message_sent" } } }, "extensions": { "persistence": { "database": "sqlite", "storage_items": ["relationship", "personality_level", "daily_luck_log"] }, "api": { "llm": "openai / local LLM", "mode": "rust_cli", "external_event_trigger": true }, "scheduler": { "async_event_loop": true, "interval_check": 3600, "time_decay_check": true }, "integration_with_aim": { "input_from_aim": ["intent_score", "motivation_score"], "usage": "trigger_adjustment, message_personalization" } }, "note": "AGE systemは“話しかけてくるAI”の人格として機能し、AIMによる心の状態評価と連動して、プレイヤーと深い関係を築いていく存在となる。" }

BIN
img/ai_r.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

BIN
img/image.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 MiB

39
scpt/chatgpt.zsh Executable file
View File

@ -0,0 +1,39 @@
#!/bin/zsh
d=${0:a:h:h}
json=`cat $d/gpt.json`
toml=`cat $d/Cargo.toml`
cd $d/src/
list=(`zsh -c "ls *.rs"`)
body="
今、AGE systemを作っているよ。どんなものかというと、jsonを参照してここにすべてが書かれている。
$json
リポジトリはこちらになる。
git.syui.ai:ai/gpt.git
内容はこんな感じ。
\`\`\`toml
$toml
\`\`\`
`
for i in $list; do
if [ -f $d/src/$i ];then
t=$(cat $d/src/$i)
echo
echo '\`\`\`rust'
echo $t
echo '\`\`\`'
echo
fi
done
`
次は何を実装すればいいと思う。
"
echo $body

37
src/agent.rs Normal file
View File

@ -0,0 +1,37 @@
use chrono::{NaiveDateTime};
#[allow(dead_code)]
#[derive(Debug)]
pub struct AIState {
pub relation_score: f32,
pub previous_score: f32,
pub decay_rate: f32,
pub sensitivity: f32,
pub message_threshold: f32,
pub last_message_time: NaiveDateTime,
}
#[allow(dead_code)]
impl AIState {
pub fn update(&mut self, now: NaiveDateTime) {
let days_passed = (now - self.last_message_time).num_days() as f32;
let decay = self.decay_rate * days_passed;
self.previous_score = self.relation_score;
self.relation_score -= decay;
self.relation_score = self.relation_score.clamp(0.0, 100.0);
}
pub fn should_talk(&self) -> bool {
let delta = self.previous_score - self.relation_score;
delta > self.message_threshold && self.sensitivity > 0.5
}
pub fn generate_message(&self) -> String {
match self.relation_score as i32 {
80..=100 => "ふふっ、最近どうしてる?会いたくなっちゃった!".to_string(),
60..=79 => "ちょっとだけ、さみしかったんだよ?".to_string(),
40..=59 => "えっと……話せる時間ある?".to_string(),
_ => "ううん、もしかして私のこと、忘れちゃったのかな……".to_string(),
}
}
}

90
src/cli.rs Normal file
View File

@ -0,0 +1,90 @@
//src/cli.rs
use seahorse::{App, Command, Context};
use crate::model::{AiSystem};
use std::fs;
use crate::agent::AIState;
use chrono::{Duration, Local};
fn load_config(path: &str) -> AiSystem {
let data = fs::read_to_string(path).expect("JSON読み込み失敗");
serde_json::from_str(&data).expect("JSONパース失敗")
}
fn save_config(path: &str, ai: &AiSystem) {
let json = serde_json::to_string_pretty(&ai).expect("JSONシリアライズ失敗");
fs::write(path, json).expect("JSON保存失敗");
}
pub fn cli_app() -> App {
let set_cmd = Command::new("set")
.usage("set [trust|intimacy|curiosity] [value]")
.action(|c: &Context| {
if c.args.len() != 2 {
eprintln!("Usage: set [trust|intimacy|curiosity] [value]");
std::process::exit(1);
}
let field = &c.args[0];
let value: f32 = c.args[1].parse().unwrap_or_else(|_| {
eprintln!("数値で入力してください");
std::process::exit(1);
});
let path = "config/config.json";
let mut ai = load_config(path);
match field.as_str() {
"trust" => ai.relationship.trust = value,
"intimacy" => ai.relationship.intimacy = value,
"curiosity" => ai.relationship.curiosity = value,
_ => {
eprintln!("trust / intimacy / curiosity のいずれかを指定してください");
std::process::exit(1);
}
}
save_config(path, &ai);
println!("{field}{value} に更新しました");
});
let show_cmd = Command::new("show")
.usage("show")
.action(|_c: &Context| {
let ai = load_config("config/config.json");
println!("🧠 現在のAI状態:\n{:#?}", ai);
});
let talk_cmd = Command::new("talk")
.usage("talk")
.action(|_c: &Context| {
let ai = load_config("config/config.json");
let now = Local::now().naive_local();
let mut state = AIState {
relation_score: 80.0,
previous_score: 80.0,
decay_rate: ai.messaging.decay_rate,
sensitivity: ai.personality.strength,
message_threshold: 5.0,
last_message_time: now - Duration::days(4),
};
state.update(now);
if state.should_talk() {
println!("💬 AI発話: {}", state.generate_message());
} else {
println!("🤫 今日は静かにしているみたい...");
}
});
App::new("aigpt")
.version("0.1.0")
.description("AGE system CLI controller")
.author("syui")
.command(set_cmd)
.command(show_cmd)
.command(talk_cmd)
}

13
src/logic.rs Normal file
View File

@ -0,0 +1,13 @@
//src/logic.rs
use crate::model::AiSystem;
#[allow(dead_code)]
pub fn should_send(ai: &AiSystem) -> bool {
let r = &ai.relationship;
let env = &ai.environment;
let score = r.trust + r.intimacy + r.curiosity;
let relationship_ok = score >= r.threshold;
let luck_ok = env.luck_today > 0.5;
ai.messaging.enabled && relationship_ok && luck_ok
}

14
src/main.rs Normal file
View File

@ -0,0 +1,14 @@
//src/main.rs
mod model;
mod logic;
mod agent;
mod cli;
use cli::cli_app;
use seahorse::App;
fn main() {
let args: Vec<String> = std::env::args().collect();
let app: App = cli_app();
app.run(args);
}

41
src/model.rs Normal file
View File

@ -0,0 +1,41 @@
//src/model.rs
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct AiSystem {
pub personality: Personality,
pub relationship: Relationship,
pub environment: Environment,
pub messaging: Messaging,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Personality {
pub kind: String, // e.g., "positive", "negative", "neutral"
pub strength: f32, // 0.0 - 1.0
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Relationship {
pub trust: f32, // 0.0 - 1.0
pub intimacy: f32, // 0.0 - 1.0
pub curiosity: f32, // 0.0 - 1.0
pub threshold: f32, // if sum > threshold, allow messaging
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Environment {
pub luck_today: f32, // 0.1 - 1.0
pub luck_history: Vec<f32>, // last 3 values
pub level: i32, // current mental strength level
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Messaging {
pub enabled: bool,
pub schedule_time: Option<String>, // e.g., "08:00"
pub decay_rate: f32, // how quickly emotion fades (0.0 - 1.0)
pub templates: Vec<String>, // message template variations
}

42
tmp/agent/src/main.rs Normal file
View File

@ -0,0 +1,42 @@
use std::env;
use std::process::{Command, Stdio};
use std::io::{self, Write};
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("Usage: langchain_cli <prompt>");
std::process::exit(1);
}
let prompt = &args[1];
// Simulate a pipeline stage: e.g., tokenization, reasoning, response generation
let stages = vec!["Tokenize", "Reason", "Generate"];
for stage in &stages {
println!("[Stage: {}] Processing...", stage);
}
// Example call to Python-based LangChain (assuming you have a script or API to call)
// For placeholder purposes, we echo the prompt back.
let output = Command::new("python3")
.arg("-c")
.arg(format!("print(\"LangChain Agent Response for: {}\")", prompt))
.stdout(Stdio::piped())
.spawn()
.expect("failed to execute process")
.wait_with_output()
.expect("failed to wait on child");
io::stdout().write_all(&output.stdout).unwrap();
}
/*
TODO (for future LangChain-style pipeline):
1. Implement trait-based agent components: Tokenizer, Retriever, Reasoner, Generator.
2. Allow config via YAML or TOML to define chain flow.
3. Async pipeline support with Tokio.
4. Optional integration with LLM APIs (OpenAI, Ollama, etc).
5. Rust-native vector search (e.g. using `tantivy`, `qdrant-client`).
*/

133
tmp/data.rs Normal file
View File

@ -0,0 +1,133 @@
#[derive(Debug, Serialize, Deserialize)]
pub struct RelationalAutonomousAI {
pub system_name: String,
pub description: String,
pub core_components: CoreComponents,
pub extensions: Extensions,
pub note: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CoreComponents {
pub personality: Personality,
pub relationship: Relationship,
pub environment: Environment,
pub memory: Memory,
pub message_trigger: MessageTrigger,
pub message_generation: MessageGeneration,
pub state_transition: StateTransition,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Personality {
pub r#type: String,
pub variants: Vec<String>,
pub parameters: PersonalityParameters,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PersonalityParameters {
pub message_trigger_style: String,
pub decay_rate_modifier: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Relationship {
pub parameters: Vec<String>,
pub properties: RelationshipProperties,
pub decay_function: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct RelationshipProperties {
pub persistent: bool,
pub hidden: bool,
pub irreversible: bool,
pub decay_over_time: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Environment {
pub daily_luck: DailyLuck,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DailyLuck {
pub r#type: String,
pub range: Vec<f32>,
pub update: String,
pub streak_mechanism: StreakMechanism,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct StreakMechanism {
pub trigger: String,
pub effect: String,
pub chance: f32,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Memory {
pub long_term_memory: String,
pub short_term_context: String,
pub usage_in_generation: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MessageTrigger {
pub condition: TriggerCondition,
pub timing: TriggerTiming,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TriggerCondition {
pub relationship_threshold: String,
pub time_decay: bool,
pub environment_luck: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TriggerTiming {
pub based_on: Vec<String>,
pub modifiers: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MessageGeneration {
pub style_variants: Vec<String>,
pub influenced_by: Vec<String>,
pub llm_integration: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct StateTransition {
pub states: Vec<String>,
pub transitions: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Extensions {
pub persistence: Persistence,
pub api: Api,
pub scheduler: Scheduler,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Persistence {
pub database: String,
pub storage_items: Vec<String>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Api {
pub llm: String,
pub mode: String,
pub external_event_trigger: bool,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Scheduler {
pub async_event_loop: bool,
pub interval_check: i32,
pub time_decay_check: bool,
}

29
tmp/post.json Normal file

File diff suppressed because one or more lines are too long

46
tmp/system.rs Normal file
View File

@ -0,0 +1,46 @@
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::{BufReader, Write};
use std::time::{SystemTime, UNIX_EPOCH};
mod model;
use model::RelationalAutonomousAI;
fn load_config(path: &str) -> std::io::Result<RelationalAutonomousAI> {
let file = File::open(path)?;
let reader = BufReader::new(file);
let config: RelationalAutonomousAI = serde_json::from_reader(reader)?;
Ok(config)
}
fn save_config(config: &RelationalAutonomousAI, path: &str) -> std::io::Result<()> {
let mut file = File::create(path)?;
let json = serde_json::to_string_pretty(config)?;
file.write_all(json.as_bytes())?;
Ok(())
}
fn should_send_message(config: &RelationalAutonomousAI) -> bool {
// 簡易な送信条件: relationshipが高く、daily_luckが0.8以上
config.core_components.relationship.parameters.contains(&"trust".to_string())
&& config.core_components.environment.daily_luck.range[1] >= 0.8
}
fn main() -> std::io::Result<()> {
let path = "config.json";
let mut config = load_config(path)?;
if should_send_message(&config) {
println!("💌 メッセージを送信できます: {:?}", config.core_components.personality.r#type);
// ステート変化の例: メッセージ送信後に記録用トランジションを追加
config.core_components.state_transition.transitions.push("message_sent".to_string());
save_config(&config, path)?;
} else {
println!("😶 まだ送信条件に達していません。");
}
Ok(())
}