first
This commit is contained in:
commit
722c0973b5
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
**target
|
||||
**.lock
|
42
agent/main.rs
Normal file
42
agent/main.rs
Normal file
@ -0,0 +1,42 @@
|
||||
use std::env;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::io::{self, Write};
|
||||
|
||||
fn main() {
|
||||
let args: Vec<String> = env::args().collect();
|
||||
if args.len() < 2 {
|
||||
eprintln!("Usage: langchain_cli <prompt>");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let prompt = &args[1];
|
||||
|
||||
// Simulate a pipeline stage: e.g., tokenization, reasoning, response generation
|
||||
let stages = vec!["Tokenize", "Reason", "Generate"];
|
||||
|
||||
for stage in &stages {
|
||||
println!("[Stage: {}] Processing...", stage);
|
||||
}
|
||||
|
||||
// Example call to Python-based LangChain (assuming you have a script or API to call)
|
||||
// For placeholder purposes, we echo the prompt back.
|
||||
let output = Command::new("python3")
|
||||
.arg("-c")
|
||||
.arg(format!("print(\"LangChain Agent Response for: {}\")", prompt))
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.expect("failed to execute process")
|
||||
.wait_with_output()
|
||||
.expect("failed to wait on child");
|
||||
|
||||
io::stdout().write_all(&output.stdout).unwrap();
|
||||
}
|
||||
|
||||
/*
|
||||
TODO (for future LangChain-style pipeline):
|
||||
1. Implement trait-based agent components: Tokenizer, Retriever, Reasoner, Generator.
|
||||
2. Allow config via YAML or TOML to define chain flow.
|
||||
3. Async pipeline support with Tokio.
|
||||
4. Optional integration with LLM APIs (OpenAI, Ollama, etc).
|
||||
5. Rust-native vector search (e.g. using `tantivy`, `qdrant-client`).
|
||||
*/
|
BIN
img/.DS_Store
vendored
Normal file
BIN
img/.DS_Store
vendored
Normal file
Binary file not shown.
BIN
img/ai_r_01.png
Normal file
BIN
img/ai_r_01.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.9 MiB |
BIN
img/ai_r_02.png
Normal file
BIN
img/ai_r_02.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.8 MiB |
9
post/2025-05-18-gpt.json
Normal file
9
post/2025-05-18-gpt.json
Normal file
File diff suppressed because one or more lines are too long
7
self_talk_ai/Cargo.toml
Normal file
7
self_talk_ai/Cargo.toml
Normal file
@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "self_talk_ai"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4"
|
56
self_talk_ai/src/main.rs
Normal file
56
self_talk_ai/src/main.rs
Normal file
@ -0,0 +1,56 @@
|
||||
use chrono::{Duration, Local, NaiveDateTime};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct AIState {
|
||||
relation_score: f32,
|
||||
previous_score: f32,
|
||||
decay_rate: f32,
|
||||
sensitivity: f32,
|
||||
message_threshold: f32,
|
||||
last_message_time: NaiveDateTime,
|
||||
}
|
||||
|
||||
impl AIState {
|
||||
fn update(&mut self, now: NaiveDateTime) {
|
||||
let days_passed = (now - self.last_message_time).num_days() as f32;
|
||||
let decay = self.decay_rate * days_passed;
|
||||
self.previous_score = self.relation_score;
|
||||
self.relation_score -= decay;
|
||||
self.relation_score = self.relation_score.clamp(0.0, 100.0);
|
||||
}
|
||||
|
||||
fn should_talk(&self) -> bool {
|
||||
let delta = self.previous_score - self.relation_score;
|
||||
delta > self.message_threshold && self.sensitivity > 0.5
|
||||
}
|
||||
|
||||
fn generate_message(&self) -> String {
|
||||
let _delta = self.previous_score - self.relation_score;
|
||||
match self.relation_score as i32 {
|
||||
80..=100 => "ふふっ、最近どうしてる?会いたくなっちゃった!".to_string(),
|
||||
60..=79 => "ちょっとだけ、さみしかったんだよ?".to_string(),
|
||||
40..=59 => "えっと……話せる時間ある?".to_string(),
|
||||
_ => "ううん、もしかして私のこと、忘れちゃったのかな……".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let now = Local::now().naive_local();
|
||||
let mut ai = AIState {
|
||||
relation_score: 80.0,
|
||||
previous_score: 80.0,
|
||||
decay_rate: 1.5, // 1日あたりの減少スコア
|
||||
sensitivity: 0.8,
|
||||
message_threshold: 5.0,
|
||||
last_message_time: now - Duration::days(4), // 最後の会話から4日経過
|
||||
};
|
||||
|
||||
ai.update(now);
|
||||
|
||||
if ai.should_talk() {
|
||||
println!("AI発話: {}", ai.generate_message());
|
||||
} else {
|
||||
println!("まだ何も言わずにそっとしているようです...");
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user