add metrics
This commit is contained in:
parent
4837de580f
commit
f09f3c9144
@ -6,7 +6,7 @@ edition = "2021"
|
||||
[dependencies]
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
chrono = "0.4"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
seahorse = "*"
|
||||
rusqlite = { version = "0.29", features = ["serde_json"] }
|
||||
shellexpand = "*"
|
||||
|
57
src/chat.rs
57
src/chat.rs
@ -1,8 +1,10 @@
|
||||
// src/chat.rs
|
||||
|
||||
use seahorse::Context;
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use serde::Deserialize;
|
||||
use seahorse::Context;
|
||||
use crate::config::ConfigPaths;
|
||||
use crate::metrics::{load_metrics, save_metrics, update_metrics_decay};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Provider {
|
||||
@ -27,9 +29,6 @@ impl Provider {
|
||||
}
|
||||
}
|
||||
|
||||
use std::fs;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OpenAIKey {
|
||||
token: String,
|
||||
@ -43,10 +42,19 @@ fn load_openai_api_key() -> Option<String> {
|
||||
Some(parsed.token)
|
||||
}
|
||||
|
||||
pub fn ask_chat(c: &Context, question: &str) -> String {
|
||||
pub fn ask_chat(c: &Context, question: &str) -> Option<String> {
|
||||
let config = ConfigPaths::new();
|
||||
let base_dir = config.base_dir.join("mcp");
|
||||
let script_path = base_dir.join("scripts/ask.py");
|
||||
let metrics_path = config.base_dir.join("metrics.json");
|
||||
let mut metrics = load_metrics(&metrics_path);
|
||||
|
||||
update_metrics_decay(&mut metrics);
|
||||
|
||||
if !metrics.can_send {
|
||||
println!("❌ 送信条件を満たしていないため、AIメッセージは送信されません。");
|
||||
return None;
|
||||
}
|
||||
|
||||
let python_path = if cfg!(target_os = "windows") {
|
||||
base_dir.join(".venv/Scripts/python.exe")
|
||||
@ -56,45 +64,48 @@ pub fn ask_chat(c: &Context, question: &str) -> String {
|
||||
|
||||
let ollama_host = c.string_flag("host").ok();
|
||||
let ollama_model = c.string_flag("model").ok();
|
||||
let api_key = c.string_flag("api-key").ok()
|
||||
.or_else(|| load_openai_api_key());
|
||||
|
||||
use crate::chat::Provider;
|
||||
|
||||
let provider_str = c.string_flag("provider").unwrap_or_else(|_| "ollama".to_string());
|
||||
let provider = Provider::from_str(&provider_str).unwrap_or(Provider::Ollama);
|
||||
//let api_key = c.string_flag("api-key").ok().or_else(|| crate::metrics::load_openai_api_key());
|
||||
let api_key = c.string_flag("api-key")
|
||||
.ok()
|
||||
.or_else(|| load_openai_api_key());
|
||||
|
||||
println!("🔍 使用プロバイダー: {}", provider.as_str());
|
||||
|
||||
// 🛠️ command の定義をここで行う
|
||||
let mut command = Command::new(python_path);
|
||||
command.arg(script_path).arg(question);
|
||||
|
||||
// ✨ 環境変数をセット
|
||||
command.env("PROVIDER", provider.as_str());
|
||||
|
||||
if let Some(host) = ollama_host {
|
||||
command.env("OLLAMA_HOST", host);
|
||||
}
|
||||
if let Some(model) = ollama_model {
|
||||
command.env("OLLAMA_MODEL", model);
|
||||
command.env("OLLAMA_MODEL", model.clone());
|
||||
command.env("OPENAI_MODEL", model);
|
||||
}
|
||||
if let Some(api_key) = api_key {
|
||||
command.env("OPENAI_API_KEY", api_key);
|
||||
command.env("PROVIDER", provider.as_str());
|
||||
|
||||
if let Some(key) = api_key {
|
||||
command.env("OPENAI_API_KEY", key);
|
||||
}
|
||||
|
||||
let output = command
|
||||
.output()
|
||||
.expect("❌ MCPチャットスクリプトの実行に失敗しました");
|
||||
let output = command.output().expect("❌ MCPチャットスクリプトの実行に失敗しました");
|
||||
|
||||
if output.status.success() {
|
||||
String::from_utf8_lossy(&output.stdout).to_string()
|
||||
let response = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
println!("💬 {}", response);
|
||||
|
||||
// 応答後のメトリクス更新
|
||||
metrics.intimacy += 0.02;
|
||||
metrics.last_updated = chrono::Utc::now();
|
||||
save_metrics(&metrics, &metrics_path);
|
||||
Some(response)
|
||||
} else {
|
||||
eprintln!(
|
||||
"❌ 実行エラー: {}\n{}",
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
);
|
||||
String::from("エラーが発生しました。")
|
||||
None
|
||||
}
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ fn set_api_key_cmd() -> Command {
|
||||
fn chat_cmd() -> Command {
|
||||
Command::new("chat")
|
||||
.description("チャットで質問を送る")
|
||||
.usage("mcp chat '質問内容' --host <OLLAMA_HOST> --model <MODEL> [--provider <ollama|openai>] [--api-key <KEY>]")
|
||||
.usage("mcp chat '質問内容' --host <OLLAMA_HOST> --model <MODEL> [--provider <ollama|openai>] [--api-key <KEY>] [--repo <REPO_URL>]")
|
||||
.flag(
|
||||
Flag::new("host", FlagType::String)
|
||||
.description("OLLAMAホストのURL")
|
||||
@ -158,16 +158,10 @@ fn chat_cmd() -> Command {
|
||||
.description("Gitリポジトリのパスを指定 (すべてのコードを読み込む)")
|
||||
.alias("r"),
|
||||
)
|
||||
.action(|c: &Context| {
|
||||
if let Some(question) = c.args.get(0) {
|
||||
let response = ask_chat(c, question);
|
||||
println!("💬 応答:\n{}", response);
|
||||
} else {
|
||||
eprintln!("❗ 質問が必要です: mcp chat 'こんにちは'");
|
||||
}
|
||||
})
|
||||
.action(|c: &Context| {
|
||||
let config = ConfigPaths::new();
|
||||
|
||||
// repoがある場合は、コードベース読み込みモード
|
||||
if let Ok(repo_url) = c.string_flag("repo") {
|
||||
let repo_base = config.base_dir.join("repos");
|
||||
let repo_dir = repo_base.join(sanitize_repo_name(&repo_url));
|
||||
@ -183,20 +177,30 @@ fn chat_cmd() -> Command {
|
||||
println!("✔ リポジトリはすでに存在します: {}", repo_dir.display());
|
||||
}
|
||||
|
||||
//let files = read_all_git_files(&repo_dir);
|
||||
let files = read_all_git_files(repo_dir.to_str().unwrap());
|
||||
let prompt = format!(
|
||||
"以下のコードベースを読み込んで、改善案や次のステップを提案してください:\n{}",
|
||||
files
|
||||
);
|
||||
|
||||
let response = ask_chat(c, &prompt);
|
||||
println!("💡 提案:\n{}", response);
|
||||
if let Some(response) = ask_chat(c, &prompt) {
|
||||
println!("💬 提案:\n{}", response);
|
||||
} else {
|
||||
if let Some(question) = c.args.get(0) {
|
||||
let response = ask_chat(c, question);
|
||||
println!("💬 {}", response);
|
||||
eprintln!("❗ 提案が取得できませんでした");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// 通常のチャット処理(repoが指定されていない場合)
|
||||
match c.args.get(0) {
|
||||
Some(question) => {
|
||||
if let Some(response) = ask_chat(c, question) {
|
||||
println!("💬 応答:\n{}", response);
|
||||
} else {
|
||||
eprintln!("❗ 応答が取得できませんでした");
|
||||
}
|
||||
}
|
||||
None => {
|
||||
eprintln!("❗ 質問が必要です: mcp chat 'こんにちは'");
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ mod commands;
|
||||
mod config;
|
||||
mod git;
|
||||
mod chat;
|
||||
mod metrics;
|
||||
|
||||
use cli::cli_app;
|
||||
use seahorse::App;
|
||||
|
98
src/metrics.rs
Normal file
98
src/metrics.rs
Normal file
@ -0,0 +1,98 @@
|
||||
// src/metrics.rs
|
||||
use serde::{Serialize, Deserialize};
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Metrics {
|
||||
pub trust: f32,
|
||||
pub intimacy: f32,
|
||||
pub energy: f32,
|
||||
pub can_send: bool,
|
||||
pub last_updated: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl Metrics {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
trust: 0.5,
|
||||
intimacy: 0.5,
|
||||
energy: 0.5,
|
||||
last_updated: chrono::Utc::now(),
|
||||
can_send: true,
|
||||
}
|
||||
}
|
||||
/// パラメータの減衰処理を行い、can_sendを更新する
|
||||
pub fn decay(&mut self) {
|
||||
let now = Utc::now();
|
||||
let elapsed = now.signed_duration_since(self.last_updated);
|
||||
let hours = elapsed.num_minutes() as f32 / 60.0;
|
||||
|
||||
self.trust = decay_param(self.trust, hours);
|
||||
self.intimacy = decay_param(self.intimacy, hours);
|
||||
self.energy = decay_param(self.energy, hours);
|
||||
|
||||
self.last_updated = now;
|
||||
self.can_send = self.trust >= 0.5 && self.intimacy >= 0.5 && self.energy >= 0.5;
|
||||
}
|
||||
|
||||
/// JSONからMetricsを読み込み、減衰し、保存して返す
|
||||
pub fn load_and_decay(path: &Path) -> Self {
|
||||
let mut metrics = if path.exists() {
|
||||
let content = fs::read_to_string(path).expect("metrics.jsonの読み込みに失敗しました");
|
||||
serde_json::from_str(&content).expect("JSONパース失敗")
|
||||
} else {
|
||||
println!("⚠️ metrics.json が存在しないため、新しく作成します。");
|
||||
Metrics::default()
|
||||
};
|
||||
|
||||
metrics.decay();
|
||||
metrics.save(path);
|
||||
metrics
|
||||
}
|
||||
|
||||
/// Metricsを保存する
|
||||
pub fn save(&self, path: &Path) {
|
||||
let data = serde_json::to_string_pretty(self).expect("JSON変換失敗");
|
||||
fs::write(path, data).expect("metrics.jsonの書き込みに失敗しました");
|
||||
}
|
||||
}
|
||||
|
||||
/// 単一のパラメータを減衰させる
|
||||
fn decay_param(value: f32, hours: f32) -> f32 {
|
||||
let decay_rate = 0.01; // 時間ごとの減衰率
|
||||
(value * (1.0f32 - decay_rate).powf(hours)).clamp(0.0, 1.0)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn test_decay_behavior() {
|
||||
let mut metrics = Metrics {
|
||||
trust: 1.0,
|
||||
intimacy: 1.0,
|
||||
energy: 1.0,
|
||||
can_send: true,
|
||||
last_updated: Utc::now() - Duration::hours(12),
|
||||
};
|
||||
metrics.decay();
|
||||
assert!(metrics.trust < 1.0);
|
||||
assert!(metrics.can_send); // 減衰後でも0.5以上あるならtrue
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_metrics(path: &Path) -> Metrics {
|
||||
Metrics::load_and_decay(path)
|
||||
}
|
||||
|
||||
pub fn save_metrics(metrics: &Metrics, path: &Path) {
|
||||
metrics.save(path)
|
||||
}
|
||||
|
||||
pub fn update_metrics_decay(metrics: &mut Metrics) {
|
||||
metrics.decay()
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user