add openai

This commit is contained in:
2025-05-21 20:40:27 +09:00
parent f94b377130
commit 1122538c73
5 changed files with 151 additions and 22 deletions

View File

@ -2,9 +2,47 @@
use seahorse::Context;
use std::process::Command;
//use std::env;
use crate::config::ConfigPaths;
#[derive(Debug, Clone, PartialEq)]
pub enum Provider {
OpenAI,
Ollama,
}
impl Provider {
pub fn from_str(s: &str) -> Option<Self> {
match s.to_lowercase().as_str() {
"openai" => Some(Provider::OpenAI),
"ollama" => Some(Provider::Ollama),
_ => None,
}
}
pub fn as_str(&self) -> &'static str {
match self {
Provider::OpenAI => "openai",
Provider::Ollama => "ollama",
}
}
}
use std::fs;
use serde::Deserialize;
#[derive(Deserialize)]
struct OpenAIKey {
token: String,
}
fn load_openai_api_key() -> Option<String> {
let config = ConfigPaths::new();
let path = config.base_dir.join("openai.json");
let data = fs::read_to_string(path).ok()?;
let parsed: OpenAIKey = serde_json::from_str(&data).ok()?;
Some(parsed.token)
}
pub fn ask_chat(c: &Context, question: &str) {
let config = ConfigPaths::new();
let base_dir = config.base_dir.join("mcp");
@ -18,17 +56,34 @@ pub fn ask_chat(c: &Context, question: &str) {
let ollama_host = c.string_flag("host").ok();
let ollama_model = c.string_flag("model").ok();
let api_key = c.string_flag("api-key").ok()
.or_else(|| load_openai_api_key());
use crate::chat::Provider;
let provider_str = c.string_flag("provider").unwrap_or_else(|_| "ollama".to_string());
let provider = Provider::from_str(&provider_str).unwrap_or(Provider::Ollama);
println!("🔍 使用プロバイダー: {}", provider.as_str());
// 🛠️ command の定義をここで行う
let mut command = Command::new(python_path);
command.arg(script_path).arg(question);
// ✨ 環境変数をセット
command.env("PROVIDER", provider.as_str());
if let Some(host) = ollama_host {
command.env("OLLAMA_HOST", host);
}
if let Some(model) = ollama_model {
command.env("OLLAMA_MODEL", model);
}
if let Some(api_key) = api_key {
command.env("OPENAI_API_KEY", api_key);
}
// 🔁 実行
let output = command
.output()
.expect("❌ MCPチャットスクリプトの実行に失敗しました");

View File

@ -1,13 +1,14 @@
// src/commands/mcp.rs
use seahorse::{Command, Context, Flag, FlagType};
use crate::chat::ask_chat;
use crate::git::{git_init, git_status};
use std::fs;
use std::path::{PathBuf};
use crate::config::ConfigPaths;
use std::process::Command as OtherCommand;
use serde_json::json;
use seahorse::{Command, Context, Flag, FlagType};
use crate::chat::ask_chat;
use crate::git::{git_init, git_status};
use crate::config::ConfigPaths;
pub fn mcp_setup() {
let config = ConfigPaths::new();
@ -106,12 +107,52 @@ pub fn mcp_setup() {
}
}
fn set_api_key_cmd() -> Command {
Command::new("set-api")
.description("OpenAI APIキーを設定")
.usage("mcp set-api --api <API_KEY>")
.flag(Flag::new("api", FlagType::String).description("OpenAI APIキー").alias("a"))
.action(|c: &Context| {
if let Ok(api_key) = c.string_flag("api") {
let config = ConfigPaths::new();
let path = config.base_dir.join("openai.json");
let json_data = json!({ "token": api_key });
if let Err(e) = fs::write(&path, serde_json::to_string_pretty(&json_data).unwrap()) {
eprintln!("❌ ファイル書き込み失敗: {}", e);
} else {
println!("✅ APIキーを保存しました: {}", path.display());
}
} else {
eprintln!("❗ APIキーを --api で指定してください");
}
})
}
fn chat_cmd() -> Command {
Command::new("chat")
.description("チャットで質問を送る")
.usage("mcp chat '質問内容' --host <OLLAMA_HOST> --model <OLLAMA_MODEL>")
.flag(Flag::new("host", FlagType::String).description("OLLAMAホストのURL"))
.flag(Flag::new("model", FlagType::String).description("OLLAMAモデル名"))
.usage("mcp chat '質問内容' --host <OLLAMA_HOST> --model <MODEL> [--provider <ollama|openai>] [--api-key <KEY>]")
.flag(
Flag::new("host", FlagType::String)
.description("OLLAMAホストのURL")
.alias("H"),
)
.flag(
Flag::new("model", FlagType::String)
.description("モデル名 (OLLAMA_MODEL / OPENAI_MODEL)")
.alias("m"),
)
.flag(
Flag::new("provider", FlagType::String)
.description("使用するプロバイダ (ollama / openai)")
.alias("p"),
)
.flag(
Flag::new("api-key", FlagType::String)
.description("OpenAI APIキー")
.alias("k"),
)
.action(|c: &Context| {
if let Some(question) = c.args.get(0) {
ask_chat(c, question);
@ -157,4 +198,5 @@ pub fn mcp_cmd() -> Command {
.command(init_cmd())
.command(status_cmd())
.command(setup_cmd())
.command(set_api_key_cmd())
}