feat: Implement aishell - AI-powered shell automation tool
Implemented a generic alternative to Claude Code with the following features: Core Implementation: - Multi-LLM provider support (OpenAI compatible APIs) - Function calling for direct tool execution by LLM - Interactive REPL shell interface - MCP server mode for Claude Desktop integration - Shell executor with bash, read, write, list tools Architecture: - src/cli: Interactive REPL implementation - src/llm: LLM provider abstraction (OpenAI compatible) - src/shell: Shell execution engine with duct - src/mcp: MCP server for Claude Desktop - src/config: Configuration management Technical Stack: - Rust 2021 with tokio async runtime - clap for CLI framework - reqwest for HTTP client - duct for shell execution - rustyline for REPL interface This tool integrates with aigpt to form AIOS (AI Operating System), enabling AI-driven OS management and automation. Based on aigpt architecture for CLI and MCP patterns.
This commit is contained in:
74
src/main.rs
Normal file
74
src/main.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use tracing_subscriber;
|
||||
|
||||
use aishell::cli::Repl;
|
||||
use aishell::mcp::MCPServer;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "aishell")]
|
||||
#[command(about = "AI-powered shell automation - A generic alternative to Claude Code")]
|
||||
#[command(version)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Start interactive AI shell
|
||||
Shell {
|
||||
/// LLM provider (openai, anthropic, ollama)
|
||||
#[arg(short, long, default_value = "openai")]
|
||||
provider: String,
|
||||
|
||||
/// Model name
|
||||
#[arg(short, long)]
|
||||
model: Option<String>,
|
||||
},
|
||||
|
||||
/// Execute a single command via AI
|
||||
Exec {
|
||||
/// Command prompt
|
||||
prompt: String,
|
||||
|
||||
/// LLM provider
|
||||
#[arg(short = 'p', long, default_value = "openai")]
|
||||
provider: String,
|
||||
},
|
||||
|
||||
/// Start MCP server (for Claude Desktop integration)
|
||||
Server,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// Initialize logging
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(
|
||||
tracing_subscriber::EnvFilter::from_default_env()
|
||||
.add_directive(tracing::Level::INFO.into()),
|
||||
)
|
||||
.init();
|
||||
|
||||
let cli = Cli::parse();
|
||||
|
||||
match cli.command {
|
||||
Commands::Shell { provider, model } => {
|
||||
let mut repl = Repl::new(&provider, model.as_deref()).await?;
|
||||
repl.run().await?;
|
||||
}
|
||||
|
||||
Commands::Exec { prompt, provider } => {
|
||||
let mut repl = Repl::new(&provider, None).await?;
|
||||
repl.execute_once(&prompt).await?;
|
||||
}
|
||||
|
||||
Commands::Server => {
|
||||
let server = MCPServer::new()?;
|
||||
server.run().await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Reference in New Issue
Block a user