1
0
Files
shell/src/cli/repl.rs
Claude a50fef9182 feat: Implement aishell - AI-powered shell automation tool
Implemented a generic alternative to Claude Code with the following features:

Core Implementation:
- Multi-LLM provider support (OpenAI compatible APIs)
- Function calling for direct tool execution by LLM
- Interactive REPL shell interface
- MCP server mode for Claude Desktop integration
- Shell executor with bash, read, write, list tools

Architecture:
- src/cli: Interactive REPL implementation
- src/llm: LLM provider abstraction (OpenAI compatible)
- src/shell: Shell execution engine with duct
- src/mcp: MCP server for Claude Desktop
- src/config: Configuration management

Technical Stack:
- Rust 2021 with tokio async runtime
- clap for CLI framework
- reqwest for HTTP client
- duct for shell execution
- rustyline for REPL interface

This tool integrates with aigpt to form AIOS (AI Operating System),
enabling AI-driven OS management and automation.

Based on aigpt architecture for CLI and MCP patterns.
2025-11-09 07:12:08 +00:00

149 lines
4.8 KiB
Rust

use anyhow::{Context, Result};
use rustyline::error::ReadlineError;
use rustyline::DefaultEditor;
use crate::llm::{create_provider, LLMProvider, Message};
use crate::shell::{execute_tool, get_tool_definitions, ShellExecutor};
pub struct Repl {
llm: Box<dyn LLMProvider>,
executor: ShellExecutor,
messages: Vec<Message>,
}
impl Repl {
pub async fn new(provider: &str, model: Option<&str>) -> Result<Self> {
let llm = create_provider(provider, model).await?;
let executor = ShellExecutor::default();
let system_prompt = Message::system(
"You are an AI assistant that helps users interact with their system through shell commands. \
You have access to tools like bash, read, write, and list to help users accomplish their tasks. \
When a user asks you to do something, use the appropriate tools to complete the task. \
Always explain what you're doing and show the results to the user."
);
Ok(Self {
llm,
executor,
messages: vec![system_prompt],
})
}
pub async fn run(&mut self) -> Result<()> {
println!("aishell - AI-powered shell automation");
println!("Type 'exit' or 'quit' to exit, 'clear' to clear history\n");
let mut rl = DefaultEditor::new()?;
loop {
let readline = rl.readline("aishell> ");
match readline {
Ok(line) => {
let line = line.trim();
if line.is_empty() {
continue;
}
if line == "exit" || line == "quit" {
println!("Goodbye!");
break;
}
if line == "clear" {
self.messages.truncate(1); // Keep only system message
println!("History cleared.");
continue;
}
rl.add_history_entry(line)?;
if let Err(e) = self.process_input(line).await {
eprintln!("Error: {}", e);
}
}
Err(ReadlineError::Interrupted) => {
println!("^C");
continue;
}
Err(ReadlineError::Eof) => {
println!("^D");
break;
}
Err(err) => {
eprintln!("Error: {:?}", err);
break;
}
}
}
Ok(())
}
pub async fn execute_once(&mut self, prompt: &str) -> Result<()> {
self.process_input(prompt).await
}
async fn process_input(&mut self, input: &str) -> Result<()> {
// Add user message
self.messages.push(Message::user(input));
let tools = get_tool_definitions();
// Agent loop: keep calling LLM until it's done (no more tool calls)
let max_iterations = 10;
for iteration in 0..max_iterations {
tracing::debug!("Agent loop iteration {}", iteration + 1);
let response = self
.llm
.chat(self.messages.clone(), Some(tools.clone()))
.await
.context("Failed to get LLM response")?;
// If there are tool calls, execute them
if let Some(tool_calls) = response.tool_calls {
tracing::info!("LLM requested {} tool calls", tool_calls.len());
// Add assistant message with tool calls
let mut assistant_msg = Message::assistant(response.content.clone());
assistant_msg.tool_calls = Some(tool_calls.clone());
self.messages.push(assistant_msg);
// Execute each tool call
for tool_call in tool_calls {
let tool_name = &tool_call.function.name;
let tool_args = &tool_call.function.arguments;
println!("\n[Executing tool: {}]", tool_name);
let result = match execute_tool(tool_name, tool_args, &self.executor) {
Ok(output) => output,
Err(e) => format!("Error executing tool: {}", e),
};
println!("{}", result);
// Add tool result message
self.messages.push(Message::tool(result, tool_call.id.clone()));
}
// Continue the loop to get the next response
continue;
}
// No tool calls, so the LLM is done
if !response.content.is_empty() {
println!("\n{}\n", response.content);
self.messages.push(Message::assistant(response.content));
}
break;
}
Ok(())
}
}