1
0
shell/config/default.toml
2025-06-02 00:32:00 +09:00

33 lines
692 B
TOML

# ai.shell Configuration
[server]
# MCP Server configuration
host = "127.0.0.1"
port = 8765
protocol = "http" # http or websocket
[llm]
# Ollama configuration
ollama_host = "http://localhost:11434"
default_model = "qwen2.5-coder:7b"
timeout_seconds = 300
[logging]
# Log configuration
level = "info" # debug, info, warn, error
file = "~/.config/syui/ai/shell/aishell.log"
max_size_mb = 10
max_backups = 3
[cli]
# CLI behavior
history_file = "~/.config/syui/ai/shell/history"
max_history = 1000
prompt = "ai> "
color_output = true
[security]
# Security settings
allowed_commands = ["ls", "cat", "grep", "find", "git", "cargo", "npm", "python"]
sandbox_mode = false
max_file_size_mb = 50