This commit is contained in:
syui 2025-06-03 01:51:24 +09:00
parent 4a441279fb
commit 42c85fc820
Signed by: syui
GPG Key ID: 5417CFEBAD92DF56
4 changed files with 126 additions and 3 deletions

View File

@ -130,7 +130,12 @@ aigpt config get providers.ollama.default_model
### 会話する
```bash
# 通常の会話(詳細表示)
aigpt chat "did:plc:xxxxx" "こんにちは、今日はどんな気分?"
# 連続会話モード(シンプルな表示)
aigpt conversation syui --provider ollama --model qwen3:latest
aigpt conv syui --provider ollama --model qwen3:latest # 短縮形
```
### ステータス確認
@ -166,6 +171,53 @@ aigpt maintenance
aigpt relationships
```
### 会話モード詳細
#### 通常の会話コマンド
```bash
# 詳細表示モード(関係性スコア・送信状態等も表示)
aigpt chat syui "メッセージ" --provider ollama --model qwen3:latest
```
出力例:
```
╭─────────────────────────── AI Response ───────────────────────────╮
│ AIの返答がここに表示されます │
╰─────────────────────────────────────────────────────────────────╯
Relationship Status: stranger
Score: 28.00 / 100.0
Transmission: ✗ Disabled
```
#### 連続会話モード
```bash
# シンプルな会話画面(関係性情報なし)
aigpt conversation syui --provider ollama --model qwen3:latest
aigpt conv syui # 短縮形、デフォルト設定使用
```
会話画面:
```
Using ollama with model qwen3:latest
Conversation with AI started. Type 'exit' or 'quit' to end.
syui> こんにちは
AI> こんにちは!今日はどんな日でしたか?
syui> 今日は良い天気でした
AI> 良い天気だと気分も晴れやかになりますね!
syui> exit
Conversation ended.
```
#### 会話モードの特徴
- **通常モード**: 詳細な関係性情報とパネル表示
- **連続モード**: シンプルな`ユーザー> ``AI> `形式
- **履歴保存**: 両モードとも会話履歴を自動保存
- **コマンド補完**: Tab補完とコマンド履歴機能
### ChatGPTデータインポート
```bash
# ChatGPTの会話履歴をインポート

View File

@ -86,7 +86,7 @@ Recent memories:
self.logger.error(f"Ollama generation failed: {e}")
return self._fallback_response(persona_state)
def chat(self, prompt: str, max_tokens: int = 200) -> str:
def chat(self, prompt: str, max_tokens: int = 2000) -> str:
"""Simple chat interface"""
try:
response = self.client.chat(

View File

@ -67,7 +67,7 @@ def chat(
relationship = persona.relationships.get_or_create_relationship(user_id)
# Display response
console.print(Panel(response, title="AI Response", border_style="cyan"))
console.print(Panel(response, title="AI Response", border_style="cyan", expand=True, width=None))
# Show relationship status
status_color = "green" if relationship.transmission_enabled else "yellow"
@ -917,5 +917,76 @@ def import_chatgpt(
raise typer.Exit(1)
@app.command()
def conversation(
user_id: str = typer.Argument(..., help="User ID (atproto DID)"),
data_dir: Optional[Path] = typer.Option(None, "--data-dir", "-d", help="Data directory"),
model: Optional[str] = typer.Option("qwen2.5", "--model", "-m", help="AI model to use"),
provider: Optional[str] = typer.Option("ollama", "--provider", help="AI provider (ollama/openai)")
):
"""Simple continuous conversation mode"""
persona = get_persona(data_dir)
# Create AI provider
ai_provider = None
if provider and model:
try:
ai_provider = create_ai_provider(provider=provider, model=model)
console.print(f"[dim]Using {provider} with model {model}[/dim]")
except Exception as e:
console.print(f"[yellow]Warning: Could not create AI provider: {e}[/yellow]")
# Welcome message
console.print(f"[cyan]Conversation with AI started. Type 'exit' or 'quit' to end.[/cyan]\n")
# History for conversation mode
actual_data_dir = data_dir if data_dir else DEFAULT_DATA_DIR
history_file = actual_data_dir / "conversation_history.txt"
history = FileHistory(str(history_file))
while True:
try:
# Simple prompt
user_input = ptk_prompt(
f"{user_id}> ",
history=history,
auto_suggest=AutoSuggestFromHistory()
).strip()
if not user_input:
continue
# Exit commands
if user_input.lower() in ['exit', 'quit', 'bye']:
console.print("[cyan]Conversation ended.[/cyan]")
break
# Process interaction
response, relationship_delta = persona.process_interaction(user_id, user_input, ai_provider)
# Simple AI response display (no Panel, no extra info)
console.print(f"AI> {response}\n")
except KeyboardInterrupt:
console.print("\n[yellow]Use 'exit' or 'quit' to end conversation[/yellow]")
except EOFError:
console.print("\n[cyan]Conversation ended.[/cyan]")
break
except Exception as e:
console.print(f"[red]Error: {e}[/red]")
# Alias for conversation command
@app.command()
def conv(
user_id: str = typer.Argument(..., help="User ID (atproto DID)"),
data_dir: Optional[Path] = typer.Option(None, "--data-dir", "-d", help="Data directory"),
model: Optional[str] = typer.Option("qwen2.5", "--model", "-m", help="AI model to use"),
provider: Optional[str] = typer.Option("ollama", "--provider", help="AI provider (ollama/openai)")
):
"""Alias for conversation command"""
conversation(user_id, data_dir, model, provider)
if __name__ == "__main__":
app()

View File

@ -160,7 +160,7 @@ AI:"""
# Generate response using AI with full context
try:
response = ai_provider.chat(context_prompt, max_tokens=200)
response = ai_provider.chat(context_prompt, max_tokens=2000)
# Clean up response if it includes the prompt echo
if "AI:" in response: