From 42c85fc82017f5f67bc5d44e2305df08653fee7f Mon Sep 17 00:00:00 2001 From: syui Date: Tue, 3 Jun 2025 01:51:24 +0900 Subject: [PATCH] add mode --- README.md | 52 ++++++++++++++++++++++++++++ src/aigpt/ai_provider.py | 2 +- src/aigpt/cli.py | 73 +++++++++++++++++++++++++++++++++++++++- src/aigpt/persona.py | 2 +- 4 files changed, 126 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 690e10f..3513526 100644 --- a/README.md +++ b/README.md @@ -130,7 +130,12 @@ aigpt config get providers.ollama.default_model ### 会話する ```bash +# 通常の会話(詳細表示) aigpt chat "did:plc:xxxxx" "こんにちは、今日はどんな気分?" + +# 連続会話モード(シンプルな表示) +aigpt conversation syui --provider ollama --model qwen3:latest +aigpt conv syui --provider ollama --model qwen3:latest # 短縮形 ``` ### ステータス確認 @@ -166,6 +171,53 @@ aigpt maintenance aigpt relationships ``` +### 会話モード詳細 + +#### 通常の会話コマンド +```bash +# 詳細表示モード(関係性スコア・送信状態等も表示) +aigpt chat syui "メッセージ" --provider ollama --model qwen3:latest +``` + +出力例: +``` +╭─────────────────────────── AI Response ───────────────────────────╮ +│ AIの返答がここに表示されます │ +╰─────────────────────────────────────────────────────────────────╯ + +Relationship Status: stranger +Score: 28.00 / 100.0 +Transmission: ✗ Disabled +``` + +#### 連続会話モード +```bash +# シンプルな会話画面(関係性情報なし) +aigpt conversation syui --provider ollama --model qwen3:latest +aigpt conv syui # 短縮形、デフォルト設定使用 +``` + +会話画面: +``` +Using ollama with model qwen3:latest +Conversation with AI started. Type 'exit' or 'quit' to end. + +syui> こんにちは +AI> こんにちは!今日はどんな日でしたか? + +syui> 今日は良い天気でした +AI> 良い天気だと気分も晴れやかになりますね! + +syui> exit +Conversation ended. +``` + +#### 会話モードの特徴 +- **通常モード**: 詳細な関係性情報とパネル表示 +- **連続モード**: シンプルな`ユーザー> ` → `AI> `形式 +- **履歴保存**: 両モードとも会話履歴を自動保存 +- **コマンド補完**: Tab補完とコマンド履歴機能 + ### ChatGPTデータインポート ```bash # ChatGPTの会話履歴をインポート diff --git a/src/aigpt/ai_provider.py b/src/aigpt/ai_provider.py index 780c4bf..6c86e3e 100644 --- a/src/aigpt/ai_provider.py +++ b/src/aigpt/ai_provider.py @@ -86,7 +86,7 @@ Recent memories: self.logger.error(f"Ollama generation failed: {e}") return self._fallback_response(persona_state) - def chat(self, prompt: str, max_tokens: int = 200) -> str: + def chat(self, prompt: str, max_tokens: int = 2000) -> str: """Simple chat interface""" try: response = self.client.chat( diff --git a/src/aigpt/cli.py b/src/aigpt/cli.py index cc1723b..7d70294 100644 --- a/src/aigpt/cli.py +++ b/src/aigpt/cli.py @@ -67,7 +67,7 @@ def chat( relationship = persona.relationships.get_or_create_relationship(user_id) # Display response - console.print(Panel(response, title="AI Response", border_style="cyan")) + console.print(Panel(response, title="AI Response", border_style="cyan", expand=True, width=None)) # Show relationship status status_color = "green" if relationship.transmission_enabled else "yellow" @@ -917,5 +917,76 @@ def import_chatgpt( raise typer.Exit(1) +@app.command() +def conversation( + user_id: str = typer.Argument(..., help="User ID (atproto DID)"), + data_dir: Optional[Path] = typer.Option(None, "--data-dir", "-d", help="Data directory"), + model: Optional[str] = typer.Option("qwen2.5", "--model", "-m", help="AI model to use"), + provider: Optional[str] = typer.Option("ollama", "--provider", help="AI provider (ollama/openai)") +): + """Simple continuous conversation mode""" + persona = get_persona(data_dir) + + # Create AI provider + ai_provider = None + if provider and model: + try: + ai_provider = create_ai_provider(provider=provider, model=model) + console.print(f"[dim]Using {provider} with model {model}[/dim]") + except Exception as e: + console.print(f"[yellow]Warning: Could not create AI provider: {e}[/yellow]") + + # Welcome message + console.print(f"[cyan]Conversation with AI started. Type 'exit' or 'quit' to end.[/cyan]\n") + + # History for conversation mode + actual_data_dir = data_dir if data_dir else DEFAULT_DATA_DIR + history_file = actual_data_dir / "conversation_history.txt" + history = FileHistory(str(history_file)) + + while True: + try: + # Simple prompt + user_input = ptk_prompt( + f"{user_id}> ", + history=history, + auto_suggest=AutoSuggestFromHistory() + ).strip() + + if not user_input: + continue + + # Exit commands + if user_input.lower() in ['exit', 'quit', 'bye']: + console.print("[cyan]Conversation ended.[/cyan]") + break + + # Process interaction + response, relationship_delta = persona.process_interaction(user_id, user_input, ai_provider) + + # Simple AI response display (no Panel, no extra info) + console.print(f"AI> {response}\n") + + except KeyboardInterrupt: + console.print("\n[yellow]Use 'exit' or 'quit' to end conversation[/yellow]") + except EOFError: + console.print("\n[cyan]Conversation ended.[/cyan]") + break + except Exception as e: + console.print(f"[red]Error: {e}[/red]") + + +# Alias for conversation command +@app.command() +def conv( + user_id: str = typer.Argument(..., help="User ID (atproto DID)"), + data_dir: Optional[Path] = typer.Option(None, "--data-dir", "-d", help="Data directory"), + model: Optional[str] = typer.Option("qwen2.5", "--model", "-m", help="AI model to use"), + provider: Optional[str] = typer.Option("ollama", "--provider", help="AI provider (ollama/openai)") +): + """Alias for conversation command""" + conversation(user_id, data_dir, model, provider) + + if __name__ == "__main__": app() \ No newline at end of file diff --git a/src/aigpt/persona.py b/src/aigpt/persona.py index 1000f40..0c18950 100644 --- a/src/aigpt/persona.py +++ b/src/aigpt/persona.py @@ -160,7 +160,7 @@ AI:""" # Generate response using AI with full context try: - response = ai_provider.chat(context_prompt, max_tokens=200) + response = ai_provider.chat(context_prompt, max_tokens=2000) # Clean up response if it includes the prompt echo if "AI:" in response: