From 36863e4d9f7f73878d1eae50d3db0da02a462f26 Mon Sep 17 00:00:00 2001 From: syui Date: Fri, 13 Jun 2025 16:49:42 +0900 Subject: [PATCH] fix loading --- oauth/.env.production | 2 +- oauth/src/components/AIChat.tsx | 34 +++++++++++++++++++++------------ run.zsh | 5 +---- 3 files changed, 24 insertions(+), 17 deletions(-) diff --git a/oauth/.env.production b/oauth/.env.production index 311e92a..8e0462c 100644 --- a/oauth/.env.production +++ b/oauth/.env.production @@ -20,5 +20,5 @@ VITE_AI_ASK_AI=true VITE_AI_PROVIDER=ollama VITE_AI_MODEL=gemma3:4b VITE_AI_HOST=https://ollama.syui.ai -VITE_AI_SYSTEM_PROMPT="You are a helpful AI assistant trained on this blog's content. You can answer questions about the articles, provide insights, and help users understand the topics discussed." +VITE_AI_SYSTEM_PROMPT="あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。" VITE_AI_DID=did:plc:4hqjfn7m6n5hno3doamuhgef diff --git a/oauth/src/components/AIChat.tsx b/oauth/src/components/AIChat.tsx index e03a21d..2e0fc28 100644 --- a/oauth/src/components/AIChat.tsx +++ b/oauth/src/components/AIChat.tsx @@ -40,13 +40,17 @@ export const AIChat: React.FC = ({ user, isEnabled }) => { console.log('Fetching AI profile with agent for DID:', aiConfig.aiDid); const profile = await agent.getProfile({ actor: aiConfig.aiDid }); console.log('AI profile fetched successfully:', profile.data); - setAiProfile({ + const profileData = { did: aiConfig.aiDid, handle: profile.data.handle || 'ai-assistant', displayName: profile.data.displayName || 'AI Assistant', avatar: profile.data.avatar || null, description: profile.data.description || null - }); + }; + setAiProfile(profileData); + + // Dispatch event to update Ask AI button + window.dispatchEvent(new CustomEvent('aiProfileLoaded', { detail: profileData })); return; } @@ -56,24 +60,32 @@ export const AIChat: React.FC = ({ user, isEnabled }) => { if (response.ok) { const profileData = await response.json(); console.log('AI profile fetched via public API:', profileData); - setAiProfile({ + const profile = { did: aiConfig.aiDid, handle: profileData.handle || 'ai-assistant', displayName: profileData.displayName || 'AI Assistant', avatar: profileData.avatar || null, description: profileData.description || null - }); + }; + setAiProfile(profile); + + // Dispatch event to update Ask AI button + window.dispatchEvent(new CustomEvent('aiProfileLoaded', { detail: profile })); return; } } catch (error) { console.log('Failed to fetch AI profile, using defaults:', error); - setAiProfile({ + const fallbackProfile = { did: aiConfig.aiDid, handle: 'ai-assistant', displayName: 'AI Assistant', avatar: null, description: 'AI assistant for this blog' - }); + }; + setAiProfile(fallbackProfile); + + // Dispatch event even with fallback profile + window.dispatchEvent(new CustomEvent('aiProfileLoaded', { detail: fallbackProfile })); } }; @@ -178,11 +190,9 @@ export const AIChat: React.FC = ({ user, isEnabled }) => { if (aiConfig.provider === 'ollama') { const prompt = `${aiConfig.systemPrompt} -${chatHistoryText ? `履歴: ${chatHistoryText}` : ''} +Question: ${question} -質問: ${question} - -簡潔に回答:`; +Answer:`; const response = await fetch(`${aiConfig.host}/api/generate`, { method: 'POST', @@ -194,9 +204,10 @@ ${chatHistoryText ? `履歴: ${chatHistoryText}` : ''} prompt: prompt, stream: false, options: { - temperature: 0.7, + temperature: 0.9, top_p: 0.9, num_predict: 80, // Shorter responses for faster generation + repeat_penalty: 1.1, } }), }); @@ -210,7 +221,6 @@ ${chatHistoryText ? `履歴: ${chatHistoryText}` : ''} } // 4. Immediately dispatch event to update UI - console.log('Dispatching AI response with profile:', aiProfile); window.dispatchEvent(new CustomEvent('aiResponseReceived', { detail: { answer: aiAnswer, diff --git a/run.zsh b/run.zsh index c36bb87..2fb5984 100755 --- a/run.zsh +++ b/run.zsh @@ -18,7 +18,6 @@ function _env() { function _server() { lsof -ti:$port | xargs kill -9 2>/dev/null || true - lsof -ti:11434 | xargs kill -9 2>/dev/null || true cd $d/my-blog cargo build --release $ailog build @@ -46,6 +45,7 @@ function _server_comment() { } function _server_ollama(){ + lsof -ti:11434 | xargs kill -9 2>/dev/null || true brew services stop ollama OLLAMA_ORIGINS="https://log.syui.ai" ollama serve } @@ -65,9 +65,6 @@ case "${1:-serve}" in ollama|ol) _server_ollama ;; - proxy|p) - _server_proxy - ;; serve|s|*) _server ;;