diff --git a/.claude/settings.local.json b/.claude/settings.local.json index b570fb0..2aca957 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -35,7 +35,8 @@ "Bash(npm run dev:*)", "Bash(./target/release/ailog:*)", "Bash(rg:*)", - "Bash(../target/release/ailog build)" + "Bash(../target/release/ailog build)", + "Bash(zsh run.zsh:*)" ], "deny": [] } diff --git a/README.md b/README.md index e62ecf7..2586d89 100644 --- a/README.md +++ b/README.md @@ -4,60 +4,62 @@ AI-powered static blog generator with ATProto integration, part of the ai.ai eco ## 🚀 Quick Start -### Basic Blog Setup +### Development Setup ```bash -# 1. Initialize a new blog -ailog init my-blog +# 1. Clone and setup +git clone https://git.syui.ai/ai/log +cd log -# 2. Configure your blog (edit my-blog/config.toml) -[site] -title = "My Blog" -description = "A blog powered by ailog" -base_url = "https://yourdomain.com" -language = "ja" +# 2. Start development services +./run.zsh serve # Blog development server +./run.zsh c # Cloudflare tunnel (log.syui.ai) +./run.zsh o # OAuth web server +./run.zsh co # Comment system monitor -[build] -highlight_code = true -minify = false - -[ai] -enabled = true -auto_translate = false -comment_moderation = false -ask_ai = true -provider = "ollama" -model = "gemma3:4b" -host = "https://ollama.yourdomain.com" -system_prompt = "You are a helpful AI assistant trained on this blog's content." -ai_did = "did:plc:your-ai-bot-did" - -# 3. Build your blog -ailog build - -# 4. Serve locally -ailog serve +# 3. Start Ollama (for Ask AI) +brew install ollama +ollama pull gemma2:2b +OLLAMA_ORIGINS="https://log.syui.ai" ollama serve ``` -### ATProto Comment System +### Production Deployment ```bash -# 1. Add OAuth configuration to my-blog/config.toml -[oauth] -json = "client-metadata.json" -redirect = "oauth/callback" -admin = "your-did-here" -collection_comment = "ai.syui.log" -collection_user = "ai.syui.log.user" +# 1. Build static site +hugo -# 2. Build OAuth app -ailog oauth build my-blog +# 2. Deploy to GitHub Pages +git add . +git commit -m "Update blog" +git push origin main -# 3. Authenticate with ATProto -ailog auth init +# 3. Automatic deployment via GitHub Actions +# Site available at: https://yourusername.github.io/repo-name +``` -# 4. Start stream monitoring -ailog stream start my-blog +### ATProto Integration + +```bash +# 1. OAuth Client Setup (oauth/client-metadata.json) +{ + "client_id": "https://log.syui.ai/client-metadata.json", + "client_name": "ai.log Blog System", + "redirect_uris": ["https://log.syui.ai/oauth/callback"], + "scope": "atproto", + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "application_type": "web", + "dpop_bound_access_tokens": true +} + +# 2. Comment System Configuration +# Collection: ai.syui.log (comments) +# User Management: ai.syui.log.user (registered users) + +# 3. Services +./run.zsh o # OAuth authentication server +./run.zsh co # ATProto Jetstream comment monitor ``` ### Development with run.zsh @@ -130,15 +132,30 @@ ai.logは、[Anthropic Docs](https://docs.anthropic.com/)にインスパイア - **自動TOC**: 右サイドバーに目次を自動生成 - **レスポンシブ**: モバイル・デスクトップ対応 -### 🤖 AI統合機能 -- **Ask AI**: ローカルLLM(Ollama)による質問応答 ✅ - - トップページでのみ利用可能 - - atproto OAuth認証必須 - - Cloudflare Tunnel経由でCORS問題解決済み -- **自動翻訳**: 日本語↔英語の自動生成 -- **AI記事強化**: コンテンツの自動改善 -- **AIコメント**: 記事への一言コメント生成 -- **カスタマイズ可能なAI設定**: system_prompt、ai_did、プロフィール連携 +### 🤖 Ask AI機能 ✅ +- **ローカルAI**: Ollama(gemma2:2b)による質問応答 +- **認証必須**: ATProto OAuth認証でアクセス制御 +- **トップページ限定**: ブログコンテンツに特化した回答 +- **CORS解決済み**: OLLAMA_ORIGINS設定でクロスオリジン問題解消 +- **プロフィール連携**: AIアバターとしてATProtoプロフィール画像表示 +- **レスポンス最適化**: 80文字制限+高いtemperatureで多様な回答 +- **ローディング表示**: Font Awesomeアイコンによる一行ローディング + +### 🔧 Ask AI設定方法 +```bash +# 1. Ollama設定 +brew install ollama +ollama pull gemma2:2b + +# 2. CORS設定で起動 +OLLAMA_ORIGINS="https://log.syui.ai" ollama serve + +# 3. AI DID設定 (my-blog/templates/base.html) +const aiConfig = { + systemPrompt: 'You are a helpful AI assistant.', + aiDid: 'did:plc:your-ai-bot-did' +}; +``` ### 🌐 分散SNS連携 - **atproto OAuth**: Blueskyアカウントでログイン diff --git a/oauth/.env.production b/oauth/.env.production index 8e0462c..f60fef9 100644 --- a/oauth/.env.production +++ b/oauth/.env.production @@ -22,3 +22,7 @@ VITE_AI_MODEL=gemma3:4b VITE_AI_HOST=https://ollama.syui.ai VITE_AI_SYSTEM_PROMPT="あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。" VITE_AI_DID=did:plc:4hqjfn7m6n5hno3doamuhgef + +# API Configuration +VITE_BSKY_PUBLIC_API=https://public.api.bsky.app + diff --git a/oauth/src/App.css b/oauth/src/App.css index 987106c..551e48b 100644 --- a/oauth/src/App.css +++ b/oauth/src/App.css @@ -1,7 +1,16 @@ +/* Theme Colors */ +:root { + --theme-color: #FF4500; + --white: #fff; + --light-gray: #aaa; + --dark-gray: #666; + --background: #fff; +} + .app { min-height: 100vh; - background: linear-gradient(180deg, #f8f9fa 0%, #ffffff 100%); - color: #333333; + background: linear-gradient(180deg, #f8f9fa 0%, var(--background) 100%); + color: var(--dark-gray); } .app-header { @@ -41,15 +50,15 @@ } .nav-button.active { - background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); - color: white; - border: 1px solid #667eea; - box-shadow: 0 4px 16px rgba(102, 126, 234, 0.4); + background: var(--theme-color); + color: var(--white); + border: 1px solid var(--theme-color); + box-shadow: 0 4px 16px rgba(255, 69, 0, 0.4); } .nav-button.active:hover { transform: translateY(-2px); - box-shadow: 0 6px 20px rgba(102, 126, 234, 0.5); + box-shadow: 0 6px 20px rgba(255, 69, 0, 0.5); } .app-header h1 { @@ -99,9 +108,9 @@ } .login-button { - background: linear-gradient(135deg, #667eea 0%, #764ba2 100%); - color: white; - border: 1px solid #667eea; + background: var(--theme-color); + color: var(--white); + border: 1px solid var(--theme-color); } .backup-button { @@ -124,7 +133,7 @@ .login-button:hover { transform: translateY(-2px); - box-shadow: 0 4px 12px rgba(102, 126, 234, 0.4); + box-shadow: 0 4px 12px rgba(255, 69, 0, 0.4); } .backup-button:hover { @@ -268,8 +277,8 @@ } .atproto-button { - background: #1185fe; - color: white; + background: var(--theme-color); + color: var(--white); border: none; padding: 12px 24px; border-radius: 6px; @@ -281,9 +290,9 @@ } .atproto-button:hover { - background: #0d6efd; + filter: brightness(1.1); transform: translateY(-2px); - box-shadow: 0 4px 12px rgba(17, 133, 254, 0.4); + box-shadow: 0 4px 12px rgba(255, 69, 0, 0.4); } .username-input-section { @@ -407,8 +416,8 @@ } .post-button { - background: #28a745; - color: white; + background: var(--theme-color); + color: var(--white); border: none; padding: 10px 20px; border-radius: 6px; @@ -419,9 +428,9 @@ } .post-button:hover:not(:disabled) { - background: #218838; + filter: brightness(1.1); transform: translateY(-2px); - box-shadow: 0 4px 12px rgba(40, 167, 69, 0.4); + box-shadow: 0 4px 12px rgba(255, 69, 0, 0.4); } .post-button:disabled { @@ -455,8 +464,8 @@ } .comments-toggle-button { - background: #1185fe; - color: white; + background: var(--theme-color); + color: var(--white); border: none; padding: 8px 16px; border-radius: 6px; @@ -467,9 +476,9 @@ } .comments-toggle-button:hover { - background: #0d6efd; + filter: brightness(1.1); transform: translateY(-2px); - box-shadow: 0 4px 12px rgba(17, 133, 254, 0.4); + box-shadow: 0 4px 12px rgba(255, 69, 0, 0.4); } .comment-item { @@ -714,8 +723,8 @@ /* JSON Display Styles */ .json-button { - background: #4caf50; - color: white; + background: var(--theme-color); + color: var(--white); border: none; padding: 4px 8px; border-radius: 4px; @@ -726,7 +735,7 @@ } .json-button:hover { - background: #45a049; + filter: brightness(1.1); transform: scale(1.05); } @@ -759,4 +768,108 @@ color: #333; max-height: 400px; overflow-y: auto; +} + +/* Tab Navigation */ +.tab-navigation { + display: flex; + border-bottom: 2px solid #e1e5e9; + margin-bottom: 20px; +} + +.tab-button { + background: none; + border: none; + padding: 12px 20px; + cursor: pointer; + font-size: 14px; + font-weight: 500; + color: #656d76; + border-bottom: 2px solid transparent; + transition: all 0.2s; +} + +.tab-button:hover { + color: var(--theme-color); + background: #f6f8fa; +} + +.tab-button.active { + color: var(--theme-color); + border-bottom-color: var(--theme-color); + background: #f6f8fa; +} + +/* AI Chat History */ +.ai-chat-list { + max-width: 100%; + border: 1px solid #ddd; + border-radius: 8px; + padding: 20px; +} + +.chat-item { + border: 1px solid #d1d9e0; + border-radius: 8px; + padding: 16px; + margin-bottom: 16px; + background: #ffffff; +} + +.chat-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 12px; +} + +.chat-actions { + display: flex; + align-items: center; + gap: 8px; +} + +.chat-type-button { + background: var(--theme-color); + color: var(--white); + border: none; + padding: 4px 8px; + border-radius: 4px; + cursor: default; + font-size: 12px; + font-weight: 500; + margin-left: 4px; +} + +.chat-type-text { + font-size: 16px; + margin-left: 4px; +} + + +.chat-date { + color: #656d76; + font-size: 12px; +} + +.chat-content { + background: #f6f8fa; + padding: 12px; + border-radius: 6px; + border-left: 4px solid #d1d9e0; + margin-bottom: 8px; + white-space: pre-wrap; + line-height: 1.5; +} + +.chat-meta { + font-size: 11px; + color: #656d76; +} + +.no-chat { + text-align: center; + padding: 40px 20px; + color: #656d76; + font-style: italic; } \ No newline at end of file diff --git a/oauth/src/App.tsx b/oauth/src/App.tsx index e64f00d..df04041 100644 --- a/oauth/src/App.tsx +++ b/oauth/src/App.tsx @@ -46,6 +46,8 @@ function App() { const [isPostingUserList, setIsPostingUserList] = useState(false); const [userListRecords, setUserListRecords] = useState([]); const [showJsonFor, setShowJsonFor] = useState(null); + const [activeTab, setActiveTab] = useState<'comments' | 'ai-chat'>('comments'); + const [aiChatHistory, setAiChatHistory] = useState([]); useEffect(() => { // Setup Jetstream WebSocket for real-time comments (optional) @@ -151,6 +153,9 @@ function App() { console.log('OAuth session found, loading all comments...'); loadAllComments(); + // Load AI chat history + loadAiChatHistory(userProfile.did); + // Load user list records if admin if (userProfile.did === appConfig.adminDid) { loadUserListRecords(); @@ -221,6 +226,50 @@ function App() { return `https://via.placeholder.com/48x48/1185fe/ffffff?text=${initial}`; }; + const loadAiChatHistory = async (did: string) => { + try { + console.log('Loading AI chat history for DID:', did); + const agent = atprotoOAuthService.getAgent(); + if (!agent) { + console.log('No agent available'); + return; + } + + // Get AI chat records from current user + const response = await agent.api.com.atproto.repo.listRecords({ + repo: did, + collection: appConfig.collections.chat, + limit: 100, + }); + + console.log('AI chat history loaded:', response.data); + const chatRecords = response.data.records || []; + + // Filter out old records with invalid AI profile data (temporary fix for migration) + const validRecords = chatRecords.filter(record => { + if (record.value.answer) { + // This is an AI answer - check if it has valid AI profile + return record.value.author?.handle && + record.value.author?.handle !== 'ai-assistant' && + record.value.author?.displayName !== 'AI Assistant'; + } + return true; // Keep all questions + }); + + console.log(`Filtered ${chatRecords.length} records to ${validRecords.length} valid records`); + + // Sort by creation time and group question-answer pairs + const sortedRecords = validRecords.sort((a, b) => + new Date(a.value.createdAt).getTime() - new Date(b.value.createdAt).getTime() + ); + + setAiChatHistory(sortedRecords); + } catch (err) { + console.error('Failed to load AI chat history:', err); + setAiChatHistory([]); + } + }; + const loadUserComments = async (did: string) => { try { console.log('Loading comments for DID:', did); @@ -305,7 +354,7 @@ function App() { if (user.did && user.did.includes('-placeholder')) { console.log(`Resolving placeholder DID for ${user.handle}`); try { - const profileResponse = await fetch(`https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(user.handle)}`); + const profileResponse = await fetch(`${appConfig.bskyPublicApi}/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(user.handle)}`); if (profileResponse.ok) { const profileData = await profileResponse.json(); if (profileData.did) { @@ -456,7 +505,7 @@ function App() { if (!record.value.author?.avatar && record.value.author?.handle) { try { // Public API でプロフィール取得 - const profileResponse = await fetch(`https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(record.value.author.handle)}`); + const profileResponse = await fetch(`${appConfig.bskyPublicApi}/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(record.value.author.handle)}`); if (profileResponse.ok) { const profileData = await profileResponse.json(); @@ -683,7 +732,7 @@ function App() { try { // Public APIでプロフィールを取得してDIDを解決 - const profileResponse = await fetch(`https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(handle)}`); + const profileResponse = await fetch(`${appConfig.bskyPublicApi}/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(handle)}`); if (profileResponse.ok) { const profileData = await profileResponse.json(); if (profileData.did) { @@ -974,11 +1023,30 @@ function App() { )} + {/* Tab Navigation */} +
+ + {user && ( + + )} +
+ {/* Comments List */} -
-
-

Comments

-
+ {activeTab === 'comments' && ( +
+
+

Comments

+
{comments.filter(shouldShowComment).length === 0 ? (

{appConfig.rkey ? `No comments for this post yet` : `No comments yet`} @@ -988,9 +1056,25 @@ function App() {

User Avatar { + // Fetch fresh avatar from API when component mounts + if (img && record.value.author?.did) { + fetch(`${appConfig.bskyPublicApi}/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(record.value.author.did)}`) + .then(res => res.json()) + .then(data => { + if (data.avatar && img) { + img.src = data.avatar; + } + }) + .catch(err => { + console.warn('Failed to fetch fresh avatar:', err); + // Keep placeholder on error + }); + } + }} />
@@ -1047,7 +1131,92 @@ function App() {
)) )} -
+
+ )} + + {/* AI Chat History List */} + {activeTab === 'ai-chat' && user && ( +
+
+

AI Chat History

+
+ {aiChatHistory.length === 0 ? ( +

No AI conversations yet. Start chatting with Ask AI!

+ ) : ( + aiChatHistory.map((record, index) => ( +
+
+ User Avatar { + // Fetch fresh avatar from API when component mounts + if (img && record.value.author?.did) { + fetch(`${appConfig.bskyPublicApi}/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(record.value.author.did)}`) + .then(res => res.json()) + .then(data => { + if (data.avatar && img) { + img.src = data.avatar; + } + }) + .catch(err => { + console.warn('Failed to fetch fresh avatar:', err); + // Keep placeholder on error + }); + } + }} + /> +
+ + {record.value.author?.displayName || record.value.author?.handle || 'unknown'} + + + @{record.value.author?.handle || 'unknown'} + +
+ + {new Date(record.value.createdAt).toLocaleString()} + +
+ + +
+
+
+ {record.value.question || record.value.answer} +
+
+ {record.uri} +
+ + {/* JSON Display */} + {showJsonFor === record.uri && ( +
+
JSON Record:
+
+                          {JSON.stringify(record, null, 2)}
+                        
+
+ )} +
+ )) + )} +
+ )} {/* Comment Form - Only show on post pages */} {user && appConfig.rkey && ( diff --git a/oauth/src/components/AIChat-access.tsx b/oauth/src/components/AIChat-access.tsx new file mode 100644 index 0000000..f4403a0 --- /dev/null +++ b/oauth/src/components/AIChat-access.tsx @@ -0,0 +1,21 @@ +// Cloudflare Access対応版の例 +const response = await fetch(`${aiConfig.host}/api/generate`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + // Cloudflare Access Service Token + 'CF-Access-Client-Id': import.meta.env.VITE_CF_ACCESS_CLIENT_ID, + 'CF-Access-Client-Secret': import.meta.env.VITE_CF_ACCESS_CLIENT_SECRET, + }, + body: JSON.stringify({ + model: aiConfig.model, + prompt: prompt, + stream: false, + options: { + temperature: 0.9, + top_p: 0.9, + num_predict: 80, + repeat_penalty: 1.1, + } + }), +}); \ No newline at end of file diff --git a/oauth/src/components/AIChat.tsx b/oauth/src/components/AIChat.tsx index 2e0fc28..ffceb0b 100644 --- a/oauth/src/components/AIChat.tsx +++ b/oauth/src/components/AIChat.tsx @@ -23,11 +23,15 @@ export const AIChat: React.FC = ({ user, isEnabled }) => { host: import.meta.env.VITE_AI_HOST || 'https://ollama.syui.ai', systemPrompt: import.meta.env.VITE_AI_SYSTEM_PROMPT || 'You are a helpful AI assistant trained on this blog\'s content.', aiDid: import.meta.env.VITE_AI_DID || 'did:plc:uqzpqmrjnptsxezjx4xuh2mn', + bskyPublicApi: import.meta.env.VITE_BSKY_PUBLIC_API || 'https://public.api.bsky.app', }; // Fetch AI profile on load useEffect(() => { const fetchAIProfile = async () => { + console.log('=== AI PROFILE FETCH START ==='); + console.log('AI DID:', aiConfig.aiDid); + if (!aiConfig.aiDid) { console.log('No AI DID configured'); return; @@ -42,51 +46,48 @@ export const AIChat: React.FC = ({ user, isEnabled }) => { console.log('AI profile fetched successfully:', profile.data); const profileData = { did: aiConfig.aiDid, - handle: profile.data.handle || 'ai-assistant', - displayName: profile.data.displayName || 'AI Assistant', - avatar: profile.data.avatar || null, - description: profile.data.description || null + handle: profile.data.handle, + displayName: profile.data.displayName, + avatar: profile.data.avatar, + description: profile.data.description }; + console.log('Setting aiProfile to:', profileData); setAiProfile(profileData); // Dispatch event to update Ask AI button window.dispatchEvent(new CustomEvent('aiProfileLoaded', { detail: profileData })); + console.log('=== AI PROFILE FETCH SUCCESS (AGENT) ==='); return; } // Fallback to public API console.log('No agent available, trying public API for AI profile'); - const response = await fetch(`https://public.api.bsky.app/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(aiConfig.aiDid)}`); + const response = await fetch(`${aiConfig.bskyPublicApi}/xrpc/app.bsky.actor.getProfile?actor=${encodeURIComponent(aiConfig.aiDid)}`); if (response.ok) { const profileData = await response.json(); console.log('AI profile fetched via public API:', profileData); const profile = { did: aiConfig.aiDid, - handle: profileData.handle || 'ai-assistant', - displayName: profileData.displayName || 'AI Assistant', - avatar: profileData.avatar || null, - description: profileData.description || null + handle: profileData.handle, + displayName: profileData.displayName, + avatar: profileData.avatar, + description: profileData.description }; + console.log('Setting aiProfile to:', profile); setAiProfile(profile); // Dispatch event to update Ask AI button window.dispatchEvent(new CustomEvent('aiProfileLoaded', { detail: profile })); + console.log('=== AI PROFILE FETCH SUCCESS (PUBLIC API) ==='); return; + } else { + console.error('Public API failed with status:', response.status); } } catch (error) { - console.log('Failed to fetch AI profile, using defaults:', error); - const fallbackProfile = { - did: aiConfig.aiDid, - handle: 'ai-assistant', - displayName: 'AI Assistant', - avatar: null, - description: 'AI assistant for this blog' - }; - setAiProfile(fallbackProfile); - - // Dispatch event even with fallback profile - window.dispatchEvent(new CustomEvent('aiProfileLoaded', { detail: fallbackProfile })); + console.error('Failed to fetch AI profile:', error); + setAiProfile(null); } + console.log('=== AI PROFILE FETCH FAILED ==='); }; fetchAIProfile(); @@ -97,9 +98,11 @@ export const AIChat: React.FC = ({ user, isEnabled }) => { // Listen for AI question posts from base.html const handleAIQuestion = async (event: any) => { - if (!user || !event.detail || !event.detail.question || isProcessing) return; + if (!user || !event.detail || !event.detail.question || isProcessing || !aiProfile) return; console.log('AIChat received question:', event.detail.question); + console.log('Current aiProfile state:', aiProfile); + setIsProcessing(true); try { await postQuestionAndGenerateResponse(event.detail.question); @@ -120,10 +123,10 @@ export const AIChat: React.FC = ({ user, isEnabled }) => { return () => { window.removeEventListener('postAIQuestion', handleAIQuestion); }; - }, [user, isEnabled, isProcessing]); + }, [user, isEnabled, isProcessing, aiProfile]); const postQuestionAndGenerateResponse = async (question: string) => { - if (!user || !aiConfig.askAi) return; + if (!user || !aiConfig.askAi || !aiProfile) return; setIsLoading(true); @@ -232,6 +235,9 @@ Answer:`; // 5. Save AI response in background const answerRkey = now.toISOString().replace(/[:.]/g, '-') + '-answer'; + console.log('=== SAVING AI ANSWER ==='); + console.log('Current aiProfile:', aiProfile); + const answerRecord = { $type: appConfig.collections.chat, answer: aiAnswer, @@ -239,11 +245,14 @@ Answer:`; url: window.location.href, createdAt: now.toISOString(), author: { - did: aiConfig.aiDid, - handle: 'AI Assistant', - displayName: 'AI Assistant', + did: aiProfile.did, + handle: aiProfile.handle, + displayName: aiProfile.displayName, + avatar: aiProfile.avatar, }, }; + + console.log('Answer record to save:', answerRecord); // Save to ATProto asynchronously (don't wait for it) agent.api.com.atproto.repo.putRecord({ diff --git a/oauth/src/config/app.ts b/oauth/src/config/app.ts index 9d4479f..75cc100 100644 --- a/oauth/src/config/app.ts +++ b/oauth/src/config/app.ts @@ -13,6 +13,7 @@ export interface AppConfig { aiProvider: string; aiModel: string; aiHost: string; + bskyPublicApi: string; } // Generate collection names from host @@ -80,13 +81,15 @@ export function getAppConfig(): AppConfig { const aiProvider = import.meta.env.VITE_AI_PROVIDER || 'ollama'; const aiModel = import.meta.env.VITE_AI_MODEL || 'gemma2:2b'; const aiHost = import.meta.env.VITE_AI_HOST || 'https://ollama.syui.ai'; + const bskyPublicApi = import.meta.env.VITE_BSKY_PUBLIC_API || 'https://public.api.bsky.app'; console.log('App configuration:', { host, adminDid, collections, rkey: rkey || 'none (not on post page)', - ai: { enabled: aiEnabled, askAi: aiAskAi, provider: aiProvider, model: aiModel, host: aiHost } + ai: { enabled: aiEnabled, askAi: aiAskAi, provider: aiProvider, model: aiModel, host: aiHost }, + bskyPublicApi }); return { @@ -98,7 +101,8 @@ export function getAppConfig(): AppConfig { aiAskAi, aiProvider, aiModel, - aiHost + aiHost, + bskyPublicApi }; } diff --git a/src/ollama_proxy.rs b/src/ollama_proxy.rs new file mode 100644 index 0000000..75972a9 --- /dev/null +++ b/src/ollama_proxy.rs @@ -0,0 +1,96 @@ +use actix_web::{web, App, HttpResponse, HttpServer, middleware}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::sync::Mutex; +use chrono::{DateTime, Utc}; + +#[derive(Clone)] +struct RateLimiter { + requests: Arc>>>>, + limit_per_hour: usize, +} + +impl RateLimiter { + fn new(limit: usize) -> Self { + Self { + requests: Arc::new(Mutex::new(HashMap::new())), + limit_per_hour: limit, + } + } + + fn check_limit(&self, user_id: &str) -> bool { + let mut requests = self.requests.lock().unwrap(); + let now = Utc::now(); + let hour_ago = now - chrono::Duration::hours(1); + + let user_requests = requests.entry(user_id.to_string()).or_insert(Vec::new()); + user_requests.retain(|&time| time > hour_ago); + + if user_requests.len() < self.limit_per_hour { + user_requests.push(now); + true + } else { + false + } + } +} + +#[derive(Deserialize)] +struct GenerateRequest { + model: String, + prompt: String, + stream: bool, + options: Option, +} + +async fn proxy_generate( + req: web::Json, + data: web::Data, + user_info: web::ReqData, // ATProto認証から取得 +) -> Result { + // レート制限チェック + if !data.rate_limiter.check_limit(&user_info.did) { + return Ok(HttpResponse::TooManyRequests() + .json(serde_json::json!({ + "error": "Rate limit exceeded. Please try again later." + }))); + } + + // プロンプトサイズ制限 + if req.prompt.len() > 500 { + return Ok(HttpResponse::BadRequest() + .json(serde_json::json!({ + "error": "Prompt too long. Maximum 500 characters." + }))); + } + + // Ollamaへのリクエスト転送 + let client = reqwest::Client::new(); + let response = client + .post("http://localhost:11434/api/generate") + .json(&req.into_inner()) + .send() + .await?; + + let body = response.bytes().await?; + Ok(HttpResponse::Ok() + .content_type("application/json") + .body(body)) +} + +#[actix_web::main] +async fn main() -> std::io::Result<()> { + let rate_limiter = RateLimiter::new(20); // 1時間に20リクエスト + + HttpServer::new(move || { + App::new() + .app_data(web::Data::new(AppState { + rate_limiter: rate_limiter.clone(), + })) + .wrap(middleware::Logger::default()) + .route("/api/generate", web::post().to(proxy_generate)) + }) + .bind("127.0.0.1:8080")? + .run() + .await +} \ No newline at end of file