add claude-code-mcp-server

This commit is contained in:
2025-06-22 00:26:20 +09:00
parent 71acd44810
commit 13c05d97d2
2 changed files with 226 additions and 53 deletions

View File

@ -3,18 +3,15 @@ import { atproto, collections } from '../api/atproto.js'
import { env } from '../config/env.js'
import { logger } from '../utils/logger.js'
import { getErrorMessage, logError } from '../utils/errorHandler.js'
import { AIProviderFactory } from '../services/aiProvider.js'
export function useAskAI(adminData, userProfile, agent) {
const [loading, setLoading] = useState(false)
const [error, setError] = useState(null)
const [chatHistory, setChatHistory] = useState([])
// AI設定を環境変数から取得
const aiConfig = {
host: import.meta.env.VITE_AI_HOST || 'https://ollama.syui.ai',
model: import.meta.env.VITE_AI_MODEL || 'gemma3:1b',
systemPrompt: import.meta.env.VITE_AI_SYSTEM_PROMPT || 'あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。'
}
// AIプロバイダーを環境変数から作成
const aiProvider = AIProviderFactory.createFromEnv()
const askQuestion = async (question) => {
if (!question.trim()) return
@ -23,51 +20,13 @@ export function useAskAI(adminData, userProfile, agent) {
setError(null)
try {
logger.log('Sending question to Ollama:', question)
logger.log('Sending question to AI provider:', question)
// ユーザー情報を含むシステムプロンプトを構築
const userInfo = userProfile ? `相手の名前は${userProfile.displayName || userProfile.handle}です。` : ''
const enhancedSystemPrompt = `${aiConfig.systemPrompt} ${userInfo}`
// Ollamaに直接リクエスト送信oauth_oldと同じ方式
const prompt = `${enhancedSystemPrompt}
Question: ${question}
Answer:`
// Add timeout to fetch request
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), 30000) // 30 second timeout
const response = await fetch(`${aiConfig.host}/api/generate`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Origin': 'https://syui.ai',
},
body: JSON.stringify({
model: aiConfig.model,
prompt: prompt,
stream: false,
options: {
temperature: 0.9,
top_p: 0.9,
num_predict: 200, // Longer responses for better answers
repeat_penalty: 1.1,
}
}),
signal: controller.signal
// AIプロバイダーに質問を送信
const aiResponse = await aiProvider.ask(question, {
userProfile: userProfile
})
clearTimeout(timeoutId)
if (!response.ok) {
throw new Error(`Ollama API error: ${response.status}`)
}
const data = await response.json()
const aiResponse = { answer: data.response || 'エラーが発生しました' }
logger.log('Received AI response:', aiResponse)
// AI回答をチャット履歴に追加
@ -110,10 +69,10 @@ Answer:`
logError(err, 'useAskAI.askQuestion')
let errorMessage = 'AI応答の生成に失敗しました'
if (err.name === 'AbortError') {
errorMessage = 'AI応答がタイムアウトしました30秒'
} else if (err.message.includes('Ollama API error')) {
errorMessage = `Ollama API エラー: ${err.message}`
if (err.message.includes('Request timeout')) {
errorMessage = 'AI応答がタイムアウトしました'
} else if (err.message.includes('API error')) {
errorMessage = `API エラー: ${err.message}`
} else if (err.message.includes('Failed to fetch')) {
errorMessage = 'AI サーバーに接続できませんでした'
}

View File

@ -0,0 +1,214 @@
/**
* AI Provider Abstract Interface
* Supports multiple AI backends (Ollama, Claude, etc.)
*/
export class AIProvider {
constructor(config) {
this.config = config
}
/**
* Send a question to the AI and get a response
* @param {string} question - User's question
* @param {Object} context - Additional context (user info, etc.)
* @returns {Promise<{answer: string}>}
*/
async ask(question, context = {}) {
throw new Error('ask() method must be implemented by subclass')
}
/**
* Check if the provider is available
* @returns {Promise<boolean>}
*/
async healthCheck() {
throw new Error('healthCheck() method must be implemented by subclass')
}
}
/**
* Ollama Provider Implementation
*/
export class OllamaProvider extends AIProvider {
constructor(config) {
super(config)
this.host = config.host || 'https://ollama.syui.ai'
this.model = config.model || 'gemma3:1b'
this.systemPrompt = config.systemPrompt || ''
}
async ask(question, context = {}) {
// Build enhanced prompt with user context
const userInfo = context.userProfile
? `相手の名前は${context.userProfile.displayName || context.userProfile.handle}です。`
: ''
const enhancedSystemPrompt = `${this.systemPrompt} ${userInfo}`
const prompt = `${enhancedSystemPrompt}
Question: ${question}
Answer:`
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), 30000)
try {
const response = await fetch(`${this.host}/api/generate`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Origin': 'https://syui.ai',
},
body: JSON.stringify({
model: this.model,
prompt: prompt,
stream: false,
options: {
temperature: 0.9,
top_p: 0.9,
num_predict: 200,
repeat_penalty: 1.1,
}
}),
signal: controller.signal
})
clearTimeout(timeoutId)
if (!response.ok) {
throw new Error(`Ollama API error: ${response.status}`)
}
const data = await response.json()
return { answer: data.response || 'エラーが発生しました' }
} catch (error) {
clearTimeout(timeoutId)
if (error.name === 'AbortError') {
throw new Error('Request timeout')
}
throw error
}
}
async healthCheck() {
try {
const response = await fetch(`${this.host}/api/tags`, {
method: 'GET',
headers: {
'Origin': 'https://syui.ai',
}
})
return response.ok
} catch {
return false
}
}
}
/**
* Claude MCP Server Provider Implementation
*/
export class ClaudeMCPProvider extends AIProvider {
constructor(config) {
super(config)
this.endpoint = config.endpoint || 'https://your-server.com/api/claude-mcp'
this.apiKey = config.apiKey // Server-side auth token
this.systemPrompt = config.systemPrompt || ''
}
async ask(question, context = {}) {
const userInfo = context.userProfile
? `相手の名前は${context.userProfile.displayName || context.userProfile.handle}です。`
: ''
const enhancedSystemPrompt = `${this.systemPrompt} ${userInfo}`
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), 45000) // Longer timeout for Claude
try {
const response = await fetch(this.endpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.apiKey}`,
},
body: JSON.stringify({
question: question,
systemPrompt: enhancedSystemPrompt,
context: context
}),
signal: controller.signal
})
clearTimeout(timeoutId)
if (!response.ok) {
throw new Error(`Claude MCP error: ${response.status}`)
}
const data = await response.json()
return { answer: data.answer || 'エラーが発生しました' }
} catch (error) {
clearTimeout(timeoutId)
if (error.name === 'AbortError') {
throw new Error('Request timeout')
}
throw error
}
}
async healthCheck() {
try {
const response = await fetch(`${this.endpoint}/health`, {
method: 'GET',
headers: {
'Authorization': `Bearer ${this.apiKey}`,
}
})
return response.ok
} catch {
return false
}
}
}
/**
* AI Provider Factory
*/
export class AIProviderFactory {
static create(provider, config) {
switch (provider) {
case 'ollama':
return new OllamaProvider(config)
case 'claude-mcp':
return new ClaudeMCPProvider(config)
default:
throw new Error(`Unknown AI provider: ${provider}`)
}
}
static createFromEnv() {
const provider = import.meta.env.VITE_AI_PROVIDER || 'ollama'
const config = {
systemPrompt: import.meta.env.VITE_AI_SYSTEM_PROMPT || '',
}
switch (provider) {
case 'ollama':
config.host = import.meta.env.VITE_AI_HOST
config.model = import.meta.env.VITE_AI_MODEL
break
case 'claude-mcp':
config.endpoint = import.meta.env.VITE_CLAUDE_MCP_ENDPOINT
config.apiKey = import.meta.env.VITE_CLAUDE_MCP_API_KEY
break
}
return AIProviderFactory.create(provider, config)
}
}