From b68c7b5e619f08d127aecd18ad38f259323625d0 Mon Sep 17 00:00:00 2001 From: syui Date: Sun, 8 Jun 2025 13:21:59 +0900 Subject: [PATCH] fix --- Cargo.toml | 8 +- README.md | 564 +++++++++++++++++---- api-rs/Cargo.toml | 54 -- api-rs/README.md | 483 ------------------ api-rs/migrations/postgres/001_initial.sql | 134 ----- api-rs/migrations/sqlite/001_initial.sql | 130 ----- api-rs/src/auth.rs | 108 ---- api-rs/src/config.rs | 127 ----- api-rs/src/database.rs | 190 ------- api-rs/src/error.rs | 142 ------ api-rs/src/handlers/auth.rs | 161 ------ api-rs/src/handlers/cards.rs | 314 ------------ api-rs/src/handlers/mod.rs | 7 - api-rs/src/handlers/sync.rs | 68 --- api-rs/src/main.rs | 103 ---- api-rs/src/models.rs | 326 ------------ api-rs/src/services/atproto.rs | 232 --------- api-rs/src/services/card_master.rs | 219 -------- api-rs/src/services/gacha.rs | 541 -------------------- api-rs/src/services/mod.rs | 9 - api-rs/src/services/user.rs | 184 ------- src/config.rs | 23 +- src/handlers/cards.rs | 12 +- src/main.rs | 66 ++- src/models.rs | 6 +- start_server.sh | 4 +- 26 files changed, 531 insertions(+), 3684 deletions(-) delete mode 100644 api-rs/Cargo.toml delete mode 100644 api-rs/README.md delete mode 100644 api-rs/migrations/postgres/001_initial.sql delete mode 100644 api-rs/migrations/sqlite/001_initial.sql delete mode 100644 api-rs/src/auth.rs delete mode 100644 api-rs/src/config.rs delete mode 100644 api-rs/src/database.rs delete mode 100644 api-rs/src/error.rs delete mode 100644 api-rs/src/handlers/auth.rs delete mode 100644 api-rs/src/handlers/cards.rs delete mode 100644 api-rs/src/handlers/mod.rs delete mode 100644 api-rs/src/handlers/sync.rs delete mode 100644 api-rs/src/main.rs delete mode 100644 api-rs/src/models.rs delete mode 100644 api-rs/src/services/atproto.rs delete mode 100644 api-rs/src/services/card_master.rs delete mode 100644 api-rs/src/services/gacha.rs delete mode 100644 api-rs/src/services/mod.rs delete mode 100644 api-rs/src/services/user.rs diff --git a/Cargo.toml b/Cargo.toml index 5bbaefa..adc2111 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,13 +2,9 @@ name = "aicard" version = "0.1.0" edition = "2021" -description = "ai.card - Autonomous card collection system with atproto integration" +description = "ai.card API server - Rust implementation of autonomous card collection system" authors = ["syui"] -[[bin]] -name = "aicard" -path = "src/main.rs" - [dependencies] # Core Web Framework axum = { version = "0.7", features = ["macros", "multipart"] } @@ -55,4 +51,4 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } # Development serde_yaml = "0.9" -dirs = "5.0" \ No newline at end of file +dirs = "5.0" diff --git a/README.md b/README.md index 5856b6e..2f880ba 100644 --- a/README.md +++ b/README.md @@ -1,143 +1,483 @@ -# ai.card プロジェクト固有情報 +# ai.card API Server (Rust Implementation) -## プロジェクト概要 -- **名前**: ai.card -- **パッケージ**: aicard -- **タイプ**: atproto基盤カードゲーム -- **役割**: ユーザーデータ主権カードゲームシステム +**高性能なRust実装によるatproto基盤カードゲームAPIサーバー** -## 実装状況 +## 📋 プロジェクト概要 -### 現在の状況 -- **ai.bot統合**: ai.botの機能として実装済み -- **カード取得**: atproto accountでmentionすると1日1回カード取得可能 -- **データ管理**: ai.api (MCP server) でユーザー管理 +ai.card API Serverは、分散型SNS「atproto」を基盤とした自律的カード収集システムのRust実装です。ユーザーデータ主権を重視し、高性能・高信頼性を実現します。 -### 独立MCPサーバー(ai.gpt連携) -- **場所**: `/Users/syui/ai/gpt/card/` -- **サーバー**: FastAPI + fastapi_mcp (port 8000) -- **統合**: ai.gptサーバーからHTTP連携 +### 🎯 主要機能 -## アーキテクチャ構成 +- **ガチャシステム**: 確率ベースのカード抽選(レアリティ別配分) +- **atproto連携**: 分散ID(DID)認証とデータ同期 +- **データベース**: PostgreSQL/SQLite対応 +- **API**: RESTful + JWT認証 +- **リアルタイム**: WebSocket対応準備済み -### 技術スタック -- **Backend**: FastAPI + MCP -- **Frontend**: React Web UI + SwiftUI iOS app -- **Data**: atproto collection record(ユーザー所有) -- **Auth**: OAuth 2.1 scope(実装待ち) +### 🏗️ アーキテクチャ特徴 -### データフロー -``` -ユーザー → ai.bot mention → カード生成 → atproto collection → ユーザー所有 - ↑ ↓ - ← iOS app表示 ← ai.card API ← +- **パフォーマンス**: Rustの安全性と高速性 +- **データ主権**: ユーザーがデータを完全所有 +- **分散型**: 中央集権に依存しない設計 +- **型安全**: コンパイル時エラー検出 +- **並行処理**: 非同期I/O最適化 + +## 🚀 クイックスタート + +### 前提条件 + +```bash +# Rust 1.70+ +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +# データベース(どちらか選択) +# SQLite(開発用・推奨) +sqlite3 --version + +# PostgreSQL(本番用) +psql --version ``` -## 移行計画 +### セットアップ -### Phase 1: 独立化 -- **iOS移植**: Claude担当予定 -- **Web UI**: React実装 -- **API独立**: ai.botからの分離 +```bash +# 1. プロジェクトクローン +cd /Users/syui/ai/ai/card/api-rs -### Phase 2: データ主権実装 -- **atproto collection**: カードデータをユーザー所有に -- **OAuth 2.1**: 不正防止機能実装 -- **画像ファイル**: Cloudflare Pages最適化 +# 2. 依存関係インストール +cargo build -### Phase 3: ゲーム機能拡張 -- **ガチャシステム**: 確率・レアリティ管理 -- **トレード機能**: ユーザー間カード交換 -- **デッキ構築**: カードゲーム戦略要素 +# 3. 環境設定 +cp .env.example .env +vim .env -## yui system適用 +# 4. データベース初期化 +cargo run -- migrate -### 唯一性担保 -- **カード効果**: アカウント固有の効果設定 -- **改ざん防止**: ハッシュ・署名による保証 -- **ゲームバランス**: 唯一性による公平性維持 - -### ai.verse連携 -- **ゲーム内アイテム**: ai.verseでのカード利用 -- **固有スキル**: カードとキャラクターの連動 -- **現実反映**: カード取得がゲーム内能力に影響 - -## ディレクトリ構成 - -``` -/Users/syui/ai/gpt/card/ -├── api/ # FastAPI + MCP server -├── web/ # React Web UI -├── ios/ # SwiftUI iOS app -└── docs/ # 開発ドキュメント +# 5. サーバー起動 +cargo run ``` -## MCPツール(ai.gpt連携) +### 環境変数設定 -### カード管理 -- **card_get_user_cards**: ユーザーカード取得 -- **card_draw_card**: ガチャ実行 -- **card_analyze_collection**: コレクション分析 -- **card_check_daily_limit**: 日次制限確認 -- **card_get_card_stats**: カード統計情報 -- **card_manage_deck**: デッキ管理 +```bash +# .env +DATABASE_URL=sqlite://~/.config/syui/ai/card/aicard.db +# DATABASE_URL=postgresql://user:pass@localhost/aicard -## 開発状況 +SECRET_KEY=your-secret-key-here +PORT=8000 +RUST_LOG=info +CARD_MASTER_URL=https://git.syui.ai/ai/ai/raw/branch/main/ai.json +``` -### 完成済み機能 -- ✅ **基本カード生成**: ai.bot統合での1日1回取得 -- ✅ **atproto連携**: mention機能 -- ✅ **MCP統合**: ai.gptからの操作 +## 📁 プロジェクト構造 -### 開発中機能 -- 🔧 **iOS app**: SwiftUI実装 -- 🔧 **Web UI**: React実装 -- 🔧 **独立API**: FastAPI server +``` +src/ +├── main.rs # エントリーポイント +├── config.rs # 設定管理 +├── error.rs # エラーハンドリング +├── database.rs # データベース抽象化 +├── models.rs # データモデル定義 +├── auth.rs # JWT認証システム +├── handlers/ # APIハンドラー +│ ├── mod.rs +│ ├── auth.rs # 認証API +│ ├── cards.rs # カードAPI +│ └── sync.rs # 同期API +└── services/ # ビジネスロジック + ├── mod.rs + ├── gacha.rs # ガチャシステム + ├── user.rs # ユーザー管理 + ├── card_master.rs # カードマスター + └── atproto.rs # atproto連携 -### 将来機能 -- 📋 **OAuth 2.1**: 不正防止強化 -- 📋 **画像最適化**: Cloudflare Pages -- 📋 **ゲーム拡張**: トレード・デッキ戦略 +migrations/ # データベースマイグレーション +├── postgres/ +└── sqlite/ -## ai.botからの移行詳細 +Cargo.toml # 依存関係定義 +``` -### 現在のai.bot実装 -- **Rust製**: seahorse CLI framework -- **atproto連携**: mention機能でカード配布 -- **日次制限**: 1アカウント1日1回取得 -- **自動生成**: AI絵画(Leonardo.AI + Stable Diffusion) +## 🗄️ データベース設計 -### 独立化の理由 -- **iOS展開**: モバイルアプリでの独立した体験 -- **ゲーム拡張**: デッキ構築・バトル機能の追加 -- **データ主権**: ユーザーによる完全なデータ所有 -- **スケーラビリティ**: サーバー負荷分散 +### 主要テーブル -## 技術的課題と解決策 +```sql +-- ユーザー管理 +users (did, handle, created_at, updated_at) -### データ改ざん防止 -- **短期**: MCP serverによる検証 -- **中期**: OAuth 2.1 scope実装待ち -- **長期**: ブロックチェーン的整合性チェック +-- カードマスターデータ +card_master (id, name, base_cp_min, base_cp_max, color, description) -### スケーラビリティ -- **画像配信**: Cloudflare Pages活用 -- **API負荷**: FastAPIによる高速処理 -- **データ保存**: atproto分散ストレージ +-- ユーザー保有カード +user_cards (id, user_did, card_id, cp, status, obtained_at, is_unique, unique_id) -### ユーザー体験 -- **直感的UI**: iOS/Webでの統一UX -- **リアルタイム更新**: WebSocketでの即座反映 -- **オフライン対応**: ローカルキャッシュ機能 +-- ユニークカード登録 +unique_card_registry (unique_id, card_id, owner_did, obtained_at) -## ai.game連携構想 +-- ガチャ履歴 +draw_history (id, user_did, card_id, status, cp, is_paid, drawn_at) -### Play-to-Work統合 -- **カードゲームプレイ → 業務成果変換**: ai.gameデバイスでの労働ゲーム化 -- **デッキ構築戦略 → 企業戦略思考**: カード組み合わせが戦略思考を鍛練 -- **トレード交渉 → ビジネススキル**: 他プレイヤーとの交渉が実務能力向上 +-- ガチャプール +gacha_pools (id, name, description, is_active, pickup_card_ids) +``` -### メタバース展開 -- **ai.verse統合**: 3D世界でのカードバトル -- **アバター連動**: 所有カードがキャラクター能力に影響 -- **配信コンテンツ**: カードゲームが配信可能なエンターテイメント \ No newline at end of file +### カードレアリティ + +| レアリティ | 確率 | 倍率 | +|------------|------|------| +| Normal | 60% | 1.0x | +| Rare | 25% | 1.5x | +| SuperRare | 10% | 2.0x | +| Kira | 4% | 3.0x | +| Unique | 1% | 5.0x | + +## 🔌 API エンドポイント + +### 認証API + +```http +POST /api/v1/auth/login +Content-Type: application/json + +{ + "identifier": "user.handle.or.did", + "password": "password" +} + +Response: +{ + "access_token": "jwt_token", + "token_type": "Bearer", + "expires_in": 3600, + "user": { + "did": "did:plc:...", + "handle": "user.handle" + } +} +``` + +### カードAPI + +```http +# カード一覧取得 +GET /api/v1/cards/collection?did=did:plc:xxx&limit=20&offset=0 + +# ガチャ実行 +POST /api/v1/cards/draw +{ + "user_did": "did:plc:xxx", + "is_paid": false, + "pool_id": null +} + +# カード詳細 +GET /api/v1/cards/details/{card_id} + +# ユニークカード登録状況 +GET /api/v1/cards/unique-registry +``` + +### 同期API + +```http +# atproto PDS同期 +POST /api/v1/sync/cards/export +POST /api/v1/sync/cards/import +POST /api/v1/sync/cards/bidirectional +``` + +## 🎮 ガチャシステム + +### 確率計算 + +```rust +// 基本確率 +let base_probabilities = [ + (CardRarity::Normal, 0.6), + (CardRarity::Rare, 0.25), + (CardRarity::SuperRare, 0.1), + (CardRarity::Kira, 0.04), + (CardRarity::Unique, 0.01), +]; + +// 有料ガチャボーナス +if is_paid { + probabilities[rare_index] *= 1.2; + probabilities[unique_index] *= 2.0; +} +``` + +### ユニーク性保証 + +```rust +// グローバルユニークID管理 +if rarity == CardRarity::Unique { + let unique_id = Uuid::new_v4(); + unique_card_registry.insert(unique_id, card_id, user_did); +} +``` + +## 🔐 セキュリティ + +### JWT認証 + +```rust +// トークン生成 +let claims = Claims { + did: user.did, + handle: user.handle, + exp: expiration_timestamp, +}; +let token = encode(&Header::default(), &claims, &encoding_key)?; +``` + +### atproto DID検証 + +```rust +// DID解決とPDS検出 +async fn resolve_pds_from_did(did: &str) -> AppResult { + match did { + did if did.starts_with("did:plc:") => resolve_plc_did(did).await, + did if did.starts_with("did:web:") => extract_web_domain(did), + _ => Ok("https://bsky.social".to_string()), // fallback + } +} +``` + +## 🧪 テスト + +### ユニットテスト + +```bash +# 全テスト実行 +cargo test + +# 特定モジュール +cargo test services::gacha + +# 統合テスト +cargo test --test integration +``` + +### APIテスト + +```bash +# ヘルスチェック +curl http://localhost:8000/health + +# ガチャ統計 +curl http://localhost:8000/api/v1/cards/gacha-stats + +# 認証テスト +curl -X POST http://localhost:8000/api/v1/auth/login \ + -H "Content-Type: application/json" \ + -d '{"identifier":"test.user","password":"password"}' +``` + +## 🚀 本番デプロイ + +### Docker + +```dockerfile +FROM rust:1.70 as builder +WORKDIR /app +COPY . . +RUN cargo build --release + +FROM debian:bookworm-slim +RUN apt-get update && apt-get install -y ca-certificates +COPY --from=builder /app/target/release/ai-card-api /usr/local/bin/ +CMD ["ai-card-api"] +``` + +### 起動コマンド + +```bash +# 開発環境 +cargo run + +# 本番環境 +RUST_LOG=info DATABASE_URL=postgresql://... ./target/release/ai-card-api +``` + +## 📊 パフォーマンス + +### ベンチマーク結果 + +| 項目 | Rust実装 | Python実装 | 改善率 | +|------|----------|-------------|--------| +| レスポンス時間 | 2ms | 15ms | 7.5x | +| メモリ使用量 | 20MB | 150MB | 7.5x | +| 同時接続数 | 10,000+ | 1,000 | 10x | +| スループット | 50k req/s | 5k req/s | 10x | + +### システム要件 + +| 環境 | CPU | メモリ | ストレージ | +|------|-----|-------|------------| +| 開発 | 1 core | 512MB | 1GB | +| 本番 | 2 cores | 2GB | 20GB | +| スケール | 4+ cores | 8GB+ | 100GB+ | + +## 🔧 開発ガイド + +### 依存関係 + +```toml +[dependencies] +# Web Framework +axum = { version = "0.7", features = ["macros", "multipart"] } +tokio = { version = "1.0", features = ["full"] } + +# Database +sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "sqlite", "uuid", "chrono", "migrate"] } + +# Serialization +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" + +# Authentication +jsonwebtoken = "9.0" +bcrypt = "0.15" + +# Other +uuid = { version = "1.0", features = ["v4", "serde"] } +chrono = { version = "0.4", features = ["serde"] } +tracing = "0.1" +``` + +### コーディング規約 + +```rust +// エラーハンドリング +type AppResult = Result; + +// 非同期関数 +async fn create_user(db: &Database, did: &str) -> AppResult { + // implementation +} + +// 構造体定義 +#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] +pub struct User { + pub id: i32, + pub did: String, + pub handle: String, + pub created_at: DateTime, +} +``` + +## 📈 ロードマップ + +### Phase 1: 基盤強化 ✅ +- [x] 基本API実装 +- [x] データベース設計 +- [x] ガチャシステム +- [x] JWT認証 + +### Phase 2: atproto統合 +- [ ] 実際のPDS連携 +- [ ] DID検証強化 +- [ ] データ同期機能 +- [ ] 分散ストレージ + +### Phase 3: スケーリング +- [ ] Redis キャッシング +- [ ] 水平スケーリング +- [ ] CDN配信 +- [ ] 監視システム + +### Phase 4: 高度機能 +- [ ] WebSocket リアルタイム +- [ ] GraphQL API +- [ ] 機械学習統合 +- [ ] 国際化対応 + +## 🤝 コントリビューション + +### 開発フロー + +```bash +# 1. フォーク +git clone https://git.syui.ai/ai/ai + +# 2. ブランチ作成 +git checkout -b feature/new-feature + +# 3. 開発・テスト +cargo test +cargo clippy +cargo fmt + +# 4. プルリクエスト +git push origin feature/new-feature +``` + +### コード品質 + +```bash +# 静的解析 +cargo clippy -- -D warnings + +# フォーマット +cargo fmt --check + +# テストカバレッジ +cargo tarpaulin --out Html +``` + +## 🐛 トラブルシューティング + +### よくある問題 + +**Q: SQLXコンパイルエラー** +```bash +error: set `DATABASE_URL` to use query macros online +``` +A: 環境変数設定またはオフラインモード使用 +```bash +export DATABASE_URL=sqlite://test.db +# または +cargo sqlx prepare +``` + +**Q: データベース接続エラー** +``` +Database connection failed +``` +A: URL確認とパーミッション設定 +```bash +# SQLite +mkdir -p ~/.config/syui/ai/card/ +chmod 755 ~/.config/syui/ai/card/ + +# PostgreSQL +psql -h localhost -U user -d aicard -c "\l" +``` + +**Q: 認証失敗** +``` +JWT validation error +``` +A: シークレットキー確認 +```bash +export SECRET_KEY=your-secret-key-here +``` + +## 📄 ライセンス + +MIT License - 詳細は[LICENSE](LICENSE)を参照 + +## 🙏 謝辞 + +- **atproto**: 分散型SNSプロトコル +- **Rust Community**: 高品質なクレート提供 +- **sqlx**: 型安全なデータベースライブラリ +- **axum**: 高性能Webフレームワーク + +--- + +**syui** (2025) - ai.card エコシステム統合プロジェクト \ No newline at end of file diff --git a/api-rs/Cargo.toml b/api-rs/Cargo.toml deleted file mode 100644 index 931807e..0000000 --- a/api-rs/Cargo.toml +++ /dev/null @@ -1,54 +0,0 @@ -[package] -name = "ai-card-api" -version = "0.1.0" -edition = "2021" -description = "ai.card API server - Rust implementation of autonomous card collection system" -authors = ["syui"] - -[dependencies] -# Core Web Framework -axum = { version = "0.7", features = ["macros", "multipart"] } -tokio = { version = "1.0", features = ["full"] } -tower = { version = "0.4", features = ["full"] } -tower-http = { version = "0.5", features = ["cors", "trace"] } - -# Database & ORM -sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "sqlite", "uuid", "chrono", "migrate"] } -uuid = { version = "1.0", features = ["v4", "serde"] } - -# Serialization & Validation -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -validator = { version = "0.18", features = ["derive"] } - -# Date/Time -chrono = { version = "0.4", features = ["serde"] } - -# Authentication & Security -jsonwebtoken = "9.0" -bcrypt = "0.15" - -# HTTP Client (for atproto integration) -reqwest = { version = "0.11", features = ["json"] } - -# Configuration -config = "0.13" -dotenvy = "0.15" - -# CLI -clap = { version = "4.0", features = ["derive"] } - -# Random (for gacha system) -rand = "0.8" - -# Error Handling -anyhow = "1.0" -thiserror = "1.0" - -# Logging -tracing = "0.1" -tracing-subscriber = { version = "0.3", features = ["env-filter"] } - -# Development -serde_yaml = "0.9" -dirs = "5.0" \ No newline at end of file diff --git a/api-rs/README.md b/api-rs/README.md deleted file mode 100644 index 2f880ba..0000000 --- a/api-rs/README.md +++ /dev/null @@ -1,483 +0,0 @@ -# ai.card API Server (Rust Implementation) - -**高性能なRust実装によるatproto基盤カードゲームAPIサーバー** - -## 📋 プロジェクト概要 - -ai.card API Serverは、分散型SNS「atproto」を基盤とした自律的カード収集システムのRust実装です。ユーザーデータ主権を重視し、高性能・高信頼性を実現します。 - -### 🎯 主要機能 - -- **ガチャシステム**: 確率ベースのカード抽選(レアリティ別配分) -- **atproto連携**: 分散ID(DID)認証とデータ同期 -- **データベース**: PostgreSQL/SQLite対応 -- **API**: RESTful + JWT認証 -- **リアルタイム**: WebSocket対応準備済み - -### 🏗️ アーキテクチャ特徴 - -- **パフォーマンス**: Rustの安全性と高速性 -- **データ主権**: ユーザーがデータを完全所有 -- **分散型**: 中央集権に依存しない設計 -- **型安全**: コンパイル時エラー検出 -- **並行処理**: 非同期I/O最適化 - -## 🚀 クイックスタート - -### 前提条件 - -```bash -# Rust 1.70+ -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh - -# データベース(どちらか選択) -# SQLite(開発用・推奨) -sqlite3 --version - -# PostgreSQL(本番用) -psql --version -``` - -### セットアップ - -```bash -# 1. プロジェクトクローン -cd /Users/syui/ai/ai/card/api-rs - -# 2. 依存関係インストール -cargo build - -# 3. 環境設定 -cp .env.example .env -vim .env - -# 4. データベース初期化 -cargo run -- migrate - -# 5. サーバー起動 -cargo run -``` - -### 環境変数設定 - -```bash -# .env -DATABASE_URL=sqlite://~/.config/syui/ai/card/aicard.db -# DATABASE_URL=postgresql://user:pass@localhost/aicard - -SECRET_KEY=your-secret-key-here -PORT=8000 -RUST_LOG=info -CARD_MASTER_URL=https://git.syui.ai/ai/ai/raw/branch/main/ai.json -``` - -## 📁 プロジェクト構造 - -``` -src/ -├── main.rs # エントリーポイント -├── config.rs # 設定管理 -├── error.rs # エラーハンドリング -├── database.rs # データベース抽象化 -├── models.rs # データモデル定義 -├── auth.rs # JWT認証システム -├── handlers/ # APIハンドラー -│ ├── mod.rs -│ ├── auth.rs # 認証API -│ ├── cards.rs # カードAPI -│ └── sync.rs # 同期API -└── services/ # ビジネスロジック - ├── mod.rs - ├── gacha.rs # ガチャシステム - ├── user.rs # ユーザー管理 - ├── card_master.rs # カードマスター - └── atproto.rs # atproto連携 - -migrations/ # データベースマイグレーション -├── postgres/ -└── sqlite/ - -Cargo.toml # 依存関係定義 -``` - -## 🗄️ データベース設計 - -### 主要テーブル - -```sql --- ユーザー管理 -users (did, handle, created_at, updated_at) - --- カードマスターデータ -card_master (id, name, base_cp_min, base_cp_max, color, description) - --- ユーザー保有カード -user_cards (id, user_did, card_id, cp, status, obtained_at, is_unique, unique_id) - --- ユニークカード登録 -unique_card_registry (unique_id, card_id, owner_did, obtained_at) - --- ガチャ履歴 -draw_history (id, user_did, card_id, status, cp, is_paid, drawn_at) - --- ガチャプール -gacha_pools (id, name, description, is_active, pickup_card_ids) -``` - -### カードレアリティ - -| レアリティ | 確率 | 倍率 | -|------------|------|------| -| Normal | 60% | 1.0x | -| Rare | 25% | 1.5x | -| SuperRare | 10% | 2.0x | -| Kira | 4% | 3.0x | -| Unique | 1% | 5.0x | - -## 🔌 API エンドポイント - -### 認証API - -```http -POST /api/v1/auth/login -Content-Type: application/json - -{ - "identifier": "user.handle.or.did", - "password": "password" -} - -Response: -{ - "access_token": "jwt_token", - "token_type": "Bearer", - "expires_in": 3600, - "user": { - "did": "did:plc:...", - "handle": "user.handle" - } -} -``` - -### カードAPI - -```http -# カード一覧取得 -GET /api/v1/cards/collection?did=did:plc:xxx&limit=20&offset=0 - -# ガチャ実行 -POST /api/v1/cards/draw -{ - "user_did": "did:plc:xxx", - "is_paid": false, - "pool_id": null -} - -# カード詳細 -GET /api/v1/cards/details/{card_id} - -# ユニークカード登録状況 -GET /api/v1/cards/unique-registry -``` - -### 同期API - -```http -# atproto PDS同期 -POST /api/v1/sync/cards/export -POST /api/v1/sync/cards/import -POST /api/v1/sync/cards/bidirectional -``` - -## 🎮 ガチャシステム - -### 確率計算 - -```rust -// 基本確率 -let base_probabilities = [ - (CardRarity::Normal, 0.6), - (CardRarity::Rare, 0.25), - (CardRarity::SuperRare, 0.1), - (CardRarity::Kira, 0.04), - (CardRarity::Unique, 0.01), -]; - -// 有料ガチャボーナス -if is_paid { - probabilities[rare_index] *= 1.2; - probabilities[unique_index] *= 2.0; -} -``` - -### ユニーク性保証 - -```rust -// グローバルユニークID管理 -if rarity == CardRarity::Unique { - let unique_id = Uuid::new_v4(); - unique_card_registry.insert(unique_id, card_id, user_did); -} -``` - -## 🔐 セキュリティ - -### JWT認証 - -```rust -// トークン生成 -let claims = Claims { - did: user.did, - handle: user.handle, - exp: expiration_timestamp, -}; -let token = encode(&Header::default(), &claims, &encoding_key)?; -``` - -### atproto DID検証 - -```rust -// DID解決とPDS検出 -async fn resolve_pds_from_did(did: &str) -> AppResult { - match did { - did if did.starts_with("did:plc:") => resolve_plc_did(did).await, - did if did.starts_with("did:web:") => extract_web_domain(did), - _ => Ok("https://bsky.social".to_string()), // fallback - } -} -``` - -## 🧪 テスト - -### ユニットテスト - -```bash -# 全テスト実行 -cargo test - -# 特定モジュール -cargo test services::gacha - -# 統合テスト -cargo test --test integration -``` - -### APIテスト - -```bash -# ヘルスチェック -curl http://localhost:8000/health - -# ガチャ統計 -curl http://localhost:8000/api/v1/cards/gacha-stats - -# 認証テスト -curl -X POST http://localhost:8000/api/v1/auth/login \ - -H "Content-Type: application/json" \ - -d '{"identifier":"test.user","password":"password"}' -``` - -## 🚀 本番デプロイ - -### Docker - -```dockerfile -FROM rust:1.70 as builder -WORKDIR /app -COPY . . -RUN cargo build --release - -FROM debian:bookworm-slim -RUN apt-get update && apt-get install -y ca-certificates -COPY --from=builder /app/target/release/ai-card-api /usr/local/bin/ -CMD ["ai-card-api"] -``` - -### 起動コマンド - -```bash -# 開発環境 -cargo run - -# 本番環境 -RUST_LOG=info DATABASE_URL=postgresql://... ./target/release/ai-card-api -``` - -## 📊 パフォーマンス - -### ベンチマーク結果 - -| 項目 | Rust実装 | Python実装 | 改善率 | -|------|----------|-------------|--------| -| レスポンス時間 | 2ms | 15ms | 7.5x | -| メモリ使用量 | 20MB | 150MB | 7.5x | -| 同時接続数 | 10,000+ | 1,000 | 10x | -| スループット | 50k req/s | 5k req/s | 10x | - -### システム要件 - -| 環境 | CPU | メモリ | ストレージ | -|------|-----|-------|------------| -| 開発 | 1 core | 512MB | 1GB | -| 本番 | 2 cores | 2GB | 20GB | -| スケール | 4+ cores | 8GB+ | 100GB+ | - -## 🔧 開発ガイド - -### 依存関係 - -```toml -[dependencies] -# Web Framework -axum = { version = "0.7", features = ["macros", "multipart"] } -tokio = { version = "1.0", features = ["full"] } - -# Database -sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "sqlite", "uuid", "chrono", "migrate"] } - -# Serialization -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" - -# Authentication -jsonwebtoken = "9.0" -bcrypt = "0.15" - -# Other -uuid = { version = "1.0", features = ["v4", "serde"] } -chrono = { version = "0.4", features = ["serde"] } -tracing = "0.1" -``` - -### コーディング規約 - -```rust -// エラーハンドリング -type AppResult = Result; - -// 非同期関数 -async fn create_user(db: &Database, did: &str) -> AppResult { - // implementation -} - -// 構造体定義 -#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] -pub struct User { - pub id: i32, - pub did: String, - pub handle: String, - pub created_at: DateTime, -} -``` - -## 📈 ロードマップ - -### Phase 1: 基盤強化 ✅ -- [x] 基本API実装 -- [x] データベース設計 -- [x] ガチャシステム -- [x] JWT認証 - -### Phase 2: atproto統合 -- [ ] 実際のPDS連携 -- [ ] DID検証強化 -- [ ] データ同期機能 -- [ ] 分散ストレージ - -### Phase 3: スケーリング -- [ ] Redis キャッシング -- [ ] 水平スケーリング -- [ ] CDN配信 -- [ ] 監視システム - -### Phase 4: 高度機能 -- [ ] WebSocket リアルタイム -- [ ] GraphQL API -- [ ] 機械学習統合 -- [ ] 国際化対応 - -## 🤝 コントリビューション - -### 開発フロー - -```bash -# 1. フォーク -git clone https://git.syui.ai/ai/ai - -# 2. ブランチ作成 -git checkout -b feature/new-feature - -# 3. 開発・テスト -cargo test -cargo clippy -cargo fmt - -# 4. プルリクエスト -git push origin feature/new-feature -``` - -### コード品質 - -```bash -# 静的解析 -cargo clippy -- -D warnings - -# フォーマット -cargo fmt --check - -# テストカバレッジ -cargo tarpaulin --out Html -``` - -## 🐛 トラブルシューティング - -### よくある問題 - -**Q: SQLXコンパイルエラー** -```bash -error: set `DATABASE_URL` to use query macros online -``` -A: 環境変数設定またはオフラインモード使用 -```bash -export DATABASE_URL=sqlite://test.db -# または -cargo sqlx prepare -``` - -**Q: データベース接続エラー** -``` -Database connection failed -``` -A: URL確認とパーミッション設定 -```bash -# SQLite -mkdir -p ~/.config/syui/ai/card/ -chmod 755 ~/.config/syui/ai/card/ - -# PostgreSQL -psql -h localhost -U user -d aicard -c "\l" -``` - -**Q: 認証失敗** -``` -JWT validation error -``` -A: シークレットキー確認 -```bash -export SECRET_KEY=your-secret-key-here -``` - -## 📄 ライセンス - -MIT License - 詳細は[LICENSE](LICENSE)を参照 - -## 🙏 謝辞 - -- **atproto**: 分散型SNSプロトコル -- **Rust Community**: 高品質なクレート提供 -- **sqlx**: 型安全なデータベースライブラリ -- **axum**: 高性能Webフレームワーク - ---- - -**syui** (2025) - ai.card エコシステム統合プロジェクト \ No newline at end of file diff --git a/api-rs/migrations/postgres/001_initial.sql b/api-rs/migrations/postgres/001_initial.sql deleted file mode 100644 index 3ad9f55..0000000 --- a/api-rs/migrations/postgres/001_initial.sql +++ /dev/null @@ -1,134 +0,0 @@ --- PostgreSQL migration for ai.card database schema - --- Create custom types -CREATE TYPE card_rarity AS ENUM ('normal', 'rare', 'super_rare', 'kira', 'unique'); - --- Enable UUID extension -CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; - --- Users table - stores atproto DID-based user information -CREATE TABLE IF NOT EXISTS users ( - id SERIAL PRIMARY KEY, - did TEXT NOT NULL UNIQUE, -- atproto Decentralized Identifier - handle TEXT NOT NULL, -- atproto handle (e.g., alice.bsky.social) - created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() -); - -CREATE INDEX IF NOT EXISTS idx_users_did ON users(did); -CREATE INDEX IF NOT EXISTS idx_users_handle ON users(handle); - --- Card master data - template definitions for all card types -CREATE TABLE IF NOT EXISTS card_master ( - id INTEGER PRIMARY KEY, -- Card ID (0-15 in current system) - name TEXT NOT NULL, -- Card name (e.g., "ai", "dream", "radiance") - base_cp_min INTEGER NOT NULL, -- Minimum base CP for this card - base_cp_max INTEGER NOT NULL, -- Maximum base CP for this card - color TEXT NOT NULL, -- Card color theme - description TEXT NOT NULL -- Card description/lore -); - --- User cards - actual card instances owned by users -CREATE TABLE IF NOT EXISTS user_cards ( - id SERIAL PRIMARY KEY, - user_id INTEGER NOT NULL, - card_id INTEGER NOT NULL, -- References card_master.id - cp INTEGER NOT NULL, -- Calculated CP (base_cp * rarity_multiplier) - status card_rarity NOT NULL, -- Card rarity - skill TEXT, -- Optional skill description - obtained_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), - is_unique BOOLEAN NOT NULL DEFAULT FALSE, - unique_id UUID, -- UUID for unique cards - - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, - FOREIGN KEY (card_id) REFERENCES card_master(id) -); - -CREATE INDEX IF NOT EXISTS idx_user_cards_user_id ON user_cards(user_id); -CREATE INDEX IF NOT EXISTS idx_user_cards_card_id ON user_cards(card_id); -CREATE INDEX IF NOT EXISTS idx_user_cards_status ON user_cards(status); -CREATE INDEX IF NOT EXISTS idx_user_cards_unique_id ON user_cards(unique_id); - --- Global unique card registry - tracks ownership of unique cards -CREATE TABLE IF NOT EXISTS unique_card_registry ( - id SERIAL PRIMARY KEY, - unique_id UUID NOT NULL UNIQUE, -- UUID from user_cards.unique_id - card_id INTEGER NOT NULL, -- Which card type is unique - owner_did TEXT NOT NULL, -- Current owner's atproto DID - obtained_at TIMESTAMP WITH TIME ZONE NOT NULL, - verse_skill_id TEXT, -- Optional verse skill reference - - FOREIGN KEY (card_id) REFERENCES card_master(id), - UNIQUE(card_id) -- Only one unique per card_id allowed -); - -CREATE INDEX IF NOT EXISTS idx_unique_registry_card_id ON unique_card_registry(card_id); -CREATE INDEX IF NOT EXISTS idx_unique_registry_owner_did ON unique_card_registry(owner_did); - --- Draw history - tracks all gacha draws for statistics -CREATE TABLE IF NOT EXISTS draw_history ( - id SERIAL PRIMARY KEY, - user_id INTEGER NOT NULL, - card_id INTEGER NOT NULL, - status card_rarity NOT NULL, - cp INTEGER NOT NULL, - is_paid BOOLEAN NOT NULL DEFAULT FALSE, -- Paid vs free gacha - drawn_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), - - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, - FOREIGN KEY (card_id) REFERENCES card_master(id) -); - -CREATE INDEX IF NOT EXISTS idx_draw_history_user_id ON draw_history(user_id); -CREATE INDEX IF NOT EXISTS idx_draw_history_drawn_at ON draw_history(drawn_at); -CREATE INDEX IF NOT EXISTS idx_draw_history_status ON draw_history(status); - --- Gacha pools - special event pools with rate-ups -CREATE TABLE IF NOT EXISTS gacha_pools ( - id SERIAL PRIMARY KEY, - name TEXT NOT NULL, - description TEXT NOT NULL, - is_active BOOLEAN NOT NULL DEFAULT TRUE, - start_at TIMESTAMP WITH TIME ZONE, - end_at TIMESTAMP WITH TIME ZONE, - pickup_card_ids INTEGER[], -- Array of card IDs - rate_up_multiplier DECIMAL(4,2) NOT NULL DEFAULT 1.0 -); - -CREATE INDEX IF NOT EXISTS idx_gacha_pools_active ON gacha_pools(is_active); -CREATE INDEX IF NOT EXISTS idx_gacha_pools_dates ON gacha_pools(start_at, end_at); - --- Insert default card master data (0-15 cards from ai.json) -INSERT INTO card_master (id, name, base_cp_min, base_cp_max, color, description) VALUES - (0, 'ai', 100, 200, '#4A90E2', 'The core essence of existence'), - (1, 'dream', 90, 180, '#9B59B6', 'Visions of possibility'), - (2, 'radiance', 110, 220, '#F39C12', 'Brilliant light energy'), - (3, 'neutron', 120, 240, '#34495E', 'Dense stellar core'), - (4, 'sun', 130, 260, '#E74C3C', 'Solar radiance'), - (5, 'night', 80, 160, '#2C3E50', 'Darkness and mystery'), - (6, 'snow', 70, 140, '#ECF0F1', 'Pure frozen crystalline'), - (7, 'thunder', 140, 280, '#F1C40F', 'Electric storm energy'), - (8, 'ultimate', 150, 300, '#8E44AD', 'The highest form'), - (9, 'sword', 160, 320, '#95A5A6', 'Blade of cutting truth'), - (10, 'destruction', 170, 340, '#C0392B', 'Force of entropy'), - (11, 'earth', 90, 180, '#27AE60', 'Grounding foundation'), - (12, 'galaxy', 180, 360, '#3498DB', 'Cosmic expanse'), - (13, 'create', 100, 200, '#16A085', 'Power of generation'), - (14, 'supernova', 200, 400, '#E67E22', 'Stellar explosion'), - (15, 'world', 250, 500, '#9B59B6', 'Reality itself') -ON CONFLICT (id) DO NOTHING; - --- Create function for updating updated_at timestamp -CREATE OR REPLACE FUNCTION update_updated_at_column() -RETURNS TRIGGER AS $$ -BEGIN - NEW.updated_at = NOW(); - RETURN NEW; -END; -$$ language 'plpgsql'; - --- Create trigger for updating users.updated_at -CREATE TRIGGER trigger_users_updated_at - BEFORE UPDATE ON users - FOR EACH ROW - EXECUTE FUNCTION update_updated_at_column(); \ No newline at end of file diff --git a/api-rs/migrations/sqlite/001_initial.sql b/api-rs/migrations/sqlite/001_initial.sql deleted file mode 100644 index 617a242..0000000 --- a/api-rs/migrations/sqlite/001_initial.sql +++ /dev/null @@ -1,130 +0,0 @@ --- SQLite migration for ai.card database schema - --- Create custom types (SQLite uses CHECK constraints instead of ENUMs) --- Card rarity levels -CREATE TABLE IF NOT EXISTS card_rarity_enum ( - value TEXT PRIMARY KEY CHECK (value IN ('normal', 'rare', 'super_rare', 'kira', 'unique')) -); - -INSERT OR IGNORE INTO card_rarity_enum (value) VALUES - ('normal'), ('rare'), ('super_rare'), ('kira'), ('unique'); - --- Users table - stores atproto DID-based user information -CREATE TABLE IF NOT EXISTS users ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - did TEXT NOT NULL UNIQUE, -- atproto Decentralized Identifier - handle TEXT NOT NULL, -- atproto handle (e.g., alice.bsky.social) - created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP -); - -CREATE INDEX IF NOT EXISTS idx_users_did ON users(did); -CREATE INDEX IF NOT EXISTS idx_users_handle ON users(handle); - --- Card master data - template definitions for all card types -CREATE TABLE IF NOT EXISTS card_master ( - id INTEGER PRIMARY KEY, -- Card ID (0-15 in current system) - name TEXT NOT NULL, -- Card name (e.g., "ai", "dream", "radiance") - base_cp_min INTEGER NOT NULL, -- Minimum base CP for this card - base_cp_max INTEGER NOT NULL, -- Maximum base CP for this card - color TEXT NOT NULL, -- Card color theme - description TEXT NOT NULL -- Card description/lore -); - --- User cards - actual card instances owned by users -CREATE TABLE IF NOT EXISTS user_cards ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL, - card_id INTEGER NOT NULL, -- References card_master.id - cp INTEGER NOT NULL, -- Calculated CP (base_cp * rarity_multiplier) - status TEXT NOT NULL -- Card rarity - CHECK (status IN ('normal', 'rare', 'super_rare', 'kira', 'unique')), - skill TEXT, -- Optional skill description - obtained_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - is_unique BOOLEAN NOT NULL DEFAULT FALSE, - unique_id TEXT, -- UUID for unique cards - - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, - FOREIGN KEY (card_id) REFERENCES card_master(id) -); - -CREATE INDEX IF NOT EXISTS idx_user_cards_user_id ON user_cards(user_id); -CREATE INDEX IF NOT EXISTS idx_user_cards_card_id ON user_cards(card_id); -CREATE INDEX IF NOT EXISTS idx_user_cards_status ON user_cards(status); -CREATE INDEX IF NOT EXISTS idx_user_cards_unique_id ON user_cards(unique_id); - --- Global unique card registry - tracks ownership of unique cards -CREATE TABLE IF NOT EXISTS unique_card_registry ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - unique_id TEXT NOT NULL UNIQUE, -- UUID from user_cards.unique_id - card_id INTEGER NOT NULL, -- Which card type is unique - owner_did TEXT NOT NULL, -- Current owner's atproto DID - obtained_at DATETIME NOT NULL, - verse_skill_id TEXT, -- Optional verse skill reference - - FOREIGN KEY (card_id) REFERENCES card_master(id), - UNIQUE(card_id) -- Only one unique per card_id allowed -); - -CREATE INDEX IF NOT EXISTS idx_unique_registry_card_id ON unique_card_registry(card_id); -CREATE INDEX IF NOT EXISTS idx_unique_registry_owner_did ON unique_card_registry(owner_did); - --- Draw history - tracks all gacha draws for statistics -CREATE TABLE IF NOT EXISTS draw_history ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - user_id INTEGER NOT NULL, - card_id INTEGER NOT NULL, - status TEXT NOT NULL - CHECK (status IN ('normal', 'rare', 'super_rare', 'kira', 'unique')), - cp INTEGER NOT NULL, - is_paid BOOLEAN NOT NULL DEFAULT FALSE, -- Paid vs free gacha - drawn_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, - - FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE, - FOREIGN KEY (card_id) REFERENCES card_master(id) -); - -CREATE INDEX IF NOT EXISTS idx_draw_history_user_id ON draw_history(user_id); -CREATE INDEX IF NOT EXISTS idx_draw_history_drawn_at ON draw_history(drawn_at); -CREATE INDEX IF NOT EXISTS idx_draw_history_status ON draw_history(status); - --- Gacha pools - special event pools with rate-ups -CREATE TABLE IF NOT EXISTS gacha_pools ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - name TEXT NOT NULL, - description TEXT NOT NULL, - is_active BOOLEAN NOT NULL DEFAULT TRUE, - start_at DATETIME, - end_at DATETIME, - pickup_card_ids TEXT, -- JSON array of card IDs - rate_up_multiplier REAL NOT NULL DEFAULT 1.0 -); - -CREATE INDEX IF NOT EXISTS idx_gacha_pools_active ON gacha_pools(is_active); -CREATE INDEX IF NOT EXISTS idx_gacha_pools_dates ON gacha_pools(start_at, end_at); - --- Insert default card master data (0-15 cards from ai.json) -INSERT OR IGNORE INTO card_master (id, name, base_cp_min, base_cp_max, color, description) VALUES - (0, 'ai', 100, 200, '#4A90E2', 'The core essence of existence'), - (1, 'dream', 90, 180, '#9B59B6', 'Visions of possibility'), - (2, 'radiance', 110, 220, '#F39C12', 'Brilliant light energy'), - (3, 'neutron', 120, 240, '#34495E', 'Dense stellar core'), - (4, 'sun', 130, 260, '#E74C3C', 'Solar radiance'), - (5, 'night', 80, 160, '#2C3E50', 'Darkness and mystery'), - (6, 'snow', 70, 140, '#ECF0F1', 'Pure frozen crystalline'), - (7, 'thunder', 140, 280, '#F1C40F', 'Electric storm energy'), - (8, 'ultimate', 150, 300, '#8E44AD', 'The highest form'), - (9, 'sword', 160, 320, '#95A5A6', 'Blade of cutting truth'), - (10, 'destruction', 170, 340, '#C0392B', 'Force of entropy'), - (11, 'earth', 90, 180, '#27AE60', 'Grounding foundation'), - (12, 'galaxy', 180, 360, '#3498DB', 'Cosmic expanse'), - (13, 'create', 100, 200, '#16A085', 'Power of generation'), - (14, 'supernova', 200, 400, '#E67E22', 'Stellar explosion'), - (15, 'world', 250, 500, '#9B59B6', 'Reality itself'); - --- Create trigger for updating users.updated_at -CREATE TRIGGER IF NOT EXISTS trigger_users_updated_at - AFTER UPDATE ON users - BEGIN - UPDATE users SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id; - END; \ No newline at end of file diff --git a/api-rs/src/auth.rs b/api-rs/src/auth.rs deleted file mode 100644 index 8c9cdce..0000000 --- a/api-rs/src/auth.rs +++ /dev/null @@ -1,108 +0,0 @@ -use chrono::{Duration, Utc}; -use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation}; -use serde::{Deserialize, Serialize}; - -use crate::error::{AppError, AppResult}; - -#[derive(Debug, Serialize, Deserialize)] -pub struct Claims { - pub did: String, - pub handle: String, - pub exp: usize, -} - -pub struct JwtService { - encoding_key: EncodingKey, - decoding_key: DecodingKey, -} - -impl JwtService { - pub fn new(secret: &str) -> Self { - Self { - encoding_key: EncodingKey::from_secret(secret.as_ref()), - decoding_key: DecodingKey::from_secret(secret.as_ref()), - } - } - - pub fn create_token(&self, did: &str, handle: &str, expires_in_minutes: u64) -> AppResult { - let expiration = Utc::now() - .checked_add_signed(Duration::minutes(expires_in_minutes as i64)) - .ok_or_else(|| AppError::internal("Failed to calculate expiration time"))? - .timestamp() as usize; - - let claims = Claims { - did: did.to_string(), - handle: handle.to_string(), - exp: expiration, - }; - - encode(&Header::default(), &claims, &self.encoding_key) - .map_err(AppError::Jwt) - } - - pub fn verify_token(&self, token: &str) -> AppResult { - let token_data = decode::(token, &self.decoding_key, &Validation::default()) - .map_err(AppError::Jwt)?; - - Ok(token_data.claims) - } -} - -/// Mock atproto authentication service -/// In a real implementation, this would integrate with actual atproto services -pub struct AtprotoAuthService { - jwt_service: JwtService, -} - -impl AtprotoAuthService { - pub fn new(secret: &str) -> Self { - Self { - jwt_service: JwtService::new(secret), - } - } - - /// Authenticate user with atproto credentials - /// This is a mock implementation - in reality would validate against atproto PDS - pub async fn authenticate(&self, identifier: &str, _password: &str) -> AppResult { - // Mock validation - in real implementation: - // 1. Connect to user's PDS - // 2. Verify credentials - // 3. Get user DID and handle - - // For now, treat identifier as DID or handle - let (did, handle) = if identifier.starts_with("did:") { - (identifier.to_string(), extract_handle_from_did(identifier)) - } else { - (format!("did:plc:{}", generate_mock_plc_id()), identifier.to_string()) - }; - - Ok(AuthenticatedUser { did, handle }) - } - - pub fn create_access_token(&self, user: &AuthenticatedUser, expires_in_minutes: u64) -> AppResult { - self.jwt_service.create_token(&user.did, &user.handle, expires_in_minutes) - } - - pub fn verify_access_token(&self, token: &str) -> AppResult { - self.jwt_service.verify_token(token) - } -} - -#[derive(Debug, Clone)] -pub struct AuthenticatedUser { - pub did: String, - pub handle: String, -} - -/// Extract handle from DID (mock implementation) -fn extract_handle_from_did(did: &str) -> String { - // In a real implementation, this would resolve the DID to get the handle - // For now, use a simple mock - did.split(':').last().unwrap_or("unknown").to_string() -} - -/// Generate mock PLC identifier -fn generate_mock_plc_id() -> String { - use uuid::Uuid; - Uuid::new_v4().to_string().replace('-', "")[..24].to_string() -} \ No newline at end of file diff --git a/api-rs/src/config.rs b/api-rs/src/config.rs deleted file mode 100644 index 3ea3cd8..0000000 --- a/api-rs/src/config.rs +++ /dev/null @@ -1,127 +0,0 @@ -use config::{Config, ConfigError, Environment, File}; -use serde::Deserialize; -use std::path::PathBuf; - -#[derive(Debug, Clone, Deserialize)] -pub struct Settings { - // Application settings - pub app_name: String, - pub port: u16, - pub api_v1_prefix: String, - - // Database settings - pub database_url: String, - pub database_url_supabase: Option, - - // Authentication - pub secret_key: String, - pub access_token_expire_minutes: u64, - - // Gacha probabilities (percentages) - pub prob_normal: f64, - pub prob_rare: f64, - pub prob_super_rare: f64, - pub prob_kira: f64, - pub prob_unique: f64, - - // atproto settings - pub atproto_pds_url: Option, - pub atproto_handle: Option, - - // External data - pub card_master_url: String, - - // File paths - pub config_dir: PathBuf, -} - -impl Settings { - pub fn new() -> Result { - let config_dir = dirs::home_dir() - .unwrap_or_else(|| PathBuf::from(".")) - .join(".config") - .join("syui") - .join("ai") - .join("card"); - - // Ensure config directory exists - if !config_dir.exists() { - std::fs::create_dir_all(&config_dir) - .map_err(|e| ConfigError::Message(format!("Failed to create config directory: {}", e)))?; - } - - let mut builder = Config::builder() - // Default values - .set_default("app_name", "ai.card")? - .set_default("port", 8000)? - .set_default("api_v1_prefix", "/api/v1")? - - // Database defaults - .set_default("database_url", format!("sqlite://{}?mode=rwc", config_dir.join("aicard.db").display()))? - - // Authentication defaults - .set_default("secret_key", "your-secret-key-change-in-production")? - .set_default("access_token_expire_minutes", 1440)? // 24 hours - - // Gacha probability defaults (matching Python implementation) - .set_default("prob_normal", 99.789)? - .set_default("prob_rare", 0.1)? - .set_default("prob_super_rare", 0.01)? - .set_default("prob_kira", 0.1)? - .set_default("prob_unique", 0.0001)? - - // External data source - .set_default("card_master_url", "https://git.syui.ai/ai/ai/raw/branch/main/ai.json")?; - - // Load from config file if it exists - let config_file = config_dir.join("config.toml"); - if config_file.exists() { - builder = builder.add_source(File::from(config_file)); - } - - // Override with environment variables (AI_CARD_ prefix) - builder = builder.add_source(Environment::with_prefix("AI_CARD").separator("_")); - - let mut settings: Settings = builder.build()?.try_deserialize()?; - - // Set the config directory path - settings.config_dir = config_dir; - - Ok(settings) - } - - /// Get the gacha configuration for the gacha service - pub fn gacha_config(&self) -> GachaConfig { - GachaConfig { - prob_normal: self.prob_normal, - prob_rare: self.prob_rare, - prob_super_rare: self.prob_super_rare, - prob_kira: self.prob_kira, - prob_unique: self.prob_unique, - } - } -} - -#[derive(Debug, Clone)] -pub struct GachaConfig { - pub prob_normal: f64, - pub prob_rare: f64, - pub prob_super_rare: f64, - pub prob_kira: f64, - pub prob_unique: f64, -} - -impl GachaConfig { - /// Calculate cumulative probabilities for rarity determination - pub fn cumulative_probabilities(&self, is_paid: bool) -> Vec<(f64, crate::models::CardRarity)> { - let multiplier = if is_paid { 2.0 } else { 1.0 }; - - vec![ - (self.prob_unique * multiplier, crate::models::CardRarity::Unique), - (self.prob_kira * multiplier, crate::models::CardRarity::Kira), - (self.prob_super_rare * multiplier, crate::models::CardRarity::SuperRare), - (self.prob_rare * multiplier, crate::models::CardRarity::Rare), - (self.prob_normal, crate::models::CardRarity::Normal), - ] - } -} \ No newline at end of file diff --git a/api-rs/src/database.rs b/api-rs/src/database.rs deleted file mode 100644 index 572d54b..0000000 --- a/api-rs/src/database.rs +++ /dev/null @@ -1,190 +0,0 @@ -use sqlx::{Pool, Postgres, Sqlite, Row}; -use sqlx::migrate::MigrateDatabase; -use crate::error::{AppError, AppResult}; -use std::str::FromStr; - -#[derive(Clone)] -pub enum Database { - Postgres(Pool), - Sqlite(Pool), -} - -impl Database { - pub async fn connect(database_url: &str) -> AppResult { - if database_url.starts_with("postgres://") || database_url.starts_with("postgresql://") { - let pool = sqlx::postgres::PgPoolOptions::new() - .max_connections(10) - .connect(database_url) - .await - .map_err(AppError::Database)?; - Ok(Database::Postgres(pool)) - } else if database_url.starts_with("sqlite://") { - // Extract the path from sqlite:// URL - let db_path = database_url.trim_start_matches("sqlite://"); - - // Create the database file if it doesn't exist - if !Sqlite::database_exists(database_url).await.unwrap_or(false) { - Sqlite::create_database(database_url) - .await - .map_err(AppError::Database)?; - } - - let pool = sqlx::sqlite::SqlitePoolOptions::new() - .max_connections(5) - .connect(database_url) - .await - .map_err(AppError::Database)?; - - Ok(Database::Sqlite(pool)) - } else { - Err(AppError::Configuration(format!( - "Unsupported database URL: {}", - database_url - ))) - } - } - - pub async fn migrate(&self) -> AppResult<()> { - match self { - Database::Postgres(pool) => { - sqlx::migrate!("./migrations/postgres") - .run(pool) - .await - .map_err(AppError::Migration)?; - } - Database::Sqlite(pool) => { - sqlx::migrate!("./migrations/sqlite") - .run(pool) - .await - .map_err(AppError::Migration)?; - } - } - Ok(()) - } - - - /// Get a generic connection for complex operations - pub async fn acquire(&self) -> AppResult { - match self { - Database::Postgres(pool) => { - let conn = pool.acquire().await.map_err(AppError::Database)?; - Ok(DatabaseConnection::Postgres(conn)) - } - Database::Sqlite(pool) => { - let conn = pool.acquire().await.map_err(AppError::Database)?; - Ok(DatabaseConnection::Sqlite(conn)) - } - } - } - - /// Begin a transaction - pub async fn begin(&self) -> AppResult { - match self { - Database::Postgres(pool) => { - let tx = pool.begin().await.map_err(AppError::Database)?; - Ok(DatabaseTransaction::Postgres(tx)) - } - Database::Sqlite(pool) => { - let tx = pool.begin().await.map_err(AppError::Database)?; - Ok(DatabaseTransaction::Sqlite(tx)) - } - } - } -} - -pub enum DatabaseConnection { - Postgres(sqlx::pool::PoolConnection), - Sqlite(sqlx::pool::PoolConnection), -} - -pub enum DatabaseTransaction { - Postgres(sqlx::Transaction<'static, Postgres>), - Sqlite(sqlx::Transaction<'static, Sqlite>), -} - -impl DatabaseTransaction { - pub async fn commit(self) -> AppResult<()> { - match self { - DatabaseTransaction::Postgres(tx) => { - tx.commit().await.map_err(AppError::Database)?; - } - DatabaseTransaction::Sqlite(tx) => { - tx.commit().await.map_err(AppError::Database)?; - } - } - Ok(()) - } - - pub async fn rollback(self) -> AppResult<()> { - match self { - DatabaseTransaction::Postgres(tx) => { - tx.rollback().await.map_err(AppError::Database)?; - } - DatabaseTransaction::Sqlite(tx) => { - tx.rollback().await.map_err(AppError::Database)?; - } - } - Ok(()) - } -} - -// Macros for database-agnostic queries -#[macro_export] -macro_rules! query_as { - ($struct:ty, $query:expr, $db:expr) => { - match $db { - Database::Postgres(pool) => { - sqlx::query_as::<_, $struct>($query) - .fetch_all(pool) - .await - .map_err(AppError::Database) - } - Database::Sqlite(pool) => { - sqlx::query_as::<_, $struct>($query) - .fetch_all(pool) - .await - .map_err(AppError::Database) - } - } - }; -} - -#[macro_export] -macro_rules! query_one_as { - ($struct:ty, $query:expr, $db:expr) => { - match $db { - Database::Postgres(pool) => { - sqlx::query_as::<_, $struct>($query) - .fetch_one(pool) - .await - .map_err(AppError::Database) - } - Database::Sqlite(pool) => { - sqlx::query_as::<_, $struct>($query) - .fetch_one(pool) - .await - .map_err(AppError::Database) - } - } - }; -} - -#[macro_export] -macro_rules! query_optional_as { - ($struct:ty, $query:expr, $db:expr) => { - match $db { - Database::Postgres(pool) => { - sqlx::query_as::<_, $struct>($query) - .fetch_optional(pool) - .await - .map_err(AppError::Database) - } - Database::Sqlite(pool) => { - sqlx::query_as::<_, $struct>($query) - .fetch_optional(pool) - .await - .map_err(AppError::Database) - } - } - }; -} \ No newline at end of file diff --git a/api-rs/src/error.rs b/api-rs/src/error.rs deleted file mode 100644 index 332335a..0000000 --- a/api-rs/src/error.rs +++ /dev/null @@ -1,142 +0,0 @@ -use axum::{ - http::StatusCode, - response::{IntoResponse, Response}, - Json, -}; -use serde_json::json; -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum AppError { - #[error("Database error: {0}")] - Database(#[from] sqlx::Error), - - #[error("Migration error: {0}")] - Migration(#[from] sqlx::migrate::MigrateError), - - #[error("Validation error: {0}")] - Validation(String), - - #[error("Authentication error: {0}")] - Authentication(String), - - #[error("Authorization error: {0}")] - Authorization(String), - - #[error("Not found: {0}")] - NotFound(String), - - #[error("Conflict: {0}")] - Conflict(String), - - #[error("External service error: {0}")] - ExternalService(String), - - #[error("Configuration error: {0}")] - Configuration(String), - - #[error("JSON serialization error: {0}")] - Json(#[from] serde_json::Error), - - #[error("HTTP client error: {0}")] - HttpClient(#[from] reqwest::Error), - - #[error("JWT error: {0}")] - Jwt(#[from] jsonwebtoken::errors::Error), - - #[error("Internal server error: {0}")] - Internal(String), -} - -impl IntoResponse for AppError { - fn into_response(self) -> Response { - let (status, error_message, error_code) = match &self { - AppError::Database(e) => { - tracing::error!("Database error: {}", e); - (StatusCode::INTERNAL_SERVER_ERROR, "Database error", "DATABASE_ERROR") - } - AppError::Migration(e) => { - tracing::error!("Migration error: {}", e); - (StatusCode::INTERNAL_SERVER_ERROR, "Migration error", "MIGRATION_ERROR") - } - AppError::Validation(msg) => { - (StatusCode::BAD_REQUEST, msg.as_str(), "VALIDATION_ERROR") - } - AppError::Authentication(msg) => { - (StatusCode::UNAUTHORIZED, msg.as_str(), "AUTHENTICATION_ERROR") - } - AppError::Authorization(msg) => { - (StatusCode::FORBIDDEN, msg.as_str(), "AUTHORIZATION_ERROR") - } - AppError::NotFound(msg) => { - (StatusCode::NOT_FOUND, msg.as_str(), "NOT_FOUND") - } - AppError::Conflict(msg) => { - (StatusCode::CONFLICT, msg.as_str(), "CONFLICT") - } - AppError::ExternalService(msg) => { - tracing::error!("External service error: {}", msg); - (StatusCode::BAD_GATEWAY, "External service unavailable", "EXTERNAL_SERVICE_ERROR") - } - AppError::Configuration(msg) => { - tracing::error!("Configuration error: {}", msg); - (StatusCode::INTERNAL_SERVER_ERROR, "Configuration error", "CONFIGURATION_ERROR") - } - AppError::Json(e) => { - tracing::error!("JSON error: {}", e); - (StatusCode::BAD_REQUEST, "Invalid JSON", "JSON_ERROR") - } - AppError::HttpClient(e) => { - tracing::error!("HTTP client error: {}", e); - (StatusCode::BAD_GATEWAY, "External service error", "HTTP_CLIENT_ERROR") - } - AppError::Jwt(e) => { - tracing::error!("JWT error: {}", e); - (StatusCode::UNAUTHORIZED, "Invalid token", "JWT_ERROR") - } - AppError::Internal(msg) => { - tracing::error!("Internal error: {}", msg); - (StatusCode::INTERNAL_SERVER_ERROR, "Internal server error", "INTERNAL_ERROR") - } - }; - - let body = Json(json!({ - "error": { - "code": error_code, - "message": error_message, - "timestamp": chrono::Utc::now().to_rfc3339() - } - })); - - (status, body).into_response() - } -} - -// Convenience methods for common errors -impl AppError { - pub fn validation>(msg: T) -> Self { - Self::Validation(msg.into()) - } - - pub fn authentication>(msg: T) -> Self { - Self::Authentication(msg.into()) - } - - pub fn authorization>(msg: T) -> Self { - Self::Authorization(msg.into()) - } - - pub fn not_found>(msg: T) -> Self { - Self::NotFound(msg.into()) - } - - pub fn conflict>(msg: T) -> Self { - Self::Conflict(msg.into()) - } - - pub fn internal>(msg: T) -> Self { - Self::Internal(msg.into()) - } -} - -pub type AppResult = Result; \ No newline at end of file diff --git a/api-rs/src/handlers/auth.rs b/api-rs/src/handlers/auth.rs deleted file mode 100644 index ae95bac..0000000 --- a/api-rs/src/handlers/auth.rs +++ /dev/null @@ -1,161 +0,0 @@ -use axum::{ - extract::State, - response::Json, - routing::post, - Router, -}; -use validator::Validate; - -use crate::{ - auth::AtprotoAuthService, - error::{AppError, AppResult}, - models::*, - AppState, -}; - -pub fn create_routes() -> Router { - Router::new() - .route("/login", post(login)) - .route("/verify", post(verify_token)) -} - -/// Authenticate user with atproto credentials -async fn login( - State(state): State, - Json(request): Json, -) -> AppResult> { - // Validate request - request.validate().map_err(|e| AppError::validation(e.to_string()))?; - - // Create auth service - let auth_service = AtprotoAuthService::new(&state.settings.secret_key); - - // Authenticate user - let user = auth_service - .authenticate(&request.identifier, &request.password) - .await?; - - // Create access token - let access_token = auth_service - .create_access_token(&user, state.settings.access_token_expire_minutes)?; - - // Create or update user in database - let db_user = create_or_update_user(&state, &user.did, &user.handle).await?; - - Ok(Json(LoginResponse { - access_token, - token_type: "Bearer".to_string(), - expires_in: state.settings.access_token_expire_minutes * 60, // Convert to seconds - user: UserInfo { - did: user.did, - handle: user.handle, - }, - })) -} - -/// Verify JWT token -async fn verify_token( - State(state): State, - Json(token): Json, -) -> AppResult> { - let token_str = token["token"] - .as_str() - .ok_or_else(|| AppError::validation("Token is required"))?; - - let auth_service = AtprotoAuthService::new(&state.settings.secret_key); - let claims = auth_service.verify_access_token(token_str)?; - - Ok(Json(serde_json::json!({ - "valid": true, - "did": claims.did, - "handle": claims.handle, - "exp": claims.exp - }))) -} - -/// Create or update user in database -async fn create_or_update_user( - state: &AppState, - did: &str, - handle: &str, -) -> AppResult { - let now = chrono::Utc::now(); - - // Try to get existing user - let existing_user = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1") - .bind(did) - .fetch_optional(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?") - .bind(did) - .fetch_optional(pool) - .await - .map_err(AppError::Database)? - } - }; - - if let Some(mut user) = existing_user { - // Update handle if changed - if user.handle != handle { - user = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as::<_, User>( - "UPDATE users SET handle = $1, updated_at = $2 WHERE did = $3 RETURNING *" - ) - .bind(handle) - .bind(now) - .bind(did) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as::<_, User>( - "UPDATE users SET handle = ?, updated_at = ? WHERE did = ? RETURNING *" - ) - .bind(handle) - .bind(now) - .bind(did) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - }; - } - Ok(user) - } else { - // Create new user - let user = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as::<_, User>( - "INSERT INTO users (did, handle, created_at, updated_at) VALUES ($1, $2, $3, $4) RETURNING *" - ) - .bind(did) - .bind(handle) - .bind(now) - .bind(now) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as::<_, User>( - "INSERT INTO users (did, handle, created_at, updated_at) VALUES (?, ?, ?, ?) RETURNING *" - ) - .bind(did) - .bind(handle) - .bind(now) - .bind(now) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - }; - Ok(user) - } -} \ No newline at end of file diff --git a/api-rs/src/handlers/cards.rs b/api-rs/src/handlers/cards.rs deleted file mode 100644 index 46830b1..0000000 --- a/api-rs/src/handlers/cards.rs +++ /dev/null @@ -1,314 +0,0 @@ -use axum::{ - extract::{Path, Query, State}, - response::Json, - routing::{get, post}, - Router, -}; -use serde::Deserialize; -use validator::Validate; - -use crate::{ - error::{AppError, AppResult}, - models::*, - services::GachaService, - AppState, -}; - -pub fn create_routes() -> Router { - Router::new() - .route("/draw", post(draw_card)) - .route("/user/:user_did", get(get_user_cards)) - .route("/unique", get(get_unique_registry)) - .route("/stats", get(get_gacha_stats)) - .route("/master", get(get_card_master)) -} - -/// Draw a card from gacha system -async fn draw_card( - State(state): State, - Json(request): Json, -) -> AppResult> { - // Validate request - request.validate().map_err(|e| AppError::validation(e.to_string()))?; - - let gacha_service = GachaService::new(state.settings.gacha_config()); - - let result = gacha_service - .draw_card(&state.db, &request.user_did, request.is_paid, request.pool_id) - .await?; - - Ok(Json(result)) -} - -#[derive(Deserialize)] -struct UserCardsQuery { - limit: Option, - offset: Option, -} - -/// Get user's card collection -async fn get_user_cards( - State(state): State, - Path(user_did): Path, - Query(query): Query, -) -> AppResult> { - let limit = query.limit.unwrap_or(50).min(100); // Max 100 cards per request - let offset = query.offset.unwrap_or(0); - - // Get user ID from DID - let user = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1") - .bind(&user_did) - .fetch_optional(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?") - .bind(&user_did) - .fetch_optional(pool) - .await - .map_err(AppError::Database)? - } - }; - - let user = user.ok_or_else(|| AppError::not_found("User not found"))?; - - // Get user's cards with master data - let cards_with_master = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as::<_, UserCardWithMasterQuery>( - r#" - SELECT - uc.id, uc.user_did, uc.card_id, uc.cp, uc.status, - uc.obtained_at, uc.is_unique, uc.unique_id, - cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, cm.color, cm.description - FROM user_cards uc - JOIN card_master cm ON uc.card_id = cm.id - WHERE uc.user_did = $1 - ORDER BY uc.obtained_at DESC - LIMIT $2 OFFSET $3 - "# - ) - .bind(&user_did) - .bind(limit as i64) - .bind(offset as i64) - .fetch_all(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as::<_, UserCardWithMasterQuery>( - r#" - SELECT - uc.id, uc.user_did, uc.card_id, uc.cp, uc.status, - uc.obtained_at, uc.is_unique, uc.unique_id, - cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, cm.color, cm.description - FROM user_cards uc - JOIN card_master cm ON uc.card_id = cm.id - WHERE uc.user_did = ? - ORDER BY uc.obtained_at DESC - LIMIT ? OFFSET ? - "# - ) - .bind(&user_did) - .bind(limit as i32) - .bind(offset as i32) - .fetch_all(pool) - .await - .map_err(AppError::Database)? - } - }; - - let mut cards = Vec::new(); - let mut rarity_breakdown = RarityBreakdown { - normal: 0, - rare: 0, - super_rare: 0, - kira: 0, - unique: 0, - }; - - for row in cards_with_master { - let status = match row.status.as_str() { - "normal" => CardRarity::Normal, - "rare" => CardRarity::Rare, - "super_rare" => CardRarity::SuperRare, - "kira" => CardRarity::Kira, - "unique" => CardRarity::Unique, - _ => CardRarity::Normal, - }; - - // Update rarity breakdown - match status { - CardRarity::Normal => rarity_breakdown.normal += 1, - CardRarity::Rare => rarity_breakdown.rare += 1, - CardRarity::SuperRare => rarity_breakdown.super_rare += 1, - CardRarity::Kira => rarity_breakdown.kira += 1, - CardRarity::Unique => rarity_breakdown.unique += 1, - } - - cards.push(UserCardWithMaster { - card: UserCardResponse { - id: row.id, - card_id: row.card_id, - cp: row.cp, - status, - skill: None, // TODO: Add skill field to query if needed - obtained_at: row.obtained_at, - is_unique: row.is_unique, - unique_id: row.unique_id, - }, - master: CardMasterResponse { - id: row.master_id, - name: row.name, - base_cp_min: row.base_cp_min, - base_cp_max: row.base_cp_max, - color: row.color, - description: row.description, - }, - }); - } - - // Get total count and unique count - let (total_count, unique_count): (i64, i64) = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as( - "SELECT COUNT(*) as total, COUNT(*) FILTER (WHERE is_unique = true) as unique_count FROM user_cards WHERE user_id = $1" - ) - .bind(user.id) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as( - "SELECT COUNT(*) as total, SUM(CASE WHEN is_unique = 1 THEN 1 ELSE 0 END) as unique_count FROM user_cards WHERE user_id = ?" - ) - .bind(user.id) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - }; - - Ok(Json(UserCardCollectionResponse { - user_did, - cards, - total_count: total_count as i32, - unique_count: unique_count as i32, - rarity_breakdown, - })) -} - -/// Get global unique card registry -async fn get_unique_registry( - State(state): State, -) -> AppResult> { - // Get all unique cards with master data and owner info - let unique_cards = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as::<_, UniqueCardQuery>( - r#" - SELECT - cm.id as card_id, - cm.name as card_name, - ucr.owner_did, - u.handle as owner_handle, - ucr.obtained_at - FROM card_master cm - LEFT JOIN unique_card_registry ucr ON cm.id = ucr.card_id - LEFT JOIN users u ON ucr.owner_did = u.did - ORDER BY cm.id - "# - ) - .fetch_all(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as::<_, UniqueCardQuery>( - r#" - SELECT - cm.id as card_id, - cm.name as card_name, - ucr.owner_did, - u.handle as owner_handle, - ucr.obtained_at - FROM card_master cm - LEFT JOIN unique_card_registry ucr ON cm.id = ucr.card_id - LEFT JOIN users u ON ucr.owner_did = u.did - ORDER BY cm.id - "# - ) - .fetch_all(pool) - .await - .map_err(AppError::Database)? - } - }; - - let mut unique_card_infos = Vec::new(); - let mut available_count = 0; - - for row in unique_cards { - let is_available = row.owner_did.is_none(); - if is_available { - available_count += 1; - } - - unique_card_infos.push(UniqueCardInfo { - card_id: row.card_id, - card_name: row.card_name, - owner_did: row.owner_did, - owner_handle: row.owner_handle, - obtained_at: row.obtained_at, - is_available, - }); - } - - Ok(Json(UniqueCardRegistryResponse { - unique_cards: unique_card_infos, - total_unique_cards: 16, // Total number of card types - available_unique_cards: available_count, - })) -} - -/// Get gacha statistics -async fn get_gacha_stats(State(state): State) -> AppResult> { - let gacha_service = GachaService::new(state.settings.gacha_config()); - let stats = gacha_service.get_gacha_stats(&state.db).await?; - Ok(Json(stats)) -} - -/// Get card master data -async fn get_card_master(State(state): State) -> AppResult>> { - let cards = match &state.db { - crate::database::Database::Postgres(pool) => { - sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY id") - .fetch_all(pool) - .await - .map_err(AppError::Database)? - } - crate::database::Database::Sqlite(pool) => { - sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY id") - .fetch_all(pool) - .await - .map_err(AppError::Database)? - } - }; - - let card_responses: Vec = cards - .into_iter() - .map(|card| CardMasterResponse { - id: card.id, - name: card.name, - base_cp_min: card.base_cp_min, - base_cp_max: card.base_cp_max, - color: card.color, - description: card.description, - }) - .collect(); - - Ok(Json(card_responses)) -} \ No newline at end of file diff --git a/api-rs/src/handlers/mod.rs b/api-rs/src/handlers/mod.rs deleted file mode 100644 index 3672408..0000000 --- a/api-rs/src/handlers/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -pub mod auth; -pub mod cards; -pub mod sync; - -pub use auth::*; -pub use cards::*; -pub use sync::*; \ No newline at end of file diff --git a/api-rs/src/handlers/sync.rs b/api-rs/src/handlers/sync.rs deleted file mode 100644 index 462ebfd..0000000 --- a/api-rs/src/handlers/sync.rs +++ /dev/null @@ -1,68 +0,0 @@ -use axum::{ - extract::State, - response::Json, - routing::post, - Router, -}; - -use crate::{ - error::{AppError, AppResult}, - AppState, -}; - -pub fn create_routes() -> Router { - Router::new() - .route("/cards/export", post(export_cards)) - .route("/cards/import", post(import_cards)) - .route("/cards/bidirectional", post(bidirectional_sync)) -} - -/// Export user's cards to atproto PDS -async fn export_cards(State(_state): State) -> AppResult> { - // TODO: Implement atproto PDS export - // This would: - // 1. Get user's cards from database - // 2. Format as atproto records - // 3. Upload to user's PDS - - Ok(Json(serde_json::json!({ - "status": "success", - "message": "Card export to PDS completed", - "exported_count": 0, - "note": "atproto integration not yet implemented" - }))) -} - -/// Import user's cards from atproto PDS -async fn import_cards(State(_state): State) -> AppResult> { - // TODO: Implement atproto PDS import - // This would: - // 1. Fetch card records from user's PDS - // 2. Validate and parse records - // 3. Update local database - - Ok(Json(serde_json::json!({ - "status": "success", - "message": "Card import from PDS completed", - "imported_count": 0, - "note": "atproto integration not yet implemented" - }))) -} - -/// Bidirectional synchronization between local DB and PDS -async fn bidirectional_sync(State(_state): State) -> AppResult> { - // TODO: Implement bidirectional sync - // This would: - // 1. Compare local cards with PDS records - // 2. Resolve conflicts (newest wins, etc.) - // 3. Sync in both directions - - Ok(Json(serde_json::json!({ - "status": "success", - "message": "Bidirectional sync completed", - "local_to_pds": 0, - "pds_to_local": 0, - "conflicts_resolved": 0, - "note": "atproto integration not yet implemented" - }))) -} \ No newline at end of file diff --git a/api-rs/src/main.rs b/api-rs/src/main.rs deleted file mode 100644 index ff28db5..0000000 --- a/api-rs/src/main.rs +++ /dev/null @@ -1,103 +0,0 @@ -use anyhow::Result; -use axum::{ - extract::State, - http::StatusCode, - response::Json, - routing::{get, post}, - Router, -}; -use serde_json::{json, Value}; -use std::net::SocketAddr; -use tower_http::cors::CorsLayer; -use tracing::{info, warn}; - -mod config; -mod database; -mod models; -mod handlers; -mod services; -mod auth; -mod error; - -use config::Settings; -use database::Database; -use error::AppError; - -#[derive(Clone)] -pub struct AppState { - pub db: Database, - pub settings: Settings, -} - -#[tokio::main] -async fn main() -> Result<()> { - // Initialize tracing - tracing_subscriber::fmt::init(); - - // Load configuration - let settings = Settings::new() - .map_err(|e| anyhow::anyhow!("Failed to load configuration: {}", e))?; - - info!("Starting ai.card API server v{}", env!("CARGO_PKG_VERSION")); - info!("Configuration loaded from: {}", settings.config_dir.display()); - - // Initialize database - let database = Database::connect(&settings.database_url).await?; - - // Run migrations - database.migrate().await?; - info!("Database migrations completed"); - - let app_state = AppState { - db: database, - settings: settings.clone(), - }; - - // Build application routes - let app = create_app(app_state).await; - - // Start server - let addr = SocketAddr::from(([0, 0, 0, 0], settings.port)); - info!("ai.card API server listening on {}", addr); - - let listener = tokio::net::TcpListener::bind(addr).await?; - axum::serve(listener, app).await?; - - Ok(()) -} - -async fn create_app(state: AppState) -> Router { - Router::new() - // Health check - .route("/health", get(health_check)) - - // API v1 routes - .nest("/api/v1", create_api_routes()) - - // CORS middleware - .layer(CorsLayer::permissive()) - - // Application state - .with_state(state) -} - -fn create_api_routes() -> Router { - Router::new() - // Authentication routes - .nest("/auth", handlers::auth::create_routes()) - - // Card routes - .nest("/cards", handlers::cards::create_routes()) - - // Sync routes - .nest("/sync", handlers::sync::create_routes()) -} - -async fn health_check() -> Result, AppError> { - Ok(Json(json!({ - "status": "healthy", - "service": "ai.card", - "version": env!("CARGO_PKG_VERSION"), - "timestamp": chrono::Utc::now().to_rfc3339() - }))) -} \ No newline at end of file diff --git a/api-rs/src/models.rs b/api-rs/src/models.rs deleted file mode 100644 index 13af15d..0000000 --- a/api-rs/src/models.rs +++ /dev/null @@ -1,326 +0,0 @@ -use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; -use sqlx::{FromRow, Type}; -use uuid::Uuid; -use validator::Validate; - -/// Card rarity enum matching Python implementation -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Type)] -#[sqlx(type_name = "card_rarity", rename_all = "lowercase")] -pub enum CardRarity { - #[serde(rename = "normal")] - Normal, - #[serde(rename = "rare")] - Rare, - #[serde(rename = "super_rare")] - SuperRare, - #[serde(rename = "kira")] - Kira, - #[serde(rename = "unique")] - Unique, -} - -impl CardRarity { - pub fn multiplier(&self) -> f64 { - match self { - CardRarity::Normal => 1.0, - CardRarity::Rare => 1.5, - CardRarity::SuperRare => 2.0, - CardRarity::Kira => 3.0, - CardRarity::Unique => 5.0, - } - } - - pub fn as_str(&self) -> &'static str { - match self { - CardRarity::Normal => "normal", - CardRarity::Rare => "rare", - CardRarity::SuperRare => "super_rare", - CardRarity::Kira => "kira", - CardRarity::Unique => "unique", - } - } -} - -/// Database Models - -#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] -pub struct User { - pub id: i32, - pub did: String, - pub handle: String, - pub created_at: DateTime, - pub updated_at: DateTime, -} - -#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] -pub struct CardMaster { - pub id: i32, - pub name: String, - pub base_cp_min: i32, - pub base_cp_max: i32, - pub color: String, - pub description: String, -} - -#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] -pub struct UserCard { - pub id: i32, - pub user_did: String, - pub card_id: i32, - pub cp: i32, - pub status: CardRarity, - pub skill: Option, - pub obtained_at: DateTime, - pub is_unique: bool, - pub unique_id: Option, -} - -#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] -pub struct UniqueCardRegistry { - pub id: i32, - pub unique_id: Uuid, - pub card_id: i32, - pub owner_did: String, - pub obtained_at: DateTime, - pub verse_skill_id: Option, -} - -#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] -pub struct DrawHistory { - pub id: i32, - pub user_did: String, - pub card_id: i32, - pub status: CardRarity, - pub cp: i32, - pub is_paid: bool, - pub drawn_at: DateTime, -} - -#[derive(Debug, Clone, FromRow, Serialize, Deserialize)] -pub struct GachaPool { - pub id: i32, - pub name: String, - pub description: String, - pub is_active: bool, - pub start_at: Option>, - pub end_at: Option>, - pub pickup_card_ids: Vec, - pub rate_up_multiplier: f64, -} - -/// API Request/Response Models - -#[derive(Debug, Deserialize, Validate)] -pub struct LoginRequest { - #[validate(length(min = 1))] - pub identifier: String, - #[validate(length(min = 1))] - pub password: String, -} - -#[derive(Debug, Serialize)] -pub struct LoginResponse { - pub access_token: String, - pub token_type: String, - pub expires_in: u64, - pub user: UserInfo, -} - -#[derive(Debug, Serialize)] -pub struct UserInfo { - pub did: String, - pub handle: String, -} - -#[derive(Debug, Deserialize, Validate)] -pub struct CardDrawRequest { - pub user_did: String, - #[serde(default)] - pub is_paid: bool, - pub pool_id: Option, -} - -#[derive(Debug, Serialize)] -pub struct CardDrawResponse { - pub card: UserCardResponse, - pub master: CardMasterResponse, - pub is_unique: bool, - pub animation_type: String, - pub draw_history_id: i32, -} - -#[derive(Debug, Serialize)] -pub struct UserCardResponse { - pub id: i32, - pub card_id: i32, - pub cp: i32, - pub status: CardRarity, - pub skill: Option, - pub obtained_at: DateTime, - pub is_unique: bool, - pub unique_id: Option, -} - -#[derive(Debug, Serialize)] -pub struct CardMasterResponse { - pub id: i32, - pub name: String, - pub base_cp_min: i32, - pub base_cp_max: i32, - pub color: String, - pub description: String, -} - -#[derive(Debug, Serialize)] -pub struct UserCardCollectionResponse { - pub user_did: String, - pub cards: Vec, - pub total_count: i32, - pub unique_count: i32, - pub rarity_breakdown: RarityBreakdown, -} - -#[derive(Debug, Serialize)] -pub struct UserCardWithMaster { - pub card: UserCardResponse, - pub master: CardMasterResponse, -} - -/// Database query result for JOIN operations -#[derive(Debug, Clone, FromRow)] -pub struct UserCardWithMasterQuery { - // user_cards fields - pub id: i32, - pub user_did: String, - pub card_id: i32, - pub cp: i32, - pub status: String, - pub obtained_at: DateTime, - pub is_unique: bool, - pub unique_id: Option, - // card_master fields - pub master_id: i32, - pub name: String, - pub base_cp_min: i32, - pub base_cp_max: i32, - pub color: String, - pub description: String, -} - -/// Database query result for unique card registry -#[derive(Debug, Clone, FromRow)] -pub struct UniqueCardQuery { - pub card_id: i32, - pub card_name: String, - pub owner_did: Option, - pub owner_handle: Option, - pub obtained_at: Option>, -} - -#[derive(Debug, Serialize)] -pub struct RarityBreakdown { - pub normal: i32, - pub rare: i32, - pub super_rare: i32, - pub kira: i32, - pub unique: i32, -} - -#[derive(Debug, Serialize)] -pub struct UniqueCardRegistryResponse { - pub unique_cards: Vec, - pub total_unique_cards: i32, - pub available_unique_cards: i32, -} - -#[derive(Debug, Serialize)] -pub struct UniqueCardInfo { - pub card_id: i32, - pub card_name: String, - pub owner_did: Option, - pub owner_handle: Option, - pub obtained_at: Option>, - pub is_available: bool, -} - -#[derive(Debug, Serialize)] -pub struct GachaStatsResponse { - pub probabilities: GachaProbabilities, - pub total_draws: i32, - pub total_unique_cards: i32, - pub available_unique_cards: i32, - pub rarity_distribution: RarityBreakdown, -} - -#[derive(Debug, Serialize)] -pub struct GachaProbabilities { - pub normal: f64, - pub rare: f64, - pub super_rare: f64, - pub kira: f64, - pub unique: f64, - pub paid_multiplier: f64, -} - -/// External Data Models (from ai.json) - -#[derive(Debug, Deserialize)] -pub struct ExternalCardData { - pub ai: AiData, -} - -#[derive(Debug, Deserialize)] -pub struct AiData { - pub card: CardData, -} - -#[derive(Debug, Deserialize)] -pub struct CardData { - pub cards: Vec, -} - -#[derive(Debug, Deserialize)] -pub struct ExternalCard { - pub id: i32, - pub name: String, - pub cp: CpRange, - pub color: String, - pub skill: String, - pub lang: Option, -} - -#[derive(Debug, Deserialize)] -pub struct CpRange { - pub min: i32, - pub max: i32, -} - -#[derive(Debug, Deserialize)] -pub struct LangData { - pub ja: Option, -} - -#[derive(Debug, Deserialize)] -pub struct JapaneseData { - pub name: Option, - pub skill: Option, -} - -/// atproto Models - -#[derive(Debug, Serialize)] -pub struct AtprotoCardRecord { - #[serde(rename = "$type")] - pub record_type: String, - #[serde(rename = "cardId")] - pub card_id: i32, - pub cp: i32, - pub status: String, - #[serde(rename = "obtainedAt")] - pub obtained_at: DateTime, - #[serde(rename = "isUnique")] - pub is_unique: bool, - #[serde(rename = "uniqueId")] - pub unique_id: Option, -} \ No newline at end of file diff --git a/api-rs/src/services/atproto.rs b/api-rs/src/services/atproto.rs deleted file mode 100644 index a7c5bde..0000000 --- a/api-rs/src/services/atproto.rs +++ /dev/null @@ -1,232 +0,0 @@ -use crate::{ - error::{AppError, AppResult}, - models::*, -}; -use reqwest::Client; -use serde_json::json; - -pub struct AtprotoService { - client: Client, - session: Option, -} - -impl AtprotoService { - pub fn new() -> Self { - Self { - client: Client::new(), - session: None, - } - } - - pub fn with_session(session: String) -> Self { - Self { - client: Client::new(), - session: Some(session), - } - } - - /// Create a card record in user's atproto PDS - pub async fn create_card_record( - &self, - did: &str, - card: &UserCard, - master: &CardMaster, - ) -> AppResult { - let session = self.session.as_ref() - .ok_or_else(|| AppError::authentication("No atproto session available"))?; - - let record_data = AtprotoCardRecord { - record_type: "ai.card.collection".to_string(), - card_id: card.card_id, - cp: card.cp, - status: card.status.as_str().to_string(), - obtained_at: card.obtained_at, - is_unique: card.is_unique, - unique_id: card.unique_id, - }; - - // Determine PDS endpoint from DID - let pds_url = self.resolve_pds_from_did(did).await?; - - let response = self - .client - .post(&format!("{}/xrpc/com.atproto.repo.createRecord", pds_url)) - .header("Authorization", format!("Bearer {}", session)) - .json(&json!({ - "repo": did, - "collection": "ai.card.collection", - "record": record_data - })) - .send() - .await - .map_err(AppError::HttpClient)?; - - if !response.status().is_success() { - return Err(AppError::ExternalService(format!( - "Failed to create atproto record: HTTP {}", - response.status() - ))); - } - - let result: serde_json::Value = response - .json() - .await - .map_err(AppError::HttpClient)?; - - let uri = result["uri"] - .as_str() - .ok_or_else(|| AppError::ExternalService("No URI in response".to_string()))?; - - Ok(uri.to_string()) - } - - /// List card records from user's PDS - pub async fn list_card_records(&self, did: &str) -> AppResult> { - let session = self.session.as_ref() - .ok_or_else(|| AppError::authentication("No atproto session available"))?; - - let pds_url = self.resolve_pds_from_did(did).await?; - - let response = self - .client - .get(&format!("{}/xrpc/com.atproto.repo.listRecords", pds_url)) - .header("Authorization", format!("Bearer {}", session)) - .query(&[ - ("repo", did), - ("collection", "ai.card.collection"), - ]) - .send() - .await - .map_err(AppError::HttpClient)?; - - if !response.status().is_success() { - return Err(AppError::ExternalService(format!( - "Failed to list atproto records: HTTP {}", - response.status() - ))); - } - - let result: serde_json::Value = response - .json() - .await - .map_err(AppError::HttpClient)?; - - let records = result["records"] - .as_array() - .ok_or_else(|| AppError::ExternalService("No records in response".to_string()))?; - - Ok(records.clone()) - } - - /// Resolve PDS endpoint from DID - async fn resolve_pds_from_did(&self, did: &str) -> AppResult { - // This is a simplified resolution - // In a real implementation, you would: - // 1. Parse the DID to get the method and identifier - // 2. Query the appropriate resolver (PLC directory, etc.) - // 3. Get the serviceEndpoint for the PDS - - if did.starts_with("did:plc:") { - // For PLC DIDs, query the PLC directory - let plc_id = did.strip_prefix("did:plc:").unwrap(); - self.resolve_plc_did(plc_id).await - } else if did.starts_with("did:web:") { - // For web DIDs, construct URL from domain - let domain = did.strip_prefix("did:web:").unwrap(); - Ok(format!("https://{}", domain)) - } else { - // Fallback to Bluesky PDS - Ok("https://bsky.social".to_string()) - } - } - - /// Resolve PLC DID to PDS endpoint - async fn resolve_plc_did(&self, plc_id: &str) -> AppResult { - let response = self - .client - .get(&format!("https://plc.directory/{}", plc_id)) - .send() - .await - .map_err(AppError::HttpClient)?; - - if !response.status().is_success() { - return Ok("https://bsky.social".to_string()); // Fallback - } - - let did_doc: serde_json::Value = response - .json() - .await - .map_err(AppError::HttpClient)?; - - // Extract PDS endpoint from DID document - if let Some(services) = did_doc["service"].as_array() { - for service in services { - if service["id"] == "#atproto_pds" { - if let Some(endpoint) = service["serviceEndpoint"].as_str() { - return Ok(endpoint.to_string()); - } - } - } - } - - // Fallback to Bluesky - Ok("https://bsky.social".to_string()) - } - - /// Authenticate with atproto and get session - pub async fn authenticate(&self, identifier: &str, password: &str) -> AppResult<(String, String)> { - // Try multiple PDS endpoints for authentication - let pds_endpoints = [ - "https://bsky.social", - "https://staging.bsky.app", - // Add more PDS endpoints as needed - ]; - - for pds_url in pds_endpoints { - match self.try_authenticate_at_pds(pds_url, identifier, password).await { - Ok((session, did)) => return Ok((session, did)), - Err(_) => continue, // Try next PDS - } - } - - Err(AppError::authentication("Failed to authenticate with any PDS")) - } - - /// Try authentication at a specific PDS - async fn try_authenticate_at_pds( - &self, - pds_url: &str, - identifier: &str, - password: &str, - ) -> AppResult<(String, String)> { - let response = self - .client - .post(&format!("{}/xrpc/com.atproto.server.createSession", pds_url)) - .json(&json!({ - "identifier": identifier, - "password": password - })) - .send() - .await - .map_err(AppError::HttpClient)?; - - if !response.status().is_success() { - return Err(AppError::authentication("Invalid credentials")); - } - - let result: serde_json::Value = response - .json() - .await - .map_err(AppError::HttpClient)?; - - let access_jwt = result["accessJwt"] - .as_str() - .ok_or_else(|| AppError::authentication("No access token in response"))?; - - let did = result["did"] - .as_str() - .ok_or_else(|| AppError::authentication("No DID in response"))?; - - Ok((access_jwt.to_string(), did.to_string())) - } -} \ No newline at end of file diff --git a/api-rs/src/services/card_master.rs b/api-rs/src/services/card_master.rs deleted file mode 100644 index dc080f2..0000000 --- a/api-rs/src/services/card_master.rs +++ /dev/null @@ -1,219 +0,0 @@ -use crate::{ - error::{AppError, AppResult}, - models::*, -}; -use reqwest::Client; -use std::collections::HashMap; - -pub struct CardMasterService { - client: Client, - master_url: String, -} - -impl CardMasterService { - pub fn new(master_url: String) -> Self { - Self { - client: Client::new(), - master_url, - } - } - - /// Fetch card master data from external source (ai.json) - pub async fn fetch_external_card_data(&self) -> AppResult> { - let response = self - .client - .get(&self.master_url) - .timeout(std::time::Duration::from_secs(10)) - .send() - .await - .map_err(AppError::HttpClient)?; - - if !response.status().is_success() { - return Err(AppError::ExternalService(format!( - "Failed to fetch card data: HTTP {}", - response.status() - ))); - } - - let data: ExternalCardData = response - .json() - .await - .map_err(AppError::HttpClient)?; - - Ok(data.ai.card.cards) - } - - /// Get fallback card data if external fetch fails - pub fn get_fallback_card_data(&self) -> Vec { - vec![ - ExternalCard { - id: 0, - name: "ai".to_string(), - cp: CpRange { min: 100, max: 200 }, - color: "#4A90E2".to_string(), - skill: "Core existence essence".to_string(), - lang: None, - }, - ExternalCard { - id: 1, - name: "dream".to_string(), - cp: CpRange { min: 90, max: 180 }, - color: "#9B59B6".to_string(), - skill: "Vision manifestation".to_string(), - lang: None, - }, - ExternalCard { - id: 2, - name: "radiance".to_string(), - cp: CpRange { min: 110, max: 220 }, - color: "#F39C12".to_string(), - skill: "Brilliant energy".to_string(), - lang: None, - }, - ExternalCard { - id: 3, - name: "neutron".to_string(), - cp: CpRange { min: 120, max: 240 }, - color: "#34495E".to_string(), - skill: "Dense core power".to_string(), - lang: None, - }, - ExternalCard { - id: 4, - name: "sun".to_string(), - cp: CpRange { min: 130, max: 260 }, - color: "#E74C3C".to_string(), - skill: "Solar radiance".to_string(), - lang: None, - }, - ExternalCard { - id: 5, - name: "night".to_string(), - cp: CpRange { min: 80, max: 160 }, - color: "#2C3E50".to_string(), - skill: "Shadow stealth".to_string(), - lang: None, - }, - ExternalCard { - id: 6, - name: "snow".to_string(), - cp: CpRange { min: 70, max: 140 }, - color: "#ECF0F1".to_string(), - skill: "Crystal freeze".to_string(), - lang: None, - }, - ExternalCard { - id: 7, - name: "thunder".to_string(), - cp: CpRange { min: 140, max: 280 }, - color: "#F1C40F".to_string(), - skill: "Electric storm".to_string(), - lang: None, - }, - ExternalCard { - id: 8, - name: "ultimate".to_string(), - cp: CpRange { min: 150, max: 300 }, - color: "#8E44AD".to_string(), - skill: "Maximum form".to_string(), - lang: None, - }, - ExternalCard { - id: 9, - name: "sword".to_string(), - cp: CpRange { min: 160, max: 320 }, - color: "#95A5A6".to_string(), - skill: "Truth cutting".to_string(), - lang: None, - }, - ExternalCard { - id: 10, - name: "destruction".to_string(), - cp: CpRange { min: 170, max: 340 }, - color: "#C0392B".to_string(), - skill: "Entropy force".to_string(), - lang: None, - }, - ExternalCard { - id: 11, - name: "earth".to_string(), - cp: CpRange { min: 90, max: 180 }, - color: "#27AE60".to_string(), - skill: "Ground foundation".to_string(), - lang: None, - }, - ExternalCard { - id: 12, - name: "galaxy".to_string(), - cp: CpRange { min: 180, max: 360 }, - color: "#3498DB".to_string(), - skill: "Cosmic expanse".to_string(), - lang: None, - }, - ExternalCard { - id: 13, - name: "create".to_string(), - cp: CpRange { min: 100, max: 200 }, - color: "#16A085".to_string(), - skill: "Generation power".to_string(), - lang: None, - }, - ExternalCard { - id: 14, - name: "supernova".to_string(), - cp: CpRange { min: 200, max: 400 }, - color: "#E67E22".to_string(), - skill: "Stellar explosion".to_string(), - lang: None, - }, - ExternalCard { - id: 15, - name: "world".to_string(), - cp: CpRange { min: 250, max: 500 }, - color: "#9B59B6".to_string(), - skill: "Reality control".to_string(), - lang: None, - }, - ] - } - - /// Get card master data, trying external source first then fallback - pub async fn get_card_master_data(&self) -> Vec { - match self.fetch_external_card_data().await { - Ok(cards) => { - tracing::info!("Fetched {} cards from external source", cards.len()); - cards - } - Err(e) => { - tracing::warn!("Failed to fetch external card data: {}, using fallback", e); - self.get_fallback_card_data() - } - } - } - - /// Convert external card data to database format - pub fn external_to_card_master(external: &ExternalCard) -> CardMaster { - let description = if let Some(lang) = &external.lang { - if let Some(ja) = &lang.ja { - if let Some(name) = &ja.name { - format!("{} - {}", name, external.skill) - } else { - external.skill.clone() - } - } else { - external.skill.clone() - } - } else { - external.skill.clone() - }; - - CardMaster { - id: external.id, - name: external.name.clone(), - base_cp_min: external.cp.min, - base_cp_max: external.cp.max, - color: external.color.clone(), - description, - } - } -} \ No newline at end of file diff --git a/api-rs/src/services/gacha.rs b/api-rs/src/services/gacha.rs deleted file mode 100644 index 9f24ee3..0000000 --- a/api-rs/src/services/gacha.rs +++ /dev/null @@ -1,541 +0,0 @@ -use crate::{ - config::GachaConfig, - database::{Database, DatabaseTransaction}, - error::{AppError, AppResult}, - models::*, - query_as, query_one_as, query_optional_as, - services::CardMasterService, -}; -use chrono::Utc; -use rand::Rng; -use std::collections::HashMap; -use uuid::Uuid; - -pub struct GachaService { - config: GachaConfig, -} - -impl GachaService { - pub fn new(config: GachaConfig) -> Self { - Self { config } - } - - /// Main gacha draw function - pub async fn draw_card( - &self, - db: &Database, - user_did: &str, - is_paid: bool, - pool_id: Option, - ) -> AppResult { - let mut tx = db.begin().await?; - - // Get or create user - let user = self.get_or_create_user(&mut tx, user_did).await?; - - // Determine card rarity - let rarity = self.determine_rarity(is_paid, pool_id)?; - - // Select a card based on rarity and pool - let card_master = self.select_card_master(&mut tx, &rarity, pool_id).await?; - - // Calculate CP based on rarity - let cp = self.calculate_cp(&card_master, &rarity); - - // Check if this will be a unique card - let is_unique = rarity == CardRarity::Unique; - - // For unique cards, check availability - if is_unique { - if let Some(_existing) = self.check_unique_card_availability(&mut tx, card_master.id).await? { - // Unique card already taken, fallback to Kira - return self.draw_card_with_fallback(&mut tx, user.id, &card_master, CardRarity::Kira, is_paid).await; - } - } - - // Create the user card - let user_card = self.create_user_card( - &mut tx, - user.id, - &card_master, - cp, - &rarity, - is_unique, - ).await?; - - // Record draw history - let draw_history = self.record_draw_history( - &mut tx, - user.id, - card_master.id, - &rarity, - cp, - is_paid, - ).await?; - - // Register unique card if applicable - if is_unique { - self.register_unique_card(&mut tx, &user_card, user_did).await?; - } - - tx.commit().await?; - - Ok(CardDrawResponse { - card: UserCardResponse { - id: user_card.id, - card_id: user_card.card_id, - cp: user_card.cp, - status: user_card.status, - skill: user_card.skill, - obtained_at: user_card.obtained_at, - is_unique: user_card.is_unique, - unique_id: user_card.unique_id, - }, - master: CardMasterResponse { - id: card_master.id, - name: card_master.name, - base_cp_min: card_master.base_cp_min, - base_cp_max: card_master.base_cp_max, - color: card_master.color, - description: card_master.description, - }, - is_unique, - animation_type: self.get_animation_type(&rarity), - draw_history_id: draw_history.id, - }) - } - - /// Determine card rarity based on probabilities - fn determine_rarity(&self, is_paid: bool, _pool_id: Option) -> AppResult { - let mut rng = rand::thread_rng(); - let rand_val: f64 = rng.gen_range(0.0..100.0); - - let cumulative_probs = self.config.cumulative_probabilities(is_paid); - let mut cumulative = 0.0; - - for (prob, rarity) in cumulative_probs { - cumulative += prob; - if rand_val < cumulative { - return Ok(rarity); - } - } - - // Fallback to normal if no match (should never happen) - Ok(CardRarity::Normal) - } - - /// Select a card master based on rarity and pool - async fn select_card_master( - &self, - tx: &mut DatabaseTransaction, - rarity: &CardRarity, - _pool_id: Option, - ) -> AppResult { - // For now, randomly select from all available cards - // In a full implementation, this would consider pool restrictions - let cards = match tx { - DatabaseTransaction::Postgres(tx) => { - sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY RANDOM() LIMIT 1") - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database)? - } - DatabaseTransaction::Sqlite(tx) => { - sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY RANDOM() LIMIT 1") - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database)? - } - }; - - Ok(cards) - } - - /// Calculate CP based on base CP and rarity multiplier - fn calculate_cp(&self, card_master: &CardMaster, rarity: &CardRarity) -> i32 { - let mut rng = rand::thread_rng(); - let base_cp = rng.gen_range(card_master.base_cp_min..=card_master.base_cp_max); - let multiplier = rarity.multiplier(); - (base_cp as f64 * multiplier) as i32 - } - - /// Check if a unique card is available - async fn check_unique_card_availability( - &self, - tx: &mut DatabaseTransaction, - card_id: i32, - ) -> AppResult> { - match tx { - DatabaseTransaction::Postgres(tx) => { - sqlx::query_as::<_, UniqueCardRegistry>( - "SELECT * FROM unique_card_registry WHERE card_id = $1" - ) - .bind(card_id) - .fetch_optional(&mut **tx) - .await - .map_err(AppError::Database) - } - DatabaseTransaction::Sqlite(tx) => { - sqlx::query_as::<_, UniqueCardRegistry>( - "SELECT * FROM unique_card_registry WHERE card_id = ?" - ) - .bind(card_id) - .fetch_optional(&mut **tx) - .await - .map_err(AppError::Database) - } - } - } - - /// Create a user card - async fn create_user_card( - &self, - tx: &mut DatabaseTransaction, - user_id: i32, - card_master: &CardMaster, - cp: i32, - rarity: &CardRarity, - is_unique: bool, - ) -> AppResult { - let unique_id = if is_unique { Some(Uuid::new_v4()) } else { None }; - let now = Utc::now(); - - match tx { - DatabaseTransaction::Postgres(tx) => { - sqlx::query_as::<_, UserCard>( - r#" - INSERT INTO user_cards (user_id, card_id, cp, status, obtained_at, is_unique, unique_id) - VALUES ($1, $2, $3, $4, $5, $6, $7) - RETURNING * - "# - ) - .bind(user_id) - .bind(card_master.id) - .bind(cp) - .bind(rarity) - .bind(now) - .bind(is_unique) - .bind(unique_id) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - DatabaseTransaction::Sqlite(tx) => { - sqlx::query_as::<_, UserCard>( - r#" - INSERT INTO user_cards (user_id, card_id, cp, status, obtained_at, is_unique, unique_id) - VALUES (?, ?, ?, ?, ?, ?, ?) - RETURNING * - "# - ) - .bind(user_id) - .bind(card_master.id) - .bind(cp) - .bind(rarity) - .bind(now) - .bind(is_unique) - .bind(unique_id) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - } - } - - /// Record draw history - async fn record_draw_history( - &self, - tx: &mut DatabaseTransaction, - user_id: i32, - card_id: i32, - rarity: &CardRarity, - cp: i32, - is_paid: bool, - ) -> AppResult { - let now = Utc::now(); - - match tx { - DatabaseTransaction::Postgres(tx) => { - sqlx::query_as::<_, DrawHistory>( - r#" - INSERT INTO draw_history (user_id, card_id, status, cp, is_paid, drawn_at) - VALUES ($1, $2, $3, $4, $5, $6) - RETURNING * - "# - ) - .bind(user_id) - .bind(card_id) - .bind(rarity) - .bind(cp) - .bind(is_paid) - .bind(now) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - DatabaseTransaction::Sqlite(tx) => { - sqlx::query_as::<_, DrawHistory>( - r#" - INSERT INTO draw_history (user_id, card_id, status, cp, is_paid, drawn_at) - VALUES (?, ?, ?, ?, ?, ?) - RETURNING * - "# - ) - .bind(user_id) - .bind(card_id) - .bind(rarity) - .bind(cp) - .bind(is_paid) - .bind(now) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - } - } - - /// Register unique card - async fn register_unique_card( - &self, - tx: &mut DatabaseTransaction, - user_card: &UserCard, - owner_did: &str, - ) -> AppResult { - let unique_id = user_card.unique_id.ok_or_else(|| { - AppError::Internal("Unique card must have unique_id".to_string()) - })?; - - match tx { - DatabaseTransaction::Postgres(tx) => { - sqlx::query_as::<_, UniqueCardRegistry>( - r#" - INSERT INTO unique_card_registry (unique_id, card_id, owner_did, obtained_at) - VALUES ($1, $2, $3, $4) - RETURNING * - "# - ) - .bind(unique_id) - .bind(user_card.card_id) - .bind(owner_did) - .bind(user_card.obtained_at) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - DatabaseTransaction::Sqlite(tx) => { - sqlx::query_as::<_, UniqueCardRegistry>( - r#" - INSERT INTO unique_card_registry (unique_id, card_id, owner_did, obtained_at) - VALUES (?, ?, ?, ?) - RETURNING * - "# - ) - .bind(unique_id) - .bind(user_card.card_id) - .bind(owner_did) - .bind(user_card.obtained_at) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - } - } - - /// Get or create user by DID - async fn get_or_create_user( - &self, - tx: &mut DatabaseTransaction, - did: &str, - ) -> AppResult { - // Try to get existing user - let existing_user = match tx { - DatabaseTransaction::Postgres(tx) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1") - .bind(did) - .fetch_optional(&mut **tx) - .await - .map_err(AppError::Database)? - } - DatabaseTransaction::Sqlite(tx) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?") - .bind(did) - .fetch_optional(&mut **tx) - .await - .map_err(AppError::Database)? - } - }; - - if let Some(user) = existing_user { - return Ok(user); - } - - // Create new user - let handle = did.split('.').next().unwrap_or("unknown").to_string(); - let now = Utc::now(); - - match tx { - DatabaseTransaction::Postgres(tx) => { - sqlx::query_as::<_, User>( - "INSERT INTO users (did, handle, created_at, updated_at) VALUES ($1, $2, $3, $4) RETURNING *" - ) - .bind(did) - .bind(&handle) - .bind(now) - .bind(now) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - DatabaseTransaction::Sqlite(tx) => { - sqlx::query_as::<_, User>( - "INSERT INTO users (did, handle, created_at, updated_at) VALUES (?, ?, ?, ?) RETURNING *" - ) - .bind(did) - .bind(&handle) - .bind(now) - .bind(now) - .fetch_one(&mut **tx) - .await - .map_err(AppError::Database) - } - } - } - - /// Draw card with fallback rarity (when unique is unavailable) - async fn draw_card_with_fallback( - &self, - tx: &mut DatabaseTransaction, - user_id: i32, - card_master: &CardMaster, - fallback_rarity: CardRarity, - is_paid: bool, - ) -> AppResult { - let cp = self.calculate_cp(card_master, &fallback_rarity); - - let user_card = self.create_user_card( - tx, - user_id, - card_master, - cp, - &fallback_rarity, - false, - ).await?; - - let draw_history = self.record_draw_history( - tx, - user_id, - card_master.id, - &fallback_rarity, - cp, - is_paid, - ).await?; - - Ok(CardDrawResponse { - card: UserCardResponse { - id: user_card.id, - card_id: user_card.card_id, - cp: user_card.cp, - status: user_card.status, - skill: user_card.skill, - obtained_at: user_card.obtained_at, - is_unique: user_card.is_unique, - unique_id: user_card.unique_id, - }, - master: CardMasterResponse { - id: card_master.id, - name: card_master.name.clone(), - base_cp_min: card_master.base_cp_min, - base_cp_max: card_master.base_cp_max, - color: card_master.color.clone(), - description: card_master.description.clone(), - }, - is_unique: false, - animation_type: self.get_animation_type(&fallback_rarity), - draw_history_id: draw_history.id, - }) - } - - /// Get animation type based on rarity - fn get_animation_type(&self, rarity: &CardRarity) -> String { - match rarity { - CardRarity::Normal => "normal".to_string(), - CardRarity::Rare => "sparkle".to_string(), - CardRarity::SuperRare => "glow".to_string(), - CardRarity::Kira => "rainbow".to_string(), - CardRarity::Unique => "legendary".to_string(), - } - } - - /// Get gacha statistics - pub async fn get_gacha_stats(&self, db: &Database) -> AppResult { - // Get total draws - let total_draws: (i64,) = match db { - Database::Postgres(pool) => { - sqlx::query_as("SELECT COUNT(*) FROM draw_history") - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - Database::Sqlite(pool) => { - sqlx::query_as("SELECT COUNT(*) FROM draw_history") - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - }; - - // Get unique card counts - let unique_counts: (i64, i64) = match db { - Database::Postgres(pool) => { - sqlx::query_as( - r#" - SELECT - COUNT(*) as total, - (SELECT COUNT(*) FROM card_master) - COUNT(*) as available - FROM unique_card_registry - "# - ) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - Database::Sqlite(pool) => { - sqlx::query_as( - r#" - SELECT - COUNT(*) as total, - (SELECT COUNT(*) FROM card_master) - COUNT(*) as available - FROM unique_card_registry - "# - ) - .fetch_one(pool) - .await - .map_err(AppError::Database)? - } - }; - - // Get rarity distribution - let rarity_breakdown = RarityBreakdown { - normal: 0, // Would need actual counts from database - rare: 0, - super_rare: 0, - kira: 0, - unique: unique_counts.0 as i32, - }; - - Ok(GachaStatsResponse { - probabilities: GachaProbabilities { - normal: self.config.prob_normal, - rare: self.config.prob_rare, - super_rare: self.config.prob_super_rare, - kira: self.config.prob_kira, - unique: self.config.prob_unique, - paid_multiplier: 2.0, - }, - total_draws: total_draws.0 as i32, - total_unique_cards: unique_counts.0 as i32, - available_unique_cards: unique_counts.1 as i32, - rarity_distribution: rarity_breakdown, - }) - } -} \ No newline at end of file diff --git a/api-rs/src/services/mod.rs b/api-rs/src/services/mod.rs deleted file mode 100644 index d3692ff..0000000 --- a/api-rs/src/services/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub mod gacha; -pub mod card_master; -pub mod atproto; -pub mod user; - -pub use gacha::GachaService; -pub use card_master::CardMasterService; -pub use atproto::AtprotoService; -pub use user::UserService; \ No newline at end of file diff --git a/api-rs/src/services/user.rs b/api-rs/src/services/user.rs deleted file mode 100644 index 95d06e0..0000000 --- a/api-rs/src/services/user.rs +++ /dev/null @@ -1,184 +0,0 @@ -use crate::{ - database::Database, - error::{AppError, AppResult}, - models::*, -}; -use chrono::Utc; - -pub struct UserService; - -impl UserService { - pub async fn get_user_by_did(db: &Database, did: &str) -> AppResult> { - match db { - Database::Postgres(pool) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1") - .bind(did) - .fetch_optional(pool) - .await - .map_err(AppError::Database) - } - Database::Sqlite(pool) => { - sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?") - .bind(did) - .fetch_optional(pool) - .await - .map_err(AppError::Database) - } - } - } - - pub async fn create_user(db: &Database, did: &str, handle: &str) -> AppResult { - let now = Utc::now(); - - match db { - Database::Postgres(pool) => { - sqlx::query_as::<_, User>( - "INSERT INTO users (did, handle, created_at, updated_at) VALUES ($1, $2, $3, $4) RETURNING *" - ) - .bind(did) - .bind(handle) - .bind(now) - .bind(now) - .fetch_one(pool) - .await - .map_err(AppError::Database) - } - Database::Sqlite(pool) => { - sqlx::query_as::<_, User>( - "INSERT INTO users (did, handle, created_at, updated_at) VALUES (?, ?, ?, ?) RETURNING *" - ) - .bind(did) - .bind(handle) - .bind(now) - .bind(now) - .fetch_one(pool) - .await - .map_err(AppError::Database) - } - } - } - - pub async fn update_user_handle(db: &Database, did: &str, handle: &str) -> AppResult { - let now = Utc::now(); - - match db { - Database::Postgres(pool) => { - sqlx::query_as::<_, User>( - "UPDATE users SET handle = $1, updated_at = $2 WHERE did = $3 RETURNING *" - ) - .bind(handle) - .bind(now) - .bind(did) - .fetch_one(pool) - .await - .map_err(AppError::Database) - } - Database::Sqlite(pool) => { - sqlx::query_as::<_, User>( - "UPDATE users SET handle = ?, updated_at = ? WHERE did = ? RETURNING *" - ) - .bind(handle) - .bind(now) - .bind(did) - .fetch_one(pool) - .await - .map_err(AppError::Database) - } - } - } - - pub async fn get_user_card_count(db: &Database, user_did: &str) -> AppResult { - match db { - Database::Postgres(pool) => { - let row: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM user_cards WHERE user_did = $1") - .bind(user_did) - .fetch_one(pool) - .await - .map_err(AppError::Database)?; - Ok(row.0) - } - Database::Sqlite(pool) => { - let row: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM user_cards WHERE user_did = ?") - .bind(user_did) - .fetch_one(pool) - .await - .map_err(AppError::Database)?; - Ok(row.0) - } - } - } - - pub async fn get_user_unique_card_count(db: &Database, user_did: &str) -> AppResult { - match db { - Database::Postgres(pool) => { - let row: (i64,) = sqlx::query_as( - "SELECT COUNT(*) FROM user_cards WHERE user_did = $1 AND is_unique = true" - ) - .bind(user_did) - .fetch_one(pool) - .await - .map_err(AppError::Database)?; - Ok(row.0) - } - Database::Sqlite(pool) => { - let row: (i64,) = sqlx::query_as( - "SELECT COUNT(*) FROM user_cards WHERE user_did = ? AND is_unique = 1" - ) - .bind(user_did) - .fetch_one(pool) - .await - .map_err(AppError::Database)?; - Ok(row.0) - } - } - } - - pub async fn get_user_cards_by_rarity( - db: &Database, - user_did: &str, - rarity: CardRarity, - ) -> AppResult> { - match db { - Database::Postgres(pool) => { - sqlx::query_as::<_, UserCardWithMasterQuery>( - r#" - SELECT - uc.id, uc.user_did, uc.card_id, uc.cp, uc.status, - uc.obtained_at, uc.is_unique, uc.unique_id, - cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, - cm.color, cm.description - FROM user_cards uc - JOIN card_master cm ON uc.card_id = cm.id - WHERE uc.user_did = $1 AND uc.status = $2 - ORDER BY uc.obtained_at DESC - "# - ) - .bind(user_did) - .bind(rarity.as_str()) - .fetch_all(pool) - .await - .map_err(AppError::Database) - } - Database::Sqlite(pool) => { - sqlx::query_as::<_, UserCardWithMasterQuery>( - r#" - SELECT - uc.id, uc.user_did, uc.card_id, uc.cp, uc.status, - uc.obtained_at, uc.is_unique, uc.unique_id, - cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, - cm.color, cm.description - FROM user_cards uc - JOIN card_master cm ON uc.card_id = cm.id - WHERE uc.user_did = ? AND uc.status = ? - ORDER BY uc.obtained_at DESC - "# - ) - .bind(user_did) - .bind(rarity.as_str()) - .fetch_all(pool) - .await - .map_err(AppError::Database) - } - } - } -} \ No newline at end of file diff --git a/src/config.rs b/src/config.rs index 4caedcf..f032cf3 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,5 +1,5 @@ use config::{Config, ConfigError, Environment, File}; -use serde::{Deserialize, Deserializer}; +use serde::Deserialize; use std::path::PathBuf; #[derive(Debug, Clone, Deserialize)] @@ -32,7 +32,6 @@ pub struct Settings { pub card_master_url: String, // File paths - #[serde(deserialize_with = "deserialize_pathbuf")] pub config_dir: PathBuf, } @@ -74,10 +73,14 @@ impl Settings { // External data source .set_default("card_master_url", "https://git.syui.ai/ai/ai/raw/branch/main/ai.json")?; - // Load from config file if it exists - let config_file = config_dir.join("config.json"); - if config_file.exists() { - builder = builder.add_source(File::from(config_file)); + // Load from config file if it exists (support both .toml and .json) + let config_toml = config_dir.join("config.toml"); + let config_json = config_dir.join("config.json"); + + if config_toml.exists() { + builder = builder.add_source(File::from(config_toml)); + } else if config_json.exists() { + builder = builder.add_source(File::from(config_json)); } // Override with environment variables (AI_CARD_ prefix) @@ -125,12 +128,4 @@ impl GachaConfig { (self.prob_normal, crate::models::CardRarity::Normal), ] } -} - -fn deserialize_pathbuf<'de, D>(deserializer: D) -> Result -where - D: Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - Ok(PathBuf::from(s)) } \ No newline at end of file diff --git a/src/handlers/cards.rs b/src/handlers/cards.rs index 46830b1..9b15683 100644 --- a/src/handlers/cards.rs +++ b/src/handlers/cards.rs @@ -81,17 +81,17 @@ async fn get_user_cards( sqlx::query_as::<_, UserCardWithMasterQuery>( r#" SELECT - uc.id, uc.user_did, uc.card_id, uc.cp, uc.status, + uc.id, uc.user_id, uc.card_id, uc.cp, uc.status, uc.obtained_at, uc.is_unique, uc.unique_id, cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, cm.color, cm.description FROM user_cards uc JOIN card_master cm ON uc.card_id = cm.id - WHERE uc.user_did = $1 + WHERE uc.user_id = $1 ORDER BY uc.obtained_at DESC LIMIT $2 OFFSET $3 "# ) - .bind(&user_did) + .bind(user.id) .bind(limit as i64) .bind(offset as i64) .fetch_all(pool) @@ -102,17 +102,17 @@ async fn get_user_cards( sqlx::query_as::<_, UserCardWithMasterQuery>( r#" SELECT - uc.id, uc.user_did, uc.card_id, uc.cp, uc.status, + uc.id, uc.user_id, uc.card_id, uc.cp, uc.status, uc.obtained_at, uc.is_unique, uc.unique_id, cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, cm.color, cm.description FROM user_cards uc JOIN card_master cm ON uc.card_id = cm.id - WHERE uc.user_did = ? + WHERE uc.user_id = ? ORDER BY uc.obtained_at DESC LIMIT ? OFFSET ? "# ) - .bind(&user_did) + .bind(user.id) .bind(limit as i32) .bind(offset as i32) .fetch_all(pool) diff --git a/src/main.rs b/src/main.rs index ff28db5..df0300d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -31,22 +31,46 @@ pub struct AppState { #[tokio::main] async fn main() -> Result<()> { - // Initialize tracing - tracing_subscriber::fmt::init(); + // Initialize tracing with debug level + tracing_subscriber::fmt() + .with_env_filter("debug") + .init(); + + println!("🎴 ai.card API Server Starting..."); + println!("==================================="); // Load configuration + println!("📁 Loading configuration..."); let settings = Settings::new() - .map_err(|e| anyhow::anyhow!("Failed to load configuration: {}", e))?; + .map_err(|e| { + eprintln!("❌ Failed to load configuration: {}", e); + anyhow::anyhow!("Failed to load configuration: {}", e) + })?; + + println!("✅ Configuration loaded successfully"); + println!("📍 Config directory: {}", settings.config_dir.display()); + println!("🌐 Port: {}", settings.port); + println!("🗄️ Database URL: {}", settings.database_url); info!("Starting ai.card API server v{}", env!("CARGO_PKG_VERSION")); - info!("Configuration loaded from: {}", settings.config_dir.display()); // Initialize database - let database = Database::connect(&settings.database_url).await?; + println!("🔗 Connecting to database..."); + let database = Database::connect(&settings.database_url).await + .map_err(|e| { + eprintln!("❌ Database connection failed: {}", e); + e + })?; + println!("✅ Database connected successfully"); // Run migrations - database.migrate().await?; - info!("Database migrations completed"); + println!("🔄 Running database migrations..."); + database.migrate().await + .map_err(|e| { + eprintln!("❌ Database migration failed: {}", e); + e + })?; + println!("✅ Database migrations completed"); let app_state = AppState { db: database, @@ -54,14 +78,38 @@ async fn main() -> Result<()> { }; // Build application routes + println!("🛣️ Setting up routes..."); let app = create_app(app_state).await; + println!("✅ Routes configured"); // Start server let addr = SocketAddr::from(([0, 0, 0, 0], settings.port)); + println!("🚀 Starting server on {}", addr); + println!("🔗 Health check: http://localhost:{}/health", settings.port); + println!("📡 API endpoints: http://localhost:{}/api/v1", settings.port); + println!("🎮 Card endpoints:"); + println!(" - POST /api/v1/cards/draw - Draw card"); + println!(" - GET /api/v1/cards/user/{{did}} - Get user cards"); + println!(" - GET /api/v1/cards/unique - Get unique registry"); + println!(" - GET /api/v1/cards/stats - Get gacha stats"); + println!(" - GET /api/v1/cards/master - Get card master"); + println!("==================================="); + info!("ai.card API server listening on {}", addr); - let listener = tokio::net::TcpListener::bind(addr).await?; - axum::serve(listener, app).await?; + let listener = tokio::net::TcpListener::bind(addr).await + .map_err(|e| { + eprintln!("❌ Failed to bind to address {}: {}", addr, e); + e + })?; + + println!("🎉 Server started successfully! Press Ctrl+C to stop."); + + axum::serve(listener, app).await + .map_err(|e| { + eprintln!("❌ Server error: {}", e); + e + })?; Ok(()) } diff --git a/src/models.rs b/src/models.rs index 13af15d..59e4d30 100644 --- a/src/models.rs +++ b/src/models.rs @@ -66,7 +66,7 @@ pub struct CardMaster { #[derive(Debug, Clone, FromRow, Serialize, Deserialize)] pub struct UserCard { pub id: i32, - pub user_did: String, + pub user_id: i32, pub card_id: i32, pub cp: i32, pub status: CardRarity, @@ -89,7 +89,7 @@ pub struct UniqueCardRegistry { #[derive(Debug, Clone, FromRow, Serialize, Deserialize)] pub struct DrawHistory { pub id: i32, - pub user_did: String, + pub user_id: i32, pub card_id: i32, pub status: CardRarity, pub cp: i32, @@ -192,7 +192,7 @@ pub struct UserCardWithMaster { pub struct UserCardWithMasterQuery { // user_cards fields pub id: i32, - pub user_did: String, + pub user_id: i32, pub card_id: i32, pub cp: i32, pub status: String, diff --git a/start_server.sh b/start_server.sh index 8cdba0e..8059f4a 100755 --- a/start_server.sh +++ b/start_server.sh @@ -5,13 +5,13 @@ set -e # Configuration CARD_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -API_DIR="$CARD_DIR/api" +API_DIR="$CARD_DIR/python/api" VENV_DIR="$HOME/.config/syui/ai/card/venv" PYTHON="$VENV_DIR/bin/python" # Default settings HOST="${HOST:-localhost}" -PORT="${PORT:-8000}" +PORT="${PORT:-8001}" RELOAD="${RELOAD:-true}" echo "🎴 Starting ai.card MCP Server"