1
0
This commit is contained in:
syui 2025-06-08 06:42:03 +09:00
parent e7948bf4cf
commit 2e55e6ce09
Signed by: syui
GPG Key ID: 5417CFEBAD92DF56
53 changed files with 3053 additions and 0 deletions

58
Cargo.toml Normal file
View File

@ -0,0 +1,58 @@
[package]
name = "aicard"
version = "0.1.0"
edition = "2021"
description = "ai.card - Autonomous card collection system with atproto integration"
authors = ["syui"]
[[bin]]
name = "aicard"
path = "src/main.rs"
[dependencies]
# Core Web Framework
axum = { version = "0.7", features = ["macros", "multipart"] }
tokio = { version = "1.0", features = ["full"] }
tower = { version = "0.4", features = ["full"] }
tower-http = { version = "0.5", features = ["cors", "trace"] }
# Database & ORM
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "sqlite", "uuid", "chrono", "migrate"] }
uuid = { version = "1.0", features = ["v4", "serde"] }
# Serialization & Validation
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
validator = { version = "0.18", features = ["derive"] }
# Date/Time
chrono = { version = "0.4", features = ["serde"] }
# Authentication & Security
jsonwebtoken = "9.0"
bcrypt = "0.15"
# HTTP Client (for atproto integration)
reqwest = { version = "0.11", features = ["json"] }
# Configuration
config = "0.13"
dotenvy = "0.15"
# CLI
clap = { version = "4.0", features = ["derive"] }
# Random (for gacha system)
rand = "0.8"
# Error Handling
anyhow = "1.0"
thiserror = "1.0"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Development
serde_yaml = "0.9"
dirs = "5.0"

View File

@ -0,0 +1,134 @@
-- PostgreSQL migration for ai.card database schema
-- Create custom types
CREATE TYPE card_rarity AS ENUM ('normal', 'rare', 'super_rare', 'kira', 'unique');
-- Enable UUID extension
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- Users table - stores atproto DID-based user information
CREATE TABLE IF NOT EXISTS users (
id SERIAL PRIMARY KEY,
did TEXT NOT NULL UNIQUE, -- atproto Decentralized Identifier
handle TEXT NOT NULL, -- atproto handle (e.g., alice.bsky.social)
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_users_did ON users(did);
CREATE INDEX IF NOT EXISTS idx_users_handle ON users(handle);
-- Card master data - template definitions for all card types
CREATE TABLE IF NOT EXISTS card_master (
id INTEGER PRIMARY KEY, -- Card ID (0-15 in current system)
name TEXT NOT NULL, -- Card name (e.g., "ai", "dream", "radiance")
base_cp_min INTEGER NOT NULL, -- Minimum base CP for this card
base_cp_max INTEGER NOT NULL, -- Maximum base CP for this card
color TEXT NOT NULL, -- Card color theme
description TEXT NOT NULL -- Card description/lore
);
-- User cards - actual card instances owned by users
CREATE TABLE IF NOT EXISTS user_cards (
id SERIAL PRIMARY KEY,
user_id INTEGER NOT NULL,
card_id INTEGER NOT NULL, -- References card_master.id
cp INTEGER NOT NULL, -- Calculated CP (base_cp * rarity_multiplier)
status card_rarity NOT NULL, -- Card rarity
skill TEXT, -- Optional skill description
obtained_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
is_unique BOOLEAN NOT NULL DEFAULT FALSE,
unique_id UUID, -- UUID for unique cards
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (card_id) REFERENCES card_master(id)
);
CREATE INDEX IF NOT EXISTS idx_user_cards_user_id ON user_cards(user_id);
CREATE INDEX IF NOT EXISTS idx_user_cards_card_id ON user_cards(card_id);
CREATE INDEX IF NOT EXISTS idx_user_cards_status ON user_cards(status);
CREATE INDEX IF NOT EXISTS idx_user_cards_unique_id ON user_cards(unique_id);
-- Global unique card registry - tracks ownership of unique cards
CREATE TABLE IF NOT EXISTS unique_card_registry (
id SERIAL PRIMARY KEY,
unique_id UUID NOT NULL UNIQUE, -- UUID from user_cards.unique_id
card_id INTEGER NOT NULL, -- Which card type is unique
owner_did TEXT NOT NULL, -- Current owner's atproto DID
obtained_at TIMESTAMP WITH TIME ZONE NOT NULL,
verse_skill_id TEXT, -- Optional verse skill reference
FOREIGN KEY (card_id) REFERENCES card_master(id),
UNIQUE(card_id) -- Only one unique per card_id allowed
);
CREATE INDEX IF NOT EXISTS idx_unique_registry_card_id ON unique_card_registry(card_id);
CREATE INDEX IF NOT EXISTS idx_unique_registry_owner_did ON unique_card_registry(owner_did);
-- Draw history - tracks all gacha draws for statistics
CREATE TABLE IF NOT EXISTS draw_history (
id SERIAL PRIMARY KEY,
user_id INTEGER NOT NULL,
card_id INTEGER NOT NULL,
status card_rarity NOT NULL,
cp INTEGER NOT NULL,
is_paid BOOLEAN NOT NULL DEFAULT FALSE, -- Paid vs free gacha
drawn_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (card_id) REFERENCES card_master(id)
);
CREATE INDEX IF NOT EXISTS idx_draw_history_user_id ON draw_history(user_id);
CREATE INDEX IF NOT EXISTS idx_draw_history_drawn_at ON draw_history(drawn_at);
CREATE INDEX IF NOT EXISTS idx_draw_history_status ON draw_history(status);
-- Gacha pools - special event pools with rate-ups
CREATE TABLE IF NOT EXISTS gacha_pools (
id SERIAL PRIMARY KEY,
name TEXT NOT NULL,
description TEXT NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT TRUE,
start_at TIMESTAMP WITH TIME ZONE,
end_at TIMESTAMP WITH TIME ZONE,
pickup_card_ids INTEGER[], -- Array of card IDs
rate_up_multiplier DECIMAL(4,2) NOT NULL DEFAULT 1.0
);
CREATE INDEX IF NOT EXISTS idx_gacha_pools_active ON gacha_pools(is_active);
CREATE INDEX IF NOT EXISTS idx_gacha_pools_dates ON gacha_pools(start_at, end_at);
-- Insert default card master data (0-15 cards from ai.json)
INSERT INTO card_master (id, name, base_cp_min, base_cp_max, color, description) VALUES
(0, 'ai', 100, 200, '#4A90E2', 'The core essence of existence'),
(1, 'dream', 90, 180, '#9B59B6', 'Visions of possibility'),
(2, 'radiance', 110, 220, '#F39C12', 'Brilliant light energy'),
(3, 'neutron', 120, 240, '#34495E', 'Dense stellar core'),
(4, 'sun', 130, 260, '#E74C3C', 'Solar radiance'),
(5, 'night', 80, 160, '#2C3E50', 'Darkness and mystery'),
(6, 'snow', 70, 140, '#ECF0F1', 'Pure frozen crystalline'),
(7, 'thunder', 140, 280, '#F1C40F', 'Electric storm energy'),
(8, 'ultimate', 150, 300, '#8E44AD', 'The highest form'),
(9, 'sword', 160, 320, '#95A5A6', 'Blade of cutting truth'),
(10, 'destruction', 170, 340, '#C0392B', 'Force of entropy'),
(11, 'earth', 90, 180, '#27AE60', 'Grounding foundation'),
(12, 'galaxy', 180, 360, '#3498DB', 'Cosmic expanse'),
(13, 'create', 100, 200, '#16A085', 'Power of generation'),
(14, 'supernova', 200, 400, '#E67E22', 'Stellar explosion'),
(15, 'world', 250, 500, '#9B59B6', 'Reality itself')
ON CONFLICT (id) DO NOTHING;
-- Create function for updating updated_at timestamp
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = NOW();
RETURN NEW;
END;
$$ language 'plpgsql';
-- Create trigger for updating users.updated_at
CREATE TRIGGER trigger_users_updated_at
BEFORE UPDATE ON users
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();

View File

@ -0,0 +1,130 @@
-- SQLite migration for ai.card database schema
-- Create custom types (SQLite uses CHECK constraints instead of ENUMs)
-- Card rarity levels
CREATE TABLE IF NOT EXISTS card_rarity_enum (
value TEXT PRIMARY KEY CHECK (value IN ('normal', 'rare', 'super_rare', 'kira', 'unique'))
);
INSERT OR IGNORE INTO card_rarity_enum (value) VALUES
('normal'), ('rare'), ('super_rare'), ('kira'), ('unique');
-- Users table - stores atproto DID-based user information
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
did TEXT NOT NULL UNIQUE, -- atproto Decentralized Identifier
handle TEXT NOT NULL, -- atproto handle (e.g., alice.bsky.social)
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_users_did ON users(did);
CREATE INDEX IF NOT EXISTS idx_users_handle ON users(handle);
-- Card master data - template definitions for all card types
CREATE TABLE IF NOT EXISTS card_master (
id INTEGER PRIMARY KEY, -- Card ID (0-15 in current system)
name TEXT NOT NULL, -- Card name (e.g., "ai", "dream", "radiance")
base_cp_min INTEGER NOT NULL, -- Minimum base CP for this card
base_cp_max INTEGER NOT NULL, -- Maximum base CP for this card
color TEXT NOT NULL, -- Card color theme
description TEXT NOT NULL -- Card description/lore
);
-- User cards - actual card instances owned by users
CREATE TABLE IF NOT EXISTS user_cards (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
card_id INTEGER NOT NULL, -- References card_master.id
cp INTEGER NOT NULL, -- Calculated CP (base_cp * rarity_multiplier)
status TEXT NOT NULL -- Card rarity
CHECK (status IN ('normal', 'rare', 'super_rare', 'kira', 'unique')),
skill TEXT, -- Optional skill description
obtained_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
is_unique BOOLEAN NOT NULL DEFAULT FALSE,
unique_id TEXT, -- UUID for unique cards
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (card_id) REFERENCES card_master(id)
);
CREATE INDEX IF NOT EXISTS idx_user_cards_user_id ON user_cards(user_id);
CREATE INDEX IF NOT EXISTS idx_user_cards_card_id ON user_cards(card_id);
CREATE INDEX IF NOT EXISTS idx_user_cards_status ON user_cards(status);
CREATE INDEX IF NOT EXISTS idx_user_cards_unique_id ON user_cards(unique_id);
-- Global unique card registry - tracks ownership of unique cards
CREATE TABLE IF NOT EXISTS unique_card_registry (
id INTEGER PRIMARY KEY AUTOINCREMENT,
unique_id TEXT NOT NULL UNIQUE, -- UUID from user_cards.unique_id
card_id INTEGER NOT NULL, -- Which card type is unique
owner_did TEXT NOT NULL, -- Current owner's atproto DID
obtained_at DATETIME NOT NULL,
verse_skill_id TEXT, -- Optional verse skill reference
FOREIGN KEY (card_id) REFERENCES card_master(id),
UNIQUE(card_id) -- Only one unique per card_id allowed
);
CREATE INDEX IF NOT EXISTS idx_unique_registry_card_id ON unique_card_registry(card_id);
CREATE INDEX IF NOT EXISTS idx_unique_registry_owner_did ON unique_card_registry(owner_did);
-- Draw history - tracks all gacha draws for statistics
CREATE TABLE IF NOT EXISTS draw_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id INTEGER NOT NULL,
card_id INTEGER NOT NULL,
status TEXT NOT NULL
CHECK (status IN ('normal', 'rare', 'super_rare', 'kira', 'unique')),
cp INTEGER NOT NULL,
is_paid BOOLEAN NOT NULL DEFAULT FALSE, -- Paid vs free gacha
drawn_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FOREIGN KEY (card_id) REFERENCES card_master(id)
);
CREATE INDEX IF NOT EXISTS idx_draw_history_user_id ON draw_history(user_id);
CREATE INDEX IF NOT EXISTS idx_draw_history_drawn_at ON draw_history(drawn_at);
CREATE INDEX IF NOT EXISTS idx_draw_history_status ON draw_history(status);
-- Gacha pools - special event pools with rate-ups
CREATE TABLE IF NOT EXISTS gacha_pools (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
description TEXT NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT TRUE,
start_at DATETIME,
end_at DATETIME,
pickup_card_ids TEXT, -- JSON array of card IDs
rate_up_multiplier REAL NOT NULL DEFAULT 1.0
);
CREATE INDEX IF NOT EXISTS idx_gacha_pools_active ON gacha_pools(is_active);
CREATE INDEX IF NOT EXISTS idx_gacha_pools_dates ON gacha_pools(start_at, end_at);
-- Insert default card master data (0-15 cards from ai.json)
INSERT OR IGNORE INTO card_master (id, name, base_cp_min, base_cp_max, color, description) VALUES
(0, 'ai', 100, 200, '#4A90E2', 'The core essence of existence'),
(1, 'dream', 90, 180, '#9B59B6', 'Visions of possibility'),
(2, 'radiance', 110, 220, '#F39C12', 'Brilliant light energy'),
(3, 'neutron', 120, 240, '#34495E', 'Dense stellar core'),
(4, 'sun', 130, 260, '#E74C3C', 'Solar radiance'),
(5, 'night', 80, 160, '#2C3E50', 'Darkness and mystery'),
(6, 'snow', 70, 140, '#ECF0F1', 'Pure frozen crystalline'),
(7, 'thunder', 140, 280, '#F1C40F', 'Electric storm energy'),
(8, 'ultimate', 150, 300, '#8E44AD', 'The highest form'),
(9, 'sword', 160, 320, '#95A5A6', 'Blade of cutting truth'),
(10, 'destruction', 170, 340, '#C0392B', 'Force of entropy'),
(11, 'earth', 90, 180, '#27AE60', 'Grounding foundation'),
(12, 'galaxy', 180, 360, '#3498DB', 'Cosmic expanse'),
(13, 'create', 100, 200, '#16A085', 'Power of generation'),
(14, 'supernova', 200, 400, '#E67E22', 'Stellar explosion'),
(15, 'world', 250, 500, '#9B59B6', 'Reality itself');
-- Create trigger for updating users.updated_at
CREATE TRIGGER IF NOT EXISTS trigger_users_updated_at
AFTER UPDATE ON users
BEGIN
UPDATE users SET updated_at = CURRENT_TIMESTAMP WHERE id = NEW.id;
END;

108
src/auth.rs Normal file
View File

@ -0,0 +1,108 @@
use chrono::{Duration, Utc};
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize};
use crate::error::{AppError, AppResult};
#[derive(Debug, Serialize, Deserialize)]
pub struct Claims {
pub did: String,
pub handle: String,
pub exp: usize,
}
pub struct JwtService {
encoding_key: EncodingKey,
decoding_key: DecodingKey,
}
impl JwtService {
pub fn new(secret: &str) -> Self {
Self {
encoding_key: EncodingKey::from_secret(secret.as_ref()),
decoding_key: DecodingKey::from_secret(secret.as_ref()),
}
}
pub fn create_token(&self, did: &str, handle: &str, expires_in_minutes: u64) -> AppResult<String> {
let expiration = Utc::now()
.checked_add_signed(Duration::minutes(expires_in_minutes as i64))
.ok_or_else(|| AppError::internal("Failed to calculate expiration time"))?
.timestamp() as usize;
let claims = Claims {
did: did.to_string(),
handle: handle.to_string(),
exp: expiration,
};
encode(&Header::default(), &claims, &self.encoding_key)
.map_err(AppError::Jwt)
}
pub fn verify_token(&self, token: &str) -> AppResult<Claims> {
let token_data = decode::<Claims>(token, &self.decoding_key, &Validation::default())
.map_err(AppError::Jwt)?;
Ok(token_data.claims)
}
}
/// Mock atproto authentication service
/// In a real implementation, this would integrate with actual atproto services
pub struct AtprotoAuthService {
jwt_service: JwtService,
}
impl AtprotoAuthService {
pub fn new(secret: &str) -> Self {
Self {
jwt_service: JwtService::new(secret),
}
}
/// Authenticate user with atproto credentials
/// This is a mock implementation - in reality would validate against atproto PDS
pub async fn authenticate(&self, identifier: &str, _password: &str) -> AppResult<AuthenticatedUser> {
// Mock validation - in real implementation:
// 1. Connect to user's PDS
// 2. Verify credentials
// 3. Get user DID and handle
// For now, treat identifier as DID or handle
let (did, handle) = if identifier.starts_with("did:") {
(identifier.to_string(), extract_handle_from_did(identifier))
} else {
(format!("did:plc:{}", generate_mock_plc_id()), identifier.to_string())
};
Ok(AuthenticatedUser { did, handle })
}
pub fn create_access_token(&self, user: &AuthenticatedUser, expires_in_minutes: u64) -> AppResult<String> {
self.jwt_service.create_token(&user.did, &user.handle, expires_in_minutes)
}
pub fn verify_access_token(&self, token: &str) -> AppResult<Claims> {
self.jwt_service.verify_token(token)
}
}
#[derive(Debug, Clone)]
pub struct AuthenticatedUser {
pub did: String,
pub handle: String,
}
/// Extract handle from DID (mock implementation)
fn extract_handle_from_did(did: &str) -> String {
// In a real implementation, this would resolve the DID to get the handle
// For now, use a simple mock
did.split(':').last().unwrap_or("unknown").to_string()
}
/// Generate mock PLC identifier
fn generate_mock_plc_id() -> String {
use uuid::Uuid;
Uuid::new_v4().to_string().replace('-', "")[..24].to_string()
}

127
src/config.rs Normal file
View File

@ -0,0 +1,127 @@
use config::{Config, ConfigError, Environment, File};
use serde::Deserialize;
use std::path::PathBuf;
#[derive(Debug, Clone, Deserialize)]
pub struct Settings {
// Application settings
pub app_name: String,
pub port: u16,
pub api_v1_prefix: String,
// Database settings
pub database_url: String,
pub database_url_supabase: Option<String>,
// Authentication
pub secret_key: String,
pub access_token_expire_minutes: u64,
// Gacha probabilities (percentages)
pub prob_normal: f64,
pub prob_rare: f64,
pub prob_super_rare: f64,
pub prob_kira: f64,
pub prob_unique: f64,
// atproto settings
pub atproto_pds_url: Option<String>,
pub atproto_handle: Option<String>,
// External data
pub card_master_url: String,
// File paths
pub config_dir: PathBuf,
}
impl Settings {
pub fn new() -> Result<Self, ConfigError> {
let config_dir = dirs::home_dir()
.unwrap_or_else(|| PathBuf::from("."))
.join(".config")
.join("syui")
.join("ai")
.join("card");
// Ensure config directory exists
if !config_dir.exists() {
std::fs::create_dir_all(&config_dir)
.map_err(|e| ConfigError::Message(format!("Failed to create config directory: {}", e)))?;
}
let mut builder = Config::builder()
// Default values
.set_default("app_name", "ai.card")?
.set_default("port", 8000)?
.set_default("api_v1_prefix", "/api/v1")?
// Database defaults
.set_default("database_url", format!("sqlite://{}?mode=rwc", config_dir.join("aicard.db").display()))?
// Authentication defaults
.set_default("secret_key", "your-secret-key-change-in-production")?
.set_default("access_token_expire_minutes", 1440)? // 24 hours
// Gacha probability defaults (matching Python implementation)
.set_default("prob_normal", 99.789)?
.set_default("prob_rare", 0.1)?
.set_default("prob_super_rare", 0.01)?
.set_default("prob_kira", 0.1)?
.set_default("prob_unique", 0.0001)?
// External data source
.set_default("card_master_url", "https://git.syui.ai/ai/ai/raw/branch/main/ai.json")?;
// Load from config file if it exists
let config_file = config_dir.join("config.toml");
if config_file.exists() {
builder = builder.add_source(File::from(config_file));
}
// Override with environment variables (AI_CARD_ prefix)
builder = builder.add_source(Environment::with_prefix("AI_CARD").separator("_"));
let mut settings: Settings = builder.build()?.try_deserialize()?;
// Set the config directory path
settings.config_dir = config_dir;
Ok(settings)
}
/// Get the gacha configuration for the gacha service
pub fn gacha_config(&self) -> GachaConfig {
GachaConfig {
prob_normal: self.prob_normal,
prob_rare: self.prob_rare,
prob_super_rare: self.prob_super_rare,
prob_kira: self.prob_kira,
prob_unique: self.prob_unique,
}
}
}
#[derive(Debug, Clone)]
pub struct GachaConfig {
pub prob_normal: f64,
pub prob_rare: f64,
pub prob_super_rare: f64,
pub prob_kira: f64,
pub prob_unique: f64,
}
impl GachaConfig {
/// Calculate cumulative probabilities for rarity determination
pub fn cumulative_probabilities(&self, is_paid: bool) -> Vec<(f64, crate::models::CardRarity)> {
let multiplier = if is_paid { 2.0 } else { 1.0 };
vec![
(self.prob_unique * multiplier, crate::models::CardRarity::Unique),
(self.prob_kira * multiplier, crate::models::CardRarity::Kira),
(self.prob_super_rare * multiplier, crate::models::CardRarity::SuperRare),
(self.prob_rare * multiplier, crate::models::CardRarity::Rare),
(self.prob_normal, crate::models::CardRarity::Normal),
]
}
}

190
src/database.rs Normal file
View File

@ -0,0 +1,190 @@
use sqlx::{Pool, Postgres, Sqlite, Row};
use sqlx::migrate::MigrateDatabase;
use crate::error::{AppError, AppResult};
use std::str::FromStr;
#[derive(Clone)]
pub enum Database {
Postgres(Pool<Postgres>),
Sqlite(Pool<Sqlite>),
}
impl Database {
pub async fn connect(database_url: &str) -> AppResult<Self> {
if database_url.starts_with("postgres://") || database_url.starts_with("postgresql://") {
let pool = sqlx::postgres::PgPoolOptions::new()
.max_connections(10)
.connect(database_url)
.await
.map_err(AppError::Database)?;
Ok(Database::Postgres(pool))
} else if database_url.starts_with("sqlite://") {
// Extract the path from sqlite:// URL
let db_path = database_url.trim_start_matches("sqlite://");
// Create the database file if it doesn't exist
if !Sqlite::database_exists(database_url).await.unwrap_or(false) {
Sqlite::create_database(database_url)
.await
.map_err(AppError::Database)?;
}
let pool = sqlx::sqlite::SqlitePoolOptions::new()
.max_connections(5)
.connect(database_url)
.await
.map_err(AppError::Database)?;
Ok(Database::Sqlite(pool))
} else {
Err(AppError::Configuration(format!(
"Unsupported database URL: {}",
database_url
)))
}
}
pub async fn migrate(&self) -> AppResult<()> {
match self {
Database::Postgres(pool) => {
sqlx::migrate!("./migrations/postgres")
.run(pool)
.await
.map_err(AppError::Migration)?;
}
Database::Sqlite(pool) => {
sqlx::migrate!("./migrations/sqlite")
.run(pool)
.await
.map_err(AppError::Migration)?;
}
}
Ok(())
}
/// Get a generic connection for complex operations
pub async fn acquire(&self) -> AppResult<DatabaseConnection> {
match self {
Database::Postgres(pool) => {
let conn = pool.acquire().await.map_err(AppError::Database)?;
Ok(DatabaseConnection::Postgres(conn))
}
Database::Sqlite(pool) => {
let conn = pool.acquire().await.map_err(AppError::Database)?;
Ok(DatabaseConnection::Sqlite(conn))
}
}
}
/// Begin a transaction
pub async fn begin(&self) -> AppResult<DatabaseTransaction> {
match self {
Database::Postgres(pool) => {
let tx = pool.begin().await.map_err(AppError::Database)?;
Ok(DatabaseTransaction::Postgres(tx))
}
Database::Sqlite(pool) => {
let tx = pool.begin().await.map_err(AppError::Database)?;
Ok(DatabaseTransaction::Sqlite(tx))
}
}
}
}
pub enum DatabaseConnection {
Postgres(sqlx::pool::PoolConnection<Postgres>),
Sqlite(sqlx::pool::PoolConnection<Sqlite>),
}
pub enum DatabaseTransaction {
Postgres(sqlx::Transaction<'static, Postgres>),
Sqlite(sqlx::Transaction<'static, Sqlite>),
}
impl DatabaseTransaction {
pub async fn commit(self) -> AppResult<()> {
match self {
DatabaseTransaction::Postgres(tx) => {
tx.commit().await.map_err(AppError::Database)?;
}
DatabaseTransaction::Sqlite(tx) => {
tx.commit().await.map_err(AppError::Database)?;
}
}
Ok(())
}
pub async fn rollback(self) -> AppResult<()> {
match self {
DatabaseTransaction::Postgres(tx) => {
tx.rollback().await.map_err(AppError::Database)?;
}
DatabaseTransaction::Sqlite(tx) => {
tx.rollback().await.map_err(AppError::Database)?;
}
}
Ok(())
}
}
// Macros for database-agnostic queries
#[macro_export]
macro_rules! query_as {
($struct:ty, $query:expr, $db:expr) => {
match $db {
Database::Postgres(pool) => {
sqlx::query_as::<_, $struct>($query)
.fetch_all(pool)
.await
.map_err(AppError::Database)
}
Database::Sqlite(pool) => {
sqlx::query_as::<_, $struct>($query)
.fetch_all(pool)
.await
.map_err(AppError::Database)
}
}
};
}
#[macro_export]
macro_rules! query_one_as {
($struct:ty, $query:expr, $db:expr) => {
match $db {
Database::Postgres(pool) => {
sqlx::query_as::<_, $struct>($query)
.fetch_one(pool)
.await
.map_err(AppError::Database)
}
Database::Sqlite(pool) => {
sqlx::query_as::<_, $struct>($query)
.fetch_one(pool)
.await
.map_err(AppError::Database)
}
}
};
}
#[macro_export]
macro_rules! query_optional_as {
($struct:ty, $query:expr, $db:expr) => {
match $db {
Database::Postgres(pool) => {
sqlx::query_as::<_, $struct>($query)
.fetch_optional(pool)
.await
.map_err(AppError::Database)
}
Database::Sqlite(pool) => {
sqlx::query_as::<_, $struct>($query)
.fetch_optional(pool)
.await
.map_err(AppError::Database)
}
}
};
}

142
src/error.rs Normal file
View File

@ -0,0 +1,142 @@
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
Json,
};
use serde_json::json;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum AppError {
#[error("Database error: {0}")]
Database(#[from] sqlx::Error),
#[error("Migration error: {0}")]
Migration(#[from] sqlx::migrate::MigrateError),
#[error("Validation error: {0}")]
Validation(String),
#[error("Authentication error: {0}")]
Authentication(String),
#[error("Authorization error: {0}")]
Authorization(String),
#[error("Not found: {0}")]
NotFound(String),
#[error("Conflict: {0}")]
Conflict(String),
#[error("External service error: {0}")]
ExternalService(String),
#[error("Configuration error: {0}")]
Configuration(String),
#[error("JSON serialization error: {0}")]
Json(#[from] serde_json::Error),
#[error("HTTP client error: {0}")]
HttpClient(#[from] reqwest::Error),
#[error("JWT error: {0}")]
Jwt(#[from] jsonwebtoken::errors::Error),
#[error("Internal server error: {0}")]
Internal(String),
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
let (status, error_message, error_code) = match &self {
AppError::Database(e) => {
tracing::error!("Database error: {}", e);
(StatusCode::INTERNAL_SERVER_ERROR, "Database error", "DATABASE_ERROR")
}
AppError::Migration(e) => {
tracing::error!("Migration error: {}", e);
(StatusCode::INTERNAL_SERVER_ERROR, "Migration error", "MIGRATION_ERROR")
}
AppError::Validation(msg) => {
(StatusCode::BAD_REQUEST, msg.as_str(), "VALIDATION_ERROR")
}
AppError::Authentication(msg) => {
(StatusCode::UNAUTHORIZED, msg.as_str(), "AUTHENTICATION_ERROR")
}
AppError::Authorization(msg) => {
(StatusCode::FORBIDDEN, msg.as_str(), "AUTHORIZATION_ERROR")
}
AppError::NotFound(msg) => {
(StatusCode::NOT_FOUND, msg.as_str(), "NOT_FOUND")
}
AppError::Conflict(msg) => {
(StatusCode::CONFLICT, msg.as_str(), "CONFLICT")
}
AppError::ExternalService(msg) => {
tracing::error!("External service error: {}", msg);
(StatusCode::BAD_GATEWAY, "External service unavailable", "EXTERNAL_SERVICE_ERROR")
}
AppError::Configuration(msg) => {
tracing::error!("Configuration error: {}", msg);
(StatusCode::INTERNAL_SERVER_ERROR, "Configuration error", "CONFIGURATION_ERROR")
}
AppError::Json(e) => {
tracing::error!("JSON error: {}", e);
(StatusCode::BAD_REQUEST, "Invalid JSON", "JSON_ERROR")
}
AppError::HttpClient(e) => {
tracing::error!("HTTP client error: {}", e);
(StatusCode::BAD_GATEWAY, "External service error", "HTTP_CLIENT_ERROR")
}
AppError::Jwt(e) => {
tracing::error!("JWT error: {}", e);
(StatusCode::UNAUTHORIZED, "Invalid token", "JWT_ERROR")
}
AppError::Internal(msg) => {
tracing::error!("Internal error: {}", msg);
(StatusCode::INTERNAL_SERVER_ERROR, "Internal server error", "INTERNAL_ERROR")
}
};
let body = Json(json!({
"error": {
"code": error_code,
"message": error_message,
"timestamp": chrono::Utc::now().to_rfc3339()
}
}));
(status, body).into_response()
}
}
// Convenience methods for common errors
impl AppError {
pub fn validation<T: Into<String>>(msg: T) -> Self {
Self::Validation(msg.into())
}
pub fn authentication<T: Into<String>>(msg: T) -> Self {
Self::Authentication(msg.into())
}
pub fn authorization<T: Into<String>>(msg: T) -> Self {
Self::Authorization(msg.into())
}
pub fn not_found<T: Into<String>>(msg: T) -> Self {
Self::NotFound(msg.into())
}
pub fn conflict<T: Into<String>>(msg: T) -> Self {
Self::Conflict(msg.into())
}
pub fn internal<T: Into<String>>(msg: T) -> Self {
Self::Internal(msg.into())
}
}
pub type AppResult<T> = Result<T, AppError>;

161
src/handlers/auth.rs Normal file
View File

@ -0,0 +1,161 @@
use axum::{
extract::State,
response::Json,
routing::post,
Router,
};
use validator::Validate;
use crate::{
auth::AtprotoAuthService,
error::{AppError, AppResult},
models::*,
AppState,
};
pub fn create_routes() -> Router<AppState> {
Router::new()
.route("/login", post(login))
.route("/verify", post(verify_token))
}
/// Authenticate user with atproto credentials
async fn login(
State(state): State<AppState>,
Json(request): Json<LoginRequest>,
) -> AppResult<Json<LoginResponse>> {
// Validate request
request.validate().map_err(|e| AppError::validation(e.to_string()))?;
// Create auth service
let auth_service = AtprotoAuthService::new(&state.settings.secret_key);
// Authenticate user
let user = auth_service
.authenticate(&request.identifier, &request.password)
.await?;
// Create access token
let access_token = auth_service
.create_access_token(&user, state.settings.access_token_expire_minutes)?;
// Create or update user in database
let db_user = create_or_update_user(&state, &user.did, &user.handle).await?;
Ok(Json(LoginResponse {
access_token,
token_type: "Bearer".to_string(),
expires_in: state.settings.access_token_expire_minutes * 60, // Convert to seconds
user: UserInfo {
did: user.did,
handle: user.handle,
},
}))
}
/// Verify JWT token
async fn verify_token(
State(state): State<AppState>,
Json(token): Json<serde_json::Value>,
) -> AppResult<Json<serde_json::Value>> {
let token_str = token["token"]
.as_str()
.ok_or_else(|| AppError::validation("Token is required"))?;
let auth_service = AtprotoAuthService::new(&state.settings.secret_key);
let claims = auth_service.verify_access_token(token_str)?;
Ok(Json(serde_json::json!({
"valid": true,
"did": claims.did,
"handle": claims.handle,
"exp": claims.exp
})))
}
/// Create or update user in database
async fn create_or_update_user(
state: &AppState,
did: &str,
handle: &str,
) -> AppResult<User> {
let now = chrono::Utc::now();
// Try to get existing user
let existing_user = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1")
.bind(did)
.fetch_optional(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?")
.bind(did)
.fetch_optional(pool)
.await
.map_err(AppError::Database)?
}
};
if let Some(mut user) = existing_user {
// Update handle if changed
if user.handle != handle {
user = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as::<_, User>(
"UPDATE users SET handle = $1, updated_at = $2 WHERE did = $3 RETURNING *"
)
.bind(handle)
.bind(now)
.bind(did)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as::<_, User>(
"UPDATE users SET handle = ?, updated_at = ? WHERE did = ? RETURNING *"
)
.bind(handle)
.bind(now)
.bind(did)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
};
}
Ok(user)
} else {
// Create new user
let user = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as::<_, User>(
"INSERT INTO users (did, handle, created_at, updated_at) VALUES ($1, $2, $3, $4) RETURNING *"
)
.bind(did)
.bind(handle)
.bind(now)
.bind(now)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as::<_, User>(
"INSERT INTO users (did, handle, created_at, updated_at) VALUES (?, ?, ?, ?) RETURNING *"
)
.bind(did)
.bind(handle)
.bind(now)
.bind(now)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
};
Ok(user)
}
}

314
src/handlers/cards.rs Normal file
View File

@ -0,0 +1,314 @@
use axum::{
extract::{Path, Query, State},
response::Json,
routing::{get, post},
Router,
};
use serde::Deserialize;
use validator::Validate;
use crate::{
error::{AppError, AppResult},
models::*,
services::GachaService,
AppState,
};
pub fn create_routes() -> Router<AppState> {
Router::new()
.route("/draw", post(draw_card))
.route("/user/:user_did", get(get_user_cards))
.route("/unique", get(get_unique_registry))
.route("/stats", get(get_gacha_stats))
.route("/master", get(get_card_master))
}
/// Draw a card from gacha system
async fn draw_card(
State(state): State<AppState>,
Json(request): Json<CardDrawRequest>,
) -> AppResult<Json<CardDrawResponse>> {
// Validate request
request.validate().map_err(|e| AppError::validation(e.to_string()))?;
let gacha_service = GachaService::new(state.settings.gacha_config());
let result = gacha_service
.draw_card(&state.db, &request.user_did, request.is_paid, request.pool_id)
.await?;
Ok(Json(result))
}
#[derive(Deserialize)]
struct UserCardsQuery {
limit: Option<i32>,
offset: Option<i32>,
}
/// Get user's card collection
async fn get_user_cards(
State(state): State<AppState>,
Path(user_did): Path<String>,
Query(query): Query<UserCardsQuery>,
) -> AppResult<Json<UserCardCollectionResponse>> {
let limit = query.limit.unwrap_or(50).min(100); // Max 100 cards per request
let offset = query.offset.unwrap_or(0);
// Get user ID from DID
let user = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1")
.bind(&user_did)
.fetch_optional(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?")
.bind(&user_did)
.fetch_optional(pool)
.await
.map_err(AppError::Database)?
}
};
let user = user.ok_or_else(|| AppError::not_found("User not found"))?;
// Get user's cards with master data
let cards_with_master = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as::<_, UserCardWithMasterQuery>(
r#"
SELECT
uc.id, uc.user_did, uc.card_id, uc.cp, uc.status,
uc.obtained_at, uc.is_unique, uc.unique_id,
cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, cm.color, cm.description
FROM user_cards uc
JOIN card_master cm ON uc.card_id = cm.id
WHERE uc.user_did = $1
ORDER BY uc.obtained_at DESC
LIMIT $2 OFFSET $3
"#
)
.bind(&user_did)
.bind(limit as i64)
.bind(offset as i64)
.fetch_all(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as::<_, UserCardWithMasterQuery>(
r#"
SELECT
uc.id, uc.user_did, uc.card_id, uc.cp, uc.status,
uc.obtained_at, uc.is_unique, uc.unique_id,
cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max, cm.color, cm.description
FROM user_cards uc
JOIN card_master cm ON uc.card_id = cm.id
WHERE uc.user_did = ?
ORDER BY uc.obtained_at DESC
LIMIT ? OFFSET ?
"#
)
.bind(&user_did)
.bind(limit as i32)
.bind(offset as i32)
.fetch_all(pool)
.await
.map_err(AppError::Database)?
}
};
let mut cards = Vec::new();
let mut rarity_breakdown = RarityBreakdown {
normal: 0,
rare: 0,
super_rare: 0,
kira: 0,
unique: 0,
};
for row in cards_with_master {
let status = match row.status.as_str() {
"normal" => CardRarity::Normal,
"rare" => CardRarity::Rare,
"super_rare" => CardRarity::SuperRare,
"kira" => CardRarity::Kira,
"unique" => CardRarity::Unique,
_ => CardRarity::Normal,
};
// Update rarity breakdown
match status {
CardRarity::Normal => rarity_breakdown.normal += 1,
CardRarity::Rare => rarity_breakdown.rare += 1,
CardRarity::SuperRare => rarity_breakdown.super_rare += 1,
CardRarity::Kira => rarity_breakdown.kira += 1,
CardRarity::Unique => rarity_breakdown.unique += 1,
}
cards.push(UserCardWithMaster {
card: UserCardResponse {
id: row.id,
card_id: row.card_id,
cp: row.cp,
status,
skill: None, // TODO: Add skill field to query if needed
obtained_at: row.obtained_at,
is_unique: row.is_unique,
unique_id: row.unique_id,
},
master: CardMasterResponse {
id: row.master_id,
name: row.name,
base_cp_min: row.base_cp_min,
base_cp_max: row.base_cp_max,
color: row.color,
description: row.description,
},
});
}
// Get total count and unique count
let (total_count, unique_count): (i64, i64) = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as(
"SELECT COUNT(*) as total, COUNT(*) FILTER (WHERE is_unique = true) as unique_count FROM user_cards WHERE user_id = $1"
)
.bind(user.id)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as(
"SELECT COUNT(*) as total, SUM(CASE WHEN is_unique = 1 THEN 1 ELSE 0 END) as unique_count FROM user_cards WHERE user_id = ?"
)
.bind(user.id)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
};
Ok(Json(UserCardCollectionResponse {
user_did,
cards,
total_count: total_count as i32,
unique_count: unique_count as i32,
rarity_breakdown,
}))
}
/// Get global unique card registry
async fn get_unique_registry(
State(state): State<AppState>,
) -> AppResult<Json<UniqueCardRegistryResponse>> {
// Get all unique cards with master data and owner info
let unique_cards = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as::<_, UniqueCardQuery>(
r#"
SELECT
cm.id as card_id,
cm.name as card_name,
ucr.owner_did,
u.handle as owner_handle,
ucr.obtained_at
FROM card_master cm
LEFT JOIN unique_card_registry ucr ON cm.id = ucr.card_id
LEFT JOIN users u ON ucr.owner_did = u.did
ORDER BY cm.id
"#
)
.fetch_all(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as::<_, UniqueCardQuery>(
r#"
SELECT
cm.id as card_id,
cm.name as card_name,
ucr.owner_did,
u.handle as owner_handle,
ucr.obtained_at
FROM card_master cm
LEFT JOIN unique_card_registry ucr ON cm.id = ucr.card_id
LEFT JOIN users u ON ucr.owner_did = u.did
ORDER BY cm.id
"#
)
.fetch_all(pool)
.await
.map_err(AppError::Database)?
}
};
let mut unique_card_infos = Vec::new();
let mut available_count = 0;
for row in unique_cards {
let is_available = row.owner_did.is_none();
if is_available {
available_count += 1;
}
unique_card_infos.push(UniqueCardInfo {
card_id: row.card_id,
card_name: row.card_name,
owner_did: row.owner_did,
owner_handle: row.owner_handle,
obtained_at: row.obtained_at,
is_available,
});
}
Ok(Json(UniqueCardRegistryResponse {
unique_cards: unique_card_infos,
total_unique_cards: 16, // Total number of card types
available_unique_cards: available_count,
}))
}
/// Get gacha statistics
async fn get_gacha_stats(State(state): State<AppState>) -> AppResult<Json<GachaStatsResponse>> {
let gacha_service = GachaService::new(state.settings.gacha_config());
let stats = gacha_service.get_gacha_stats(&state.db).await?;
Ok(Json(stats))
}
/// Get card master data
async fn get_card_master(State(state): State<AppState>) -> AppResult<Json<Vec<CardMasterResponse>>> {
let cards = match &state.db {
crate::database::Database::Postgres(pool) => {
sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY id")
.fetch_all(pool)
.await
.map_err(AppError::Database)?
}
crate::database::Database::Sqlite(pool) => {
sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY id")
.fetch_all(pool)
.await
.map_err(AppError::Database)?
}
};
let card_responses: Vec<CardMasterResponse> = cards
.into_iter()
.map(|card| CardMasterResponse {
id: card.id,
name: card.name,
base_cp_min: card.base_cp_min,
base_cp_max: card.base_cp_max,
color: card.color,
description: card.description,
})
.collect();
Ok(Json(card_responses))
}

7
src/handlers/mod.rs Normal file
View File

@ -0,0 +1,7 @@
pub mod auth;
pub mod cards;
pub mod sync;
pub use auth::*;
pub use cards::*;
pub use sync::*;

68
src/handlers/sync.rs Normal file
View File

@ -0,0 +1,68 @@
use axum::{
extract::State,
response::Json,
routing::post,
Router,
};
use crate::{
error::{AppError, AppResult},
AppState,
};
pub fn create_routes() -> Router<AppState> {
Router::new()
.route("/cards/export", post(export_cards))
.route("/cards/import", post(import_cards))
.route("/cards/bidirectional", post(bidirectional_sync))
}
/// Export user's cards to atproto PDS
async fn export_cards(State(_state): State<AppState>) -> AppResult<Json<serde_json::Value>> {
// TODO: Implement atproto PDS export
// This would:
// 1. Get user's cards from database
// 2. Format as atproto records
// 3. Upload to user's PDS
Ok(Json(serde_json::json!({
"status": "success",
"message": "Card export to PDS completed",
"exported_count": 0,
"note": "atproto integration not yet implemented"
})))
}
/// Import user's cards from atproto PDS
async fn import_cards(State(_state): State<AppState>) -> AppResult<Json<serde_json::Value>> {
// TODO: Implement atproto PDS import
// This would:
// 1. Fetch card records from user's PDS
// 2. Validate and parse records
// 3. Update local database
Ok(Json(serde_json::json!({
"status": "success",
"message": "Card import from PDS completed",
"imported_count": 0,
"note": "atproto integration not yet implemented"
})))
}
/// Bidirectional synchronization between local DB and PDS
async fn bidirectional_sync(State(_state): State<AppState>) -> AppResult<Json<serde_json::Value>> {
// TODO: Implement bidirectional sync
// This would:
// 1. Compare local cards with PDS records
// 2. Resolve conflicts (newest wins, etc.)
// 3. Sync in both directions
Ok(Json(serde_json::json!({
"status": "success",
"message": "Bidirectional sync completed",
"local_to_pds": 0,
"pds_to_local": 0,
"conflicts_resolved": 0,
"note": "atproto integration not yet implemented"
})))
}

103
src/main.rs Normal file
View File

@ -0,0 +1,103 @@
use anyhow::Result;
use axum::{
extract::State,
http::StatusCode,
response::Json,
routing::{get, post},
Router,
};
use serde_json::{json, Value};
use std::net::SocketAddr;
use tower_http::cors::CorsLayer;
use tracing::{info, warn};
mod config;
mod database;
mod models;
mod handlers;
mod services;
mod auth;
mod error;
use config::Settings;
use database::Database;
use error::AppError;
#[derive(Clone)]
pub struct AppState {
pub db: Database,
pub settings: Settings,
}
#[tokio::main]
async fn main() -> Result<()> {
// Initialize tracing
tracing_subscriber::fmt::init();
// Load configuration
let settings = Settings::new()
.map_err(|e| anyhow::anyhow!("Failed to load configuration: {}", e))?;
info!("Starting ai.card API server v{}", env!("CARGO_PKG_VERSION"));
info!("Configuration loaded from: {}", settings.config_dir.display());
// Initialize database
let database = Database::connect(&settings.database_url).await?;
// Run migrations
database.migrate().await?;
info!("Database migrations completed");
let app_state = AppState {
db: database,
settings: settings.clone(),
};
// Build application routes
let app = create_app(app_state).await;
// Start server
let addr = SocketAddr::from(([0, 0, 0, 0], settings.port));
info!("ai.card API server listening on {}", addr);
let listener = tokio::net::TcpListener::bind(addr).await?;
axum::serve(listener, app).await?;
Ok(())
}
async fn create_app(state: AppState) -> Router {
Router::new()
// Health check
.route("/health", get(health_check))
// API v1 routes
.nest("/api/v1", create_api_routes())
// CORS middleware
.layer(CorsLayer::permissive())
// Application state
.with_state(state)
}
fn create_api_routes() -> Router<AppState> {
Router::new()
// Authentication routes
.nest("/auth", handlers::auth::create_routes())
// Card routes
.nest("/cards", handlers::cards::create_routes())
// Sync routes
.nest("/sync", handlers::sync::create_routes())
}
async fn health_check() -> Result<Json<Value>, AppError> {
Ok(Json(json!({
"status": "healthy",
"service": "ai.card",
"version": env!("CARGO_PKG_VERSION"),
"timestamp": chrono::Utc::now().to_rfc3339()
})))
}

326
src/models.rs Normal file
View File

@ -0,0 +1,326 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::{FromRow, Type};
use uuid::Uuid;
use validator::Validate;
/// Card rarity enum matching Python implementation
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Type)]
#[sqlx(type_name = "card_rarity", rename_all = "lowercase")]
pub enum CardRarity {
#[serde(rename = "normal")]
Normal,
#[serde(rename = "rare")]
Rare,
#[serde(rename = "super_rare")]
SuperRare,
#[serde(rename = "kira")]
Kira,
#[serde(rename = "unique")]
Unique,
}
impl CardRarity {
pub fn multiplier(&self) -> f64 {
match self {
CardRarity::Normal => 1.0,
CardRarity::Rare => 1.5,
CardRarity::SuperRare => 2.0,
CardRarity::Kira => 3.0,
CardRarity::Unique => 5.0,
}
}
pub fn as_str(&self) -> &'static str {
match self {
CardRarity::Normal => "normal",
CardRarity::Rare => "rare",
CardRarity::SuperRare => "super_rare",
CardRarity::Kira => "kira",
CardRarity::Unique => "unique",
}
}
}
/// Database Models
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct User {
pub id: i32,
pub did: String,
pub handle: String,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct CardMaster {
pub id: i32,
pub name: String,
pub base_cp_min: i32,
pub base_cp_max: i32,
pub color: String,
pub description: String,
}
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct UserCard {
pub id: i32,
pub user_did: String,
pub card_id: i32,
pub cp: i32,
pub status: CardRarity,
pub skill: Option<String>,
pub obtained_at: DateTime<Utc>,
pub is_unique: bool,
pub unique_id: Option<Uuid>,
}
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct UniqueCardRegistry {
pub id: i32,
pub unique_id: Uuid,
pub card_id: i32,
pub owner_did: String,
pub obtained_at: DateTime<Utc>,
pub verse_skill_id: Option<String>,
}
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct DrawHistory {
pub id: i32,
pub user_did: String,
pub card_id: i32,
pub status: CardRarity,
pub cp: i32,
pub is_paid: bool,
pub drawn_at: DateTime<Utc>,
}
#[derive(Debug, Clone, FromRow, Serialize, Deserialize)]
pub struct GachaPool {
pub id: i32,
pub name: String,
pub description: String,
pub is_active: bool,
pub start_at: Option<DateTime<Utc>>,
pub end_at: Option<DateTime<Utc>>,
pub pickup_card_ids: Vec<i32>,
pub rate_up_multiplier: f64,
}
/// API Request/Response Models
#[derive(Debug, Deserialize, Validate)]
pub struct LoginRequest {
#[validate(length(min = 1))]
pub identifier: String,
#[validate(length(min = 1))]
pub password: String,
}
#[derive(Debug, Serialize)]
pub struct LoginResponse {
pub access_token: String,
pub token_type: String,
pub expires_in: u64,
pub user: UserInfo,
}
#[derive(Debug, Serialize)]
pub struct UserInfo {
pub did: String,
pub handle: String,
}
#[derive(Debug, Deserialize, Validate)]
pub struct CardDrawRequest {
pub user_did: String,
#[serde(default)]
pub is_paid: bool,
pub pool_id: Option<i32>,
}
#[derive(Debug, Serialize)]
pub struct CardDrawResponse {
pub card: UserCardResponse,
pub master: CardMasterResponse,
pub is_unique: bool,
pub animation_type: String,
pub draw_history_id: i32,
}
#[derive(Debug, Serialize)]
pub struct UserCardResponse {
pub id: i32,
pub card_id: i32,
pub cp: i32,
pub status: CardRarity,
pub skill: Option<String>,
pub obtained_at: DateTime<Utc>,
pub is_unique: bool,
pub unique_id: Option<Uuid>,
}
#[derive(Debug, Serialize)]
pub struct CardMasterResponse {
pub id: i32,
pub name: String,
pub base_cp_min: i32,
pub base_cp_max: i32,
pub color: String,
pub description: String,
}
#[derive(Debug, Serialize)]
pub struct UserCardCollectionResponse {
pub user_did: String,
pub cards: Vec<UserCardWithMaster>,
pub total_count: i32,
pub unique_count: i32,
pub rarity_breakdown: RarityBreakdown,
}
#[derive(Debug, Serialize)]
pub struct UserCardWithMaster {
pub card: UserCardResponse,
pub master: CardMasterResponse,
}
/// Database query result for JOIN operations
#[derive(Debug, Clone, FromRow)]
pub struct UserCardWithMasterQuery {
// user_cards fields
pub id: i32,
pub user_did: String,
pub card_id: i32,
pub cp: i32,
pub status: String,
pub obtained_at: DateTime<Utc>,
pub is_unique: bool,
pub unique_id: Option<Uuid>,
// card_master fields
pub master_id: i32,
pub name: String,
pub base_cp_min: i32,
pub base_cp_max: i32,
pub color: String,
pub description: String,
}
/// Database query result for unique card registry
#[derive(Debug, Clone, FromRow)]
pub struct UniqueCardQuery {
pub card_id: i32,
pub card_name: String,
pub owner_did: Option<String>,
pub owner_handle: Option<String>,
pub obtained_at: Option<DateTime<Utc>>,
}
#[derive(Debug, Serialize)]
pub struct RarityBreakdown {
pub normal: i32,
pub rare: i32,
pub super_rare: i32,
pub kira: i32,
pub unique: i32,
}
#[derive(Debug, Serialize)]
pub struct UniqueCardRegistryResponse {
pub unique_cards: Vec<UniqueCardInfo>,
pub total_unique_cards: i32,
pub available_unique_cards: i32,
}
#[derive(Debug, Serialize)]
pub struct UniqueCardInfo {
pub card_id: i32,
pub card_name: String,
pub owner_did: Option<String>,
pub owner_handle: Option<String>,
pub obtained_at: Option<DateTime<Utc>>,
pub is_available: bool,
}
#[derive(Debug, Serialize)]
pub struct GachaStatsResponse {
pub probabilities: GachaProbabilities,
pub total_draws: i32,
pub total_unique_cards: i32,
pub available_unique_cards: i32,
pub rarity_distribution: RarityBreakdown,
}
#[derive(Debug, Serialize)]
pub struct GachaProbabilities {
pub normal: f64,
pub rare: f64,
pub super_rare: f64,
pub kira: f64,
pub unique: f64,
pub paid_multiplier: f64,
}
/// External Data Models (from ai.json)
#[derive(Debug, Deserialize)]
pub struct ExternalCardData {
pub ai: AiData,
}
#[derive(Debug, Deserialize)]
pub struct AiData {
pub card: CardData,
}
#[derive(Debug, Deserialize)]
pub struct CardData {
pub cards: Vec<ExternalCard>,
}
#[derive(Debug, Deserialize)]
pub struct ExternalCard {
pub id: i32,
pub name: String,
pub cp: CpRange,
pub color: String,
pub skill: String,
pub lang: Option<LangData>,
}
#[derive(Debug, Deserialize)]
pub struct CpRange {
pub min: i32,
pub max: i32,
}
#[derive(Debug, Deserialize)]
pub struct LangData {
pub ja: Option<JapaneseData>,
}
#[derive(Debug, Deserialize)]
pub struct JapaneseData {
pub name: Option<String>,
pub skill: Option<String>,
}
/// atproto Models
#[derive(Debug, Serialize)]
pub struct AtprotoCardRecord {
#[serde(rename = "$type")]
pub record_type: String,
#[serde(rename = "cardId")]
pub card_id: i32,
pub cp: i32,
pub status: String,
#[serde(rename = "obtainedAt")]
pub obtained_at: DateTime<Utc>,
#[serde(rename = "isUnique")]
pub is_unique: bool,
#[serde(rename = "uniqueId")]
pub unique_id: Option<Uuid>,
}

232
src/services/atproto.rs Normal file
View File

@ -0,0 +1,232 @@
use crate::{
error::{AppError, AppResult},
models::*,
};
use reqwest::Client;
use serde_json::json;
pub struct AtprotoService {
client: Client,
session: Option<String>,
}
impl AtprotoService {
pub fn new() -> Self {
Self {
client: Client::new(),
session: None,
}
}
pub fn with_session(session: String) -> Self {
Self {
client: Client::new(),
session: Some(session),
}
}
/// Create a card record in user's atproto PDS
pub async fn create_card_record(
&self,
did: &str,
card: &UserCard,
master: &CardMaster,
) -> AppResult<String> {
let session = self.session.as_ref()
.ok_or_else(|| AppError::authentication("No atproto session available"))?;
let record_data = AtprotoCardRecord {
record_type: "ai.card.collection".to_string(),
card_id: card.card_id,
cp: card.cp,
status: card.status.as_str().to_string(),
obtained_at: card.obtained_at,
is_unique: card.is_unique,
unique_id: card.unique_id,
};
// Determine PDS endpoint from DID
let pds_url = self.resolve_pds_from_did(did).await?;
let response = self
.client
.post(&format!("{}/xrpc/com.atproto.repo.createRecord", pds_url))
.header("Authorization", format!("Bearer {}", session))
.json(&json!({
"repo": did,
"collection": "ai.card.collection",
"record": record_data
}))
.send()
.await
.map_err(AppError::HttpClient)?;
if !response.status().is_success() {
return Err(AppError::ExternalService(format!(
"Failed to create atproto record: HTTP {}",
response.status()
)));
}
let result: serde_json::Value = response
.json()
.await
.map_err(AppError::HttpClient)?;
let uri = result["uri"]
.as_str()
.ok_or_else(|| AppError::ExternalService("No URI in response".to_string()))?;
Ok(uri.to_string())
}
/// List card records from user's PDS
pub async fn list_card_records(&self, did: &str) -> AppResult<Vec<serde_json::Value>> {
let session = self.session.as_ref()
.ok_or_else(|| AppError::authentication("No atproto session available"))?;
let pds_url = self.resolve_pds_from_did(did).await?;
let response = self
.client
.get(&format!("{}/xrpc/com.atproto.repo.listRecords", pds_url))
.header("Authorization", format!("Bearer {}", session))
.query(&[
("repo", did),
("collection", "ai.card.collection"),
])
.send()
.await
.map_err(AppError::HttpClient)?;
if !response.status().is_success() {
return Err(AppError::ExternalService(format!(
"Failed to list atproto records: HTTP {}",
response.status()
)));
}
let result: serde_json::Value = response
.json()
.await
.map_err(AppError::HttpClient)?;
let records = result["records"]
.as_array()
.ok_or_else(|| AppError::ExternalService("No records in response".to_string()))?;
Ok(records.clone())
}
/// Resolve PDS endpoint from DID
async fn resolve_pds_from_did(&self, did: &str) -> AppResult<String> {
// This is a simplified resolution
// In a real implementation, you would:
// 1. Parse the DID to get the method and identifier
// 2. Query the appropriate resolver (PLC directory, etc.)
// 3. Get the serviceEndpoint for the PDS
if did.starts_with("did:plc:") {
// For PLC DIDs, query the PLC directory
let plc_id = did.strip_prefix("did:plc:").unwrap();
self.resolve_plc_did(plc_id).await
} else if did.starts_with("did:web:") {
// For web DIDs, construct URL from domain
let domain = did.strip_prefix("did:web:").unwrap();
Ok(format!("https://{}", domain))
} else {
// Fallback to Bluesky PDS
Ok("https://bsky.social".to_string())
}
}
/// Resolve PLC DID to PDS endpoint
async fn resolve_plc_did(&self, plc_id: &str) -> AppResult<String> {
let response = self
.client
.get(&format!("https://plc.directory/{}", plc_id))
.send()
.await
.map_err(AppError::HttpClient)?;
if !response.status().is_success() {
return Ok("https://bsky.social".to_string()); // Fallback
}
let did_doc: serde_json::Value = response
.json()
.await
.map_err(AppError::HttpClient)?;
// Extract PDS endpoint from DID document
if let Some(services) = did_doc["service"].as_array() {
for service in services {
if service["id"] == "#atproto_pds" {
if let Some(endpoint) = service["serviceEndpoint"].as_str() {
return Ok(endpoint.to_string());
}
}
}
}
// Fallback to Bluesky
Ok("https://bsky.social".to_string())
}
/// Authenticate with atproto and get session
pub async fn authenticate(&self, identifier: &str, password: &str) -> AppResult<(String, String)> {
// Try multiple PDS endpoints for authentication
let pds_endpoints = [
"https://bsky.social",
"https://staging.bsky.app",
// Add more PDS endpoints as needed
];
for pds_url in pds_endpoints {
match self.try_authenticate_at_pds(pds_url, identifier, password).await {
Ok((session, did)) => return Ok((session, did)),
Err(_) => continue, // Try next PDS
}
}
Err(AppError::authentication("Failed to authenticate with any PDS"))
}
/// Try authentication at a specific PDS
async fn try_authenticate_at_pds(
&self,
pds_url: &str,
identifier: &str,
password: &str,
) -> AppResult<(String, String)> {
let response = self
.client
.post(&format!("{}/xrpc/com.atproto.server.createSession", pds_url))
.json(&json!({
"identifier": identifier,
"password": password
}))
.send()
.await
.map_err(AppError::HttpClient)?;
if !response.status().is_success() {
return Err(AppError::authentication("Invalid credentials"));
}
let result: serde_json::Value = response
.json()
.await
.map_err(AppError::HttpClient)?;
let access_jwt = result["accessJwt"]
.as_str()
.ok_or_else(|| AppError::authentication("No access token in response"))?;
let did = result["did"]
.as_str()
.ok_or_else(|| AppError::authentication("No DID in response"))?;
Ok((access_jwt.to_string(), did.to_string()))
}
}

219
src/services/card_master.rs Normal file
View File

@ -0,0 +1,219 @@
use crate::{
error::{AppError, AppResult},
models::*,
};
use reqwest::Client;
use std::collections::HashMap;
pub struct CardMasterService {
client: Client,
master_url: String,
}
impl CardMasterService {
pub fn new(master_url: String) -> Self {
Self {
client: Client::new(),
master_url,
}
}
/// Fetch card master data from external source (ai.json)
pub async fn fetch_external_card_data(&self) -> AppResult<Vec<ExternalCard>> {
let response = self
.client
.get(&self.master_url)
.timeout(std::time::Duration::from_secs(10))
.send()
.await
.map_err(AppError::HttpClient)?;
if !response.status().is_success() {
return Err(AppError::ExternalService(format!(
"Failed to fetch card data: HTTP {}",
response.status()
)));
}
let data: ExternalCardData = response
.json()
.await
.map_err(AppError::HttpClient)?;
Ok(data.ai.card.cards)
}
/// Get fallback card data if external fetch fails
pub fn get_fallback_card_data(&self) -> Vec<ExternalCard> {
vec![
ExternalCard {
id: 0,
name: "ai".to_string(),
cp: CpRange { min: 100, max: 200 },
color: "#4A90E2".to_string(),
skill: "Core existence essence".to_string(),
lang: None,
},
ExternalCard {
id: 1,
name: "dream".to_string(),
cp: CpRange { min: 90, max: 180 },
color: "#9B59B6".to_string(),
skill: "Vision manifestation".to_string(),
lang: None,
},
ExternalCard {
id: 2,
name: "radiance".to_string(),
cp: CpRange { min: 110, max: 220 },
color: "#F39C12".to_string(),
skill: "Brilliant energy".to_string(),
lang: None,
},
ExternalCard {
id: 3,
name: "neutron".to_string(),
cp: CpRange { min: 120, max: 240 },
color: "#34495E".to_string(),
skill: "Dense core power".to_string(),
lang: None,
},
ExternalCard {
id: 4,
name: "sun".to_string(),
cp: CpRange { min: 130, max: 260 },
color: "#E74C3C".to_string(),
skill: "Solar radiance".to_string(),
lang: None,
},
ExternalCard {
id: 5,
name: "night".to_string(),
cp: CpRange { min: 80, max: 160 },
color: "#2C3E50".to_string(),
skill: "Shadow stealth".to_string(),
lang: None,
},
ExternalCard {
id: 6,
name: "snow".to_string(),
cp: CpRange { min: 70, max: 140 },
color: "#ECF0F1".to_string(),
skill: "Crystal freeze".to_string(),
lang: None,
},
ExternalCard {
id: 7,
name: "thunder".to_string(),
cp: CpRange { min: 140, max: 280 },
color: "#F1C40F".to_string(),
skill: "Electric storm".to_string(),
lang: None,
},
ExternalCard {
id: 8,
name: "ultimate".to_string(),
cp: CpRange { min: 150, max: 300 },
color: "#8E44AD".to_string(),
skill: "Maximum form".to_string(),
lang: None,
},
ExternalCard {
id: 9,
name: "sword".to_string(),
cp: CpRange { min: 160, max: 320 },
color: "#95A5A6".to_string(),
skill: "Truth cutting".to_string(),
lang: None,
},
ExternalCard {
id: 10,
name: "destruction".to_string(),
cp: CpRange { min: 170, max: 340 },
color: "#C0392B".to_string(),
skill: "Entropy force".to_string(),
lang: None,
},
ExternalCard {
id: 11,
name: "earth".to_string(),
cp: CpRange { min: 90, max: 180 },
color: "#27AE60".to_string(),
skill: "Ground foundation".to_string(),
lang: None,
},
ExternalCard {
id: 12,
name: "galaxy".to_string(),
cp: CpRange { min: 180, max: 360 },
color: "#3498DB".to_string(),
skill: "Cosmic expanse".to_string(),
lang: None,
},
ExternalCard {
id: 13,
name: "create".to_string(),
cp: CpRange { min: 100, max: 200 },
color: "#16A085".to_string(),
skill: "Generation power".to_string(),
lang: None,
},
ExternalCard {
id: 14,
name: "supernova".to_string(),
cp: CpRange { min: 200, max: 400 },
color: "#E67E22".to_string(),
skill: "Stellar explosion".to_string(),
lang: None,
},
ExternalCard {
id: 15,
name: "world".to_string(),
cp: CpRange { min: 250, max: 500 },
color: "#9B59B6".to_string(),
skill: "Reality control".to_string(),
lang: None,
},
]
}
/// Get card master data, trying external source first then fallback
pub async fn get_card_master_data(&self) -> Vec<ExternalCard> {
match self.fetch_external_card_data().await {
Ok(cards) => {
tracing::info!("Fetched {} cards from external source", cards.len());
cards
}
Err(e) => {
tracing::warn!("Failed to fetch external card data: {}, using fallback", e);
self.get_fallback_card_data()
}
}
}
/// Convert external card data to database format
pub fn external_to_card_master(external: &ExternalCard) -> CardMaster {
let description = if let Some(lang) = &external.lang {
if let Some(ja) = &lang.ja {
if let Some(name) = &ja.name {
format!("{} - {}", name, external.skill)
} else {
external.skill.clone()
}
} else {
external.skill.clone()
}
} else {
external.skill.clone()
};
CardMaster {
id: external.id,
name: external.name.clone(),
base_cp_min: external.cp.min,
base_cp_max: external.cp.max,
color: external.color.clone(),
description,
}
}
}

541
src/services/gacha.rs Normal file
View File

@ -0,0 +1,541 @@
use crate::{
config::GachaConfig,
database::{Database, DatabaseTransaction},
error::{AppError, AppResult},
models::*,
query_as, query_one_as, query_optional_as,
services::CardMasterService,
};
use chrono::Utc;
use rand::Rng;
use std::collections::HashMap;
use uuid::Uuid;
pub struct GachaService {
config: GachaConfig,
}
impl GachaService {
pub fn new(config: GachaConfig) -> Self {
Self { config }
}
/// Main gacha draw function
pub async fn draw_card(
&self,
db: &Database,
user_did: &str,
is_paid: bool,
pool_id: Option<i32>,
) -> AppResult<CardDrawResponse> {
let mut tx = db.begin().await?;
// Get or create user
let user = self.get_or_create_user(&mut tx, user_did).await?;
// Determine card rarity
let rarity = self.determine_rarity(is_paid, pool_id)?;
// Select a card based on rarity and pool
let card_master = self.select_card_master(&mut tx, &rarity, pool_id).await?;
// Calculate CP based on rarity
let cp = self.calculate_cp(&card_master, &rarity);
// Check if this will be a unique card
let is_unique = rarity == CardRarity::Unique;
// For unique cards, check availability
if is_unique {
if let Some(_existing) = self.check_unique_card_availability(&mut tx, card_master.id).await? {
// Unique card already taken, fallback to Kira
return self.draw_card_with_fallback(&mut tx, user.id, &card_master, CardRarity::Kira, is_paid).await;
}
}
// Create the user card
let user_card = self.create_user_card(
&mut tx,
user.id,
&card_master,
cp,
&rarity,
is_unique,
).await?;
// Record draw history
let draw_history = self.record_draw_history(
&mut tx,
user.id,
card_master.id,
&rarity,
cp,
is_paid,
).await?;
// Register unique card if applicable
if is_unique {
self.register_unique_card(&mut tx, &user_card, user_did).await?;
}
tx.commit().await?;
Ok(CardDrawResponse {
card: UserCardResponse {
id: user_card.id,
card_id: user_card.card_id,
cp: user_card.cp,
status: user_card.status,
skill: user_card.skill,
obtained_at: user_card.obtained_at,
is_unique: user_card.is_unique,
unique_id: user_card.unique_id,
},
master: CardMasterResponse {
id: card_master.id,
name: card_master.name,
base_cp_min: card_master.base_cp_min,
base_cp_max: card_master.base_cp_max,
color: card_master.color,
description: card_master.description,
},
is_unique,
animation_type: self.get_animation_type(&rarity),
draw_history_id: draw_history.id,
})
}
/// Determine card rarity based on probabilities
fn determine_rarity(&self, is_paid: bool, _pool_id: Option<i32>) -> AppResult<CardRarity> {
let mut rng = rand::thread_rng();
let rand_val: f64 = rng.gen_range(0.0..100.0);
let cumulative_probs = self.config.cumulative_probabilities(is_paid);
let mut cumulative = 0.0;
for (prob, rarity) in cumulative_probs {
cumulative += prob;
if rand_val < cumulative {
return Ok(rarity);
}
}
// Fallback to normal if no match (should never happen)
Ok(CardRarity::Normal)
}
/// Select a card master based on rarity and pool
async fn select_card_master(
&self,
tx: &mut DatabaseTransaction,
rarity: &CardRarity,
_pool_id: Option<i32>,
) -> AppResult<CardMaster> {
// For now, randomly select from all available cards
// In a full implementation, this would consider pool restrictions
let cards = match tx {
DatabaseTransaction::Postgres(tx) => {
sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY RANDOM() LIMIT 1")
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)?
}
DatabaseTransaction::Sqlite(tx) => {
sqlx::query_as::<_, CardMaster>("SELECT * FROM card_master ORDER BY RANDOM() LIMIT 1")
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)?
}
};
Ok(cards)
}
/// Calculate CP based on base CP and rarity multiplier
fn calculate_cp(&self, card_master: &CardMaster, rarity: &CardRarity) -> i32 {
let mut rng = rand::thread_rng();
let base_cp = rng.gen_range(card_master.base_cp_min..=card_master.base_cp_max);
let multiplier = rarity.multiplier();
(base_cp as f64 * multiplier) as i32
}
/// Check if a unique card is available
async fn check_unique_card_availability(
&self,
tx: &mut DatabaseTransaction,
card_id: i32,
) -> AppResult<Option<UniqueCardRegistry>> {
match tx {
DatabaseTransaction::Postgres(tx) => {
sqlx::query_as::<_, UniqueCardRegistry>(
"SELECT * FROM unique_card_registry WHERE card_id = $1"
)
.bind(card_id)
.fetch_optional(&mut **tx)
.await
.map_err(AppError::Database)
}
DatabaseTransaction::Sqlite(tx) => {
sqlx::query_as::<_, UniqueCardRegistry>(
"SELECT * FROM unique_card_registry WHERE card_id = ?"
)
.bind(card_id)
.fetch_optional(&mut **tx)
.await
.map_err(AppError::Database)
}
}
}
/// Create a user card
async fn create_user_card(
&self,
tx: &mut DatabaseTransaction,
user_id: i32,
card_master: &CardMaster,
cp: i32,
rarity: &CardRarity,
is_unique: bool,
) -> AppResult<UserCard> {
let unique_id = if is_unique { Some(Uuid::new_v4()) } else { None };
let now = Utc::now();
match tx {
DatabaseTransaction::Postgres(tx) => {
sqlx::query_as::<_, UserCard>(
r#"
INSERT INTO user_cards (user_id, card_id, cp, status, obtained_at, is_unique, unique_id)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING *
"#
)
.bind(user_id)
.bind(card_master.id)
.bind(cp)
.bind(rarity)
.bind(now)
.bind(is_unique)
.bind(unique_id)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
DatabaseTransaction::Sqlite(tx) => {
sqlx::query_as::<_, UserCard>(
r#"
INSERT INTO user_cards (user_id, card_id, cp, status, obtained_at, is_unique, unique_id)
VALUES (?, ?, ?, ?, ?, ?, ?)
RETURNING *
"#
)
.bind(user_id)
.bind(card_master.id)
.bind(cp)
.bind(rarity)
.bind(now)
.bind(is_unique)
.bind(unique_id)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
}
}
/// Record draw history
async fn record_draw_history(
&self,
tx: &mut DatabaseTransaction,
user_id: i32,
card_id: i32,
rarity: &CardRarity,
cp: i32,
is_paid: bool,
) -> AppResult<DrawHistory> {
let now = Utc::now();
match tx {
DatabaseTransaction::Postgres(tx) => {
sqlx::query_as::<_, DrawHistory>(
r#"
INSERT INTO draw_history (user_id, card_id, status, cp, is_paid, drawn_at)
VALUES ($1, $2, $3, $4, $5, $6)
RETURNING *
"#
)
.bind(user_id)
.bind(card_id)
.bind(rarity)
.bind(cp)
.bind(is_paid)
.bind(now)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
DatabaseTransaction::Sqlite(tx) => {
sqlx::query_as::<_, DrawHistory>(
r#"
INSERT INTO draw_history (user_id, card_id, status, cp, is_paid, drawn_at)
VALUES (?, ?, ?, ?, ?, ?)
RETURNING *
"#
)
.bind(user_id)
.bind(card_id)
.bind(rarity)
.bind(cp)
.bind(is_paid)
.bind(now)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
}
}
/// Register unique card
async fn register_unique_card(
&self,
tx: &mut DatabaseTransaction,
user_card: &UserCard,
owner_did: &str,
) -> AppResult<UniqueCardRegistry> {
let unique_id = user_card.unique_id.ok_or_else(|| {
AppError::Internal("Unique card must have unique_id".to_string())
})?;
match tx {
DatabaseTransaction::Postgres(tx) => {
sqlx::query_as::<_, UniqueCardRegistry>(
r#"
INSERT INTO unique_card_registry (unique_id, card_id, owner_did, obtained_at)
VALUES ($1, $2, $3, $4)
RETURNING *
"#
)
.bind(unique_id)
.bind(user_card.card_id)
.bind(owner_did)
.bind(user_card.obtained_at)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
DatabaseTransaction::Sqlite(tx) => {
sqlx::query_as::<_, UniqueCardRegistry>(
r#"
INSERT INTO unique_card_registry (unique_id, card_id, owner_did, obtained_at)
VALUES (?, ?, ?, ?)
RETURNING *
"#
)
.bind(unique_id)
.bind(user_card.card_id)
.bind(owner_did)
.bind(user_card.obtained_at)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
}
}
/// Get or create user by DID
async fn get_or_create_user(
&self,
tx: &mut DatabaseTransaction,
did: &str,
) -> AppResult<User> {
// Try to get existing user
let existing_user = match tx {
DatabaseTransaction::Postgres(tx) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1")
.bind(did)
.fetch_optional(&mut **tx)
.await
.map_err(AppError::Database)?
}
DatabaseTransaction::Sqlite(tx) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?")
.bind(did)
.fetch_optional(&mut **tx)
.await
.map_err(AppError::Database)?
}
};
if let Some(user) = existing_user {
return Ok(user);
}
// Create new user
let handle = did.split('.').next().unwrap_or("unknown").to_string();
let now = Utc::now();
match tx {
DatabaseTransaction::Postgres(tx) => {
sqlx::query_as::<_, User>(
"INSERT INTO users (did, handle, created_at, updated_at) VALUES ($1, $2, $3, $4) RETURNING *"
)
.bind(did)
.bind(&handle)
.bind(now)
.bind(now)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
DatabaseTransaction::Sqlite(tx) => {
sqlx::query_as::<_, User>(
"INSERT INTO users (did, handle, created_at, updated_at) VALUES (?, ?, ?, ?) RETURNING *"
)
.bind(did)
.bind(&handle)
.bind(now)
.bind(now)
.fetch_one(&mut **tx)
.await
.map_err(AppError::Database)
}
}
}
/// Draw card with fallback rarity (when unique is unavailable)
async fn draw_card_with_fallback(
&self,
tx: &mut DatabaseTransaction,
user_id: i32,
card_master: &CardMaster,
fallback_rarity: CardRarity,
is_paid: bool,
) -> AppResult<CardDrawResponse> {
let cp = self.calculate_cp(card_master, &fallback_rarity);
let user_card = self.create_user_card(
tx,
user_id,
card_master,
cp,
&fallback_rarity,
false,
).await?;
let draw_history = self.record_draw_history(
tx,
user_id,
card_master.id,
&fallback_rarity,
cp,
is_paid,
).await?;
Ok(CardDrawResponse {
card: UserCardResponse {
id: user_card.id,
card_id: user_card.card_id,
cp: user_card.cp,
status: user_card.status,
skill: user_card.skill,
obtained_at: user_card.obtained_at,
is_unique: user_card.is_unique,
unique_id: user_card.unique_id,
},
master: CardMasterResponse {
id: card_master.id,
name: card_master.name.clone(),
base_cp_min: card_master.base_cp_min,
base_cp_max: card_master.base_cp_max,
color: card_master.color.clone(),
description: card_master.description.clone(),
},
is_unique: false,
animation_type: self.get_animation_type(&fallback_rarity),
draw_history_id: draw_history.id,
})
}
/// Get animation type based on rarity
fn get_animation_type(&self, rarity: &CardRarity) -> String {
match rarity {
CardRarity::Normal => "normal".to_string(),
CardRarity::Rare => "sparkle".to_string(),
CardRarity::SuperRare => "glow".to_string(),
CardRarity::Kira => "rainbow".to_string(),
CardRarity::Unique => "legendary".to_string(),
}
}
/// Get gacha statistics
pub async fn get_gacha_stats(&self, db: &Database) -> AppResult<GachaStatsResponse> {
// Get total draws
let total_draws: (i64,) = match db {
Database::Postgres(pool) => {
sqlx::query_as("SELECT COUNT(*) FROM draw_history")
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
Database::Sqlite(pool) => {
sqlx::query_as("SELECT COUNT(*) FROM draw_history")
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
};
// Get unique card counts
let unique_counts: (i64, i64) = match db {
Database::Postgres(pool) => {
sqlx::query_as(
r#"
SELECT
COUNT(*) as total,
(SELECT COUNT(*) FROM card_master) - COUNT(*) as available
FROM unique_card_registry
"#
)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
Database::Sqlite(pool) => {
sqlx::query_as(
r#"
SELECT
COUNT(*) as total,
(SELECT COUNT(*) FROM card_master) - COUNT(*) as available
FROM unique_card_registry
"#
)
.fetch_one(pool)
.await
.map_err(AppError::Database)?
}
};
// Get rarity distribution
let rarity_breakdown = RarityBreakdown {
normal: 0, // Would need actual counts from database
rare: 0,
super_rare: 0,
kira: 0,
unique: unique_counts.0 as i32,
};
Ok(GachaStatsResponse {
probabilities: GachaProbabilities {
normal: self.config.prob_normal,
rare: self.config.prob_rare,
super_rare: self.config.prob_super_rare,
kira: self.config.prob_kira,
unique: self.config.prob_unique,
paid_multiplier: 2.0,
},
total_draws: total_draws.0 as i32,
total_unique_cards: unique_counts.0 as i32,
available_unique_cards: unique_counts.1 as i32,
rarity_distribution: rarity_breakdown,
})
}
}

9
src/services/mod.rs Normal file
View File

@ -0,0 +1,9 @@
pub mod gacha;
pub mod card_master;
pub mod atproto;
pub mod user;
pub use gacha::GachaService;
pub use card_master::CardMasterService;
pub use atproto::AtprotoService;
pub use user::UserService;

184
src/services/user.rs Normal file
View File

@ -0,0 +1,184 @@
use crate::{
database::Database,
error::{AppError, AppResult},
models::*,
};
use chrono::Utc;
pub struct UserService;
impl UserService {
pub async fn get_user_by_did(db: &Database, did: &str) -> AppResult<Option<User>> {
match db {
Database::Postgres(pool) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = $1")
.bind(did)
.fetch_optional(pool)
.await
.map_err(AppError::Database)
}
Database::Sqlite(pool) => {
sqlx::query_as::<_, User>("SELECT * FROM users WHERE did = ?")
.bind(did)
.fetch_optional(pool)
.await
.map_err(AppError::Database)
}
}
}
pub async fn create_user(db: &Database, did: &str, handle: &str) -> AppResult<User> {
let now = Utc::now();
match db {
Database::Postgres(pool) => {
sqlx::query_as::<_, User>(
"INSERT INTO users (did, handle, created_at, updated_at) VALUES ($1, $2, $3, $4) RETURNING *"
)
.bind(did)
.bind(handle)
.bind(now)
.bind(now)
.fetch_one(pool)
.await
.map_err(AppError::Database)
}
Database::Sqlite(pool) => {
sqlx::query_as::<_, User>(
"INSERT INTO users (did, handle, created_at, updated_at) VALUES (?, ?, ?, ?) RETURNING *"
)
.bind(did)
.bind(handle)
.bind(now)
.bind(now)
.fetch_one(pool)
.await
.map_err(AppError::Database)
}
}
}
pub async fn update_user_handle(db: &Database, did: &str, handle: &str) -> AppResult<User> {
let now = Utc::now();
match db {
Database::Postgres(pool) => {
sqlx::query_as::<_, User>(
"UPDATE users SET handle = $1, updated_at = $2 WHERE did = $3 RETURNING *"
)
.bind(handle)
.bind(now)
.bind(did)
.fetch_one(pool)
.await
.map_err(AppError::Database)
}
Database::Sqlite(pool) => {
sqlx::query_as::<_, User>(
"UPDATE users SET handle = ?, updated_at = ? WHERE did = ? RETURNING *"
)
.bind(handle)
.bind(now)
.bind(did)
.fetch_one(pool)
.await
.map_err(AppError::Database)
}
}
}
pub async fn get_user_card_count(db: &Database, user_did: &str) -> AppResult<i64> {
match db {
Database::Postgres(pool) => {
let row: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM user_cards WHERE user_did = $1")
.bind(user_did)
.fetch_one(pool)
.await
.map_err(AppError::Database)?;
Ok(row.0)
}
Database::Sqlite(pool) => {
let row: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM user_cards WHERE user_did = ?")
.bind(user_did)
.fetch_one(pool)
.await
.map_err(AppError::Database)?;
Ok(row.0)
}
}
}
pub async fn get_user_unique_card_count(db: &Database, user_did: &str) -> AppResult<i64> {
match db {
Database::Postgres(pool) => {
let row: (i64,) = sqlx::query_as(
"SELECT COUNT(*) FROM user_cards WHERE user_did = $1 AND is_unique = true"
)
.bind(user_did)
.fetch_one(pool)
.await
.map_err(AppError::Database)?;
Ok(row.0)
}
Database::Sqlite(pool) => {
let row: (i64,) = sqlx::query_as(
"SELECT COUNT(*) FROM user_cards WHERE user_did = ? AND is_unique = 1"
)
.bind(user_did)
.fetch_one(pool)
.await
.map_err(AppError::Database)?;
Ok(row.0)
}
}
}
pub async fn get_user_cards_by_rarity(
db: &Database,
user_did: &str,
rarity: CardRarity,
) -> AppResult<Vec<UserCardWithMasterQuery>> {
match db {
Database::Postgres(pool) => {
sqlx::query_as::<_, UserCardWithMasterQuery>(
r#"
SELECT
uc.id, uc.user_did, uc.card_id, uc.cp, uc.status,
uc.obtained_at, uc.is_unique, uc.unique_id,
cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max,
cm.color, cm.description
FROM user_cards uc
JOIN card_master cm ON uc.card_id = cm.id
WHERE uc.user_did = $1 AND uc.status = $2
ORDER BY uc.obtained_at DESC
"#
)
.bind(user_did)
.bind(rarity.as_str())
.fetch_all(pool)
.await
.map_err(AppError::Database)
}
Database::Sqlite(pool) => {
sqlx::query_as::<_, UserCardWithMasterQuery>(
r#"
SELECT
uc.id, uc.user_did, uc.card_id, uc.cp, uc.status,
uc.obtained_at, uc.is_unique, uc.unique_id,
cm.id as master_id, cm.name, cm.base_cp_min, cm.base_cp_max,
cm.color, cm.description
FROM user_cards uc
JOIN card_master cm ON uc.card_id = cm.id
WHERE uc.user_did = ? AND uc.status = ?
ORDER BY uc.obtained_at DESC
"#
)
.bind(user_did)
.bind(rarity.as_str())
.fetch_all(pool)
.await
.map_err(AppError::Database)
}
}
}
}