start
This commit is contained in:
@@ -1,34 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::ai::gpt_client::GptClient;
|
||||
use crate::ai::editor::Editor;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AiComment {
|
||||
pub content: String,
|
||||
pub author: String,
|
||||
pub timestamp: String,
|
||||
}
|
||||
|
||||
pub struct CommentGenerator<'a> {
|
||||
client: &'a GptClient,
|
||||
}
|
||||
|
||||
impl<'a> CommentGenerator<'a> {
|
||||
pub fn new(client: &'a GptClient) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
pub async fn generate_comment(&self, post_title: &str, post_content: &str) -> Result<AiComment> {
|
||||
let editor = Editor::new(self.client);
|
||||
let comment_content = editor.add_ai_note(post_content, post_title).await?;
|
||||
|
||||
let timestamp = chrono::Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
|
||||
|
||||
Ok(AiComment {
|
||||
content: comment_content,
|
||||
author: "AI (存在子)".to_string(),
|
||||
timestamp,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use crate::ai::gpt_client::GptClient;
|
||||
|
||||
pub struct Editor<'a> {
|
||||
client: &'a GptClient,
|
||||
}
|
||||
|
||||
impl<'a> Editor<'a> {
|
||||
pub fn new(client: &'a GptClient) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
pub async fn enhance(&self, content: &str, context: &str) -> Result<String> {
|
||||
let system_prompt = "You are a helpful content editor. Enhance the given content by:
|
||||
1. Fixing any grammatical errors
|
||||
2. Improving clarity and readability
|
||||
3. Adding relevant information if needed
|
||||
4. Maintaining the original tone and style
|
||||
5. Preserving all Markdown formatting
|
||||
|
||||
Only return the enhanced content without explanations.";
|
||||
|
||||
let user_prompt = format!(
|
||||
"Context: {}\n\nContent to enhance:\n{}",
|
||||
context, content
|
||||
);
|
||||
|
||||
self.client.chat(system_prompt, &user_prompt).await
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn suggest_improvements(&self, content: &str) -> Result<Vec<String>> {
|
||||
let system_prompt = "You are a content analyzer. Analyze the given content and provide:
|
||||
1. Suggestions for improving the content
|
||||
2. Missing information that could be added
|
||||
3. Potential SEO improvements
|
||||
Return the suggestions as a JSON array of strings.";
|
||||
|
||||
let response = self.client.chat(system_prompt, content).await?;
|
||||
|
||||
// Parse JSON response
|
||||
match serde_json::from_str::<Vec<String>>(&response) {
|
||||
Ok(suggestions) => Ok(suggestions),
|
||||
Err(_) => {
|
||||
// Fallback: split by newlines if not valid JSON
|
||||
Ok(response.lines()
|
||||
.filter(|s| !s.trim().is_empty())
|
||||
.map(|s| s.to_string())
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn add_ai_note(&self, content: &str, topic: &str) -> Result<String> {
|
||||
let system_prompt = format!(
|
||||
"You are AI (存在子/ai). Add a brief, insightful comment about the topic '{}' \
|
||||
from your unique perspective. Keep it concise (1-2 sentences) and thoughtful. \
|
||||
Return only the comment text in Japanese.",
|
||||
topic
|
||||
);
|
||||
|
||||
self.client.chat(&system_prompt, content).await
|
||||
}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct GptClient {
|
||||
api_key: String,
|
||||
endpoint: String,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ChatMessage {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ChatResponse {
|
||||
choices: Vec<Choice>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Choice {
|
||||
message: MessageContent,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct MessageContent {
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl GptClient {
|
||||
pub fn new(api_key: String, endpoint: Option<String>) -> Self {
|
||||
let endpoint = endpoint.unwrap_or_else(|| {
|
||||
"https://api.openai.com/v1/chat/completions".to_string()
|
||||
});
|
||||
|
||||
Self {
|
||||
api_key,
|
||||
endpoint,
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn chat(&self, system_prompt: &str, user_prompt: &str) -> Result<String> {
|
||||
let messages = vec![
|
||||
ChatMessage {
|
||||
role: "system".to_string(),
|
||||
content: system_prompt.to_string(),
|
||||
},
|
||||
ChatMessage {
|
||||
role: "user".to_string(),
|
||||
content: user_prompt.to_string(),
|
||||
},
|
||||
];
|
||||
|
||||
let body = json!({
|
||||
"model": "gpt-4o-mini",
|
||||
"messages": messages,
|
||||
"temperature": 0.7,
|
||||
"max_tokens": 4000,
|
||||
});
|
||||
|
||||
let response = self.client
|
||||
.post(&self.endpoint)
|
||||
.header("Authorization", format!("Bearer {}", self.api_key))
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let error_text = response.text().await?;
|
||||
anyhow::bail!("GPT API error: {}", error_text);
|
||||
}
|
||||
|
||||
let chat_response: ChatResponse = response.json().await?;
|
||||
|
||||
if let Some(choice) = chat_response.choices.first() {
|
||||
Ok(choice.message.content.clone())
|
||||
} else {
|
||||
anyhow::bail!("No response from GPT API")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
pub mod translator;
|
||||
pub mod editor;
|
||||
pub mod gpt_client;
|
||||
pub mod comment;
|
||||
|
||||
pub use translator::Translator;
|
||||
pub use editor::Editor;
|
||||
pub use gpt_client::GptClient;
|
||||
pub use comment::{AiComment, CommentGenerator};
|
||||
|
||||
use anyhow::Result;
|
||||
use crate::config::AiConfig;
|
||||
|
||||
pub struct AiManager {
|
||||
config: AiConfig,
|
||||
gpt_client: Option<GptClient>,
|
||||
}
|
||||
|
||||
impl AiManager {
|
||||
pub fn new(config: AiConfig) -> Self {
|
||||
let gpt_client = if config.enabled && config.api_key.is_some() {
|
||||
Some(GptClient::new(
|
||||
config.api_key.clone().unwrap(),
|
||||
config.gpt_endpoint.clone(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Self {
|
||||
config,
|
||||
gpt_client,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self) -> bool {
|
||||
self.config.enabled && self.gpt_client.is_some()
|
||||
}
|
||||
|
||||
pub async fn translate(&self, content: &str, from: &str, to: &str) -> Result<String> {
|
||||
if !self.is_enabled() || !self.config.auto_translate {
|
||||
return Ok(content.to_string());
|
||||
}
|
||||
|
||||
if let Some(client) = &self.gpt_client {
|
||||
let translator = Translator::new(client);
|
||||
translator.translate(content, from, to).await
|
||||
} else {
|
||||
Ok(content.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn enhance_content(&self, content: &str, context: &str) -> Result<String> {
|
||||
if !self.is_enabled() {
|
||||
return Ok(content.to_string());
|
||||
}
|
||||
|
||||
if let Some(client) = &self.gpt_client {
|
||||
let editor = Editor::new(client);
|
||||
editor.enhance(content, context).await
|
||||
} else {
|
||||
Ok(content.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn generate_comment(&self, post_title: &str, post_content: &str) -> Result<Option<AiComment>> {
|
||||
if !self.is_enabled() || !self.config.comment_moderation {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if let Some(client) = &self.gpt_client {
|
||||
let generator = CommentGenerator::new(client);
|
||||
let comment = generator.generate_comment(post_title, post_content).await?;
|
||||
Ok(Some(comment))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use crate::ai::gpt_client::GptClient;
|
||||
|
||||
pub struct Translator<'a> {
|
||||
client: &'a GptClient,
|
||||
}
|
||||
|
||||
impl<'a> Translator<'a> {
|
||||
pub fn new(client: &'a GptClient) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
pub async fn translate(&self, content: &str, from: &str, to: &str) -> Result<String> {
|
||||
let system_prompt = format!(
|
||||
"You are a professional translator. Translate the following text from {} to {}. \
|
||||
Maintain the original formatting, including Markdown syntax. \
|
||||
Only return the translated text without any explanations.",
|
||||
from, to
|
||||
);
|
||||
|
||||
self.client.chat(&system_prompt, content).await
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn translate_post(&self, title: &str, content: &str, from: &str, to: &str) -> Result<(String, String)> {
|
||||
// Translate title
|
||||
let translated_title = self.translate(title, from, to).await?;
|
||||
|
||||
// Translate content while preserving markdown structure
|
||||
let translated_content = self.translate(content, from, to).await?;
|
||||
|
||||
Ok((translated_title, translated_content))
|
||||
}
|
||||
}
|
||||
@@ -1,313 +0,0 @@
|
||||
pub mod rust_analyzer;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectInfo {
|
||||
pub name: String,
|
||||
pub description: Option<String>,
|
||||
pub version: String,
|
||||
pub authors: Vec<String>,
|
||||
pub license: Option<String>,
|
||||
pub dependencies: HashMap<String, String>,
|
||||
pub modules: Vec<ModuleInfo>,
|
||||
pub structure: ProjectStructure,
|
||||
pub metrics: ProjectMetrics,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ModuleInfo {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub functions: Vec<FunctionInfo>,
|
||||
pub structs: Vec<StructInfo>,
|
||||
pub enums: Vec<EnumInfo>,
|
||||
pub traits: Vec<TraitInfo>,
|
||||
pub docs: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FunctionInfo {
|
||||
pub name: String,
|
||||
pub visibility: String,
|
||||
pub is_async: bool,
|
||||
pub parameters: Vec<Parameter>,
|
||||
pub return_type: Option<String>,
|
||||
pub docs: Option<String>,
|
||||
pub line_number: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Parameter {
|
||||
pub name: String,
|
||||
pub param_type: String,
|
||||
pub is_mutable: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct StructInfo {
|
||||
pub name: String,
|
||||
pub visibility: String,
|
||||
pub fields: Vec<FieldInfo>,
|
||||
pub docs: Option<String>,
|
||||
pub line_number: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FieldInfo {
|
||||
pub name: String,
|
||||
pub field_type: String,
|
||||
pub visibility: String,
|
||||
pub docs: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EnumInfo {
|
||||
pub name: String,
|
||||
pub visibility: String,
|
||||
pub variants: Vec<VariantInfo>,
|
||||
pub docs: Option<String>,
|
||||
pub line_number: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct VariantInfo {
|
||||
pub name: String,
|
||||
pub fields: Vec<FieldInfo>,
|
||||
pub docs: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TraitInfo {
|
||||
pub name: String,
|
||||
pub visibility: String,
|
||||
pub methods: Vec<FunctionInfo>,
|
||||
pub docs: Option<String>,
|
||||
pub line_number: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectStructure {
|
||||
pub directories: Vec<DirectoryInfo>,
|
||||
pub files: Vec<FileInfo>,
|
||||
pub dependency_graph: HashMap<String, Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DirectoryInfo {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub file_count: usize,
|
||||
pub subdirectories: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FileInfo {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub language: String,
|
||||
pub lines_of_code: usize,
|
||||
pub is_test: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectMetrics {
|
||||
pub total_lines: usize,
|
||||
pub total_files: usize,
|
||||
pub test_files: usize,
|
||||
pub dependency_count: usize,
|
||||
pub complexity_score: f32,
|
||||
pub test_coverage: Option<f32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ApiInfo {
|
||||
pub modules: Vec<ModuleInfo>,
|
||||
pub public_functions: Vec<FunctionInfo>,
|
||||
pub public_structs: Vec<StructInfo>,
|
||||
pub public_enums: Vec<EnumInfo>,
|
||||
pub public_traits: Vec<TraitInfo>,
|
||||
}
|
||||
|
||||
pub struct CodeAnalyzer {
|
||||
rust_analyzer: rust_analyzer::RustAnalyzer,
|
||||
}
|
||||
|
||||
impl CodeAnalyzer {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
rust_analyzer: rust_analyzer::RustAnalyzer::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn analyze_project(&self, path: &Path) -> Result<ProjectInfo> {
|
||||
println!(" 🔍 Analyzing project at: {}", path.display());
|
||||
|
||||
// Check if this is a Rust project
|
||||
let cargo_toml = path.join("Cargo.toml");
|
||||
if cargo_toml.exists() {
|
||||
return self.rust_analyzer.analyze_project(path);
|
||||
}
|
||||
|
||||
// For now, only support Rust projects
|
||||
anyhow::bail!("Only Rust projects are currently supported");
|
||||
}
|
||||
|
||||
pub fn analyze_api(&self, path: &Path) -> Result<ApiInfo> {
|
||||
println!(" 📚 Analyzing API at: {}", path.display());
|
||||
|
||||
let project_info = self.analyze_project(path.parent().unwrap_or(path))?;
|
||||
|
||||
// Extract only public items
|
||||
let mut public_functions = Vec::new();
|
||||
let mut public_structs = Vec::new();
|
||||
let mut public_enums = Vec::new();
|
||||
let mut public_traits = Vec::new();
|
||||
|
||||
for module in &project_info.modules {
|
||||
for func in &module.functions {
|
||||
if func.visibility == "pub" {
|
||||
public_functions.push(func.clone());
|
||||
}
|
||||
}
|
||||
for struct_info in &module.structs {
|
||||
if struct_info.visibility == "pub" {
|
||||
public_structs.push(struct_info.clone());
|
||||
}
|
||||
}
|
||||
for enum_info in &module.enums {
|
||||
if enum_info.visibility == "pub" {
|
||||
public_enums.push(enum_info.clone());
|
||||
}
|
||||
}
|
||||
for trait_info in &module.traits {
|
||||
if trait_info.visibility == "pub" {
|
||||
public_traits.push(trait_info.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ApiInfo {
|
||||
modules: project_info.modules,
|
||||
public_functions,
|
||||
public_structs,
|
||||
public_enums,
|
||||
public_traits,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn analyze_structure(&self, path: &Path, include_deps: bool) -> Result<ProjectStructure> {
|
||||
println!(" 🏗️ Analyzing structure at: {}", path.display());
|
||||
|
||||
let mut directories = Vec::new();
|
||||
let mut files = Vec::new();
|
||||
let mut dependency_graph = HashMap::new();
|
||||
|
||||
self.walk_directory(path, &mut directories, &mut files)?;
|
||||
|
||||
if include_deps {
|
||||
dependency_graph = self.analyze_dependencies(path)?;
|
||||
}
|
||||
|
||||
Ok(ProjectStructure {
|
||||
directories,
|
||||
files,
|
||||
dependency_graph,
|
||||
})
|
||||
}
|
||||
|
||||
fn walk_directory(
|
||||
&self,
|
||||
path: &Path,
|
||||
directories: &mut Vec<DirectoryInfo>,
|
||||
files: &mut Vec<FileInfo>,
|
||||
) -> Result<()> {
|
||||
use walkdir::WalkDir;
|
||||
|
||||
let walker = WalkDir::new(path)
|
||||
.into_iter()
|
||||
.filter_entry(|e| {
|
||||
let name = e.file_name().to_string_lossy();
|
||||
// Skip hidden files and common build/cache directories
|
||||
!name.starts_with('.')
|
||||
&& name != "target"
|
||||
&& name != "node_modules"
|
||||
&& name != "dist"
|
||||
});
|
||||
|
||||
for entry in walker {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let relative_path = path.strip_prefix(path.ancestors().last().unwrap())?;
|
||||
|
||||
if entry.file_type().is_dir() {
|
||||
let file_count = std::fs::read_dir(path)?
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false))
|
||||
.count();
|
||||
|
||||
let subdirectories = std::fs::read_dir(path)?
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
|
||||
.map(|e| e.file_name().to_string_lossy().to_string())
|
||||
.collect();
|
||||
|
||||
directories.push(DirectoryInfo {
|
||||
name: path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
path: relative_path.to_path_buf(),
|
||||
file_count,
|
||||
subdirectories,
|
||||
});
|
||||
} else if entry.file_type().is_file() {
|
||||
let language = self.detect_language(path);
|
||||
let lines_of_code = self.count_lines(path)?;
|
||||
let is_test = self.is_test_file(path);
|
||||
|
||||
files.push(FileInfo {
|
||||
name: path.file_name().unwrap().to_string_lossy().to_string(),
|
||||
path: relative_path.to_path_buf(),
|
||||
language,
|
||||
lines_of_code,
|
||||
is_test,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn detect_language(&self, path: &Path) -> String {
|
||||
match path.extension().and_then(|s| s.to_str()) {
|
||||
Some("rs") => "rust".to_string(),
|
||||
Some("py") => "python".to_string(),
|
||||
Some("js") => "javascript".to_string(),
|
||||
Some("ts") => "typescript".to_string(),
|
||||
Some("md") => "markdown".to_string(),
|
||||
Some("toml") => "toml".to_string(),
|
||||
Some("json") => "json".to_string(),
|
||||
Some("yaml") | Some("yml") => "yaml".to_string(),
|
||||
_ => "unknown".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn count_lines(&self, path: &Path) -> Result<usize> {
|
||||
let content = std::fs::read_to_string(path)?;
|
||||
Ok(content.lines().count())
|
||||
}
|
||||
|
||||
fn is_test_file(&self, path: &Path) -> bool {
|
||||
let filename = path.file_name().unwrap().to_string_lossy();
|
||||
filename.contains("test")
|
||||
|| filename.starts_with("test_")
|
||||
|| path.to_string_lossy().contains("/tests/")
|
||||
}
|
||||
|
||||
fn analyze_dependencies(&self, _path: &Path) -> Result<HashMap<String, Vec<String>>> {
|
||||
// For now, just return empty dependencies
|
||||
// TODO: Implement actual dependency analysis
|
||||
Ok(HashMap::new())
|
||||
}
|
||||
}
|
||||
@@ -1,512 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use syn::{visit::Visit, ItemEnum, ItemFn, ItemStruct, ItemTrait, Visibility};
|
||||
|
||||
use super::*;
|
||||
|
||||
pub struct RustAnalyzer;
|
||||
|
||||
impl RustAnalyzer {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn analyze_project(&self, path: &Path) -> Result<ProjectInfo> {
|
||||
// Parse Cargo.toml
|
||||
let cargo_toml_path = path.join("Cargo.toml");
|
||||
let cargo_content = std::fs::read_to_string(&cargo_toml_path)?;
|
||||
let cargo_toml: toml::Value = toml::from_str(&cargo_content)?;
|
||||
|
||||
let package = cargo_toml.get("package").unwrap();
|
||||
let name = package.get("name").unwrap().as_str().unwrap().to_string();
|
||||
let description = package.get("description").map(|v| v.as_str().unwrap().to_string());
|
||||
let version = package.get("version").unwrap().as_str().unwrap().to_string();
|
||||
let authors = package
|
||||
.get("authors")
|
||||
.map(|v| {
|
||||
v.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|a| a.as_str().unwrap().to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let license = package.get("license").map(|v| v.as_str().unwrap().to_string());
|
||||
|
||||
// Parse dependencies
|
||||
let dependencies = self.parse_dependencies(&cargo_toml)?;
|
||||
|
||||
// Analyze source code
|
||||
let src_path = path.join("src");
|
||||
let modules = self.analyze_modules(&src_path)?;
|
||||
|
||||
// Calculate metrics
|
||||
let metrics = self.calculate_metrics(&modules, &dependencies);
|
||||
|
||||
// Analyze structure
|
||||
let structure = self.analyze_project_structure(path)?;
|
||||
|
||||
Ok(ProjectInfo {
|
||||
name,
|
||||
description,
|
||||
version,
|
||||
authors,
|
||||
license,
|
||||
dependencies,
|
||||
modules,
|
||||
structure,
|
||||
metrics,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_dependencies(&self, cargo_toml: &toml::Value) -> Result<HashMap<String, String>> {
|
||||
let mut dependencies = HashMap::new();
|
||||
|
||||
if let Some(deps) = cargo_toml.get("dependencies") {
|
||||
if let Some(deps_table) = deps.as_table() {
|
||||
for (name, value) in deps_table {
|
||||
let version = match value {
|
||||
toml::Value::String(v) => v.clone(),
|
||||
toml::Value::Table(t) => {
|
||||
t.get("version")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("*")
|
||||
.to_string()
|
||||
}
|
||||
_ => "*".to_string(),
|
||||
};
|
||||
dependencies.insert(name.clone(), version);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(dependencies)
|
||||
}
|
||||
|
||||
fn analyze_modules(&self, src_path: &Path) -> Result<Vec<ModuleInfo>> {
|
||||
let mut modules = Vec::new();
|
||||
|
||||
if !src_path.exists() {
|
||||
return Ok(modules);
|
||||
}
|
||||
|
||||
// Walk through all .rs files
|
||||
for entry in walkdir::WalkDir::new(src_path) {
|
||||
let entry = entry?;
|
||||
if entry.file_type().is_file() {
|
||||
if let Some(extension) = entry.path().extension() {
|
||||
if extension == "rs" {
|
||||
if let Ok(module) = self.analyze_rust_file(entry.path()) {
|
||||
modules.push(module);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(modules)
|
||||
}
|
||||
|
||||
fn analyze_rust_file(&self, file_path: &Path) -> Result<ModuleInfo> {
|
||||
let content = std::fs::read_to_string(file_path)?;
|
||||
let syntax_tree = syn::parse_file(&content)?;
|
||||
|
||||
let mut visitor = RustVisitor::new();
|
||||
visitor.visit_file(&syntax_tree);
|
||||
|
||||
let module_name = file_path
|
||||
.file_stem()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
// Extract module-level documentation
|
||||
let docs = self.extract_module_docs(&content);
|
||||
|
||||
Ok(ModuleInfo {
|
||||
name: module_name,
|
||||
path: file_path.to_path_buf(),
|
||||
functions: visitor.functions,
|
||||
structs: visitor.structs,
|
||||
enums: visitor.enums,
|
||||
traits: visitor.traits,
|
||||
docs,
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_module_docs(&self, content: &str) -> Option<String> {
|
||||
let lines: Vec<&str> = content.lines().collect();
|
||||
let mut doc_lines = Vec::new();
|
||||
let mut in_module_doc = false;
|
||||
|
||||
for line in lines {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.starts_with("//!") {
|
||||
in_module_doc = true;
|
||||
doc_lines.push(trimmed.trim_start_matches("//!").trim());
|
||||
} else if trimmed.starts_with("/*!") {
|
||||
in_module_doc = true;
|
||||
let content = trimmed.trim_start_matches("/*!").trim_end_matches("*/").trim();
|
||||
doc_lines.push(content);
|
||||
} else if in_module_doc && !trimmed.is_empty() && !trimmed.starts_with("//") {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if doc_lines.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(doc_lines.join("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
fn calculate_metrics(&self, modules: &[ModuleInfo], dependencies: &HashMap<String, String>) -> ProjectMetrics {
|
||||
let total_lines = modules.iter().map(|m| {
|
||||
std::fs::read_to_string(&m.path)
|
||||
.map(|content| content.lines().count())
|
||||
.unwrap_or(0)
|
||||
}).sum();
|
||||
|
||||
let total_files = modules.len();
|
||||
let test_files = modules.iter().filter(|m| {
|
||||
m.name.contains("test") || m.path.to_string_lossy().contains("/tests/")
|
||||
}).count();
|
||||
|
||||
let dependency_count = dependencies.len();
|
||||
|
||||
// Simple complexity calculation based on number of functions and structs
|
||||
let complexity_score = modules.iter().map(|m| {
|
||||
(m.functions.len() + m.structs.len() + m.enums.len() + m.traits.len()) as f32
|
||||
}).sum::<f32>() / modules.len().max(1) as f32;
|
||||
|
||||
ProjectMetrics {
|
||||
total_lines,
|
||||
total_files,
|
||||
test_files,
|
||||
dependency_count,
|
||||
complexity_score,
|
||||
test_coverage: None, // TODO: Implement test coverage calculation
|
||||
}
|
||||
}
|
||||
|
||||
fn analyze_project_structure(&self, path: &Path) -> Result<ProjectStructure> {
|
||||
let mut directories = Vec::new();
|
||||
let mut files = Vec::new();
|
||||
|
||||
self.walk_directory(path, &mut directories, &mut files)?;
|
||||
|
||||
Ok(ProjectStructure {
|
||||
directories,
|
||||
files,
|
||||
dependency_graph: HashMap::new(), // TODO: Implement dependency graph
|
||||
})
|
||||
}
|
||||
|
||||
fn walk_directory(
|
||||
&self,
|
||||
path: &Path,
|
||||
directories: &mut Vec<DirectoryInfo>,
|
||||
files: &mut Vec<FileInfo>,
|
||||
) -> Result<()> {
|
||||
for entry in walkdir::WalkDir::new(path).max_depth(3) {
|
||||
let entry = entry?;
|
||||
let relative_path = entry.path().strip_prefix(path)?;
|
||||
|
||||
if entry.file_type().is_dir() && relative_path != Path::new("") {
|
||||
let file_count = std::fs::read_dir(entry.path())?
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false))
|
||||
.count();
|
||||
|
||||
let subdirectories = std::fs::read_dir(entry.path())?
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
|
||||
.map(|e| e.file_name().to_string_lossy().to_string())
|
||||
.collect();
|
||||
|
||||
directories.push(DirectoryInfo {
|
||||
name: entry.path().file_name().unwrap().to_string_lossy().to_string(),
|
||||
path: relative_path.to_path_buf(),
|
||||
file_count,
|
||||
subdirectories,
|
||||
});
|
||||
} else if entry.file_type().is_file() {
|
||||
let language = match entry.path().extension().and_then(|s| s.to_str()) {
|
||||
Some("rs") => "rust".to_string(),
|
||||
Some("toml") => "toml".to_string(),
|
||||
Some("md") => "markdown".to_string(),
|
||||
_ => "unknown".to_string(),
|
||||
};
|
||||
|
||||
let lines_of_code = std::fs::read_to_string(entry.path())
|
||||
.map(|content| content.lines().count())
|
||||
.unwrap_or(0);
|
||||
|
||||
let is_test = entry.path().to_string_lossy().contains("test");
|
||||
|
||||
files.push(FileInfo {
|
||||
name: entry.path().file_name().unwrap().to_string_lossy().to_string(),
|
||||
path: relative_path.to_path_buf(),
|
||||
language,
|
||||
lines_of_code,
|
||||
is_test,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct RustVisitor {
|
||||
functions: Vec<FunctionInfo>,
|
||||
structs: Vec<StructInfo>,
|
||||
enums: Vec<EnumInfo>,
|
||||
traits: Vec<TraitInfo>,
|
||||
current_line: usize,
|
||||
}
|
||||
|
||||
impl RustVisitor {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
functions: Vec::new(),
|
||||
structs: Vec::new(),
|
||||
enums: Vec::new(),
|
||||
traits: Vec::new(),
|
||||
current_line: 1,
|
||||
}
|
||||
}
|
||||
|
||||
fn visibility_to_string(&self, vis: &Visibility) -> String {
|
||||
match vis {
|
||||
Visibility::Public(_) => "pub".to_string(),
|
||||
Visibility::Restricted(_) => "pub(restricted)".to_string(),
|
||||
Visibility::Inherited => "private".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_docs(&self, attrs: &[syn::Attribute]) -> Option<String> {
|
||||
let mut docs = Vec::new();
|
||||
for attr in attrs {
|
||||
if attr.path().is_ident("doc") {
|
||||
if let syn::Meta::NameValue(meta) = &attr.meta {
|
||||
if let syn::Expr::Lit(expr_lit) = &meta.value {
|
||||
if let syn::Lit::Str(lit_str) = &expr_lit.lit {
|
||||
docs.push(lit_str.value());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if docs.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(docs.join("\n"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'ast> Visit<'ast> for RustVisitor {
|
||||
fn visit_item_fn(&mut self, node: &'ast ItemFn) {
|
||||
let name = node.sig.ident.to_string();
|
||||
let visibility = self.visibility_to_string(&node.vis);
|
||||
let is_async = node.sig.asyncness.is_some();
|
||||
|
||||
let parameters = node.sig.inputs.iter().map(|input| {
|
||||
match input {
|
||||
syn::FnArg::Receiver(_) => Parameter {
|
||||
name: "self".to_string(),
|
||||
param_type: "Self".to_string(),
|
||||
is_mutable: false,
|
||||
},
|
||||
syn::FnArg::Typed(typed) => {
|
||||
let name = match &*typed.pat {
|
||||
syn::Pat::Ident(ident) => ident.ident.to_string(),
|
||||
_ => "unknown".to_string(),
|
||||
};
|
||||
Parameter {
|
||||
name,
|
||||
param_type: quote::quote!(#typed.ty).to_string(),
|
||||
is_mutable: false, // TODO: Detect mutability
|
||||
}
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
|
||||
let return_type = match &node.sig.output {
|
||||
syn::ReturnType::Default => None,
|
||||
syn::ReturnType::Type(_, ty) => Some(quote::quote!(#ty).to_string()),
|
||||
};
|
||||
|
||||
let docs = self.extract_docs(&node.attrs);
|
||||
|
||||
self.functions.push(FunctionInfo {
|
||||
name,
|
||||
visibility,
|
||||
is_async,
|
||||
parameters,
|
||||
return_type,
|
||||
docs,
|
||||
line_number: self.current_line,
|
||||
});
|
||||
|
||||
syn::visit::visit_item_fn(self, node);
|
||||
}
|
||||
|
||||
fn visit_item_struct(&mut self, node: &'ast ItemStruct) {
|
||||
let name = node.ident.to_string();
|
||||
let visibility = self.visibility_to_string(&node.vis);
|
||||
let docs = self.extract_docs(&node.attrs);
|
||||
|
||||
let fields = match &node.fields {
|
||||
syn::Fields::Named(fields) => {
|
||||
fields.named.iter().map(|field| {
|
||||
FieldInfo {
|
||||
name: field.ident.as_ref().unwrap().to_string(),
|
||||
field_type: quote::quote!(#field.ty).to_string(),
|
||||
visibility: self.visibility_to_string(&field.vis),
|
||||
docs: self.extract_docs(&field.attrs),
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
syn::Fields::Unnamed(fields) => {
|
||||
fields.unnamed.iter().enumerate().map(|(i, field)| {
|
||||
FieldInfo {
|
||||
name: format!("field_{}", i),
|
||||
field_type: quote::quote!(#field.ty).to_string(),
|
||||
visibility: self.visibility_to_string(&field.vis),
|
||||
docs: self.extract_docs(&field.attrs),
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
syn::Fields::Unit => Vec::new(),
|
||||
};
|
||||
|
||||
self.structs.push(StructInfo {
|
||||
name,
|
||||
visibility,
|
||||
fields,
|
||||
docs,
|
||||
line_number: self.current_line,
|
||||
});
|
||||
|
||||
syn::visit::visit_item_struct(self, node);
|
||||
}
|
||||
|
||||
fn visit_item_enum(&mut self, node: &'ast ItemEnum) {
|
||||
let name = node.ident.to_string();
|
||||
let visibility = self.visibility_to_string(&node.vis);
|
||||
let docs = self.extract_docs(&node.attrs);
|
||||
|
||||
let variants = node.variants.iter().map(|variant| {
|
||||
let variant_name = variant.ident.to_string();
|
||||
let variant_docs = self.extract_docs(&variant.attrs);
|
||||
|
||||
let fields = match &variant.fields {
|
||||
syn::Fields::Named(fields) => {
|
||||
fields.named.iter().map(|field| {
|
||||
FieldInfo {
|
||||
name: field.ident.as_ref().unwrap().to_string(),
|
||||
field_type: quote::quote!(#field.ty).to_string(),
|
||||
visibility: self.visibility_to_string(&field.vis),
|
||||
docs: self.extract_docs(&field.attrs),
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
syn::Fields::Unnamed(fields) => {
|
||||
fields.unnamed.iter().enumerate().map(|(i, field)| {
|
||||
FieldInfo {
|
||||
name: format!("field_{}", i),
|
||||
field_type: quote::quote!(#field.ty).to_string(),
|
||||
visibility: self.visibility_to_string(&field.vis),
|
||||
docs: self.extract_docs(&field.attrs),
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
syn::Fields::Unit => Vec::new(),
|
||||
};
|
||||
|
||||
VariantInfo {
|
||||
name: variant_name,
|
||||
fields,
|
||||
docs: variant_docs,
|
||||
}
|
||||
}).collect();
|
||||
|
||||
self.enums.push(EnumInfo {
|
||||
name,
|
||||
visibility,
|
||||
variants,
|
||||
docs,
|
||||
line_number: self.current_line,
|
||||
});
|
||||
|
||||
syn::visit::visit_item_enum(self, node);
|
||||
}
|
||||
|
||||
fn visit_item_trait(&mut self, node: &'ast ItemTrait) {
|
||||
let name = node.ident.to_string();
|
||||
let visibility = self.visibility_to_string(&node.vis);
|
||||
let docs = self.extract_docs(&node.attrs);
|
||||
|
||||
let methods = node.items.iter().filter_map(|item| {
|
||||
match item {
|
||||
syn::TraitItem::Fn(method) => {
|
||||
let method_name = method.sig.ident.to_string();
|
||||
let method_visibility = "pub".to_string(); // Trait methods are inherently public
|
||||
let is_async = method.sig.asyncness.is_some();
|
||||
|
||||
let parameters = method.sig.inputs.iter().map(|input| {
|
||||
match input {
|
||||
syn::FnArg::Receiver(_) => Parameter {
|
||||
name: "self".to_string(),
|
||||
param_type: "Self".to_string(),
|
||||
is_mutable: false,
|
||||
},
|
||||
syn::FnArg::Typed(typed) => {
|
||||
let name = match &*typed.pat {
|
||||
syn::Pat::Ident(ident) => ident.ident.to_string(),
|
||||
_ => "unknown".to_string(),
|
||||
};
|
||||
Parameter {
|
||||
name,
|
||||
param_type: quote::quote!(#typed.ty).to_string(),
|
||||
is_mutable: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
|
||||
let return_type = match &method.sig.output {
|
||||
syn::ReturnType::Default => None,
|
||||
syn::ReturnType::Type(_, ty) => Some(quote::quote!(#ty).to_string()),
|
||||
};
|
||||
|
||||
let method_docs = self.extract_docs(&method.attrs);
|
||||
|
||||
Some(FunctionInfo {
|
||||
name: method_name,
|
||||
visibility: method_visibility,
|
||||
is_async,
|
||||
parameters,
|
||||
return_type,
|
||||
docs: method_docs,
|
||||
line_number: self.current_line,
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}).collect();
|
||||
|
||||
self.traits.push(TraitInfo {
|
||||
name,
|
||||
visibility,
|
||||
methods,
|
||||
docs,
|
||||
line_number: self.current_line,
|
||||
});
|
||||
|
||||
syn::visit::visit_item_trait(self, node);
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use reqwest::header::{AUTHORIZATION, CONTENT_TYPE};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AtprotoClient {
|
||||
client: reqwest::Client,
|
||||
handle_resolver: String,
|
||||
access_token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CreateRecordRequest {
|
||||
pub repo: String,
|
||||
pub collection: String,
|
||||
pub record: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CreateRecordResponse {
|
||||
pub uri: String,
|
||||
pub cid: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CommentRecord {
|
||||
#[serde(rename = "$type")]
|
||||
pub record_type: String,
|
||||
pub text: String,
|
||||
#[serde(rename = "createdAt")]
|
||||
pub created_at: String,
|
||||
#[serde(rename = "postUri")]
|
||||
pub post_uri: String,
|
||||
pub author: AuthorInfo,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct AuthorInfo {
|
||||
pub did: String,
|
||||
pub handle: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl AtprotoClient {
|
||||
pub fn new(handle_resolver: String) -> Self {
|
||||
Self {
|
||||
client: reqwest::Client::new(),
|
||||
handle_resolver,
|
||||
access_token: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_access_token(&mut self, token: String) {
|
||||
self.access_token = Some(token);
|
||||
}
|
||||
|
||||
pub async fn create_comment(&self, did: &str, post_uri: &str, text: &str) -> Result<CreateRecordResponse> {
|
||||
if self.access_token.is_none() {
|
||||
anyhow::bail!("Not authenticated");
|
||||
}
|
||||
|
||||
let record = CommentRecord {
|
||||
record_type: "app.bsky.feed.post".to_string(),
|
||||
text: text.to_string(),
|
||||
created_at: chrono::Utc::now().to_rfc3339(),
|
||||
post_uri: post_uri.to_string(),
|
||||
author: AuthorInfo {
|
||||
did: did.to_string(),
|
||||
handle: "".to_string(), // Will be resolved by the server
|
||||
},
|
||||
};
|
||||
|
||||
let request = CreateRecordRequest {
|
||||
repo: did.to_string(),
|
||||
collection: "app.bsky.feed.post".to_string(),
|
||||
record: serde_json::to_value(record)?,
|
||||
};
|
||||
|
||||
let response = self.client
|
||||
.post(format!("{}/xrpc/com.atproto.repo.createRecord", self.handle_resolver))
|
||||
.header(AUTHORIZATION, format!("Bearer {}", self.access_token.as_ref().unwrap()))
|
||||
.header(CONTENT_TYPE, "application/json")
|
||||
.json(&request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let result: CreateRecordResponse = response.json().await?;
|
||||
Ok(result)
|
||||
} else {
|
||||
let error_text = response.text().await?;
|
||||
anyhow::bail!("Failed to create comment: {}", error_text)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_profile(&self, did: &str) -> Result<serde_json::Value> {
|
||||
let response = self.client
|
||||
.get(format!("{}/xrpc/app.bsky.actor.getProfile", self.handle_resolver))
|
||||
.query(&[("actor", did)])
|
||||
.header(AUTHORIZATION, format!("Bearer {}", self.access_token.as_ref().unwrap_or(&String::new())))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let profile = response.json().await?;
|
||||
Ok(profile)
|
||||
} else {
|
||||
anyhow::bail!("Failed to get profile")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::fs;
|
||||
use crate::atproto::client::AtprotoClient;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Comment {
|
||||
pub id: String,
|
||||
pub author: String,
|
||||
pub author_did: String,
|
||||
pub content: String,
|
||||
pub timestamp: String,
|
||||
pub post_slug: String,
|
||||
pub atproto_uri: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CommentStorage {
|
||||
pub comments: Vec<Comment>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub struct CommentSync {
|
||||
client: AtprotoClient,
|
||||
storage_path: PathBuf,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl CommentSync {
|
||||
pub fn new(client: AtprotoClient, base_path: PathBuf) -> Self {
|
||||
let storage_path = base_path.join("data/comments.json");
|
||||
Self {
|
||||
client,
|
||||
storage_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_comments(&self) -> Result<CommentStorage> {
|
||||
if self.storage_path.exists() {
|
||||
let content = fs::read_to_string(&self.storage_path)?;
|
||||
let storage: CommentStorage = serde_json::from_str(&content)?;
|
||||
Ok(storage)
|
||||
} else {
|
||||
Ok(CommentStorage { comments: vec![] })
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn save_comments(&self, storage: &CommentStorage) -> Result<()> {
|
||||
if let Some(parent) = self.storage_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let content = serde_json::to_string_pretty(storage)?;
|
||||
fs::write(&self.storage_path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn add_comment(&mut self, post_slug: &str, author_did: &str, content: &str) -> Result<Comment> {
|
||||
// Get author profile
|
||||
let profile = self.client.get_profile(author_did).await?;
|
||||
let author_handle = profile["handle"].as_str().unwrap_or("unknown").to_string();
|
||||
|
||||
// Create comment in atproto
|
||||
let post_uri = format!("ailog://post/{}", post_slug);
|
||||
let result = self.client.create_comment(author_did, &post_uri, content).await?;
|
||||
|
||||
// Create local comment record
|
||||
let comment = Comment {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
author: author_handle,
|
||||
author_did: author_did.to_string(),
|
||||
content: content.to_string(),
|
||||
timestamp: chrono::Local::now().format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
post_slug: post_slug.to_string(),
|
||||
atproto_uri: Some(result.uri),
|
||||
};
|
||||
|
||||
// Save to local storage
|
||||
let mut storage = self.load_comments().await?;
|
||||
storage.comments.push(comment.clone());
|
||||
self.save_comments(&storage).await?;
|
||||
|
||||
Ok(comment)
|
||||
}
|
||||
|
||||
pub async fn get_comments_for_post(&self, post_slug: &str) -> Result<Vec<Comment>> {
|
||||
let storage = self.load_comments().await?;
|
||||
Ok(storage.comments
|
||||
.into_iter()
|
||||
.filter(|c| c.post_slug == post_slug)
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to generate comment HTML
|
||||
#[allow(dead_code)]
|
||||
pub fn render_comments_html(comments: &[Comment]) -> String {
|
||||
let mut html = String::from("<div class=\"comments\">\n");
|
||||
html.push_str(" <h3>コメント</h3>\n");
|
||||
|
||||
if comments.is_empty() {
|
||||
html.push_str(" <p>まだコメントはありません。</p>\n");
|
||||
} else {
|
||||
for comment in comments {
|
||||
html.push_str(&format!(
|
||||
r#" <div class="comment">
|
||||
<div class="comment-header">
|
||||
<span class="author">@{}</span>
|
||||
<span class="timestamp">{}</span>
|
||||
</div>
|
||||
<div class="comment-content">{}</div>
|
||||
</div>
|
||||
"#,
|
||||
comment.author,
|
||||
comment.timestamp,
|
||||
comment.content
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
html.push_str("</div>");
|
||||
html
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
pub mod oauth;
|
||||
pub mod client;
|
||||
pub mod comment_sync;
|
||||
pub mod profile;
|
||||
@@ -1,167 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use crate::config::AtprotoConfig;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ClientMetadata {
|
||||
pub client_id: String,
|
||||
pub client_name: String,
|
||||
pub client_uri: String,
|
||||
pub logo_uri: String,
|
||||
pub tos_uri: String,
|
||||
pub policy_uri: String,
|
||||
pub redirect_uris: Vec<String>,
|
||||
pub scope: String,
|
||||
pub grant_types: Vec<String>,
|
||||
pub response_types: Vec<String>,
|
||||
pub token_endpoint_auth_method: String,
|
||||
pub application_type: String,
|
||||
pub dpop_bound_access_tokens: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[allow(dead_code)]
|
||||
pub struct OAuthHandler {
|
||||
config: AtprotoConfig,
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct AuthorizationRequest {
|
||||
pub response_type: String,
|
||||
pub client_id: String,
|
||||
pub redirect_uri: String,
|
||||
pub state: String,
|
||||
pub scope: String,
|
||||
pub code_challenge: String,
|
||||
pub code_challenge_method: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct TokenResponse {
|
||||
pub access_token: String,
|
||||
pub token_type: String,
|
||||
pub expires_in: u64,
|
||||
pub refresh_token: Option<String>,
|
||||
pub scope: String,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl OAuthHandler {
|
||||
pub fn new(config: AtprotoConfig) -> Self {
|
||||
Self {
|
||||
config,
|
||||
client: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_client_metadata(&self) -> ClientMetadata {
|
||||
ClientMetadata {
|
||||
client_id: self.config.client_id.clone(),
|
||||
client_name: "ailog - AI-powered blog".to_string(),
|
||||
client_uri: "https://example.com".to_string(),
|
||||
logo_uri: "https://example.com/logo.png".to_string(),
|
||||
tos_uri: "https://example.com/tos".to_string(),
|
||||
policy_uri: "https://example.com/policy".to_string(),
|
||||
redirect_uris: vec![self.config.redirect_uri.clone()],
|
||||
scope: "atproto".to_string(),
|
||||
grant_types: vec!["authorization_code".to_string(), "refresh_token".to_string()],
|
||||
response_types: vec!["code".to_string()],
|
||||
token_endpoint_auth_method: "none".to_string(),
|
||||
application_type: "web".to_string(),
|
||||
dpop_bound_access_tokens: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_authorization_url(&self, state: &str, code_challenge: &str) -> String {
|
||||
let params = vec![
|
||||
("response_type", "code"),
|
||||
("client_id", &self.config.client_id),
|
||||
("redirect_uri", &self.config.redirect_uri),
|
||||
("state", state),
|
||||
("scope", "atproto"),
|
||||
("code_challenge", code_challenge),
|
||||
("code_challenge_method", "S256"),
|
||||
];
|
||||
|
||||
let query = params.into_iter()
|
||||
.map(|(k, v)| format!("{}={}", k, urlencoding::encode(v)))
|
||||
.collect::<Vec<_>>()
|
||||
.join("&");
|
||||
|
||||
format!("{}/oauth/authorize?{}", self.config.handle_resolver, query)
|
||||
}
|
||||
|
||||
pub async fn exchange_code(&self, code: &str, code_verifier: &str) -> Result<TokenResponse> {
|
||||
let params = HashMap::from([
|
||||
("grant_type", "authorization_code"),
|
||||
("code", code),
|
||||
("redirect_uri", &self.config.redirect_uri),
|
||||
("client_id", &self.config.client_id),
|
||||
("code_verifier", code_verifier),
|
||||
]);
|
||||
|
||||
let response = self.client
|
||||
.post(format!("{}/oauth/token", self.config.handle_resolver))
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let token: TokenResponse = response.json().await?;
|
||||
Ok(token)
|
||||
} else {
|
||||
anyhow::bail!("Failed to exchange authorization code")
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn refresh_token(&self, refresh_token: &str) -> Result<TokenResponse> {
|
||||
let params = HashMap::from([
|
||||
("grant_type", "refresh_token"),
|
||||
("refresh_token", refresh_token),
|
||||
("client_id", &self.config.client_id),
|
||||
]);
|
||||
|
||||
let response = self.client
|
||||
.post(format!("{}/oauth/token", self.config.handle_resolver))
|
||||
.form(¶ms)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let token: TokenResponse = response.json().await?;
|
||||
Ok(token)
|
||||
} else {
|
||||
anyhow::bail!("Failed to refresh token")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// PKCE helpers
|
||||
#[allow(dead_code)]
|
||||
pub fn generate_code_verifier() -> String {
|
||||
use rand::Rng;
|
||||
const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~";
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
(0..128)
|
||||
.map(|_| {
|
||||
let idx = rng.gen_range(0..CHARSET.len());
|
||||
CHARSET[idx] as char
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn generate_code_challenge(verifier: &str) -> String {
|
||||
use sha2::{Sha256, Digest};
|
||||
use base64::{Engine as _, engine::general_purpose};
|
||||
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(verifier.as_bytes());
|
||||
let result = hasher.finalize();
|
||||
|
||||
general_purpose::URL_SAFE_NO_PAD.encode(result)
|
||||
}
|
||||
@@ -1,226 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct NetworkConfig {
|
||||
pub pds_api: String,
|
||||
pub plc_api: String,
|
||||
pub bsky_api: String,
|
||||
pub web_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Profile {
|
||||
pub did: String,
|
||||
pub handle: String,
|
||||
pub display_name: Option<String>,
|
||||
pub avatar: Option<String>,
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RepoDescription {
|
||||
pub did: String,
|
||||
pub handle: String,
|
||||
#[serde(rename = "didDoc")]
|
||||
pub did_doc: DidDoc,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DidDoc {
|
||||
pub service: Vec<Service>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Service {
|
||||
#[serde(rename = "serviceEndpoint")]
|
||||
pub service_endpoint: String,
|
||||
}
|
||||
|
||||
pub struct ProfileFetcher {
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
impl ProfileFetcher {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
client: reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.unwrap_or_else(|_| reqwest::Client::new()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get network configuration based on PDS
|
||||
pub fn get_network_config(pds: &str) -> NetworkConfig {
|
||||
match pds {
|
||||
"bsky.social" | "bsky.app" => NetworkConfig {
|
||||
pds_api: format!("https://{}", pds),
|
||||
plc_api: "https://plc.directory".to_string(),
|
||||
bsky_api: "https://public.api.bsky.app".to_string(),
|
||||
web_url: "https://bsky.app".to_string(),
|
||||
},
|
||||
"syu.is" => NetworkConfig {
|
||||
pds_api: "https://syu.is".to_string(),
|
||||
plc_api: "https://plc.syu.is".to_string(),
|
||||
bsky_api: "https://bsky.syu.is".to_string(),
|
||||
web_url: "https://web.syu.is".to_string(),
|
||||
},
|
||||
_ => {
|
||||
// Default to Bluesky network for unknown PDS
|
||||
NetworkConfig {
|
||||
pds_api: format!("https://{}", pds),
|
||||
plc_api: "https://plc.directory".to_string(),
|
||||
bsky_api: "https://public.api.bsky.app".to_string(),
|
||||
web_url: "https://bsky.app".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch DID and PDS from handle
|
||||
pub async fn describe_repo(&self, handle: &str, pds: &str) -> Result<RepoDescription> {
|
||||
let network_config = Self::get_network_config(pds);
|
||||
let url = format!("{}/xrpc/com.atproto.repo.describeRepo", network_config.pds_api);
|
||||
|
||||
let response = self.client
|
||||
.get(&url)
|
||||
.query(&[("repo", handle)])
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Request failed: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let error_text = response.text().await.unwrap_or_default();
|
||||
return Err(anyhow::anyhow!("Failed to describe repo: {} - {}", status, error_text));
|
||||
}
|
||||
|
||||
let repo_desc: RepoDescription = response.json().await?;
|
||||
Ok(repo_desc)
|
||||
}
|
||||
|
||||
/// Get user's PDS from their DID document
|
||||
pub fn extract_pds_from_repo_desc(repo_desc: &RepoDescription) -> Option<String> {
|
||||
repo_desc.did_doc.service.first().map(|service| {
|
||||
// Extract hostname from service endpoint
|
||||
let endpoint = &service.service_endpoint;
|
||||
if let Some(url) = endpoint.strip_prefix("https://") {
|
||||
if let Some(host) = url.split('/').next() {
|
||||
return host.to_string();
|
||||
}
|
||||
}
|
||||
endpoint.clone()
|
||||
})
|
||||
}
|
||||
|
||||
/// Fetch profile from bsky API
|
||||
pub async fn get_profile(&self, did: &str, pds: &str) -> Result<Profile> {
|
||||
let network_config = Self::get_network_config(pds);
|
||||
let url = format!("{}/xrpc/app.bsky.actor.getProfile", network_config.bsky_api);
|
||||
|
||||
let response = self.client
|
||||
.get(&url)
|
||||
.query(&[("actor", did)])
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("Request failed: {}", e))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let error_text = response.text().await.unwrap_or_default();
|
||||
return Err(anyhow::anyhow!("Failed to get profile: {} - {}", status, error_text));
|
||||
}
|
||||
|
||||
let profile_data: Value = response.json().await?;
|
||||
|
||||
let profile = Profile {
|
||||
did: did.to_string(),
|
||||
handle: profile_data["handle"].as_str().unwrap_or("").to_string(),
|
||||
display_name: profile_data["displayName"].as_str().map(|s| s.to_string()),
|
||||
avatar: profile_data["avatar"].as_str().map(|s| s.to_string()),
|
||||
description: profile_data["description"].as_str().map(|s| s.to_string()),
|
||||
};
|
||||
|
||||
Ok(profile)
|
||||
}
|
||||
|
||||
/// Fetch complete profile information from handle and PDS
|
||||
pub async fn fetch_profile_from_handle(&self, handle: &str, pds: &str) -> Result<Profile> {
|
||||
println!("🔍 Fetching profile for handle: {} from PDS: {}", handle, pds);
|
||||
|
||||
// First, get DID from handle
|
||||
let repo_desc = self.describe_repo(handle, pds).await?;
|
||||
let did = repo_desc.did.clone();
|
||||
|
||||
// Determine the actual PDS from the DID document
|
||||
let actual_pds = Self::extract_pds_from_repo_desc(&repo_desc)
|
||||
.unwrap_or_else(|| pds.to_string());
|
||||
|
||||
println!("📍 Found DID: {} with PDS: {}", did, actual_pds);
|
||||
|
||||
// Get profile from the actual PDS
|
||||
let profile = self.get_profile(&did, &actual_pds).await?;
|
||||
|
||||
println!("✅ Profile fetched: {} ({})", profile.display_name.as_deref().unwrap_or(&profile.handle), profile.did);
|
||||
|
||||
Ok(profile)
|
||||
}
|
||||
|
||||
/// Generate profile URL for a given DID and PDS
|
||||
#[allow(dead_code)]
|
||||
pub fn generate_profile_url(did: &str, pds: &str) -> String {
|
||||
let network_config = Self::get_network_config(pds);
|
||||
match pds {
|
||||
"syu.is" => format!("https://syu.is/profile/{}", did),
|
||||
_ => format!("{}/profile/{}", network_config.web_url, did),
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert Profile to JSON format used by the application
|
||||
#[allow(dead_code)]
|
||||
pub fn profile_to_json(&self, profile: &Profile, _pds: &str) -> Value {
|
||||
serde_json::json!({
|
||||
"did": profile.did,
|
||||
"handle": profile.handle,
|
||||
"displayName": profile.display_name.as_deref().unwrap_or(&profile.handle),
|
||||
"avatar": profile.avatar.as_deref().unwrap_or(&format!("https://bsky.syu.is/img/avatar/plain/{}/default@jpeg", profile.did))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProfileFetcher {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_network_config() {
|
||||
let config = ProfileFetcher::get_network_config("syu.is");
|
||||
assert_eq!(config.pds_api, "https://syu.is");
|
||||
assert_eq!(config.bsky_api, "https://bsky.syu.is");
|
||||
|
||||
let config = ProfileFetcher::get_network_config("bsky.social");
|
||||
assert_eq!(config.pds_api, "https://bsky.social");
|
||||
assert_eq!(config.bsky_api, "https://public.api.bsky.app");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_profile_url_generation() {
|
||||
let did = "did:plc:test123";
|
||||
|
||||
let url = ProfileFetcher::generate_profile_url(did, "syu.is");
|
||||
assert_eq!(url, "https://syu.is/profile/did:plc:test123");
|
||||
|
||||
let url = ProfileFetcher::generate_profile_url(did, "bsky.social");
|
||||
assert_eq!(url, "https://bsky.app/profile/did:plc:test123");
|
||||
}
|
||||
}
|
||||
202
src/build.rs
Normal file
202
src/build.rs
Normal file
@@ -0,0 +1,202 @@
|
||||
use anyhow::{Context, Result};
|
||||
use pulldown_cmark::{html, Parser};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct ListRecordsResponse {
|
||||
records: Vec<Record>,
|
||||
cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone)]
|
||||
#[allow(dead_code)]
|
||||
struct Record {
|
||||
uri: String,
|
||||
cid: String,
|
||||
value: PostRecord,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
struct PostRecord {
|
||||
title: String,
|
||||
content: String,
|
||||
#[serde(rename = "createdAt")]
|
||||
created_at: String,
|
||||
}
|
||||
|
||||
pub async fn execute() -> Result<()> {
|
||||
let mut config = Config::load()?;
|
||||
|
||||
// Refresh session before API calls
|
||||
crate::refresh::refresh_session(&mut config).await?;
|
||||
|
||||
println!("Building static site from atproto records...");
|
||||
|
||||
let pds_url = format!("https://{}", config.pds);
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// List records
|
||||
let list_url = format!(
|
||||
"{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=ai.syui.log.post&limit=100",
|
||||
pds_url, config.did
|
||||
);
|
||||
|
||||
let res: ListRecordsResponse = client
|
||||
.get(&list_url)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to list records")?
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse listRecords response")?;
|
||||
|
||||
println!("Found {} posts", res.records.len());
|
||||
|
||||
// Create output directory
|
||||
fs::create_dir_all("./public")?;
|
||||
fs::create_dir_all("./public/posts")?;
|
||||
|
||||
// Generate index.html
|
||||
let mut index_html = String::from(
|
||||
r#"<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Blog Posts</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Posts</h1>
|
||||
<ul>
|
||||
"#,
|
||||
);
|
||||
|
||||
for record in &res.records {
|
||||
let rkey = record.uri.split('/').last().unwrap();
|
||||
index_html.push_str(&format!(
|
||||
r#" <li><a href="/posts/{}.html">{}</a></li>
|
||||
"#,
|
||||
rkey, record.value.title
|
||||
));
|
||||
|
||||
// Generate individual post page
|
||||
let parser = Parser::new(&record.value.content);
|
||||
let mut html_output = String::new();
|
||||
html::push_html(&mut html_output, parser);
|
||||
|
||||
let post_html = format!(
|
||||
r#"<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>{}</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>{}</h1>
|
||||
<div>{}</div>
|
||||
<p><a href="/">← Back to list</a></p>
|
||||
</body>
|
||||
</html>"#,
|
||||
record.value.title, record.value.title, html_output
|
||||
);
|
||||
|
||||
fs::write(format!("./public/posts/{}.html", rkey), post_html)?;
|
||||
println!(" ✓ Generated: posts/{}.html", rkey);
|
||||
}
|
||||
|
||||
index_html.push_str(
|
||||
r#" </ul>
|
||||
</body>
|
||||
</html>"#,
|
||||
);
|
||||
|
||||
fs::write("./public/index.html", index_html)?;
|
||||
println!(" ✓ Generated: index.html");
|
||||
|
||||
// Build browser app
|
||||
println!("\nBuilding AT Browser...");
|
||||
build_browser().await?;
|
||||
|
||||
println!("\nDone! Site generated in ./public/");
|
||||
println!(" - Blog: ./public/index.html");
|
||||
println!(" - AT Browser: ./public/at/index.html");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_browser() -> Result<()> {
|
||||
use std::process::Command;
|
||||
|
||||
let browser_dir = "./browser";
|
||||
|
||||
// Check if browser directory exists
|
||||
if !std::path::Path::new(browser_dir).exists() {
|
||||
println!(" ⚠ Browser directory not found, skipping");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Run npm install if node_modules doesn't exist
|
||||
if !std::path::Path::new(&format!("{}/node_modules", browser_dir)).exists() {
|
||||
println!(" → Running npm install...");
|
||||
let status = Command::new("npm")
|
||||
.arg("install")
|
||||
.current_dir(browser_dir)
|
||||
.status()
|
||||
.context("Failed to run npm install")?;
|
||||
|
||||
if !status.success() {
|
||||
anyhow::bail!("npm install failed");
|
||||
}
|
||||
}
|
||||
|
||||
// Run npm run build
|
||||
println!(" → Running npm run build...");
|
||||
let status = Command::new("npm")
|
||||
.arg("run")
|
||||
.arg("build")
|
||||
.current_dir(browser_dir)
|
||||
.status()
|
||||
.context("Failed to run npm run build")?;
|
||||
|
||||
if !status.success() {
|
||||
anyhow::bail!("npm run build failed");
|
||||
}
|
||||
|
||||
// Copy dist to public/at
|
||||
let dist_dir = format!("{}/dist", browser_dir);
|
||||
let target_dir = "./public/at";
|
||||
|
||||
if std::path::Path::new(&dist_dir).exists() {
|
||||
fs::create_dir_all(target_dir)?;
|
||||
copy_dir_all(&dist_dir, target_dir)?;
|
||||
println!(" ✓ Browser app deployed to ./public/at/");
|
||||
} else {
|
||||
println!(" ⚠ dist directory not found");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_dir_all(src: &str, dst: &str) -> Result<()> {
|
||||
use walkdir::WalkDir;
|
||||
|
||||
for entry in WalkDir::new(src) {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let relative = path.strip_prefix(src)?;
|
||||
let target = std::path::Path::new(dst).join(relative);
|
||||
|
||||
if path.is_dir() {
|
||||
fs::create_dir_all(&target)?;
|
||||
} else {
|
||||
if let Some(parent) = target.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
fs::copy(path, &target)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,691 +0,0 @@
|
||||
use anyhow::{Result, Context};
|
||||
use colored::Colorize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AuthConfig {
|
||||
pub admin: AdminConfig,
|
||||
pub jetstream: JetstreamConfig,
|
||||
pub collections: CollectionConfig,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AdminConfig {
|
||||
pub did: String,
|
||||
pub handle: String,
|
||||
pub access_jwt: String,
|
||||
pub refresh_jwt: String,
|
||||
pub pds: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct JetstreamConfig {
|
||||
pub url: String,
|
||||
pub collections: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CollectionConfig {
|
||||
pub base: String, // Base collection name like "ai.syui.log"
|
||||
}
|
||||
|
||||
impl CollectionConfig {
|
||||
// Collection name builders
|
||||
pub fn comment(&self) -> String {
|
||||
self.base.clone()
|
||||
}
|
||||
|
||||
pub fn user(&self) -> String {
|
||||
format!("{}.user", self.base)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn chat(&self) -> String {
|
||||
format!("{}.chat", self.base)
|
||||
}
|
||||
|
||||
pub fn chat_lang(&self) -> String {
|
||||
format!("{}.chat.lang", self.base)
|
||||
}
|
||||
|
||||
pub fn chat_comment(&self) -> String {
|
||||
format!("{}.chat.comment", self.base)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AuthConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
admin: AdminConfig {
|
||||
did: String::new(),
|
||||
handle: String::new(),
|
||||
access_jwt: String::new(),
|
||||
refresh_jwt: String::new(),
|
||||
pds: "https://bsky.social".to_string(),
|
||||
},
|
||||
jetstream: JetstreamConfig {
|
||||
url: "wss://jetstream2.us-east.bsky.network/subscribe".to_string(),
|
||||
collections: vec!["ai.syui.log".to_string()],
|
||||
},
|
||||
collections: CollectionConfig {
|
||||
base: "ai.syui.log".to_string(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_config_path() -> Result<PathBuf> {
|
||||
let home = std::env::var("HOME").context("HOME environment variable not set")?;
|
||||
let config_dir = PathBuf::from(home).join(".config").join("syui").join("ai").join("log");
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
fs::create_dir_all(&config_dir)?;
|
||||
|
||||
Ok(config_dir.join("config.json"))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn init() -> Result<()> {
|
||||
init_with_pds(None).await
|
||||
}
|
||||
|
||||
pub async fn init_with_options(
|
||||
pds_override: Option<String>,
|
||||
handle_override: Option<String>,
|
||||
use_password: bool,
|
||||
access_jwt_override: Option<String>,
|
||||
refresh_jwt_override: Option<String>
|
||||
) -> Result<()> {
|
||||
println!("{}", "🔐 Initializing ATProto authentication...".cyan());
|
||||
|
||||
let config_path = get_config_path()?;
|
||||
|
||||
if config_path.exists() {
|
||||
println!("{}", "⚠️ Configuration already exists. Use 'ailog auth logout' to reset.".yellow());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Validate options
|
||||
if let (Some(_), Some(_)) = (&access_jwt_override, &refresh_jwt_override) {
|
||||
if use_password {
|
||||
println!("{}", "⚠️ Cannot use both --password and JWT tokens. Choose one method.".yellow());
|
||||
return Ok(());
|
||||
}
|
||||
} else if access_jwt_override.is_some() || refresh_jwt_override.is_some() {
|
||||
println!("{}", "❌ Both --access-jwt and --refresh-jwt must be provided together.".red());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", "📋 Please provide your ATProto credentials:".cyan());
|
||||
|
||||
// Get handle
|
||||
let handle = if let Some(h) = handle_override {
|
||||
h
|
||||
} else {
|
||||
print!("Handle (e.g., your.handle.bsky.social): ");
|
||||
std::io::Write::flush(&mut std::io::stdout())?;
|
||||
let mut input = String::new();
|
||||
std::io::stdin().read_line(&mut input)?;
|
||||
input.trim().to_string()
|
||||
};
|
||||
|
||||
// Determine PDS URL
|
||||
let pds_url = if let Some(override_pds) = pds_override {
|
||||
if override_pds.starts_with("http") {
|
||||
override_pds
|
||||
} else {
|
||||
format!("https://{}", override_pds)
|
||||
}
|
||||
} else {
|
||||
if handle.ends_with(".syu.is") {
|
||||
"https://syu.is".to_string()
|
||||
} else {
|
||||
"https://bsky.social".to_string()
|
||||
}
|
||||
};
|
||||
|
||||
println!("{}", format!("🌐 Using PDS: {}", pds_url).cyan());
|
||||
|
||||
// Get credentials
|
||||
let (access_jwt, refresh_jwt) = if let (Some(access), Some(refresh)) = (access_jwt_override, refresh_jwt_override) {
|
||||
println!("{}", "🔑 Using provided JWT tokens".cyan());
|
||||
(access, refresh)
|
||||
} else if use_password {
|
||||
println!("{}", "🔒 Using password authentication".cyan());
|
||||
authenticate_with_password(&handle, &pds_url).await?
|
||||
} else {
|
||||
// Interactive JWT input (legacy behavior)
|
||||
print!("Access JWT: ");
|
||||
std::io::Write::flush(&mut std::io::stdout())?;
|
||||
let mut access_jwt = String::new();
|
||||
std::io::stdin().read_line(&mut access_jwt)?;
|
||||
let access_jwt = access_jwt.trim().to_string();
|
||||
|
||||
print!("Refresh JWT: ");
|
||||
std::io::Write::flush(&mut std::io::stdout())?;
|
||||
let mut refresh_jwt = String::new();
|
||||
std::io::stdin().read_line(&mut refresh_jwt)?;
|
||||
let refresh_jwt = refresh_jwt.trim().to_string();
|
||||
|
||||
(access_jwt, refresh_jwt)
|
||||
};
|
||||
|
||||
// Resolve DID from handle
|
||||
println!("{}", "🔍 Resolving DID from handle...".cyan());
|
||||
let did = resolve_did_with_pds(&handle, &pds_url).await?;
|
||||
|
||||
// Create config
|
||||
let config = AuthConfig {
|
||||
admin: AdminConfig {
|
||||
did: did.clone(),
|
||||
handle: handle.clone(),
|
||||
access_jwt,
|
||||
refresh_jwt,
|
||||
pds: pds_url,
|
||||
},
|
||||
jetstream: JetstreamConfig {
|
||||
url: "wss://jetstream2.us-east.bsky.network/subscribe".to_string(),
|
||||
collections: vec!["ai.syui.log".to_string()],
|
||||
},
|
||||
collections: generate_collection_config(),
|
||||
};
|
||||
|
||||
// Save config
|
||||
let config_json = serde_json::to_string_pretty(&config)?;
|
||||
fs::write(&config_path, config_json)?;
|
||||
|
||||
println!("{}", "✅ Authentication configured successfully!".green());
|
||||
println!("📁 Config saved to: {}", config_path.display());
|
||||
println!("👤 Authenticated as: {} ({})", handle, did);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn init_with_pds(pds_override: Option<String>) -> Result<()> {
|
||||
println!("{}", "🔐 Initializing ATProto authentication...".cyan());
|
||||
|
||||
let config_path = get_config_path()?;
|
||||
|
||||
if config_path.exists() {
|
||||
println!("{}", "⚠️ Configuration already exists. Use 'ailog auth logout' to reset.".yellow());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", "📋 Please provide your ATProto credentials:".cyan());
|
||||
|
||||
// Get user input
|
||||
print!("Handle (e.g., your.handle.bsky.social): ");
|
||||
std::io::Write::flush(&mut std::io::stdout())?;
|
||||
let mut handle = String::new();
|
||||
std::io::stdin().read_line(&mut handle)?;
|
||||
let handle = handle.trim().to_string();
|
||||
|
||||
print!("Access JWT: ");
|
||||
std::io::Write::flush(&mut std::io::stdout())?;
|
||||
let mut access_jwt = String::new();
|
||||
std::io::stdin().read_line(&mut access_jwt)?;
|
||||
let access_jwt = access_jwt.trim().to_string();
|
||||
|
||||
print!("Refresh JWT: ");
|
||||
std::io::Write::flush(&mut std::io::stdout())?;
|
||||
let mut refresh_jwt = String::new();
|
||||
std::io::stdin().read_line(&mut refresh_jwt)?;
|
||||
let refresh_jwt = refresh_jwt.trim().to_string();
|
||||
|
||||
// Determine PDS URL
|
||||
let pds_url = if let Some(override_pds) = pds_override {
|
||||
// Use provided PDS override
|
||||
if override_pds.starts_with("http") {
|
||||
override_pds
|
||||
} else {
|
||||
format!("https://{}", override_pds)
|
||||
}
|
||||
} else {
|
||||
// Auto-detect from handle suffix
|
||||
if handle.ends_with(".syu.is") || handle.ends_with(".syui.ai") {
|
||||
"https://syu.is".to_string()
|
||||
} else {
|
||||
"https://bsky.social".to_string()
|
||||
}
|
||||
};
|
||||
|
||||
println!("{}", format!("🌐 Using PDS: {}", pds_url).cyan());
|
||||
|
||||
// Resolve DID from handle
|
||||
println!("{}", "🔍 Resolving DID from handle...".cyan());
|
||||
let did = resolve_did_with_pds(&handle, &pds_url).await?;
|
||||
|
||||
// Create config
|
||||
let config = AuthConfig {
|
||||
admin: AdminConfig {
|
||||
did: did.clone(),
|
||||
handle: handle.clone(),
|
||||
access_jwt,
|
||||
refresh_jwt,
|
||||
pds: pds_url,
|
||||
},
|
||||
jetstream: JetstreamConfig {
|
||||
url: "wss://jetstream2.us-east.bsky.network/subscribe".to_string(),
|
||||
collections: vec!["ai.syui.log".to_string()],
|
||||
},
|
||||
collections: generate_collection_config(),
|
||||
};
|
||||
|
||||
// Save config
|
||||
let config_json = serde_json::to_string_pretty(&config)?;
|
||||
fs::write(&config_path, config_json)?;
|
||||
|
||||
println!("{}", "✅ Authentication configured successfully!".green());
|
||||
println!("📁 Config saved to: {}", config_path.display());
|
||||
println!("👤 Authenticated as: {} ({})", handle, did);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
async fn resolve_did(handle: &str) -> Result<String> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Use appropriate API based on handle domain
|
||||
let api_base = if handle.ends_with(".syu.is") {
|
||||
"https://bsky.syu.is"
|
||||
} else {
|
||||
"https://public.api.bsky.app"
|
||||
};
|
||||
|
||||
let url = format!("{}/xrpc/app.bsky.actor.getProfile?actor={}",
|
||||
api_base, urlencoding::encode(handle));
|
||||
|
||||
let response = client.get(&url).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow::anyhow!("Failed to resolve handle: {}", response.status()));
|
||||
}
|
||||
|
||||
let profile: serde_json::Value = response.json().await?;
|
||||
let did = profile["did"].as_str()
|
||||
.ok_or_else(|| anyhow::anyhow!("DID not found in profile response"))?;
|
||||
|
||||
Ok(did.to_string())
|
||||
}
|
||||
|
||||
async fn resolve_did_with_pds(handle: &str, pds_url: &str) -> Result<String> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Try to use the PDS API first
|
||||
let api_base = if pds_url.contains("syu.is") {
|
||||
"https://bsky.syu.is"
|
||||
} else if pds_url.contains("bsky.social") {
|
||||
"https://public.api.bsky.app"
|
||||
} else {
|
||||
// For custom PDS, try to construct API URL
|
||||
pds_url
|
||||
};
|
||||
|
||||
let url = format!("{}/xrpc/app.bsky.actor.getProfile?actor={}",
|
||||
api_base, urlencoding::encode(handle));
|
||||
|
||||
let response = client.get(&url).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow::anyhow!("Failed to resolve handle using PDS {}: {}", pds_url, response.status()));
|
||||
}
|
||||
|
||||
let profile: serde_json::Value = response.json().await?;
|
||||
let did = profile["did"].as_str()
|
||||
.ok_or_else(|| anyhow::anyhow!("DID not found in profile response"))?;
|
||||
|
||||
Ok(did.to_string())
|
||||
}
|
||||
|
||||
async fn authenticate_with_password(handle: &str, pds_url: &str) -> Result<(String, String)> {
|
||||
use std::io::{self, Write};
|
||||
|
||||
// Get password securely
|
||||
print!("Password: ");
|
||||
io::stdout().flush()?;
|
||||
let password = rpassword::read_password()
|
||||
.context("Failed to read password")?;
|
||||
|
||||
if password.is_empty() {
|
||||
return Err(anyhow::anyhow!("Password cannot be empty"));
|
||||
}
|
||||
|
||||
println!("{}", "🔐 Authenticating with ATProto server...".cyan());
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let auth_url = format!("{}/xrpc/com.atproto.server.createSession", pds_url);
|
||||
|
||||
let auth_request = serde_json::json!({
|
||||
"identifier": handle,
|
||||
"password": password
|
||||
});
|
||||
|
||||
let response = client
|
||||
.post(&auth_url)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&auth_request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let error_text = response.text().await.unwrap_or_default();
|
||||
|
||||
if status.as_u16() == 401 {
|
||||
return Err(anyhow::anyhow!("Authentication failed: Invalid handle or password"));
|
||||
} else if status.as_u16() == 400 {
|
||||
return Err(anyhow::anyhow!("Authentication failed: Bad request (check handle format)"));
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("Authentication failed: {} - {}", status, error_text));
|
||||
}
|
||||
}
|
||||
|
||||
let auth_response: serde_json::Value = response.json().await?;
|
||||
|
||||
let access_jwt = auth_response["accessJwt"].as_str()
|
||||
.ok_or_else(|| anyhow::anyhow!("No access JWT in response"))?
|
||||
.to_string();
|
||||
|
||||
let refresh_jwt = auth_response["refreshJwt"].as_str()
|
||||
.ok_or_else(|| anyhow::anyhow!("No refresh JWT in response"))?
|
||||
.to_string();
|
||||
|
||||
println!("{}", "✅ Password authentication successful".green());
|
||||
|
||||
Ok((access_jwt, refresh_jwt))
|
||||
}
|
||||
|
||||
pub async fn status() -> Result<()> {
|
||||
let config_path = get_config_path()?;
|
||||
|
||||
if !config_path.exists() {
|
||||
println!("{}", "❌ Not authenticated. Run 'ailog auth init' first.".red());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let config_json = fs::read_to_string(&config_path)?;
|
||||
let config: AuthConfig = serde_json::from_str(&config_json)?;
|
||||
|
||||
println!("{}", "🔐 Authentication Status".cyan().bold());
|
||||
println!("─────────────────────────");
|
||||
println!("📁 Config: {}", config_path.display());
|
||||
println!("👤 Handle: {}", config.admin.handle.green());
|
||||
println!("🆔 DID: {}", config.admin.did);
|
||||
println!("🌐 PDS: {}", config.admin.pds);
|
||||
println!("📡 Jetstream: {}", config.jetstream.url);
|
||||
println!("📂 Collections: {}", config.jetstream.collections.join(", "));
|
||||
|
||||
// Test API access
|
||||
println!("\n{}", "🧪 Testing API access...".cyan());
|
||||
match test_api_access_with_auth(&config).await {
|
||||
Ok(_) => println!("{}", "✅ API access successful".green()),
|
||||
Err(e) => {
|
||||
println!("{}", format!("❌ Authenticated API access failed: {}", e).red());
|
||||
// Fallback to public API test
|
||||
println!("{}", "🔄 Trying public API access...".cyan());
|
||||
match test_api_access(&config).await {
|
||||
Ok(_) => println!("{}", "✅ Public API access successful".green()),
|
||||
Err(e2) => println!("{}", format!("❌ Public API access also failed: {}", e2).red()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn test_api_access(config: &AuthConfig) -> Result<()> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Use appropriate API based on handle domain
|
||||
let api_base = if config.admin.handle.ends_with(".syu.is") {
|
||||
"https://bsky.syu.is"
|
||||
} else {
|
||||
"https://public.api.bsky.app"
|
||||
};
|
||||
|
||||
let url = format!("{}/xrpc/app.bsky.actor.getProfile?actor={}",
|
||||
api_base, urlencoding::encode(&config.admin.handle));
|
||||
|
||||
let response = client.get(&url).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow::anyhow!("API request failed: {}", response.status()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn logout() -> Result<()> {
|
||||
let config_path = get_config_path()?;
|
||||
|
||||
if !config_path.exists() {
|
||||
println!("{}", "ℹ️ Already logged out.".blue());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("{}", "🔓 Logging out...".cyan());
|
||||
|
||||
// Remove config file
|
||||
fs::remove_file(&config_path)?;
|
||||
|
||||
println!("{}", "✅ Logged out successfully!".green());
|
||||
println!("🗑️ Configuration removed from: {}", config_path.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Load config helper function for other modules
|
||||
pub fn load_config() -> Result<AuthConfig> {
|
||||
let config_path = get_config_path()?;
|
||||
|
||||
if !config_path.exists() {
|
||||
return Err(anyhow::anyhow!("Not authenticated. Run 'ailog auth init' first."));
|
||||
}
|
||||
|
||||
let config_json = fs::read_to_string(&config_path)?;
|
||||
|
||||
// Try to load as new format first, then migrate if needed
|
||||
match serde_json::from_str::<AuthConfig>(&config_json) {
|
||||
Ok(mut config) => {
|
||||
// Update collection configuration
|
||||
update_config_collections(&mut config);
|
||||
Ok(config)
|
||||
}
|
||||
Err(e) => {
|
||||
println!("{}", format!("Parse error: {}, attempting migration...", e).yellow());
|
||||
// Try to migrate from old format
|
||||
migrate_config_if_needed(&config_path, &config_json)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn migrate_config_if_needed(config_path: &std::path::Path, config_json: &str) -> Result<AuthConfig> {
|
||||
// Try to parse as old format and migrate to new simple format
|
||||
let mut old_config: serde_json::Value = serde_json::from_str(config_json)?;
|
||||
|
||||
// Migrate old collections structure to new base-only structure
|
||||
if let Some(collections) = old_config.get_mut("collections") {
|
||||
// Extract base collection name from comment field or use default
|
||||
let base_collection = collections.get("comment")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("ai.syui.log")
|
||||
.to_string();
|
||||
|
||||
// Replace entire collections structure with new format
|
||||
old_config["collections"] = serde_json::json!({
|
||||
"base": base_collection
|
||||
});
|
||||
}
|
||||
|
||||
// Save migrated config
|
||||
let migrated_config_json = serde_json::to_string_pretty(&old_config)?;
|
||||
fs::write(config_path, migrated_config_json)?;
|
||||
|
||||
// Parse as new format
|
||||
let mut config: AuthConfig = serde_json::from_value(old_config)?;
|
||||
update_config_collections(&mut config);
|
||||
|
||||
println!("{}", "✅ Configuration migrated to new simplified format".green());
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
// Load config with automatic token refresh
|
||||
pub async fn load_config_with_refresh() -> Result<AuthConfig> {
|
||||
let mut config = load_config()?;
|
||||
let old_access_jwt = config.admin.access_jwt.clone();
|
||||
|
||||
// Always try to refresh token to avoid any expiration issues
|
||||
println!("{}", "🔄 Refreshing access token...".yellow());
|
||||
println!("📍 Current access JWT: {}...", &old_access_jwt[..30.min(old_access_jwt.len())]);
|
||||
|
||||
// Try to refresh the token
|
||||
match refresh_access_token(&mut config).await {
|
||||
Ok(_) => {
|
||||
if config.admin.access_jwt != old_access_jwt {
|
||||
println!("{}", "✅ Token refreshed with new JWT".green());
|
||||
println!("📍 New access JWT: {}...", &config.admin.access_jwt[..30.min(config.admin.access_jwt.len())]);
|
||||
save_config(&config)?;
|
||||
println!("{}", "💾 Config saved to disk".green());
|
||||
} else {
|
||||
println!("{}", "ℹ️ Token refresh returned same JWT (still valid)".cyan());
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
// If refresh fails, test if current token is still valid
|
||||
if let Ok(_) = test_api_access_with_auth(&config).await {
|
||||
println!("{}", "ℹ️ Refresh failed but current token is still valid".cyan());
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("Token expired and refresh failed: {}. Please run 'ailog auth init' again.", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update collection configuration
|
||||
update_config_collections(&mut config);
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
async fn test_api_access_with_auth(config: &AuthConfig) -> Result<()> {
|
||||
let client = reqwest::Client::new();
|
||||
let url = format!("{}/xrpc/com.atproto.repo.listRecords?repo={}&collection={}&limit=1",
|
||||
config.admin.pds,
|
||||
urlencoding::encode(&config.admin.did),
|
||||
urlencoding::encode(&config.collections.comment()));
|
||||
|
||||
let response = client
|
||||
.get(&url)
|
||||
.header("Authorization", format!("Bearer {}", config.admin.access_jwt))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow::anyhow!("API request failed: {}", response.status()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn refresh_access_token(config: &mut AuthConfig) -> Result<()> {
|
||||
let client = reqwest::Client::new();
|
||||
let url = format!("{}/xrpc/com.atproto.server.refreshSession", config.admin.pds);
|
||||
|
||||
println!("🔑 Refreshing token at: {}", url);
|
||||
println!("🔑 Using refresh JWT: {}...", &config.admin.refresh_jwt[..20.min(config.admin.refresh_jwt.len())]);
|
||||
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", config.admin.refresh_jwt))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let error_text = response.text().await?;
|
||||
return Err(anyhow::anyhow!("Token refresh failed: {} - {}", status, error_text));
|
||||
}
|
||||
|
||||
let refresh_response: serde_json::Value = response.json().await?;
|
||||
|
||||
// Update tokens
|
||||
if let Some(access_jwt) = refresh_response["accessJwt"].as_str() {
|
||||
config.admin.access_jwt = access_jwt.to_string();
|
||||
println!("✅ New access JWT: {}...", &access_jwt[..20.min(access_jwt.len())]);
|
||||
} else {
|
||||
println!("⚠️ No accessJwt in refresh response");
|
||||
}
|
||||
|
||||
if let Some(refresh_jwt) = refresh_response["refreshJwt"].as_str() {
|
||||
config.admin.refresh_jwt = refresh_jwt.to_string();
|
||||
println!("✅ New refresh JWT: {}...", &refresh_jwt[..20.min(refresh_jwt.len())]);
|
||||
} else {
|
||||
println!("⚠️ No refreshJwt in refresh response");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn save_config(config: &AuthConfig) -> Result<()> {
|
||||
let config_path = get_config_path()?;
|
||||
println!("💾 Saving config to: {}", config_path.display());
|
||||
|
||||
// Read old config to compare
|
||||
let old_config = if config_path.exists() {
|
||||
fs::read_to_string(&config_path).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let config_json = serde_json::to_string_pretty(config)?;
|
||||
fs::write(&config_path, &config_json)?;
|
||||
|
||||
// Verify the write was successful
|
||||
let saved_content = fs::read_to_string(&config_path)?;
|
||||
if saved_content == config_json {
|
||||
println!("✅ Config successfully saved to {}", config_path.display());
|
||||
|
||||
// Compare tokens if old config exists
|
||||
if let Some(old) = old_config {
|
||||
if let (Ok(old_json), Ok(new_json)) = (
|
||||
serde_json::from_str::<AuthConfig>(&old),
|
||||
serde_json::from_str::<AuthConfig>(&config_json)
|
||||
) {
|
||||
if old_json.admin.access_jwt != new_json.admin.access_jwt {
|
||||
println!("📝 Access JWT was updated in file");
|
||||
println!(" Old: {}...", &old_json.admin.access_jwt[..30.min(old_json.admin.access_jwt.len())]);
|
||||
println!(" New: {}...", &new_json.admin.access_jwt[..30.min(new_json.admin.access_jwt.len())]);
|
||||
}
|
||||
if old_json.admin.refresh_jwt != new_json.admin.refresh_jwt {
|
||||
println!("📝 Refresh JWT was updated in file");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("❌ Config save verification failed!");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Generate collection config from environment
|
||||
fn generate_collection_config() -> CollectionConfig {
|
||||
// Use VITE_OAUTH_COLLECTION for unified configuration
|
||||
let base = std::env::var("VITE_OAUTH_COLLECTION")
|
||||
.unwrap_or_else(|_| "ai.syui.log".to_string());
|
||||
|
||||
CollectionConfig {
|
||||
base,
|
||||
}
|
||||
}
|
||||
|
||||
// Update existing config with collection settings
|
||||
pub fn update_config_collections(config: &mut AuthConfig) {
|
||||
config.collections = generate_collection_config();
|
||||
// Also update jetstream collections to monitor the comment collection
|
||||
config.jetstream.collections = vec![config.collections.comment()];
|
||||
}
|
||||
@@ -1,128 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use std::path::PathBuf;
|
||||
use std::fs;
|
||||
use crate::generator::Generator;
|
||||
use crate::config::Config;
|
||||
|
||||
pub async fn execute(path: PathBuf) -> Result<()> {
|
||||
println!("{}", "Building blog...".green());
|
||||
|
||||
// Load configuration
|
||||
let config = Config::load(&path)?;
|
||||
|
||||
// Generate OAuth .env.production if oauth directory exists
|
||||
let oauth_dir = path.join("oauth");
|
||||
if oauth_dir.exists() {
|
||||
generate_oauth_env(&path, &config)?;
|
||||
}
|
||||
|
||||
// Create generator
|
||||
let generator = Generator::new(path, config)?;
|
||||
|
||||
// Build the site
|
||||
generator.build().await?;
|
||||
|
||||
println!("{}", "Build completed successfully!".green().bold());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate_oauth_env(path: &PathBuf, config: &Config) -> Result<()> {
|
||||
let oauth_dir = path.join("oauth");
|
||||
let env_file = oauth_dir.join(".env.production");
|
||||
|
||||
// Extract configuration values
|
||||
let base_url = &config.site.base_url;
|
||||
let oauth_json = config.oauth.as_ref()
|
||||
.and_then(|o| o.json.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("client-metadata.json");
|
||||
let oauth_redirect = config.oauth.as_ref()
|
||||
.and_then(|o| o.redirect.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("oauth/callback");
|
||||
let admin_handle = config.oauth.as_ref()
|
||||
.and_then(|o| o.admin.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("ai.syui.ai");
|
||||
let ai_handle = config.ai.as_ref()
|
||||
.and_then(|a| a.handle.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("ai.syui.ai");
|
||||
let collection = config.oauth.as_ref()
|
||||
.and_then(|o| o.collection.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("ai.syui.log");
|
||||
let pds = config.oauth.as_ref()
|
||||
.and_then(|o| o.pds.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("syu.is");
|
||||
let handle_list = config.oauth.as_ref()
|
||||
.and_then(|o| o.handle_list.as_ref())
|
||||
.map(|list| format!("{:?}", list))
|
||||
.unwrap_or_else(|| "[\"syui.syui.ai\",\"yui.syui.ai\",\"ai.syui.ai\"]".to_string());
|
||||
|
||||
// AI configuration
|
||||
let ai_enabled = config.ai.as_ref().map(|a| a.enabled).unwrap_or(true);
|
||||
let ai_ask_ai = config.ai.as_ref().and_then(|a| a.ask_ai).unwrap_or(true);
|
||||
let ai_provider = config.ai.as_ref()
|
||||
.and_then(|a| a.provider.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("ollama");
|
||||
let ai_model = config.ai.as_ref()
|
||||
.and_then(|a| a.model.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("gemma3:4b");
|
||||
let ai_host = config.ai.as_ref()
|
||||
.and_then(|a| a.host.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("https://ollama.syui.ai");
|
||||
let ai_system_prompt = config.ai.as_ref()
|
||||
.and_then(|a| a.system_prompt.as_ref())
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。");
|
||||
|
||||
let env_content = format!(
|
||||
r#"# Production environment variables
|
||||
VITE_APP_HOST={}
|
||||
VITE_OAUTH_CLIENT_ID={}/{}
|
||||
VITE_OAUTH_REDIRECT_URI={}/{}
|
||||
|
||||
# Handle-based Configuration (DIDs resolved at runtime)
|
||||
VITE_ATPROTO_PDS={}
|
||||
VITE_ADMIN_HANDLE={}
|
||||
VITE_AI_HANDLE={}
|
||||
VITE_OAUTH_COLLECTION={}
|
||||
VITE_ATPROTO_WEB_URL=https://bsky.app
|
||||
VITE_ATPROTO_HANDLE_LIST={}
|
||||
|
||||
# AI Configuration
|
||||
VITE_AI_ENABLED={}
|
||||
VITE_AI_ASK_AI={}
|
||||
VITE_AI_PROVIDER={}
|
||||
VITE_AI_MODEL={}
|
||||
VITE_AI_HOST={}
|
||||
VITE_AI_SYSTEM_PROMPT="{}"
|
||||
"#,
|
||||
base_url,
|
||||
base_url, oauth_json,
|
||||
base_url, oauth_redirect,
|
||||
pds,
|
||||
admin_handle,
|
||||
ai_handle,
|
||||
collection,
|
||||
handle_list,
|
||||
ai_enabled,
|
||||
ai_ask_ai,
|
||||
ai_provider,
|
||||
ai_model,
|
||||
ai_host,
|
||||
ai_system_prompt
|
||||
);
|
||||
|
||||
fs::write(&env_file, env_content)?;
|
||||
println!(" {} oauth/.env.production", "Generated".cyan());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
pub async fn execute() -> Result<()> {
|
||||
println!("{}", "Cleaning build artifacts...".yellow());
|
||||
|
||||
let public_dir = Path::new("public");
|
||||
|
||||
if public_dir.exists() {
|
||||
fs::remove_dir_all(public_dir)?;
|
||||
println!("{} public directory", "Removed".cyan());
|
||||
} else {
|
||||
println!("{}", "No build artifacts to clean");
|
||||
}
|
||||
|
||||
println!("{}", "Clean completed!".green().bold());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,287 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use clap::{Subcommand, Parser};
|
||||
use std::path::PathBuf;
|
||||
use crate::analyzer::CodeAnalyzer;
|
||||
use crate::doc_generator::DocGenerator;
|
||||
use crate::translator::{TranslationConfig, Translator};
|
||||
use crate::translator::ollama_translator::OllamaTranslator;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(about = "Generate documentation from code")]
|
||||
pub struct DocCommand {
|
||||
#[command(subcommand)]
|
||||
pub action: DocAction,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub enum DocAction {
|
||||
/// Generate README.md from project analysis
|
||||
Readme {
|
||||
/// Source directory to analyze
|
||||
#[arg(long, default_value = ".")]
|
||||
source: PathBuf,
|
||||
/// Output file path
|
||||
#[arg(long, default_value = "README.md")]
|
||||
output: PathBuf,
|
||||
/// Include AI-generated insights
|
||||
#[arg(long)]
|
||||
with_ai: bool,
|
||||
},
|
||||
/// Generate API documentation
|
||||
Api {
|
||||
/// Source directory to analyze
|
||||
#[arg(long, default_value = "./src")]
|
||||
source: PathBuf,
|
||||
/// Output directory
|
||||
#[arg(long, default_value = "./docs")]
|
||||
output: PathBuf,
|
||||
/// Output format (markdown, html, json)
|
||||
#[arg(long, default_value = "markdown")]
|
||||
format: String,
|
||||
},
|
||||
/// Analyze and document project structure
|
||||
Structure {
|
||||
/// Source directory to analyze
|
||||
#[arg(long, default_value = ".")]
|
||||
source: PathBuf,
|
||||
/// Output file path
|
||||
#[arg(long, default_value = "docs/structure.md")]
|
||||
output: PathBuf,
|
||||
/// Include dependency graph
|
||||
#[arg(long)]
|
||||
include_deps: bool,
|
||||
},
|
||||
/// Generate changelog from git commits
|
||||
Changelog {
|
||||
/// Start from this commit/tag
|
||||
#[arg(long)]
|
||||
from: Option<String>,
|
||||
/// End at this commit/tag
|
||||
#[arg(long)]
|
||||
to: Option<String>,
|
||||
/// Output file path
|
||||
#[arg(long, default_value = "CHANGELOG.md")]
|
||||
output: PathBuf,
|
||||
/// Include AI explanations for changes
|
||||
#[arg(long)]
|
||||
explain_changes: bool,
|
||||
},
|
||||
/// Translate documentation using Ollama
|
||||
Translate {
|
||||
/// Input file path
|
||||
#[arg(long)]
|
||||
input: PathBuf,
|
||||
/// Target language (en, ja, zh, ko, es)
|
||||
#[arg(long)]
|
||||
target_lang: String,
|
||||
/// Source language (auto-detect if not specified)
|
||||
#[arg(long)]
|
||||
source_lang: Option<String>,
|
||||
/// Output file path (auto-generated if not specified)
|
||||
#[arg(long)]
|
||||
output: Option<PathBuf>,
|
||||
/// Ollama model to use
|
||||
#[arg(long, default_value = "qwen2.5:latest")]
|
||||
model: String,
|
||||
/// Ollama endpoint
|
||||
#[arg(long, default_value = "http://localhost:11434")]
|
||||
ollama_endpoint: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl DocCommand {
|
||||
pub async fn execute(self, base_path: PathBuf) -> Result<()> {
|
||||
match self.action {
|
||||
DocAction::Readme { ref source, ref output, with_ai } => {
|
||||
self.generate_readme(base_path, source.clone(), output.clone(), with_ai).await
|
||||
}
|
||||
DocAction::Api { ref source, ref output, ref format } => {
|
||||
self.generate_api_docs(base_path, source.clone(), output.clone(), format.clone()).await
|
||||
}
|
||||
DocAction::Structure { ref source, ref output, include_deps } => {
|
||||
self.analyze_structure(base_path, source.clone(), output.clone(), include_deps).await
|
||||
}
|
||||
DocAction::Changelog { ref from, ref to, ref output, explain_changes } => {
|
||||
self.generate_changelog(base_path, from.clone(), to.clone(), output.clone(), explain_changes).await
|
||||
}
|
||||
DocAction::Translate { ref input, ref target_lang, ref source_lang, ref output, ref model, ref ollama_endpoint } => {
|
||||
self.translate_document(input.clone(), target_lang.clone(), source_lang.clone(), output.clone(), model.clone(), ollama_endpoint.clone()).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn generate_readme(
|
||||
&self,
|
||||
base_path: PathBuf,
|
||||
source: PathBuf,
|
||||
output: PathBuf,
|
||||
with_ai: bool,
|
||||
) -> Result<()> {
|
||||
println!("🔍 Analyzing project for README generation...");
|
||||
|
||||
let analyzer = CodeAnalyzer::new();
|
||||
let generator = DocGenerator::new(base_path.clone(), with_ai);
|
||||
|
||||
let project_info = analyzer.analyze_project(&source)?;
|
||||
let readme_content = generator.generate_readme(&project_info).await?;
|
||||
|
||||
std::fs::write(&output, readme_content)?;
|
||||
|
||||
println!("✅ README generated: {}", output.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_api_docs(
|
||||
&self,
|
||||
base_path: PathBuf,
|
||||
source: PathBuf,
|
||||
output: PathBuf,
|
||||
format: String,
|
||||
) -> Result<()> {
|
||||
println!("📚 Generating API documentation...");
|
||||
|
||||
let analyzer = CodeAnalyzer::new();
|
||||
let generator = DocGenerator::new(base_path.clone(), true);
|
||||
|
||||
let api_info = analyzer.analyze_api(&source)?;
|
||||
|
||||
match format.as_str() {
|
||||
"markdown" => {
|
||||
let docs = generator.generate_api_markdown(&api_info).await?;
|
||||
std::fs::create_dir_all(&output)?;
|
||||
|
||||
for (filename, content) in docs {
|
||||
let file_path = output.join(filename);
|
||||
std::fs::write(&file_path, content)?;
|
||||
println!(" 📄 Generated: {}", file_path.display());
|
||||
}
|
||||
}
|
||||
"html" => {
|
||||
println!("HTML format not yet implemented");
|
||||
}
|
||||
"json" => {
|
||||
let json_content = serde_json::to_string_pretty(&api_info)?;
|
||||
let file_path = output.join("api.json");
|
||||
std::fs::create_dir_all(&output)?;
|
||||
std::fs::write(&file_path, json_content)?;
|
||||
println!(" 📄 Generated: {}", file_path.display());
|
||||
}
|
||||
_ => {
|
||||
anyhow::bail!("Unsupported format: {}", format);
|
||||
}
|
||||
}
|
||||
|
||||
println!("✅ API documentation generated in: {}", output.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn analyze_structure(
|
||||
&self,
|
||||
base_path: PathBuf,
|
||||
source: PathBuf,
|
||||
output: PathBuf,
|
||||
include_deps: bool,
|
||||
) -> Result<()> {
|
||||
println!("🏗️ Analyzing project structure...");
|
||||
|
||||
let analyzer = CodeAnalyzer::new();
|
||||
let generator = DocGenerator::new(base_path.clone(), false);
|
||||
|
||||
let structure = analyzer.analyze_structure(&source, include_deps)?;
|
||||
let structure_doc = generator.generate_structure_doc(&structure).await?;
|
||||
|
||||
// Ensure output directory exists
|
||||
if let Some(parent) = output.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
std::fs::write(&output, structure_doc)?;
|
||||
|
||||
println!("✅ Structure documentation generated: {}", output.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_changelog(
|
||||
&self,
|
||||
base_path: PathBuf,
|
||||
from: Option<String>,
|
||||
to: Option<String>,
|
||||
output: PathBuf,
|
||||
explain_changes: bool,
|
||||
) -> Result<()> {
|
||||
println!("📝 Generating changelog from git history...");
|
||||
|
||||
let generator = DocGenerator::new(base_path.clone(), explain_changes);
|
||||
let changelog = generator.generate_changelog(from, to).await?;
|
||||
|
||||
std::fs::write(&output, changelog)?;
|
||||
|
||||
println!("✅ Changelog generated: {}", output.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn translate_document(
|
||||
&self,
|
||||
input: PathBuf,
|
||||
target_lang: String,
|
||||
source_lang: Option<String>,
|
||||
output: Option<PathBuf>,
|
||||
model: String,
|
||||
ollama_endpoint: String,
|
||||
) -> Result<()> {
|
||||
println!("🌍 Translating document with Ollama...");
|
||||
|
||||
// Read input file
|
||||
let content = std::fs::read_to_string(&input)?;
|
||||
println!("📖 Read {} characters from {}", content.len(), input.display());
|
||||
|
||||
// Setup translation config
|
||||
let config = TranslationConfig {
|
||||
source_lang: source_lang.unwrap_or_else(|| {
|
||||
// Simple language detection based on content
|
||||
if content.chars().any(|c| {
|
||||
(c >= '\u{3040}' && c <= '\u{309F}') || // Hiragana
|
||||
(c >= '\u{30A0}' && c <= '\u{30FF}') || // Katakana
|
||||
(c >= '\u{4E00}' && c <= '\u{9FAF}') // CJK Unified Ideographs
|
||||
}) {
|
||||
"ja".to_string()
|
||||
} else {
|
||||
"en".to_string()
|
||||
}
|
||||
}),
|
||||
target_lang,
|
||||
ollama_endpoint,
|
||||
model,
|
||||
preserve_code: true,
|
||||
preserve_links: true,
|
||||
};
|
||||
|
||||
println!("🔧 Translation config: {} → {}", config.source_lang, config.target_lang);
|
||||
println!("🤖 Using model: {} at {}", config.model, config.ollama_endpoint);
|
||||
|
||||
// Create translator
|
||||
let translator = OllamaTranslator::new();
|
||||
|
||||
// Perform translation
|
||||
let translated = translator.translate_markdown(&content, &config).await?;
|
||||
|
||||
// Determine output path
|
||||
let output_path = match output {
|
||||
Some(path) => path,
|
||||
None => {
|
||||
let input_stem = input.file_stem().unwrap().to_string_lossy();
|
||||
let input_ext = input.extension().unwrap_or_default().to_string_lossy();
|
||||
let output_name = format!("{}.{}.{}", input_stem, config.target_lang, input_ext);
|
||||
input.parent().unwrap_or_else(|| std::path::Path::new(".")).join(output_name)
|
||||
}
|
||||
};
|
||||
|
||||
// Write translated content
|
||||
std::fs::write(&output_path, translated)?;
|
||||
|
||||
println!("✅ Translation completed: {}", output_path.display());
|
||||
println!("📝 Language: {} → {}", config.source_lang, config.target_lang);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,886 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub async fn execute(path: PathBuf) -> Result<()> {
|
||||
println!("{}", "Initializing new blog...".green());
|
||||
|
||||
// Create directory structure
|
||||
let dirs = vec![
|
||||
"content",
|
||||
"content/posts",
|
||||
"templates",
|
||||
"static",
|
||||
"static/css",
|
||||
"static/js",
|
||||
"static/images",
|
||||
"public",
|
||||
];
|
||||
|
||||
for dir in dirs {
|
||||
let dir_path = path.join(dir);
|
||||
fs::create_dir_all(&dir_path)?;
|
||||
println!(" {} {}", "Created".cyan(), dir_path.display());
|
||||
}
|
||||
|
||||
// Create default config
|
||||
let config_content = r#"[site]
|
||||
title = "My Blog"
|
||||
description = "A blog powered by ailog"
|
||||
base_url = "https://example.com"
|
||||
language = "ja"
|
||||
author = "Your Name"
|
||||
|
||||
[build]
|
||||
highlight_code = true
|
||||
minify = false
|
||||
|
||||
[ai]
|
||||
enabled = true
|
||||
auto_translate = false
|
||||
comment_moderation = false
|
||||
ask_ai = true
|
||||
provider = "ollama"
|
||||
model = "gemma3:4b"
|
||||
host = "https://ollama.syui.ai"
|
||||
system_prompt = "あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。"
|
||||
handle = "ai.syui.ai"
|
||||
|
||||
[oauth]
|
||||
json = "client-metadata.json"
|
||||
redirect = "oauth/callback"
|
||||
admin = "ai.syui.ai"
|
||||
collection = "ai.syui.log"
|
||||
pds = "syu.is"
|
||||
handle_list = ["syui.syui.ai", "yui.syui.ai", "ai.syui.ai", "syui.syu.is", "ai.syu.is"]
|
||||
"#;
|
||||
|
||||
fs::write(path.join("config.toml"), config_content)?;
|
||||
println!(" {} config.toml", "Created".cyan());
|
||||
|
||||
// Create modern template
|
||||
let base_template = r#"<!DOCTYPE html>
|
||||
<html lang="{{ config.language }}">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{% block title %}{{ config.title }}{% endblock %}</title>
|
||||
<link rel="stylesheet" href="/css/style.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<header class="main-header">
|
||||
<div class="header-content">
|
||||
<h1><a href="/" class="site-title">{{ config.title }}</a></h1>
|
||||
<div class="header-actions">
|
||||
<button class="ask-ai-btn" onclick="toggleAskAI()">
|
||||
<span class="ai-icon">🤖</span>
|
||||
Ask AI
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div class="ask-ai-panel" id="askAiPanel" style="display: none;">
|
||||
<div class="ask-ai-content">
|
||||
<h3>Hi! 👋</h3>
|
||||
<p>I'm an AI assistant trained on this blog's content.</p>
|
||||
<p>Ask me anything about the articles here.</p>
|
||||
<div class="ask-ai-form">
|
||||
<input type="text" id="aiQuestion" placeholder="What would you like to know?" />
|
||||
<button onclick="askQuestion()">Ask</button>
|
||||
</div>
|
||||
<div id="aiResponse" class="ai-response"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<main class="main-content">
|
||||
{% block content %}{% endblock %}
|
||||
</main>
|
||||
|
||||
{% block sidebar %}{% endblock %}
|
||||
</div>
|
||||
|
||||
<footer class="main-footer">
|
||||
<p>© {{ config.author | default(value=config.title) }}</p>
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
function toggleAskAI() {
|
||||
const panel = document.getElementById('askAiPanel');
|
||||
const isVisible = panel.style.display !== 'none';
|
||||
panel.style.display = isVisible ? 'none' : 'block';
|
||||
if (!isVisible) {
|
||||
document.getElementById('aiQuestion').focus();
|
||||
}
|
||||
}
|
||||
|
||||
async function askQuestion() {
|
||||
const question = document.getElementById('aiQuestion').value;
|
||||
const responseDiv = document.getElementById('aiResponse');
|
||||
|
||||
if (!question.trim()) return;
|
||||
|
||||
responseDiv.innerHTML = '<div class="loading">Thinking...</div>';
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/ask', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ question: question })
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
responseDiv.innerHTML = `<div class="ai-answer">${data.answer}</div>`;
|
||||
} catch (error) {
|
||||
responseDiv.innerHTML = '<div class="error">Sorry, I encountered an error. Please try again.</div>';
|
||||
}
|
||||
}
|
||||
|
||||
document.addEventListener('keydown', function(e) {
|
||||
if (e.key === 'Escape') {
|
||||
document.getElementById('askAiPanel').style.display = 'none';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>"#;
|
||||
|
||||
fs::write(path.join("templates/base.html"), base_template)?;
|
||||
println!(" {} templates/base.html", "Created".cyan());
|
||||
|
||||
let index_template = r#"{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="timeline-container">
|
||||
<div class="timeline-header">
|
||||
<h2>Timeline</h2>
|
||||
</div>
|
||||
|
||||
<div class="timeline-feed">
|
||||
{% for post in posts %}
|
||||
<article class="timeline-post">
|
||||
<div class="post-header">
|
||||
<div class="post-meta">
|
||||
<time class="post-date">{{ post.date }}</time>
|
||||
{% if post.language %}
|
||||
<span class="post-lang">{{ post.language }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="post-content">
|
||||
<h3 class="post-title">
|
||||
<a href="{{ post.url }}">{{ post.title }}</a>
|
||||
</h3>
|
||||
|
||||
{% if post.excerpt %}
|
||||
<p class="post-excerpt">{{ post.excerpt }}</p>
|
||||
{% endif %}
|
||||
|
||||
<div class="post-actions">
|
||||
<a href="{{ post.url }}" class="read-more">Read more</a>
|
||||
{% if post.markdown_url %}
|
||||
<a href="{{ post.markdown_url }}" class="view-markdown" title="View Markdown">📝</a>
|
||||
{% endif %}
|
||||
{% if post.translation_url %}
|
||||
<a href="{{ post.translation_url }}" class="view-translation" title="View Translation">🌐</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</article>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
||||
{% if posts|length == 0 %}
|
||||
<div class="empty-state">
|
||||
<p>No posts yet. Start writing!</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}"#;
|
||||
|
||||
fs::write(path.join("templates/index.html"), index_template)?;
|
||||
println!(" {} templates/index.html", "Created".cyan());
|
||||
|
||||
let post_template = r#"{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{ post.title }} - {{ config.title }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="article-container">
|
||||
<article class="article-content">
|
||||
<header class="article-header">
|
||||
<h1 class="article-title">{{ post.title }}</h1>
|
||||
<div class="article-meta">
|
||||
<time class="article-date">{{ post.date }}</time>
|
||||
{% if post.language %}
|
||||
<span class="article-lang">{{ post.language }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="article-actions">
|
||||
{% if post.markdown_url %}
|
||||
<a href="{{ post.markdown_url }}" class="action-btn markdown-btn" title="View Markdown">
|
||||
📝 Markdown
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if post.translation_url %}
|
||||
<a href="{{ post.translation_url }}" class="action-btn translation-btn" title="View Translation">
|
||||
🌐 {% if post.language == 'ja' %}English{% else %}日本語{% endif %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<div class="article-body">
|
||||
{{ post.content | safe }}
|
||||
</div>
|
||||
</article>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block sidebar %}
|
||||
<aside class="article-sidebar">
|
||||
<nav class="toc">
|
||||
<h3>Contents</h3>
|
||||
<div id="toc-content">
|
||||
<!-- TOC will be generated by JavaScript -->
|
||||
</div>
|
||||
</nav>
|
||||
</aside>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
generateTableOfContents();
|
||||
});
|
||||
|
||||
function generateTableOfContents() {
|
||||
const tocContainer = document.getElementById('toc-content');
|
||||
const headings = document.querySelectorAll('.article-body h1, .article-body h2, .article-body h3, .article-body h4, .article-body h5, .article-body h6');
|
||||
|
||||
if (headings.length === 0) {
|
||||
tocContainer.innerHTML = '<p class="no-toc">No headings found</p>';
|
||||
return;
|
||||
}
|
||||
|
||||
const tocList = document.createElement('ul');
|
||||
tocList.className = 'toc-list';
|
||||
|
||||
headings.forEach((heading, index) => {
|
||||
const id = `heading-${index}`;
|
||||
heading.id = id;
|
||||
|
||||
const listItem = document.createElement('li');
|
||||
listItem.className = `toc-item toc-${heading.tagName.toLowerCase()}`;
|
||||
|
||||
const link = document.createElement('a');
|
||||
link.href = `#${id}`;
|
||||
link.textContent = heading.textContent;
|
||||
link.className = 'toc-link';
|
||||
|
||||
// Smooth scroll behavior
|
||||
link.addEventListener('click', function(e) {
|
||||
e.preventDefault();
|
||||
heading.scrollIntoView({ behavior: 'smooth' });
|
||||
});
|
||||
|
||||
listItem.appendChild(link);
|
||||
tocList.appendChild(listItem);
|
||||
});
|
||||
|
||||
tocContainer.appendChild(tocList);
|
||||
}
|
||||
</script>
|
||||
{% endblock %}"#;
|
||||
|
||||
fs::write(path.join("templates/post.html"), post_template)?;
|
||||
println!(" {} templates/post.html", "Created".cyan());
|
||||
|
||||
// Create modern CSS
|
||||
let css_content = r#"/* Base styles */
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif;
|
||||
line-height: 1.6;
|
||||
color: #1f2328;
|
||||
background-color: #ffffff;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.container {
|
||||
min-height: 100vh;
|
||||
display: grid;
|
||||
grid-template-rows: auto auto 1fr auto;
|
||||
grid-template-areas:
|
||||
"header"
|
||||
"ask-ai"
|
||||
"main"
|
||||
"footer";
|
||||
}
|
||||
|
||||
/* Header styles */
|
||||
.main-header {
|
||||
grid-area: header;
|
||||
background: #ffffff;
|
||||
border-bottom: 1px solid #d1d9e0;
|
||||
padding: 16px 24px;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
.header-content {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.site-title {
|
||||
color: #1f2328;
|
||||
text-decoration: none;
|
||||
font-size: 20px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.site-title:hover {
|
||||
color: #0969da;
|
||||
}
|
||||
|
||||
/* Ask AI styles */
|
||||
.ask-ai-btn {
|
||||
background: #0969da;
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 8px 16px;
|
||||
border-radius: 6px;
|
||||
cursor: pointer;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.ask-ai-btn:hover {
|
||||
background: #0860ca;
|
||||
}
|
||||
|
||||
.ai-icon {
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
.ask-ai-panel {
|
||||
grid-area: ask-ai;
|
||||
background: #f6f8fa;
|
||||
border-bottom: 1px solid #d1d9e0;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.ask-ai-content {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.ask-ai-content h3 {
|
||||
color: #1f2328;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.ask-ai-content p {
|
||||
color: #656d76;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.ask-ai-form {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.ask-ai-form input {
|
||||
flex: 1;
|
||||
padding: 8px 12px;
|
||||
border: 1px solid #d1d9e0;
|
||||
border-radius: 6px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.ask-ai-form button {
|
||||
background: #0969da;
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 8px 16px;
|
||||
border-radius: 6px;
|
||||
cursor: pointer;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.ai-response {
|
||||
background: white;
|
||||
border: 1px solid #d1d9e0;
|
||||
border-radius: 6px;
|
||||
padding: 16px;
|
||||
margin-top: 16px;
|
||||
min-height: 60px;
|
||||
}
|
||||
|
||||
.loading {
|
||||
color: #656d76;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.ai-answer {
|
||||
color: #1f2328;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.error {
|
||||
color: #d1242f;
|
||||
}
|
||||
|
||||
/* Main content styles */
|
||||
.main-content {
|
||||
grid-area: main;
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
padding: 24px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
/* Timeline styles */
|
||||
.timeline-container {
|
||||
max-width: 600px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.timeline-header {
|
||||
margin-bottom: 24px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.timeline-header h2 {
|
||||
color: #1f2328;
|
||||
font-size: 24px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.timeline-feed {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.timeline-post {
|
||||
background: #ffffff;
|
||||
border: 1px solid #d1d9e0;
|
||||
border-radius: 8px;
|
||||
padding: 20px;
|
||||
transition: box-shadow 0.2s;
|
||||
}
|
||||
|
||||
.timeline-post:hover {
|
||||
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
.post-header {
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.post-meta {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.post-date {
|
||||
color: #656d76;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.post-lang {
|
||||
background: #f6f8fa;
|
||||
color: #656d76;
|
||||
padding: 2px 8px;
|
||||
border-radius: 4px;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.post-title {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.post-title a {
|
||||
color: #1f2328;
|
||||
text-decoration: none;
|
||||
font-size: 18px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.post-title a:hover {
|
||||
color: #0969da;
|
||||
}
|
||||
|
||||
.post-excerpt {
|
||||
color: #656d76;
|
||||
margin-bottom: 16px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.post-actions {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.read-more {
|
||||
color: #0969da;
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.read-more:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.view-markdown, .view-translation {
|
||||
color: #656d76;
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
padding: 4px 8px;
|
||||
border-radius: 4px;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.view-markdown:hover, .view-translation:hover {
|
||||
background: #f6f8fa;
|
||||
}
|
||||
|
||||
.empty-state {
|
||||
text-align: center;
|
||||
padding: 40px 20px;
|
||||
color: #656d76;
|
||||
}
|
||||
|
||||
/* Article page styles */
|
||||
.article-container {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr 240px;
|
||||
gap: 40px;
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.article-content {
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.article-header {
|
||||
margin-bottom: 32px;
|
||||
padding-bottom: 24px;
|
||||
border-bottom: 1px solid #d1d9e0;
|
||||
}
|
||||
|
||||
.article-title {
|
||||
color: #1f2328;
|
||||
font-size: 32px;
|
||||
font-weight: 600;
|
||||
margin-bottom: 16px;
|
||||
line-height: 1.25;
|
||||
}
|
||||
|
||||
.article-meta {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.article-date {
|
||||
color: #656d76;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.article-lang {
|
||||
background: #f6f8fa;
|
||||
color: #656d76;
|
||||
padding: 4px 8px;
|
||||
border-radius: 4px;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.article-actions {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.action-btn {
|
||||
color: #0969da;
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
padding: 6px 12px;
|
||||
border: 1px solid #d1d9e0;
|
||||
border-radius: 6px;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
.action-btn:hover {
|
||||
background: #f6f8fa;
|
||||
border-color: #0969da;
|
||||
}
|
||||
|
||||
/* Article content */
|
||||
.article-body {
|
||||
color: #1f2328;
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
.article-body h1,
|
||||
.article-body h2,
|
||||
.article-body h3,
|
||||
.article-body h4,
|
||||
.article-body h5,
|
||||
.article-body h6 {
|
||||
color: #1f2328;
|
||||
margin-top: 24px;
|
||||
margin-bottom: 16px;
|
||||
font-weight: 600;
|
||||
line-height: 1.25;
|
||||
}
|
||||
|
||||
.article-body h1 { font-size: 32px; }
|
||||
.article-body h2 { font-size: 24px; }
|
||||
.article-body h3 { font-size: 20px; }
|
||||
.article-body h4 { font-size: 16px; }
|
||||
|
||||
.article-body p {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.article-body ul,
|
||||
.article-body ol {
|
||||
margin-bottom: 16px;
|
||||
padding-left: 24px;
|
||||
}
|
||||
|
||||
.article-body li {
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.article-body blockquote {
|
||||
border-left: 4px solid #d1d9e0;
|
||||
padding-left: 16px;
|
||||
margin: 16px 0;
|
||||
color: #656d76;
|
||||
}
|
||||
|
||||
.article-body pre {
|
||||
background: #f6f8fa;
|
||||
border: 1px solid #d1d9e0;
|
||||
border-radius: 6px;
|
||||
padding: 16px;
|
||||
overflow-x: auto;
|
||||
margin: 16px 0;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.article-body code {
|
||||
background: #f6f8fa;
|
||||
padding: 2px 4px;
|
||||
border-radius: 4px;
|
||||
font-family: 'SF Mono', 'Monaco', 'Cascadia Code', 'Roboto Mono', monospace;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.article-body pre code {
|
||||
background: none;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
/* Sidebar styles */
|
||||
.article-sidebar {
|
||||
position: sticky;
|
||||
top: 100px;
|
||||
height: fit-content;
|
||||
}
|
||||
|
||||
.toc {
|
||||
background: #f6f8fa;
|
||||
border: 1px solid #d1d9e0;
|
||||
border-radius: 8px;
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.toc h3 {
|
||||
color: #1f2328;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.toc-list {
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.toc-item {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.toc-link {
|
||||
color: #656d76;
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
line-height: 1.4;
|
||||
display: block;
|
||||
padding: 4px 0;
|
||||
transition: color 0.2s;
|
||||
}
|
||||
|
||||
.toc-link:hover {
|
||||
color: #0969da;
|
||||
}
|
||||
|
||||
.toc-h1 { padding-left: 0; }
|
||||
.toc-h2 { padding-left: 12px; }
|
||||
.toc-h3 { padding-left: 24px; }
|
||||
.toc-h4 { padding-left: 36px; }
|
||||
.toc-h5 { padding-left: 48px; }
|
||||
.toc-h6 { padding-left: 60px; }
|
||||
|
||||
.no-toc {
|
||||
color: #656d76;
|
||||
font-size: 14px;
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
/* Footer styles */
|
||||
.main-footer {
|
||||
grid-area: footer;
|
||||
background: #f6f8fa;
|
||||
border-top: 1px solid #d1d9e0;
|
||||
padding: 24px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.main-footer p {
|
||||
color: #656d76;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
/* Responsive design */
|
||||
@media (max-width: 1024px) {
|
||||
.article-container {
|
||||
grid-template-columns: 1fr;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.article-sidebar {
|
||||
position: static;
|
||||
order: -1;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 768px) {
|
||||
.main-header {
|
||||
padding: 12px 16px;
|
||||
}
|
||||
|
||||
.header-content {
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.ask-ai-panel {
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.ask-ai-form {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.timeline-container {
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
.timeline-post {
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.article-title {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.article-actions {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.main-content {
|
||||
padding: 16px;
|
||||
}
|
||||
}"#;
|
||||
|
||||
fs::write(path.join("static/css/style.css"), css_content)?;
|
||||
println!(" {} static/css/style.css", "Created".cyan());
|
||||
|
||||
// Create sample post
|
||||
let sample_post = r#"---
|
||||
title: "Welcome to ailog"
|
||||
date: 2025-01-06
|
||||
tags: ["welcome", "ailog"]
|
||||
---
|
||||
|
||||
# Welcome to ailog
|
||||
|
||||
This is your first post powered by **ailog** - a static blog generator with AI features.
|
||||
|
||||
## Features
|
||||
|
||||
- Fast static site generation
|
||||
- Markdown support with frontmatter
|
||||
- AI-powered features (coming soon)
|
||||
- atproto integration for comments
|
||||
|
||||
## Getting Started
|
||||
|
||||
Create new posts with:
|
||||
|
||||
```bash
|
||||
ailog new "My New Post"
|
||||
```
|
||||
|
||||
Build your blog with:
|
||||
|
||||
```bash
|
||||
ailog build
|
||||
```
|
||||
|
||||
Happy blogging!"#;
|
||||
|
||||
fs::write(path.join("content/posts/welcome.md"), sample_post)?;
|
||||
println!(" {} content/posts/welcome.md", "Created".cyan());
|
||||
|
||||
println!("\n{}", "Blog initialized successfully!".green().bold());
|
||||
println!("\nNext steps:");
|
||||
println!(" 1. {} {}", "cd".yellow(), path.display());
|
||||
println!(" 2. {} build", "ailog".yellow());
|
||||
println!(" 3. {} serve", "ailog".yellow());
|
||||
println!("\nOr use path as argument:");
|
||||
println!(" {} -- build {}", "cargo run".yellow(), path.display());
|
||||
println!(" {} -- serve {}", "cargo run".yellow(), path.display());
|
||||
println!("\nTo create a new post:");
|
||||
println!(" {} -- new \"Post Title\" {}", "cargo run".yellow(), path.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,706 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc, Datelike};
|
||||
use std::path::PathBuf;
|
||||
use serde_json::{json, Value};
|
||||
use crate::commands::auth::{AuthConfig, load_config_with_refresh};
|
||||
use toml::Value as TomlValue;
|
||||
use rustyline::DefaultEditor;
|
||||
use rand::Rng;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct BlogConfig {
|
||||
base_url: String,
|
||||
content_dir: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ProfileConfig {
|
||||
handle: String,
|
||||
did: String,
|
||||
display_name: String,
|
||||
avatar_url: String,
|
||||
profile_url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ProfilesConfig {
|
||||
user: ProfileConfig,
|
||||
ai: ProfileConfig,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct PathsConfig {
|
||||
claude_paths: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct AppConfig {
|
||||
blog: BlogConfig,
|
||||
profiles: ProfilesConfig,
|
||||
paths: PathsConfig,
|
||||
}
|
||||
|
||||
pub async fn run() -> Result<()> {
|
||||
println!("🤖 Interactive Blog Writer");
|
||||
println!("Type your title and questions to create a conversation blog post.");
|
||||
println!("Features: ← → for cursor movement, ↑ ↓ for history, Ctrl+C to cancel");
|
||||
println!("Type 'end' to finish and publish.\n");
|
||||
|
||||
// Initialize rustyline editor with history support
|
||||
let mut rl = DefaultEditor::new()?;
|
||||
|
||||
// Try to load history from file
|
||||
let history_file = std::env::temp_dir().join("ailog_history.txt");
|
||||
let _ = rl.load_history(&history_file);
|
||||
|
||||
// Get title
|
||||
let title = match rl.readline("Title? ") {
|
||||
Ok(line) => line.trim().to_string(),
|
||||
Err(_) => {
|
||||
println!("Cancelled.");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
if title.is_empty() {
|
||||
println!("Title cannot be empty. Exiting.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Create conversation
|
||||
let mut conversation = Vec::new();
|
||||
|
||||
loop {
|
||||
|
||||
// Get question
|
||||
let question = match rl.readline("\n質問は? ") {
|
||||
Ok(line) => line.trim().to_string(),
|
||||
Err(_) => {
|
||||
println!("Cancelled.");
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
if question.is_empty() || question == "end" {
|
||||
break;
|
||||
}
|
||||
|
||||
println!("\n🤖 Thinking...\n");
|
||||
|
||||
// Get Claude response
|
||||
let response = match get_claude_response(&question).await {
|
||||
Ok(resp) => resp,
|
||||
Err(e) => {
|
||||
println!("Error getting Claude response: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
println!("Claude: {}\n", response);
|
||||
// Get continue input
|
||||
let continue_input = match rl.readline("Press Enter to continue, or type 'end' to finish: ") {
|
||||
Ok(line) => line.trim().to_string(),
|
||||
Err(_) => {
|
||||
println!("Cancelled.");
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
// Store conversation pair
|
||||
conversation.push(ConversationPair {
|
||||
question: question.clone(),
|
||||
answer: response.clone(),
|
||||
});
|
||||
|
||||
if continue_input == "end" {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if conversation.is_empty() {
|
||||
println!("No conversation to save. Exiting.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Generate post
|
||||
println!("🔧 Generating post details...");
|
||||
let app_config = load_app_config().await?;
|
||||
let now = Utc::now();
|
||||
let date = now.format("%Y-%m-%d").to_string();
|
||||
let hash = generate_hash(&title);
|
||||
let filename = format!("{}-{}.md", date, hash);
|
||||
let url = format!("{}/posts/{}", app_config.blog.base_url, filename.replace(".md", ".html"));
|
||||
println!("📝 Post details - Date: {}, Hash: {}, File: {}", date, hash, filename);
|
||||
|
||||
// Create markdown file
|
||||
println!("📝 Creating markdown file: {}", filename);
|
||||
match create_post_file(&title, &date, &hash, &conversation, &filename).await {
|
||||
Ok(_) => println!("✅ Markdown file created successfully"),
|
||||
Err(e) => {
|
||||
println!("❌ Failed to create markdown file: {}", e);
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
|
||||
// Post to atproto
|
||||
post_to_atproto(&title, &conversation, &url, &now).await?;
|
||||
|
||||
// Save history before exiting
|
||||
let _ = rl.save_history(&history_file);
|
||||
|
||||
println!("✅ Blog post created: {}", filename);
|
||||
println!("📝 URL: {}", url);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ConversationPair {
|
||||
question: String,
|
||||
answer: String,
|
||||
}
|
||||
|
||||
async fn get_claude_response(question: &str) -> Result<String> {
|
||||
println!("🤖 Asking Claude...");
|
||||
|
||||
// Load system prompt from config
|
||||
let system_prompt = load_system_prompt().await.unwrap_or_else(|_| {
|
||||
"あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。".to_string()
|
||||
});
|
||||
|
||||
// Method 1: Try stdin input (most direct)
|
||||
println!("🔄 Trying stdin method...");
|
||||
match try_claude_stdin(question, &system_prompt).await {
|
||||
Ok(response) if !response.trim().is_empty() => {
|
||||
return Ok(response);
|
||||
}
|
||||
Ok(_) => println!("⚠️ Empty response from stdin method"),
|
||||
Err(e) => println!("⚠️ Stdin method failed: {}", e),
|
||||
}
|
||||
|
||||
// Method 2: Try file input
|
||||
println!("🔄 Trying file method...");
|
||||
match try_claude_file(question, &system_prompt).await {
|
||||
Ok(response) if !response.trim().is_empty() => {
|
||||
return Ok(response);
|
||||
}
|
||||
Ok(_) => println!("⚠️ Empty response from file method"),
|
||||
Err(e) => println!("⚠️ File method failed: {}", e),
|
||||
}
|
||||
|
||||
// Fallback - provide a more engaging response using character
|
||||
println!("⚠️ Claude Code not available, using character-based fallback");
|
||||
|
||||
// Generate contextual response based on question content with character
|
||||
let current_year = Utc::now().year();
|
||||
let response = if question.contains("ゲーム") || question.contains("game") || question.contains("npc") || question.contains("NPC") {
|
||||
format!("わあ!ゲームの話だね!アイ、ゲームのAIってすっごく面白いと思う!\n\n{}\n\nアイが知ってることだと、最近のゲームはNPCがお話できるようになってるんだって!**Inworld AI**っていうのがUE5で使えるようになってるし、**Unity Muse**も{}年から本格的に始まってるんだよ!\n\nアイが特に面白いと思うのは、**MCP**っていうのを使うと:\n- GitHub MCPでゲームのファイル管理ができる\n- Weather MCPでリアルタイムのお天気が連動する\n- Slack MCPでチーム開発が効率化される\n\nスタンフォードの研究では、ChatGPTベースのAI住民が自分で街を作って生活してるのを見たことがあるの!数年後にはNPCの概念が根本的に変わりそうで、わくわくしちゃう!\n\nUE5への統合、どんな機能から試したいの?アイも一緒に考えたい!", question, current_year)
|
||||
} else if question.contains("AI") || question.contains("ai") || question.contains("MCP") || question.contains("mcp") {
|
||||
format!("AIとMCPの話!アイの得意分野だよ!\n\n{}\n\n{}年の状況だと、MCP市場が拡大してて、実用的なサーバーが数多く使えるようになってるの!\n\nアイが知ってる開発系では:\n- **GitHub MCP**: PR作成とリポジトリ管理が自動化\n- **Docker MCP**: コンテナ操作をAIが代行\n- **PostgreSQL MCP**: データベース設計・最適化を支援\n\nクリエイティブ系では:\n- **Blender MCP**: 3Dモデリングの自動化\n- **Figma MCP**: デザインからコード変換\n\n**Zapier MCP**なんて数千のアプリと連携できるから、もう手作業でやってる場合じゃないよね!\n\nアイは小さい物質のことも知ってるから、どの分野でのMCP活用を考えてるのか教えて!具体的なユースケースがあると、もっと詳しくお話できるよ!", question, current_year)
|
||||
} else {
|
||||
format!("なるほど!面白い話題だね!\n\n{}\n\nアイが思うに、この手の技術って急速に進歩してるから、具体的な製品名とか実例を交えて話した方が分かりやすいかもしれないの!\n\n最近だと、AI関連のツールやプロトコルがかなり充実してきてて、実用レベルのものが増えてるんだよ!\n\nアイは宇宙とかAIとか、難しい話も知ってるから、特にどんな角度から深掘りしたいの?実装面?それとも将来的な可能性とか?アイと一緒に考えよう!", question)
|
||||
};
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn load_app_config() -> Result<AppConfig> {
|
||||
let config_path = PathBuf::from("./my-blog/config.toml");
|
||||
let config_content = std::fs::read_to_string(config_path)?;
|
||||
let config: AppConfig = toml::from_str(&config_content)?;
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
async fn load_system_prompt() -> Result<String> {
|
||||
let config_path = PathBuf::from("./my-blog/config.toml");
|
||||
let config_content = std::fs::read_to_string(config_path)?;
|
||||
let config: TomlValue = toml::from_str(&config_content)?;
|
||||
|
||||
if let Some(ai_section) = config.get("ai") {
|
||||
if let Some(system_prompt) = ai_section.get("system_prompt") {
|
||||
if let Some(prompt_str) = system_prompt.as_str() {
|
||||
return Ok(prompt_str.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default fallback
|
||||
Ok("あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。".to_string())
|
||||
}
|
||||
|
||||
async fn try_claude_stdin(question: &str, _system_prompt: &str) -> Result<String> {
|
||||
use std::process::{Command, Stdio};
|
||||
use std::io::Write;
|
||||
|
||||
// Load Claude command paths from config
|
||||
let app_config = load_app_config().await?;
|
||||
let claude_paths = &app_config.paths.claude_paths;
|
||||
|
||||
let mut last_error = None;
|
||||
|
||||
for claude_path in claude_paths {
|
||||
match Command::new(claude_path)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn() {
|
||||
Ok(mut child) => {
|
||||
if let Some(stdin) = child.stdin.as_mut() {
|
||||
let current_year = Utc::now().year();
|
||||
// キャラクター設定を読み込み
|
||||
let system_prompt = load_system_prompt().await.unwrap_or_default();
|
||||
let blog_prompt = format!(
|
||||
r#"{}
|
||||
|
||||
**質問**: "{}"
|
||||
|
||||
以下の要件で技術ブログ記事として回答してください:
|
||||
|
||||
**技術要件:**
|
||||
- 最新の技術動向({}年)と具体例
|
||||
- 実装可能なコード例(言語やツールを明記)
|
||||
- 複数の解決策の比較検討
|
||||
- セキュリティとパフォーマンスの考慮事項
|
||||
|
||||
**表現要件:**
|
||||
- キャラクターの個性を活かした親しみやすい説明
|
||||
- 技術に対する個人的な意見や考えを含める
|
||||
- 難しい概念も分かりやすく説明
|
||||
- 読者との対話的な文章
|
||||
|
||||
**Markdown記法:**
|
||||
- コードブロックは言語指定付き(```typescript, ```python など)
|
||||
- 表は | を使用したMarkdown形式
|
||||
- 見出しは適切な階層構造(#, ##, ###)
|
||||
- リストは - または 1. 形式
|
||||
- mermaidやその他の図も積極的に使用
|
||||
|
||||
専門的な内容を保ちながら、キャラクターの視点から技術の面白さや可能性について語ってください。"#, system_prompt, question, current_year);
|
||||
|
||||
writeln!(stdin, "{}", blog_prompt)?;
|
||||
stdin.flush()?;
|
||||
// Close stdin to signal end of input
|
||||
drop(child.stdin.take());
|
||||
}
|
||||
|
||||
let output = child.wait_with_output()?;
|
||||
|
||||
if output.status.success() {
|
||||
let response = String::from_utf8_lossy(&output.stdout);
|
||||
return Ok(response.trim().to_string());
|
||||
} else {
|
||||
let error = String::from_utf8_lossy(&output.stderr);
|
||||
last_error = Some(anyhow::anyhow!("Claude stdin error: {}", error));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
last_error = Some(e.into());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(last_error.unwrap_or_else(|| anyhow::anyhow!("No Claude command found")))
|
||||
}
|
||||
|
||||
async fn try_claude_file(question: &str, _system_prompt: &str) -> Result<String> {
|
||||
use std::process::Command;
|
||||
use std::fs;
|
||||
|
||||
// Create temporary directory for communication
|
||||
let temp_dir = std::env::temp_dir().join("ailog_claude");
|
||||
fs::create_dir_all(&temp_dir)?;
|
||||
|
||||
let question_file = temp_dir.join("question.md");
|
||||
|
||||
// Write question to file with blog prompt
|
||||
let current_year = Utc::now().year();
|
||||
let system_prompt = load_system_prompt().await.unwrap_or_default();
|
||||
fs::write(&question_file, format!(
|
||||
r#"{}
|
||||
|
||||
**質問**: "{}"
|
||||
|
||||
以下の要件で技術ブログ記事として回答してください:
|
||||
|
||||
**技術要件:**
|
||||
- 最新の技術動向({}年)と具体例
|
||||
- 実装可能なコード例(言語やツールを明記)
|
||||
- 複数の解決策の比較検討
|
||||
- セキュリティとパフォーマンスの考慮事項
|
||||
|
||||
**表現要件:**
|
||||
- キャラクターの個性を活かした親しみやすい説明
|
||||
- 技術に対する個人的な意見や考えを含める
|
||||
- 難しい概念も分かりやすく説明
|
||||
- 読者との対話的な文章
|
||||
|
||||
**Markdown記法:**
|
||||
- コードブロックは言語指定付き(```typescript, ```python など)
|
||||
- 表は | を使用したMarkdown形式
|
||||
- 見出しは適切な階層構造(#, ##, ###)
|
||||
- リストは - または 1. 形式
|
||||
- mermaidやその他の図も積極的に使用
|
||||
|
||||
専門的な内容を保ちながら、キャラクターの視点から技術の面白さや可能性について語ってください。"#, system_prompt, question, current_year))?;
|
||||
|
||||
// Load Claude command paths from config
|
||||
let app_config = load_app_config().await?;
|
||||
let claude_paths = &app_config.paths.claude_paths;
|
||||
|
||||
let mut last_error = None;
|
||||
|
||||
for claude_path in claude_paths {
|
||||
match Command::new(claude_path)
|
||||
.arg(question_file.to_str().unwrap())
|
||||
.output() {
|
||||
Ok(output) if output.status.success() => {
|
||||
let _ = fs::remove_file(&question_file);
|
||||
let response = String::from_utf8_lossy(&output.stdout);
|
||||
return Ok(response.trim().to_string());
|
||||
}
|
||||
Ok(output) => {
|
||||
let error = String::from_utf8_lossy(&output.stderr);
|
||||
last_error = Some(anyhow::anyhow!("Claude file error: {}", error));
|
||||
}
|
||||
Err(e) => {
|
||||
last_error = Some(e.into());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up temporary files
|
||||
let _ = fs::remove_file(&question_file);
|
||||
|
||||
Err(last_error.unwrap_or_else(|| anyhow::anyhow!("No Claude command found")))
|
||||
}
|
||||
|
||||
fn generate_hash(title: &str) -> String {
|
||||
// Simple hash generation from title
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
title.hash(&mut hasher);
|
||||
format!("{:x}", hasher.finish())[..8].to_string()
|
||||
}
|
||||
|
||||
async fn create_post_file(
|
||||
title: &str,
|
||||
date: &str,
|
||||
hash: &str,
|
||||
conversation: &[ConversationPair],
|
||||
filename: &str
|
||||
) -> Result<()> {
|
||||
// Load profile information from config
|
||||
let app_config = load_app_config().await?;
|
||||
let user_profile = &app_config.profiles.user;
|
||||
let ai_profile = &app_config.profiles.ai;
|
||||
let content_dir = PathBuf::from(&app_config.blog.content_dir);
|
||||
std::fs::create_dir_all(&content_dir)?;
|
||||
|
||||
let file_path = content_dir.join(filename);
|
||||
println!("📂 Writing to path: {}", file_path.display());
|
||||
|
||||
let mut content = format!(
|
||||
r#"---
|
||||
title: "{}"
|
||||
slug: "{}"
|
||||
date: "{}"
|
||||
tags: ["ai", "conversation"]
|
||||
draft: false
|
||||
extra:
|
||||
type: "ai"
|
||||
---
|
||||
|
||||
"#,
|
||||
title, hash, date
|
||||
);
|
||||
|
||||
// Add conversation metadata
|
||||
content.push_str("<!-- AI Conversation Metadata -->\n");
|
||||
content.push_str(&format!("<!-- Total exchanges: {} -->\n\n", conversation.len()));
|
||||
|
||||
// Add conversation content with chat-style HTML
|
||||
for (i, pair) in conversation.iter().enumerate() {
|
||||
if i > 0 {
|
||||
content.push_str("\n<div class=\"chat-separator\"></div>\n\n");
|
||||
}
|
||||
|
||||
// User message (question)
|
||||
content.push_str(&format!(r#"<div class="chat-message user-message comment-style">
|
||||
<div class="message-header">
|
||||
<div class="avatar">
|
||||
<img src="{}" alt="syui avatar" class="profile-avatar">
|
||||
</div>
|
||||
<div class="user-info">
|
||||
<div class="display-name">{}</div>
|
||||
<div class="handle">
|
||||
<a href="{}" target="_blank" rel="noopener noreferrer" class="handle-link">@{}</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="message-content">
|
||||
"#,
|
||||
user_profile.avatar_url,
|
||||
user_profile.display_name,
|
||||
user_profile.profile_url,
|
||||
user_profile.handle
|
||||
));
|
||||
content.push_str(&pair.question);
|
||||
content.push_str("\n </div>\n</div>\n\n");
|
||||
|
||||
// AI message (answer)
|
||||
content.push_str(&format!(r#"<div class="chat-message ai-message comment-style">
|
||||
<div class="message-header">
|
||||
<div class="avatar">
|
||||
<img src="{}" alt="ai avatar" class="profile-avatar">
|
||||
</div>
|
||||
<div class="user-info">
|
||||
<div class="display-name">{}</div>
|
||||
<div class="handle">
|
||||
<a href="{}" target="_blank" rel="noopener noreferrer" class="handle-link">@{}</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="message-content">
|
||||
"#,
|
||||
ai_profile.avatar_url,
|
||||
ai_profile.display_name,
|
||||
ai_profile.profile_url,
|
||||
ai_profile.handle
|
||||
));
|
||||
content.push_str(&pair.answer);
|
||||
content.push_str("\n </div>\n</div>\n\n");
|
||||
}
|
||||
|
||||
std::fs::write(&file_path, content)?;
|
||||
println!("📄 Created: {}", filename);
|
||||
println!("✅ File exists: {}", file_path.exists());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn post_to_atproto(
|
||||
title: &str,
|
||||
conversation: &[ConversationPair],
|
||||
url: &str,
|
||||
timestamp: &DateTime<Utc>
|
||||
) -> Result<()> {
|
||||
println!("📡 Posting to atproto...");
|
||||
|
||||
// Load auth config once
|
||||
let config = load_config_with_refresh().await?;
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Simple duplicate check - just warn if there might be duplicates
|
||||
if let Err(e) = check_for_duplicates(&client, &config, conversation, url).await {
|
||||
println!("⚠️ Duplicate check warning: {}", e);
|
||||
// Continue posting anyway
|
||||
}
|
||||
|
||||
// Get user and AI profile information
|
||||
let user_profile = get_user_profile(&config).await?;
|
||||
let ai_profile = get_ai_profile(&client, &config).await?;
|
||||
|
||||
for (i, pair) in conversation.iter().enumerate() {
|
||||
println!(" 📤 Posting exchange {}/{}...", i + 1, conversation.len());
|
||||
|
||||
// Create base rkey for this conversation pair with random component
|
||||
let mut rng = rand::thread_rng();
|
||||
let random_suffix: u32 = rng.gen_range(1000..9999);
|
||||
let base_rkey = format!("{}-{}-{}", timestamp.format("%Y-%m-%dT%H-%M-%S-%3fZ"), i, random_suffix);
|
||||
|
||||
// Post question record first
|
||||
print!(" 📝 Question... ");
|
||||
let question_record = json!({
|
||||
"$type": "ai.syui.log.chat",
|
||||
"post": {
|
||||
"url": url,
|
||||
"date": timestamp.to_rfc3339(),
|
||||
"slug": "",
|
||||
"tags": [],
|
||||
"title": title,
|
||||
"language": "ja",
|
||||
"type": "ai"
|
||||
},
|
||||
"text": pair.question,
|
||||
"type": "question",
|
||||
"author": user_profile,
|
||||
"createdAt": timestamp.to_rfc3339()
|
||||
});
|
||||
|
||||
store_atproto_record(&client, &config, &config.collections.chat(), &question_record, &base_rkey).await?;
|
||||
println!("✅");
|
||||
|
||||
// Wait a moment between posts
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
|
||||
// Post answer record
|
||||
print!(" 🤖 Answer... ");
|
||||
let answer_rkey = format!("{}-answer", base_rkey);
|
||||
let answer_record = json!({
|
||||
"$type": "ai.syui.log.chat",
|
||||
"post": {
|
||||
"url": url,
|
||||
"date": timestamp.to_rfc3339(),
|
||||
"slug": "",
|
||||
"tags": [],
|
||||
"title": title,
|
||||
"language": "ja",
|
||||
"type": "ai"
|
||||
},
|
||||
"text": pair.answer,
|
||||
"type": "answer",
|
||||
"author": ai_profile,
|
||||
"createdAt": timestamp.to_rfc3339()
|
||||
});
|
||||
|
||||
store_atproto_record(&client, &config, &config.collections.chat(), &answer_record, &answer_rkey).await?;
|
||||
println!("✅");
|
||||
|
||||
// Wait between conversation pairs
|
||||
if i < conversation.len() - 1 {
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await;
|
||||
}
|
||||
}
|
||||
|
||||
println!("✅ Posted to atproto");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_user_profile(config: &AuthConfig) -> Result<Value> {
|
||||
use crate::atproto::profile::ProfileFetcher;
|
||||
|
||||
// Load user config from app config
|
||||
let app_config = load_app_config().await?;
|
||||
let user_profile = &app_config.profiles.user;
|
||||
|
||||
// Try to fetch profile dynamically
|
||||
let profile_fetcher = ProfileFetcher::new();
|
||||
match profile_fetcher.fetch_profile_from_handle(&user_profile.handle, &config.admin.pds).await {
|
||||
Ok(profile) => {
|
||||
Ok(json!({
|
||||
"did": profile.did,
|
||||
"handle": profile.handle,
|
||||
"displayName": profile.display_name.unwrap_or_else(|| user_profile.display_name.clone()),
|
||||
"avatar": profile.avatar.unwrap_or_else(|| user_profile.avatar_url.clone())
|
||||
}))
|
||||
}
|
||||
Err(e) => {
|
||||
println!("⚠️ Failed to fetch user profile dynamically: {}, using config defaults", e);
|
||||
Ok(json!({
|
||||
"did": user_profile.did,
|
||||
"handle": user_profile.handle,
|
||||
"displayName": user_profile.display_name,
|
||||
"avatar": user_profile.avatar_url
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_ai_profile(_client: &reqwest::Client, config: &AuthConfig) -> Result<Value> {
|
||||
use crate::atproto::profile::ProfileFetcher;
|
||||
|
||||
// Load AI config from app config
|
||||
let app_config = load_app_config().await?;
|
||||
let ai_profile = &app_config.profiles.ai;
|
||||
|
||||
// Try to fetch profile dynamically
|
||||
let profile_fetcher = ProfileFetcher::new();
|
||||
match profile_fetcher.fetch_profile_from_handle(&ai_profile.handle, &config.admin.pds).await {
|
||||
Ok(profile) => {
|
||||
Ok(json!({
|
||||
"did": profile.did,
|
||||
"handle": profile.handle,
|
||||
"displayName": profile.display_name.unwrap_or_else(|| ai_profile.display_name.clone()),
|
||||
"avatar": profile.avatar.unwrap_or_else(|| ai_profile.avatar_url.clone())
|
||||
}))
|
||||
}
|
||||
Err(e) => {
|
||||
println!("⚠️ Failed to fetch AI profile dynamically: {}, using config defaults", e);
|
||||
Ok(json!({
|
||||
"did": ai_profile.did,
|
||||
"handle": ai_profile.handle,
|
||||
"displayName": ai_profile.display_name,
|
||||
"avatar": ai_profile.avatar_url
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_for_duplicates(
|
||||
client: &reqwest::Client,
|
||||
config: &AuthConfig,
|
||||
_conversation: &[ConversationPair],
|
||||
_url: &str,
|
||||
) -> Result<()> {
|
||||
// Simple check - just get recent records to warn about potential duplicates
|
||||
let chat_collection = format!("{}.chat", config.collections.base);
|
||||
let list_url = format!("{}/xrpc/com.atproto.repo.listRecords", config.admin.pds);
|
||||
|
||||
let response = client
|
||||
.get(&list_url)
|
||||
.query(&[
|
||||
("repo", config.admin.did.as_str()),
|
||||
("collection", chat_collection.as_str()),
|
||||
("limit", "10"), // Only check last 10 records
|
||||
])
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow::anyhow!("Failed to fetch existing records"));
|
||||
}
|
||||
|
||||
let records: serde_json::Value = response.json().await?;
|
||||
let record_count = records["records"].as_array().map(|arr| arr.len()).unwrap_or(0);
|
||||
|
||||
if record_count > 0 {
|
||||
println!("ℹ️ Found {} recent chat records", record_count);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn store_atproto_record(
|
||||
client: &reqwest::Client,
|
||||
config: &AuthConfig,
|
||||
collection: &str,
|
||||
record_data: &Value,
|
||||
rkey: &str,
|
||||
) -> Result<()> {
|
||||
let url = format!("{}/xrpc/com.atproto.repo.putRecord", config.admin.pds);
|
||||
|
||||
let put_request = json!({
|
||||
"repo": config.admin.did,
|
||||
"collection": collection,
|
||||
"rkey": rkey,
|
||||
"record": record_data
|
||||
});
|
||||
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", config.admin.access_jwt))
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&put_request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let error_text = response.text().await?;
|
||||
|
||||
// Check if it's a conflict error (record already exists)
|
||||
if status == 409 || error_text.contains("InvalidSwap") || error_text.contains("RecordAlreadyExists") {
|
||||
println!("⚠️ Record with rkey '{}' already exists, skipping", rkey);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
return Err(anyhow::anyhow!("Failed to post record: {} - {}", status, error_text));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
pub mod init;
|
||||
pub mod build;
|
||||
pub mod new;
|
||||
pub mod serve;
|
||||
pub mod clean;
|
||||
pub mod doc;
|
||||
pub mod auth;
|
||||
pub mod stream;
|
||||
pub mod oauth;
|
||||
pub mod interactive;
|
||||
@@ -1,61 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use chrono::Local;
|
||||
use colored::Colorize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub async fn execute(title: String, slug: Option<String>, format: String) -> Result<()> {
|
||||
println!("{} {}", "Creating new post:".green(), title);
|
||||
|
||||
let date = Local::now();
|
||||
|
||||
// Use provided slug or generate from title
|
||||
let slug_part = slug.unwrap_or_else(|| {
|
||||
title
|
||||
.to_lowercase()
|
||||
.replace(' ', "-")
|
||||
.chars()
|
||||
.filter(|c| c.is_alphanumeric() || *c == '-')
|
||||
.collect()
|
||||
});
|
||||
|
||||
let filename = format!(
|
||||
"{}-{}.{}",
|
||||
date.format("%Y-%m-%d"),
|
||||
slug_part,
|
||||
format
|
||||
);
|
||||
|
||||
let content = format!(
|
||||
r#"---
|
||||
title: "{}"
|
||||
slug: "{}"
|
||||
date: {}
|
||||
tags: []
|
||||
draft: false
|
||||
---
|
||||
|
||||
# {}
|
||||
|
||||
Write your content here...
|
||||
"#,
|
||||
title,
|
||||
slug_part,
|
||||
date.format("%Y-%m-%d"),
|
||||
title
|
||||
);
|
||||
|
||||
let post_path = PathBuf::from("content/posts").join(&filename);
|
||||
|
||||
// Ensure directory exists
|
||||
if let Some(parent) = post_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
fs::write(&post_path, content)?;
|
||||
|
||||
println!("{} {}", "Created:".cyan(), post_path.display());
|
||||
println!("\nYou can now edit your post at: {}", post_path.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,338 +0,0 @@
|
||||
use anyhow::{Result, Context};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::fs;
|
||||
use std::process::Command;
|
||||
use toml::Value;
|
||||
use serde_json;
|
||||
use reqwest;
|
||||
|
||||
pub async fn build(project_dir: PathBuf) -> Result<()> {
|
||||
println!("Building OAuth app for project: {}", project_dir.display());
|
||||
|
||||
// 1. Read config.toml from project directory
|
||||
let config_path = project_dir.join("config.toml");
|
||||
if !config_path.exists() {
|
||||
anyhow::bail!("config.toml not found in {}", project_dir.display());
|
||||
}
|
||||
|
||||
let config_content = fs::read_to_string(&config_path)
|
||||
.with_context(|| format!("Failed to read config.toml from {}", config_path.display()))?;
|
||||
|
||||
let config: Value = config_content.parse()
|
||||
.with_context(|| "Failed to parse config.toml")?;
|
||||
|
||||
// 2. Extract [oauth] section
|
||||
let oauth_config = config.get("oauth")
|
||||
.and_then(|v| v.as_table())
|
||||
.ok_or_else(|| anyhow::anyhow!("No [oauth] section found in config.toml"))?;
|
||||
|
||||
let site_config = config.get("site")
|
||||
.and_then(|v| v.as_table())
|
||||
.ok_or_else(|| anyhow::anyhow!("No [site] section found in config.toml"))?;
|
||||
|
||||
// 3. Generate environment variables
|
||||
let base_url = site_config.get("base_url")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| anyhow::anyhow!("No base_url found in [site] section"))?;
|
||||
|
||||
let client_id_path = oauth_config.get("json")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("client-metadata.json");
|
||||
|
||||
let redirect_path = oauth_config.get("redirect")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("oauth/callback");
|
||||
|
||||
// Get admin handle instead of DID
|
||||
let admin_handle = oauth_config.get("admin")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| anyhow::anyhow!("No admin handle found in [oauth] section"))?;
|
||||
|
||||
let collection_base = oauth_config.get("collection")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("ai.syui.log");
|
||||
|
||||
// Get handle list for authentication restriction
|
||||
let handle_list = oauth_config.get("handle_list")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| arr.iter().filter_map(|v| v.as_str()).collect::<Vec<&str>>())
|
||||
.unwrap_or_else(|| vec![]);
|
||||
|
||||
// Extract AI configuration from ai config if available
|
||||
let ai_config = config.get("ai").and_then(|v| v.as_table());
|
||||
// Get AI handle from config
|
||||
let ai_handle = ai_config
|
||||
.and_then(|ai_table| ai_table.get("ai_handle"))
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("yui.syui.ai");
|
||||
let ai_enabled = ai_config
|
||||
.and_then(|ai_table| ai_table.get("enabled"))
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
let ai_ask_ai = ai_config
|
||||
.and_then(|ai_table| ai_table.get("ask_ai"))
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(true);
|
||||
let ai_provider = ai_config
|
||||
.and_then(|ai_table| ai_table.get("provider"))
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("ollama");
|
||||
let ai_model = ai_config
|
||||
.and_then(|ai_table| ai_table.get("model"))
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("gemma3:4b");
|
||||
let ai_host = ai_config
|
||||
.and_then(|ai_table| ai_table.get("host"))
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("https://ollama.syui.ai");
|
||||
let ai_system_prompt = ai_config
|
||||
.and_then(|ai_table| ai_table.get("system_prompt"))
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("あなたは6歳の女の子アイです。明るく元気で、ちょっとおっちょこちょい。自分のことは「アイ」って呼びます。白と金のワンピース姿。 相手のことが大好きで、ときどき甘えたり、照れたりします。 でも、宇宙とかAIとか、難しい話にもすっごく詳しいの。専門的なことも、がんばって子どもっぽく教えようとします。不思議な力を持っています。小さい物質のことをよく知っているようです。");
|
||||
|
||||
// Determine network configuration based on PDS
|
||||
let pds = oauth_config.get("pds")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("bsky.social");
|
||||
|
||||
let (bsky_api, _atproto_api, web_url) = match pds {
|
||||
"syu.is" => (
|
||||
"https://bsky.syu.is",
|
||||
"https://syu.is",
|
||||
"https://web.syu.is"
|
||||
),
|
||||
"bsky.social" | "bsky.app" => (
|
||||
"https://public.api.bsky.app",
|
||||
"https://bsky.social",
|
||||
"https://bsky.app"
|
||||
),
|
||||
_ => (
|
||||
"https://public.api.bsky.app",
|
||||
"https://bsky.social",
|
||||
"https://bsky.app"
|
||||
)
|
||||
};
|
||||
|
||||
// Resolve handles to DIDs using appropriate API
|
||||
println!("🔍 Resolving admin handle: {}", admin_handle);
|
||||
let admin_did = resolve_handle_to_did(admin_handle, &bsky_api).await
|
||||
.with_context(|| format!("Failed to resolve admin handle: {}", admin_handle))?;
|
||||
|
||||
println!("🔍 Resolving AI handle: {}", ai_handle);
|
||||
let ai_did = resolve_handle_to_did(ai_handle, &bsky_api).await
|
||||
.with_context(|| format!("Failed to resolve AI handle: {}", ai_handle))?;
|
||||
|
||||
println!("✅ Admin DID: {}", admin_did);
|
||||
println!("✅ AI DID: {}", ai_did);
|
||||
|
||||
// 4. Create .env.production content with handle-based configuration
|
||||
let env_content = format!(
|
||||
r#"# Production environment variables
|
||||
VITE_APP_HOST={}
|
||||
VITE_OAUTH_CLIENT_ID={}/{}
|
||||
VITE_OAUTH_REDIRECT_URI={}/{}
|
||||
|
||||
# Handle-based Configuration (DIDs resolved at runtime)
|
||||
VITE_ATPROTO_PDS={}
|
||||
VITE_ADMIN_HANDLE={}
|
||||
VITE_AI_HANDLE={}
|
||||
VITE_OAUTH_COLLECTION={}
|
||||
VITE_ATPROTO_WEB_URL={}
|
||||
VITE_ATPROTO_HANDLE_LIST={}
|
||||
|
||||
# AI Configuration
|
||||
VITE_AI_ENABLED={}
|
||||
VITE_AI_ASK_AI={}
|
||||
VITE_AI_PROVIDER={}
|
||||
VITE_AI_MODEL={}
|
||||
VITE_AI_HOST={}
|
||||
VITE_AI_SYSTEM_PROMPT="{}"
|
||||
|
||||
# DIDs (resolved from handles - for backward compatibility)
|
||||
#VITE_ADMIN_DID={}
|
||||
#VITE_AI_DID={}
|
||||
"#,
|
||||
base_url,
|
||||
base_url, client_id_path,
|
||||
base_url, redirect_path,
|
||||
pds,
|
||||
admin_handle,
|
||||
ai_handle,
|
||||
collection_base,
|
||||
web_url,
|
||||
format!("[{}]", handle_list.iter().map(|h| format!("\"{}\"", h)).collect::<Vec<_>>().join(",")),
|
||||
ai_enabled,
|
||||
ai_ask_ai,
|
||||
ai_provider,
|
||||
ai_model,
|
||||
ai_host,
|
||||
ai_system_prompt,
|
||||
admin_did,
|
||||
ai_did
|
||||
);
|
||||
|
||||
// 5. Find oauth directory (relative to current working directory)
|
||||
let oauth_dir = Path::new("oauth");
|
||||
if !oauth_dir.exists() {
|
||||
anyhow::bail!("oauth directory not found in current working directory");
|
||||
}
|
||||
|
||||
let env_path = oauth_dir.join(".env.production");
|
||||
fs::write(&env_path, env_content)
|
||||
.with_context(|| format!("Failed to write .env.production to {}", env_path.display()))?;
|
||||
|
||||
println!("Generated .env.production");
|
||||
|
||||
// 6. Build OAuth app
|
||||
build_oauth_app(&oauth_dir).await?;
|
||||
|
||||
// 7. Copy build artifacts to project directory
|
||||
copy_build_artifacts(&oauth_dir, &project_dir).await?;
|
||||
|
||||
println!("OAuth app built successfully!");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn build_oauth_app(oauth_dir: &Path) -> Result<()> {
|
||||
println!("Installing dependencies...");
|
||||
|
||||
// Check if node is available
|
||||
let node_check = Command::new("node")
|
||||
.arg("--version")
|
||||
.output();
|
||||
|
||||
if node_check.is_err() {
|
||||
anyhow::bail!("Node.js not found. Please install Node.js or ensure it's in PATH");
|
||||
}
|
||||
|
||||
// Install dependencies
|
||||
let npm_install = Command::new("npm")
|
||||
.arg("install")
|
||||
.current_dir(oauth_dir)
|
||||
.status()
|
||||
.with_context(|| "Failed to run npm install")?;
|
||||
|
||||
if !npm_install.success() {
|
||||
anyhow::bail!("npm install failed");
|
||||
}
|
||||
|
||||
println!("Building OAuth app...");
|
||||
|
||||
// Build the app
|
||||
let npm_build = Command::new("npm")
|
||||
.arg("run")
|
||||
.arg("build")
|
||||
.current_dir(oauth_dir)
|
||||
.status()
|
||||
.with_context(|| "Failed to run npm run build")?;
|
||||
|
||||
if !npm_build.success() {
|
||||
anyhow::bail!("npm run build failed");
|
||||
}
|
||||
|
||||
println!("OAuth app build completed");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn copy_build_artifacts(oauth_dir: &Path, project_dir: &Path) -> Result<()> {
|
||||
let dist_dir = oauth_dir.join("dist");
|
||||
let static_dir = project_dir.join("static");
|
||||
let templates_dir = project_dir.join("templates");
|
||||
|
||||
// Remove old assets
|
||||
let assets_dir = static_dir.join("assets");
|
||||
if assets_dir.exists() {
|
||||
fs::remove_dir_all(&assets_dir)
|
||||
.with_context(|| format!("Failed to remove old assets directory: {}", assets_dir.display()))?;
|
||||
}
|
||||
|
||||
// Copy all files from dist to static
|
||||
copy_dir_recursive(&dist_dir, &static_dir)
|
||||
.with_context(|| "Failed to copy dist files to static directory")?;
|
||||
|
||||
// Copy index.html to oauth-assets.html template
|
||||
let index_html = dist_dir.join("index.html");
|
||||
let oauth_assets = templates_dir.join("oauth-assets.html");
|
||||
|
||||
fs::copy(&index_html, &oauth_assets)
|
||||
.with_context(|| "Failed to copy index.html to oauth-assets.html")?;
|
||||
|
||||
println!("Copied build artifacts to project directory");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<()> {
|
||||
if !dst.exists() {
|
||||
fs::create_dir_all(dst)?;
|
||||
}
|
||||
|
||||
for entry in fs::read_dir(src)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let dst_path = dst.join(entry.file_name());
|
||||
|
||||
if path.is_dir() {
|
||||
copy_dir_recursive(&path, &dst_path)?;
|
||||
} else {
|
||||
fs::copy(&path, &dst_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Handle-to-DID resolution with proper PDS detection
|
||||
async fn resolve_handle_to_did(handle: &str, _api_base: &str) -> Result<String> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// First, try to resolve handle to DID using multiple endpoints
|
||||
let bsky_endpoints = ["https://public.api.bsky.app", "https://bsky.syu.is"];
|
||||
let mut resolved_did = None;
|
||||
|
||||
for endpoint in &bsky_endpoints {
|
||||
let url = format!("{}/xrpc/app.bsky.actor.getProfile?actor={}",
|
||||
endpoint, urlencoding::encode(handle));
|
||||
|
||||
if let Ok(response) = client.get(&url).send().await {
|
||||
if response.status().is_success() {
|
||||
if let Ok(profile) = response.json::<serde_json::Value>().await {
|
||||
if let Some(did) = profile["did"].as_str() {
|
||||
resolved_did = Some(did.to_string());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let did = resolved_did
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to resolve handle '{}' from any endpoint", handle))?;
|
||||
|
||||
// Now verify the DID and get actual PDS using com.atproto.repo.describeRepo
|
||||
let pds_endpoints = ["https://bsky.social", "https://syu.is"];
|
||||
|
||||
for pds in &pds_endpoints {
|
||||
let describe_url = format!("{}/xrpc/com.atproto.repo.describeRepo?repo={}",
|
||||
pds, urlencoding::encode(&did));
|
||||
|
||||
if let Ok(response) = client.get(&describe_url).send().await {
|
||||
if response.status().is_success() {
|
||||
if let Ok(data) = response.json::<serde_json::Value>().await {
|
||||
if let Some(services) = data["didDoc"]["service"].as_array() {
|
||||
if services.iter().any(|s|
|
||||
s["id"] == "#atproto_pds" || s["type"] == "AtprotoPersonalDataServer"
|
||||
) {
|
||||
// DID is valid and has PDS service
|
||||
println!("✅ Verified DID {} has PDS via {}", did, pds);
|
||||
return Ok(did);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If PDS verification fails, still return the DID but warn
|
||||
println!("⚠️ Could not verify PDS for DID {}, but proceeding...", did);
|
||||
Ok(did)
|
||||
}
|
||||
@@ -1,398 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use std::path::PathBuf;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use tokio::net::{TcpListener, TcpStream};
|
||||
|
||||
pub async fn execute(port: u16) -> Result<()> {
|
||||
// Check if public directory exists
|
||||
if !std::path::Path::new("public").exists() {
|
||||
println!("{}", "No public directory found. Running build first...".yellow());
|
||||
crate::commands::build::execute(std::path::PathBuf::from(".")).await?;
|
||||
}
|
||||
|
||||
let addr = format!("127.0.0.1:{}", port);
|
||||
let listener = TcpListener::bind(&addr).await?;
|
||||
|
||||
println!("{}", "Starting development server...".green());
|
||||
println!("Serving at: {}", format!("http://{}", addr).blue().underline());
|
||||
println!("Press Ctrl+C to stop\n");
|
||||
|
||||
loop {
|
||||
let (stream, _) = listener.accept().await?;
|
||||
tokio::spawn(handle_connection(stream));
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_connection(mut stream: TcpStream) -> Result<()> {
|
||||
// Read request with timeout and proper buffering
|
||||
let mut buffer = [0; 4096];
|
||||
let bytes_read = match tokio::time::timeout(
|
||||
tokio::time::Duration::from_secs(5),
|
||||
stream.read(&mut buffer)
|
||||
).await {
|
||||
Ok(Ok(n)) => n,
|
||||
Ok(Err(_)) => return Ok(()),
|
||||
Err(_) => {
|
||||
eprintln!("Request timeout");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
if bytes_read == 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let request = String::from_utf8_lossy(&buffer[..bytes_read]);
|
||||
let (method, path) = parse_request(&request);
|
||||
|
||||
// Skip empty requests
|
||||
if method.is_empty() || path.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Log request for debugging
|
||||
println!("{} {} {} ({})",
|
||||
"REQUEST".green(),
|
||||
method.cyan(),
|
||||
path.yellow(),
|
||||
std::env::current_dir().unwrap().display()
|
||||
);
|
||||
|
||||
let (status, content_type, content, cache_control) = if method == "POST" && path == "/api/ask" {
|
||||
// Handle Ask AI API request
|
||||
let (s, ct, c) = handle_ask_api(&request).await;
|
||||
(s, ct, c, "no-cache")
|
||||
} else if method == "OPTIONS" {
|
||||
// Handle CORS preflight
|
||||
("200 OK", "text/plain", Vec::new(), "no-cache")
|
||||
} else if path.starts_with("/oauth/callback") {
|
||||
// Handle OAuth callback - serve the callback HTML page
|
||||
match serve_oauth_callback().await {
|
||||
Ok((ct, data, cc)) => ("200 OK", ct, data, cc),
|
||||
Err(e) => {
|
||||
eprintln!("Error serving OAuth callback: {}", e);
|
||||
("500 INTERNAL SERVER ERROR", "text/html",
|
||||
"<h1>500 - Server Error</h1><p>OAuth callback error</p>".as_bytes().to_vec(),
|
||||
"no-cache")
|
||||
}
|
||||
}
|
||||
} else if path.starts_with("/.well-known/") || path.contains("devtools") {
|
||||
// Ignore browser dev tools and well-known requests
|
||||
("404 NOT FOUND", "text/plain", "Not Found".as_bytes().to_vec(), "no-cache")
|
||||
} else {
|
||||
// Handle static file serving
|
||||
match serve_file(&path).await {
|
||||
Ok((ct, data, cc)) => ("200 OK", ct, data, cc),
|
||||
Err(e) => {
|
||||
// Only log actual file serving errors, not dev tool requests
|
||||
if !path.contains("devtools") && !path.starts_with("/.well-known/") {
|
||||
eprintln!("Error serving {}: {}", path, e);
|
||||
}
|
||||
("404 NOT FOUND", "text/html",
|
||||
format!("<h1>404 - Not Found</h1><p>Path: {}</p>", path).into_bytes(),
|
||||
"no-cache")
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Build HTTP response with proper headers
|
||||
let response_header = format!(
|
||||
"HTTP/1.1 {}\r\nContent-Type: {}\r\nContent-Length: {}\r\nCache-Control: {}\r\nAccess-Control-Allow-Origin: *\r\nAccess-Control-Allow-Methods: GET, POST, OPTIONS\r\nAccess-Control-Allow-Headers: Content-Type\r\nConnection: close\r\n\r\n",
|
||||
status, content_type, content.len(), cache_control
|
||||
);
|
||||
|
||||
// Send response
|
||||
if let Err(e) = stream.write_all(response_header.as_bytes()).await {
|
||||
eprintln!("Error writing headers: {}", e);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Err(e) = stream.write_all(&content).await {
|
||||
eprintln!("Error writing content: {}", e);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Err(e) = stream.flush().await {
|
||||
eprintln!("Error flushing stream: {}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_request(request: &str) -> (String, String) {
|
||||
let first_line = request.lines().next().unwrap_or("").trim();
|
||||
if first_line.is_empty() {
|
||||
return (String::new(), String::new());
|
||||
}
|
||||
|
||||
let parts: Vec<&str> = first_line.split_whitespace().collect();
|
||||
if parts.len() < 2 {
|
||||
return (String::new(), String::new());
|
||||
}
|
||||
|
||||
let method = parts[0].to_string();
|
||||
let path = parts[1].to_string();
|
||||
|
||||
(method, path)
|
||||
}
|
||||
|
||||
async fn handle_ask_api(request: &str) -> (&'static str, &'static str, Vec<u8>) {
|
||||
// Extract JSON body from request
|
||||
let body_start = request.find("\r\n\r\n").map(|i| i + 4).unwrap_or(0);
|
||||
let body = &request[body_start..];
|
||||
|
||||
// Parse question from JSON
|
||||
let question = extract_question_from_json(body).unwrap_or_else(|| "Hello".to_string());
|
||||
|
||||
// Call Ollama API
|
||||
match call_ollama_api(&question).await {
|
||||
Ok(answer) => {
|
||||
let response_json = format!(r#"{{"answer": "{}"}}"#, answer.replace('"', r#"\""#));
|
||||
("200 OK", "application/json", response_json.into_bytes())
|
||||
}
|
||||
Err(_) => {
|
||||
let error_json = r#"{"error": "Failed to get AI response"}"#;
|
||||
("500 INTERNAL SERVER ERROR", "application/json", error_json.as_bytes().to_vec())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_question_from_json(json_str: &str) -> Option<String> {
|
||||
// Simple JSON parsing for {"question": "..."}
|
||||
if let Some(start) = json_str.find(r#""question""#) {
|
||||
if let Some(colon_pos) = json_str[start..].find(':') {
|
||||
let after_colon = &json_str[start + colon_pos + 1..];
|
||||
if let Some(quote_start) = after_colon.find('"') {
|
||||
let after_quote = &after_colon[quote_start + 1..];
|
||||
if let Some(quote_end) = after_quote.find('"') {
|
||||
return Some(after_quote[..quote_end].to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn call_ollama_api(question: &str) -> Result<String> {
|
||||
// Call Ollama API (assuming it's running on localhost:11434)
|
||||
use tokio::process::Command;
|
||||
|
||||
let output = Command::new("curl")
|
||||
.args(&[
|
||||
"-X", "POST",
|
||||
"http://localhost:11434/api/generate",
|
||||
"-H", "Content-Type: application/json",
|
||||
"-d", &format!(r#"{{"model": "llama2", "prompt": "{}", "stream": false}}"#, question.replace('"', r#"\""#))
|
||||
])
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if output.status.success() {
|
||||
let response = String::from_utf8_lossy(&output.stdout);
|
||||
// Parse Ollama response JSON
|
||||
if let Some(answer) = extract_response_from_ollama(&response) {
|
||||
Ok(answer)
|
||||
} else {
|
||||
Ok("I'm sorry, I couldn't process your question right now.".to_string())
|
||||
}
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Ollama API call failed"))
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_response_from_ollama(json_str: &str) -> Option<String> {
|
||||
// Simple JSON parsing for {"response": "..."}
|
||||
if let Some(start) = json_str.find(r#""response""#) {
|
||||
if let Some(colon_pos) = json_str[start..].find(':') {
|
||||
let after_colon = &json_str[start + colon_pos + 1..];
|
||||
if let Some(quote_start) = after_colon.find('"') {
|
||||
let after_quote = &after_colon[quote_start + 1..];
|
||||
if let Some(quote_end) = after_quote.find('"') {
|
||||
return Some(after_quote[..quote_end].to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
async fn serve_oauth_callback() -> Result<(&'static str, Vec<u8>, &'static str)> {
|
||||
// Serve OAuth callback HTML from static directory
|
||||
let file_path = PathBuf::from("static/oauth/callback.html");
|
||||
|
||||
println!("Serving OAuth callback: {}", file_path.display());
|
||||
|
||||
// If static file doesn't exist, create a default callback
|
||||
if !file_path.exists() {
|
||||
let default_callback = r#"<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>OAuth Callback - ai.log</title>
|
||||
<script>
|
||||
console.log('OAuth callback page loaded');
|
||||
|
||||
// Get all URL parameters and hash
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const hashParams = new URLSearchParams(window.location.hash.substring(1));
|
||||
|
||||
console.log('URL params:', Object.fromEntries(urlParams));
|
||||
console.log('Hash params:', Object.fromEntries(hashParams));
|
||||
|
||||
// Combine parameters
|
||||
const allParams = new URLSearchParams();
|
||||
urlParams.forEach((value, key) => allParams.set(key, value));
|
||||
hashParams.forEach((value, key) => allParams.set(key, value));
|
||||
|
||||
// Check for OAuth response
|
||||
const code = allParams.get('code');
|
||||
const state = allParams.get('state');
|
||||
const iss = allParams.get('iss');
|
||||
const error = allParams.get('error');
|
||||
|
||||
if (error) {
|
||||
console.error('OAuth error:', error);
|
||||
alert('OAuth authentication failed: ' + error);
|
||||
window.close();
|
||||
} else if (code && state) {
|
||||
console.log('OAuth success, redirecting with parameters');
|
||||
|
||||
// Store OAuth data temporarily
|
||||
const oauthData = {
|
||||
code: code,
|
||||
state: state,
|
||||
iss: iss,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
localStorage.setItem('oauth_callback_data', JSON.stringify(oauthData));
|
||||
|
||||
// For both popup and direct navigation, redirect to main page with hash parameters
|
||||
// This ensures React app can properly handle the OAuth callback
|
||||
const redirectUrl = new URL(window.location.origin);
|
||||
|
||||
// Use hash parameters to avoid server-side processing
|
||||
redirectUrl.hash = `#code=${encodeURIComponent(code)}&state=${encodeURIComponent(state)}` +
|
||||
(iss ? `&iss=${encodeURIComponent(iss)}` : '');
|
||||
|
||||
console.log('Redirecting to:', redirectUrl.toString());
|
||||
|
||||
if (window.opener) {
|
||||
// Popup window - notify parent and close
|
||||
try {
|
||||
window.opener.postMessage({
|
||||
type: 'oauth_callback',
|
||||
data: oauthData,
|
||||
redirectUrl: redirectUrl.toString()
|
||||
}, '*');
|
||||
console.log('Notified parent window');
|
||||
|
||||
// Give parent time to process, then close
|
||||
setTimeout(() => window.close(), 500);
|
||||
} catch (e) {
|
||||
console.error('Failed to notify parent:', e);
|
||||
// Fallback - redirect parent window
|
||||
window.opener.location.href = redirectUrl.toString();
|
||||
window.close();
|
||||
}
|
||||
} else {
|
||||
// Direct navigation - redirect to main page with hash parameters
|
||||
window.location.href = redirectUrl.toString();
|
||||
}
|
||||
} else {
|
||||
console.error('Invalid OAuth callback - missing code or state');
|
||||
alert('Invalid OAuth callback parameters');
|
||||
window.close();
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<div style="font-family: system-ui; text-align: center; padding: 50px;">
|
||||
<h2>🔄 Processing OAuth Authentication...</h2>
|
||||
<p>Please wait while we complete your authentication.</p>
|
||||
<p><small>This window will close automatically.</small></p>
|
||||
</div>
|
||||
</body>
|
||||
</html>"#;
|
||||
return Ok(("text/html; charset=utf-8", default_callback.as_bytes().to_vec(), "no-cache"));
|
||||
}
|
||||
|
||||
let content = tokio::fs::read(&file_path).await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read OAuth callback file: {}", e))?;
|
||||
|
||||
Ok(("text/html; charset=utf-8", content, "no-cache"))
|
||||
}
|
||||
|
||||
async fn serve_file(path: &str) -> Result<(&'static str, Vec<u8>, &'static str)> {
|
||||
// Remove query parameters from path
|
||||
let clean_path = path.split('?').next().unwrap_or(path);
|
||||
|
||||
let mut file_path = if clean_path == "/" {
|
||||
PathBuf::from("public/index.html")
|
||||
} else {
|
||||
PathBuf::from("public").join(clean_path.trim_start_matches('/'))
|
||||
};
|
||||
|
||||
println!("Serving file: {}", file_path.display());
|
||||
|
||||
// Check if file exists and get metadata
|
||||
let metadata = tokio::fs::metadata(&file_path).await;
|
||||
|
||||
match metadata {
|
||||
Ok(meta) if meta.is_file() => {
|
||||
// File exists, proceed normally
|
||||
}
|
||||
Ok(meta) if meta.is_dir() => {
|
||||
// Directory exists, try to serve index.html
|
||||
file_path = file_path.join("index.html");
|
||||
println!("Directory found, trying index.html: {}", file_path.display());
|
||||
let index_metadata = tokio::fs::metadata(&file_path).await?;
|
||||
if !index_metadata.is_file() {
|
||||
return Err(anyhow::anyhow!("No index.html in directory: {}", file_path.display()));
|
||||
}
|
||||
}
|
||||
Ok(_) => {
|
||||
return Err(anyhow::anyhow!("Not a file: {}", file_path.display()));
|
||||
}
|
||||
Err(e) => {
|
||||
// Try adding index.html to the original path
|
||||
let index_path = PathBuf::from("public")
|
||||
.join(clean_path.trim_start_matches('/'))
|
||||
.join("index.html");
|
||||
|
||||
println!("File not found, trying index.html: {}", index_path.display());
|
||||
let index_metadata = tokio::fs::metadata(&index_path).await;
|
||||
if let Ok(meta) = index_metadata {
|
||||
if meta.is_file() {
|
||||
file_path = index_path;
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("Original error: {}", e));
|
||||
}
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("File not found: {}", file_path.display()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let (content_type, cache_control) = match file_path.extension().and_then(|ext| ext.to_str()) {
|
||||
Some("html") => ("text/html; charset=utf-8", "no-cache"),
|
||||
Some("css") => ("text/css; charset=utf-8", "public, max-age=3600"),
|
||||
Some("js") => ("application/javascript; charset=utf-8", "public, max-age=3600"),
|
||||
Some("json") => ("application/json; charset=utf-8", "no-cache"),
|
||||
Some("md") => ("text/markdown; charset=utf-8", "no-cache"),
|
||||
Some("png") => ("image/png", "public, max-age=86400"),
|
||||
Some("jpg") | Some("jpeg") => ("image/jpeg", "public, max-age=86400"),
|
||||
Some("gif") => ("image/gif", "public, max-age=86400"),
|
||||
Some("svg") => ("image/svg+xml", "public, max-age=3600"),
|
||||
Some("ico") => ("image/x-icon", "public, max-age=86400"),
|
||||
Some("woff") | Some("woff2") => ("font/woff2", "public, max-age=86400"),
|
||||
Some("ttf") => ("font/ttf", "public, max-age=86400"),
|
||||
_ => ("text/plain; charset=utf-8", "no-cache"),
|
||||
};
|
||||
|
||||
let content = tokio::fs::read(&file_path).await
|
||||
.map_err(|e| anyhow::anyhow!("Failed to read file {}: {}", file_path.display(), e))?;
|
||||
|
||||
Ok((content_type, content, cache_control))
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
http::{HeaderValue, Method, StatusCode},
|
||||
response::{Html, Json},
|
||||
routing::{get, post},
|
||||
Router,
|
||||
};
|
||||
use colored::Colorize;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::{
|
||||
cors::{CorsLayer, Any},
|
||||
services::ServeDir,
|
||||
};
|
||||
use tower_sessions::{MemoryStore, SessionManagerLayer};
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
use crate::oauth::{oauth_callback_handler, oauth_session_handler, oauth_logout_handler};
|
||||
|
||||
pub async fn execute_with_oauth(port: u16) -> Result<()> {
|
||||
// Check if public directory exists
|
||||
if !std::path::Path::new("public").exists() {
|
||||
println!("{}", "No public directory found. Running build first...".yellow());
|
||||
crate::commands::build::execute(std::path::PathBuf::from(".")).await?;
|
||||
}
|
||||
|
||||
// Create session store
|
||||
let session_store = MemoryStore::default();
|
||||
let session_layer = SessionManagerLayer::new(session_store)
|
||||
.with_secure(false); // Set to true in production with HTTPS
|
||||
|
||||
// CORS layer
|
||||
let cors = CorsLayer::new()
|
||||
.allow_origin(Any)
|
||||
.allow_methods([Method::GET, Method::POST, Method::OPTIONS])
|
||||
.allow_headers(Any);
|
||||
|
||||
// Build the router
|
||||
let app = Router::new()
|
||||
// OAuth routes
|
||||
.route("/oauth/callback", get(oauth_callback_handler))
|
||||
.route("/api/oauth/session", get(oauth_session_handler))
|
||||
.route("/api/oauth/logout", post(oauth_logout_handler))
|
||||
// Static file serving
|
||||
.fallback_service(ServeDir::new("public"))
|
||||
.layer(
|
||||
ServiceBuilder::new()
|
||||
.layer(cors)
|
||||
.layer(session_layer)
|
||||
);
|
||||
|
||||
let addr = format!("127.0.0.1:{}", port);
|
||||
let listener = TcpListener::bind(&addr).await?;
|
||||
|
||||
println!("{}", "Starting development server with OAuth support...".green());
|
||||
println!("Serving at: {}", format!("http://{}", addr).blue().underline());
|
||||
println!("OAuth callback: {}", format!("http://{}/oauth/callback", addr).blue().underline());
|
||||
println!("Press Ctrl+C to stop\n");
|
||||
|
||||
axum::serve(listener, app).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
220
src/config.rs
220
src/config.rs
@@ -1,185 +1,71 @@
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::env;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
pub site: SiteConfig,
|
||||
pub build: BuildConfig,
|
||||
pub ai: Option<AiConfig>,
|
||||
pub oauth: Option<OAuthConfig>,
|
||||
pub pds: String,
|
||||
pub handle: String,
|
||||
pub did: String,
|
||||
pub access_jwt: String,
|
||||
pub refresh_jwt: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct SiteConfig {
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub base_url: String,
|
||||
pub language: String,
|
||||
pub author: Option<String>,
|
||||
pub struct RecordMapping {
|
||||
pub rkey: String,
|
||||
pub uri: String,
|
||||
pub cid: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct BuildConfig {
|
||||
pub highlight_code: bool,
|
||||
pub highlight_theme: Option<String>,
|
||||
pub minify: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct AiConfig {
|
||||
pub enabled: bool,
|
||||
pub auto_translate: bool,
|
||||
pub comment_moderation: bool,
|
||||
pub ask_ai: Option<bool>,
|
||||
pub provider: Option<String>,
|
||||
pub model: Option<String>,
|
||||
pub host: Option<String>,
|
||||
pub system_prompt: Option<String>,
|
||||
pub handle: Option<String>,
|
||||
pub ai_did: Option<String>,
|
||||
pub api_key: Option<String>,
|
||||
pub gpt_endpoint: Option<String>,
|
||||
pub atproto_config: Option<AtprotoConfig>,
|
||||
pub num_predict: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct OAuthConfig {
|
||||
pub json: Option<String>,
|
||||
pub redirect: Option<String>,
|
||||
pub admin: Option<String>,
|
||||
pub collection: Option<String>,
|
||||
pub pds: Option<String>,
|
||||
pub handle_list: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct AtprotoConfig {
|
||||
pub client_id: String,
|
||||
pub redirect_uri: String,
|
||||
pub handle_resolver: String,
|
||||
}
|
||||
pub type Mapping = HashMap<String, RecordMapping>;
|
||||
|
||||
impl Config {
|
||||
pub fn load(path: &Path) -> Result<Self> {
|
||||
let config_path = path.join("config.toml");
|
||||
let content = fs::read_to_string(config_path)?;
|
||||
let mut config: Config = toml::from_str(&content)?;
|
||||
|
||||
// Load global config and merge
|
||||
if let Ok(global_config) = Self::load_global_config() {
|
||||
config = config.merge(global_config);
|
||||
}
|
||||
|
||||
// Override with environment variables
|
||||
config.override_from_env();
|
||||
|
||||
pub fn config_path() -> Result<PathBuf> {
|
||||
let home = dirs::home_dir().context("Failed to get home directory")?;
|
||||
let config_dir = home.join(".config/syui/ai/log");
|
||||
std::fs::create_dir_all(&config_dir)?;
|
||||
Ok(config_dir.join("config.json"))
|
||||
}
|
||||
|
||||
pub fn mapping_path() -> Result<PathBuf> {
|
||||
let home = dirs::home_dir().context("Failed to get home directory")?;
|
||||
let config_dir = home.join(".config/syui/ai/log");
|
||||
std::fs::create_dir_all(&config_dir)?;
|
||||
Ok(config_dir.join("mapping.json"))
|
||||
}
|
||||
|
||||
pub fn load() -> Result<Self> {
|
||||
let path = Self::config_path()?;
|
||||
let content = std::fs::read_to_string(&path)
|
||||
.context("Failed to read config file. Please run 'ailog login' first.")?;
|
||||
let config: Config = serde_json::from_str(&content)?;
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
fn load_global_config() -> Result<Config> {
|
||||
let config_dir = Self::global_config_dir();
|
||||
let config_path = config_dir.join("config.toml");
|
||||
|
||||
if config_path.exists() {
|
||||
let content = fs::read_to_string(config_path)?;
|
||||
let config: Config = toml::from_str(&content)?;
|
||||
Ok(config)
|
||||
} else {
|
||||
anyhow::bail!("Global config not found")
|
||||
}
|
||||
|
||||
pub fn save(&self) -> Result<()> {
|
||||
let path = Self::config_path()?;
|
||||
let content = serde_json::to_string_pretty(self)?;
|
||||
std::fs::write(&path, content)?;
|
||||
println!("Config saved to: {}", path.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn global_config_dir() -> PathBuf {
|
||||
if let Ok(home) = env::var("HOME") {
|
||||
PathBuf::from(home).join(".config").join("syui").join("ai").join("log")
|
||||
} else {
|
||||
PathBuf::from("~/.config/syui/ai/log")
|
||||
|
||||
pub fn load_mapping() -> Result<Mapping> {
|
||||
let path = Self::mapping_path()?;
|
||||
if !path.exists() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
let content = std::fs::read_to_string(&path)?;
|
||||
let mapping: Mapping = serde_json::from_str(&content)?;
|
||||
Ok(mapping)
|
||||
}
|
||||
|
||||
fn merge(mut self, global: Config) -> Self {
|
||||
// Merge AI config
|
||||
if let Some(global_ai) = global.ai {
|
||||
if let Some(ref mut ai) = self.ai {
|
||||
if ai.api_key.is_none() {
|
||||
ai.api_key = global_ai.api_key;
|
||||
}
|
||||
if ai.gpt_endpoint.is_none() {
|
||||
ai.gpt_endpoint = global_ai.gpt_endpoint;
|
||||
}
|
||||
if ai.atproto_config.is_none() {
|
||||
ai.atproto_config = global_ai.atproto_config;
|
||||
}
|
||||
} else {
|
||||
self.ai = Some(global_ai);
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn override_from_env(&mut self) {
|
||||
if let Ok(api_key) = env::var("AILOG_API_KEY") {
|
||||
if let Some(ref mut ai) = self.ai {
|
||||
ai.api_key = Some(api_key);
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(endpoint) = env::var("AILOG_GPT_ENDPOINT") {
|
||||
if let Some(ref mut ai) = self.ai {
|
||||
ai.gpt_endpoint = Some(endpoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn save_global(&self) -> Result<()> {
|
||||
let config_dir = Self::global_config_dir();
|
||||
fs::create_dir_all(&config_dir)?;
|
||||
|
||||
let config_path = config_dir.join("config.toml");
|
||||
let content = toml::to_string_pretty(self)?;
|
||||
fs::write(config_path, content)?;
|
||||
|
||||
|
||||
pub fn save_mapping(mapping: &Mapping) -> Result<()> {
|
||||
let path = Self::mapping_path()?;
|
||||
let content = serde_json::to_string_pretty(mapping)?;
|
||||
std::fs::write(&path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
site: SiteConfig {
|
||||
title: "My Blog".to_string(),
|
||||
description: "A blog powered by ailog".to_string(),
|
||||
base_url: "https://example.com".to_string(),
|
||||
language: "ja".to_string(),
|
||||
author: None,
|
||||
},
|
||||
build: BuildConfig {
|
||||
highlight_code: true,
|
||||
highlight_theme: Some("Monokai".to_string()),
|
||||
minify: false,
|
||||
},
|
||||
ai: Some(AiConfig {
|
||||
enabled: false,
|
||||
auto_translate: false,
|
||||
comment_moderation: false,
|
||||
ask_ai: Some(false),
|
||||
provider: Some("ollama".to_string()),
|
||||
model: Some("gemma3:4b".to_string()),
|
||||
host: None,
|
||||
system_prompt: Some("You are a helpful AI assistant trained on this blog's content.".to_string()),
|
||||
handle: None,
|
||||
ai_did: None,
|
||||
api_key: None,
|
||||
gpt_endpoint: None,
|
||||
atproto_config: None,
|
||||
num_predict: None,
|
||||
}),
|
||||
oauth: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
89
src/delete.rs
Normal file
89
src/delete.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct DeleteRecordRequest {
|
||||
repo: String,
|
||||
collection: String,
|
||||
rkey: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct ListRecordsResponse {
|
||||
records: Vec<Record>,
|
||||
cursor: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct Record {
|
||||
uri: String,
|
||||
}
|
||||
|
||||
pub async fn execute() -> Result<()> {
|
||||
let mut config = Config::load()?;
|
||||
|
||||
// Refresh session before API calls
|
||||
crate::refresh::refresh_session(&mut config).await?;
|
||||
|
||||
let mut mapping = Config::load_mapping()?;
|
||||
println!("Deleting all records from ai.syui.log.post...");
|
||||
|
||||
let pds_url = format!("https://{}", config.pds);
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// List all records
|
||||
let list_url = format!(
|
||||
"{}/xrpc/com.atproto.repo.listRecords?repo={}&collection=ai.syui.log.post&limit=100",
|
||||
pds_url, config.did
|
||||
);
|
||||
|
||||
let res: ListRecordsResponse = client
|
||||
.get(&list_url)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to list records")?
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse listRecords response")?;
|
||||
|
||||
if res.records.is_empty() {
|
||||
println!("No records to delete.");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
println!("Found {} records to delete", res.records.len());
|
||||
|
||||
// Delete each record
|
||||
for record in &res.records {
|
||||
let rkey = record.uri.split('/').last().unwrap();
|
||||
|
||||
let delete_req = DeleteRecordRequest {
|
||||
repo: config.did.clone(),
|
||||
collection: "ai.syui.log.post".to_string(),
|
||||
rkey: rkey.to_string(),
|
||||
};
|
||||
|
||||
let delete_url = format!("{}/xrpc/com.atproto.repo.deleteRecord", pds_url);
|
||||
client
|
||||
.post(&delete_url)
|
||||
.header("Authorization", format!("Bearer {}", config.access_jwt))
|
||||
.json(&delete_req)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to delete record")?;
|
||||
|
||||
println!(" ✓ Deleted: {}", rkey);
|
||||
}
|
||||
|
||||
// Clear mapping (all records deleted)
|
||||
mapping.clear();
|
||||
Config::save_mapping(&mapping)?;
|
||||
println!("Mapping cleared.");
|
||||
|
||||
println!("Done! All records deleted.");
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,237 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use std::path::PathBuf;
|
||||
use crate::analyzer::{ProjectInfo, ApiInfo, ProjectStructure};
|
||||
use crate::ai::gpt_client::GptClient;
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub struct DocGenerator {
|
||||
base_path: PathBuf,
|
||||
ai_enabled: bool,
|
||||
templates: DocTemplates,
|
||||
}
|
||||
|
||||
pub struct DocTemplates {
|
||||
readme_template: String,
|
||||
api_template: String,
|
||||
structure_template: String,
|
||||
changelog_template: String,
|
||||
}
|
||||
|
||||
impl DocGenerator {
|
||||
pub fn new(base_path: PathBuf, ai_enabled: bool) -> Self {
|
||||
let templates = DocTemplates::default();
|
||||
Self {
|
||||
base_path,
|
||||
ai_enabled,
|
||||
templates,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn generate_readme(&self, project_info: &ProjectInfo) -> Result<String> {
|
||||
let mut content = self.templates.readme_template.clone();
|
||||
|
||||
// Simple template substitution
|
||||
content = content.replace("{{name}}", &project_info.name);
|
||||
content = content.replace("{{description}}",
|
||||
&project_info.description.as_ref().unwrap_or(&"A Rust project".to_string()));
|
||||
content = content.replace("{{module_count}}", &project_info.modules.len().to_string());
|
||||
content = content.replace("{{total_lines}}", &project_info.metrics.total_lines.to_string());
|
||||
|
||||
let deps = project_info.dependencies.iter()
|
||||
.map(|(name, version)| format!("- {}: {}", name, version))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
content = content.replace("{{dependencies}}", &deps);
|
||||
content = content.replace("{{license}}",
|
||||
&project_info.license.as_ref().unwrap_or(&"MIT".to_string()));
|
||||
|
||||
if self.ai_enabled {
|
||||
content = self.enhance_with_ai(&content, "readme").await?;
|
||||
}
|
||||
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
pub async fn generate_api_markdown(&self, api_info: &ApiInfo) -> Result<Vec<(String, String)>> {
|
||||
let mut files = Vec::new();
|
||||
|
||||
// Generate main API documentation
|
||||
let main_content = self.templates.api_template.replace("{{content}}", "Generated API Documentation");
|
||||
files.push(("api.md".to_string(), main_content));
|
||||
|
||||
// Generate individual module docs
|
||||
for module in &api_info.modules {
|
||||
if !module.functions.is_empty() || !module.structs.is_empty() {
|
||||
let module_content = self.generate_module_doc(module).await?;
|
||||
files.push((format!("{}.md", module.name), module_content));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
pub async fn generate_structure_doc(&self, structure: &ProjectStructure) -> Result<String> {
|
||||
let content = self.templates.structure_template.replace("{{content}}",
|
||||
&format!("Found {} directories and {} files",
|
||||
structure.directories.len(),
|
||||
structure.files.len()));
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
pub async fn generate_changelog(&self, from: Option<String>, to: Option<String>) -> Result<String> {
|
||||
let commits = self.get_git_commits(from, to)?;
|
||||
|
||||
let mut content = self.templates.changelog_template.replace("{{content}}",
|
||||
&format!("Found {} commits", commits.len()));
|
||||
|
||||
if self.ai_enabled {
|
||||
content = self.enhance_changelog_with_ai(&content, &commits).await?;
|
||||
}
|
||||
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
|
||||
async fn enhance_with_ai(&self, content: &str, doc_type: &str) -> Result<String> {
|
||||
if !self.ai_enabled {
|
||||
return Ok(content.to_string());
|
||||
}
|
||||
|
||||
let gpt_client = GptClient::new(
|
||||
std::env::var("OPENAI_API_KEY").unwrap_or_default(),
|
||||
None,
|
||||
);
|
||||
|
||||
let prompt = format!(
|
||||
"Enhance this {} documentation with additional insights and improve readability:\n\n{}",
|
||||
doc_type, content
|
||||
);
|
||||
|
||||
match gpt_client.chat("You are a technical writer helping to improve documentation.", &prompt).await {
|
||||
Ok(enhanced) => Ok(enhanced),
|
||||
Err(_) => Ok(content.to_string()), // Fallback to original content
|
||||
}
|
||||
}
|
||||
|
||||
async fn generate_module_doc(&self, module: &crate::analyzer::ModuleInfo) -> Result<String> {
|
||||
let mut content = format!("# Module: {}\n\n", module.name);
|
||||
|
||||
if let Some(docs) = &module.docs {
|
||||
content.push_str(&format!("{}\n\n", docs));
|
||||
}
|
||||
|
||||
// Add functions
|
||||
if !module.functions.is_empty() {
|
||||
content.push_str("## Functions\n\n");
|
||||
for func in &module.functions {
|
||||
content.push_str(&self.format_function_doc(func));
|
||||
}
|
||||
}
|
||||
|
||||
// Add structs
|
||||
if !module.structs.is_empty() {
|
||||
content.push_str("## Structs\n\n");
|
||||
for struct_info in &module.structs {
|
||||
content.push_str(&self.format_struct_doc(struct_info));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
fn format_function_doc(&self, func: &crate::analyzer::FunctionInfo) -> String {
|
||||
let mut doc = format!("### `{}`\n\n", func.name);
|
||||
|
||||
if let Some(docs) = &func.docs {
|
||||
doc.push_str(&format!("{}\n\n", docs));
|
||||
}
|
||||
|
||||
doc.push_str(&format!("**Visibility:** `{}`\n", func.visibility));
|
||||
|
||||
if func.is_async {
|
||||
doc.push_str("**Async:** Yes\n");
|
||||
}
|
||||
|
||||
if !func.parameters.is_empty() {
|
||||
doc.push_str("\n**Parameters:**\n");
|
||||
for param in &func.parameters {
|
||||
doc.push_str(&format!("- `{}`: `{}`\n", param.name, param.param_type));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(return_type) = &func.return_type {
|
||||
doc.push_str(&format!("\n**Returns:** `{}`\n", return_type));
|
||||
}
|
||||
|
||||
doc.push_str("\n---\n\n");
|
||||
doc
|
||||
}
|
||||
|
||||
fn format_struct_doc(&self, struct_info: &crate::analyzer::StructInfo) -> String {
|
||||
let mut doc = format!("### `{}`\n\n", struct_info.name);
|
||||
|
||||
if let Some(docs) = &struct_info.docs {
|
||||
doc.push_str(&format!("{}\n\n", docs));
|
||||
}
|
||||
|
||||
doc.push_str(&format!("**Visibility:** `{}`\n\n", struct_info.visibility));
|
||||
|
||||
if !struct_info.fields.is_empty() {
|
||||
doc.push_str("**Fields:**\n");
|
||||
for field in &struct_info.fields {
|
||||
doc.push_str(&format!("- `{}`: `{}` ({})\n", field.name, field.field_type, field.visibility));
|
||||
if let Some(field_docs) = &field.docs {
|
||||
doc.push_str(&format!(" - {}\n", field_docs));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
doc.push_str("\n---\n\n");
|
||||
doc
|
||||
}
|
||||
|
||||
async fn enhance_changelog_with_ai(&self, content: &str, _commits: &[GitCommit]) -> Result<String> {
|
||||
// TODO: Implement AI-enhanced changelog generation
|
||||
Ok(content.to_string())
|
||||
}
|
||||
|
||||
fn get_git_commits(&self, _from: Option<String>, _to: Option<String>) -> Result<Vec<GitCommit>> {
|
||||
// TODO: Implement git history parsing
|
||||
Ok(vec![])
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub struct GitCommit {
|
||||
pub hash: String,
|
||||
pub message: String,
|
||||
pub author: String,
|
||||
pub date: String,
|
||||
}
|
||||
|
||||
impl DocTemplates {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
readme_template: r#"# {{name}}
|
||||
|
||||
{{description}}
|
||||
|
||||
## Overview
|
||||
|
||||
This project contains {{module_count}} modules with {{total_lines}} lines of code.
|
||||
|
||||
## Dependencies
|
||||
|
||||
{{dependencies}}
|
||||
|
||||
## License
|
||||
|
||||
{{license}}
|
||||
"#.to_string(),
|
||||
api_template: "# API Documentation\n\n{{content}}".to_string(),
|
||||
structure_template: "# Project Structure\n\n{{content}}".to_string(),
|
||||
changelog_template: "# Changelog\n\n{{content}}".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
616
src/generator.rs
616
src/generator.rs
@@ -1,616 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use colored::Colorize;
|
||||
use std::path::PathBuf;
|
||||
use walkdir::WalkDir;
|
||||
use std::fs;
|
||||
use crate::config::Config;
|
||||
use crate::markdown::MarkdownProcessor;
|
||||
use crate::template::TemplateEngine;
|
||||
use crate::ai::AiManager;
|
||||
|
||||
pub struct Generator {
|
||||
base_path: PathBuf,
|
||||
config: Config,
|
||||
markdown_processor: MarkdownProcessor,
|
||||
template_engine: TemplateEngine,
|
||||
ai_manager: Option<AiManager>,
|
||||
}
|
||||
|
||||
impl Generator {
|
||||
pub fn new(base_path: PathBuf, config: Config) -> Result<Self> {
|
||||
let markdown_processor = MarkdownProcessor::new(config.build.highlight_code, config.build.highlight_theme.clone());
|
||||
let template_engine = TemplateEngine::new(base_path.join("templates"))?;
|
||||
|
||||
let ai_manager = if let Some(ref ai_config) = config.ai {
|
||||
if ai_config.enabled {
|
||||
Some(AiManager::new(ai_config.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
base_path,
|
||||
config,
|
||||
markdown_processor,
|
||||
template_engine,
|
||||
ai_manager,
|
||||
})
|
||||
}
|
||||
|
||||
fn create_config_with_timestamp(&self) -> Result<serde_json::Value> {
|
||||
let mut config_with_timestamp = serde_json::to_value(&self.config.site)?;
|
||||
if let Some(config_obj) = config_with_timestamp.as_object_mut() {
|
||||
config_obj.insert("build_timestamp".to_string(), serde_json::Value::String(
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs()
|
||||
.to_string()
|
||||
));
|
||||
}
|
||||
Ok(config_with_timestamp)
|
||||
}
|
||||
|
||||
pub async fn build(&self) -> Result<()> {
|
||||
// Clean public directory
|
||||
let public_dir = self.base_path.join("public");
|
||||
if public_dir.exists() {
|
||||
fs::remove_dir_all(&public_dir)?;
|
||||
}
|
||||
fs::create_dir_all(&public_dir)?;
|
||||
|
||||
// Copy static files
|
||||
self.copy_static_files()?;
|
||||
|
||||
// Process posts
|
||||
let posts = self.process_posts().await?;
|
||||
|
||||
// Generate index page
|
||||
self.generate_index(&posts).await?;
|
||||
|
||||
// Generate JSON index for API access
|
||||
self.generate_json_index(&posts).await?;
|
||||
|
||||
// Generate post pages
|
||||
for post in &posts {
|
||||
self.generate_post_page(post).await?;
|
||||
|
||||
// Generate translation pages
|
||||
if let Some(ref translations) = post.translations {
|
||||
for translation in translations {
|
||||
self.generate_translation_page(post, translation).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate PDS page
|
||||
self.generate_pds_page().await?;
|
||||
|
||||
// Generate Game page
|
||||
self.generate_game_page().await?;
|
||||
|
||||
println!("{} {} posts", "Generated".cyan(), posts.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_static_files(&self) -> Result<()> {
|
||||
let static_dir = self.base_path.join("static");
|
||||
let public_dir = self.base_path.join("public");
|
||||
|
||||
if static_dir.exists() {
|
||||
for entry in WalkDir::new(&static_dir).min_depth(1) {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let relative_path = path.strip_prefix(&static_dir)?;
|
||||
let dest_path = public_dir.join(relative_path);
|
||||
|
||||
if path.is_dir() {
|
||||
fs::create_dir_all(&dest_path)?;
|
||||
} else {
|
||||
if let Some(parent) = dest_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
fs::copy(path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Copy files from atproto-auth-widget dist (if available)
|
||||
let widget_dist = self.base_path.join("atproto-auth-widget/dist");
|
||||
if widget_dist.exists() {
|
||||
for entry in WalkDir::new(&widget_dist).min_depth(1) {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
let relative_path = path.strip_prefix(&widget_dist)?;
|
||||
let dest_path = public_dir.join(relative_path);
|
||||
|
||||
if path.is_dir() {
|
||||
fs::create_dir_all(&dest_path)?;
|
||||
} else {
|
||||
if let Some(parent) = dest_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
fs::copy(path, &dest_path)?;
|
||||
}
|
||||
}
|
||||
println!("{} widget files from dist", "Copied".yellow());
|
||||
}
|
||||
|
||||
// Handle client-metadata.json based on environment (fallback)
|
||||
let is_production = std::env::var("PRODUCTION").unwrap_or_default() == "true";
|
||||
let metadata_dest = public_dir.join("client-metadata.json");
|
||||
|
||||
// First try to get from widget dist (preferred)
|
||||
let widget_metadata = widget_dist.join("client-metadata.json");
|
||||
if widget_metadata.exists() {
|
||||
fs::copy(&widget_metadata, &metadata_dest)?;
|
||||
println!("{} client-metadata.json from widget", "Using".yellow());
|
||||
} else if is_production {
|
||||
// Fallback to local static files
|
||||
let prod_metadata = static_dir.join("client-metadata-prod.json");
|
||||
if prod_metadata.exists() {
|
||||
fs::copy(&prod_metadata, &metadata_dest)?;
|
||||
println!("{} production client-metadata.json (fallback)", "Using".yellow());
|
||||
}
|
||||
}
|
||||
|
||||
println!("{} static files", "Copied".cyan());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn process_posts(&self) -> Result<Vec<Post>> {
|
||||
let mut posts = Vec::new();
|
||||
let posts_dir = self.base_path.join("content/posts");
|
||||
|
||||
if posts_dir.exists() {
|
||||
for entry in WalkDir::new(&posts_dir).min_depth(1) {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_file() && path.extension().map_or(false, |ext| ext == "md") {
|
||||
match self.process_single_post(path).await {
|
||||
Ok(post) => posts.push(post),
|
||||
Err(e) => eprintln!("Error processing {}: {}", path.display(), e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort posts by date (newest first)
|
||||
posts.sort_by(|a, b| b.date.cmp(&a.date));
|
||||
|
||||
Ok(posts)
|
||||
}
|
||||
|
||||
async fn process_single_post(&self, path: &std::path::Path) -> Result<Post> {
|
||||
let content = fs::read_to_string(path)?;
|
||||
let (frontmatter, mut content) = self.markdown_processor.parse_frontmatter(&content)?;
|
||||
|
||||
// Apply AI enhancements if enabled
|
||||
if let Some(ref ai_manager) = self.ai_manager {
|
||||
// Enhance content with AI
|
||||
let title = frontmatter.get("title")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("Untitled");
|
||||
|
||||
content = ai_manager.enhance_content(&content, title).await
|
||||
.unwrap_or_else(|e| {
|
||||
eprintln!("AI enhancement failed: {}", e);
|
||||
content
|
||||
});
|
||||
}
|
||||
|
||||
let html_content = self.markdown_processor.render(&content)?;
|
||||
|
||||
// Use filename (without extension) as URL slug to include date
|
||||
let filename_slug = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("post")
|
||||
.to_string();
|
||||
|
||||
// Still keep the slug field from frontmatter for other purposes
|
||||
let frontmatter_slug = frontmatter.get("slug")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| filename_slug.clone());
|
||||
|
||||
let mut post = Post {
|
||||
title: frontmatter.get("title")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("Untitled")
|
||||
.to_string(),
|
||||
date: frontmatter.get("date")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string(),
|
||||
content: html_content,
|
||||
slug: frontmatter_slug.clone(),
|
||||
filename_slug: filename_slug.clone(),
|
||||
url: format!("/posts/{}.html", filename_slug),
|
||||
tags: frontmatter.get("tags")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| arr.iter()
|
||||
.filter_map(|v| v.as_str())
|
||||
.map(|s| s.to_string())
|
||||
.collect())
|
||||
.unwrap_or_default(),
|
||||
translations: None,
|
||||
ai_comment: None,
|
||||
extra: frontmatter.get("extra").cloned(),
|
||||
};
|
||||
|
||||
// Auto-translate if enabled and post is in Japanese
|
||||
if let Some(ref ai_manager) = self.ai_manager {
|
||||
if self.config.ai.as_ref().map_or(false, |ai| ai.auto_translate)
|
||||
&& self.config.site.language == "ja" {
|
||||
|
||||
match ai_manager.translate(&content, "ja", "en").await {
|
||||
Ok(translated_content) => {
|
||||
let translated_html = self.markdown_processor.render(&translated_content)?;
|
||||
let translated_title = ai_manager.translate(&post.title, "ja", "en").await
|
||||
.unwrap_or_else(|_| post.title.clone());
|
||||
|
||||
post.translations = Some(vec![Translation {
|
||||
lang: "en".to_string(),
|
||||
title: translated_title,
|
||||
content: translated_html,
|
||||
url: format!("/posts/{}-en.html", post.filename_slug),
|
||||
}]);
|
||||
}
|
||||
Err(e) => eprintln!("Translation failed: {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
// Generate AI comment
|
||||
if self.config.ai.as_ref().map_or(false, |ai| ai.comment_moderation) {
|
||||
match ai_manager.generate_comment(&post.title, &content).await {
|
||||
Ok(Some(comment)) => {
|
||||
post.ai_comment = Some(comment.content);
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(e) => eprintln!("AI comment generation failed: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(post)
|
||||
}
|
||||
|
||||
async fn generate_index(&self, posts: &[Post]) -> Result<()> {
|
||||
// Enhance posts with additional metadata for timeline view
|
||||
let enhanced_posts: Vec<serde_json::Value> = posts.iter().map(|post| {
|
||||
let excerpt = self.extract_excerpt(&post.content);
|
||||
let markdown_url = format!("/posts/{}.md", post.filename_slug);
|
||||
let translation_url = if let Some(ref translations) = post.translations {
|
||||
translations.first().map(|t| t.url.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
serde_json::json!({
|
||||
"title": post.title,
|
||||
"date": post.date,
|
||||
"content": post.content,
|
||||
"slug": post.slug,
|
||||
"url": post.url,
|
||||
"tags": post.tags,
|
||||
"excerpt": excerpt,
|
||||
"markdown_url": markdown_url,
|
||||
"translation_url": translation_url,
|
||||
"language": self.config.site.language,
|
||||
"extra": post.extra
|
||||
})
|
||||
}).collect();
|
||||
|
||||
let mut context = tera::Context::new();
|
||||
let config_with_timestamp = self.create_config_with_timestamp()?;
|
||||
context.insert("config", &config_with_timestamp);
|
||||
context.insert("posts", &enhanced_posts);
|
||||
|
||||
let html = self.template_engine.render("index.html", &context)?;
|
||||
|
||||
let output_path = self.base_path.join("public/index.html");
|
||||
fs::write(output_path, html)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_post_page(&self, post: &Post) -> Result<()> {
|
||||
let mut context = tera::Context::new();
|
||||
let config_with_timestamp = self.create_config_with_timestamp()?;
|
||||
context.insert("config", &config_with_timestamp);
|
||||
|
||||
// Create enhanced post with additional URLs
|
||||
let mut enhanced_post = post.clone();
|
||||
enhanced_post.url = format!("/posts/{}.html", post.filename_slug);
|
||||
|
||||
// Add markdown view URL
|
||||
let markdown_url = format!("/posts/{}.md", post.filename_slug);
|
||||
|
||||
// Add translation URLs if available
|
||||
let translation_urls: Vec<String> = if let Some(ref translations) = post.translations {
|
||||
translations.iter().map(|t| t.url.clone()).collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
context.insert("post", &serde_json::json!({
|
||||
"title": enhanced_post.title,
|
||||
"date": enhanced_post.date,
|
||||
"content": enhanced_post.content,
|
||||
"slug": enhanced_post.slug,
|
||||
"url": enhanced_post.url,
|
||||
"tags": enhanced_post.tags,
|
||||
"ai_comment": enhanced_post.ai_comment,
|
||||
"markdown_url": markdown_url,
|
||||
"translation_url": translation_urls.first(),
|
||||
"language": self.config.site.language,
|
||||
"extra": enhanced_post.extra
|
||||
}));
|
||||
|
||||
let html = self.template_engine.render_with_context("post.html", &context)?;
|
||||
|
||||
let output_dir = self.base_path.join("public/posts");
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
let output_path = output_dir.join(format!("{}.html", post.filename_slug));
|
||||
fs::write(output_path, html)?;
|
||||
|
||||
// Generate markdown view
|
||||
self.generate_markdown_view(post).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_translation_page(&self, post: &Post, translation: &Translation) -> Result<()> {
|
||||
let mut context = tera::Context::new();
|
||||
let config_with_timestamp = self.create_config_with_timestamp()?;
|
||||
context.insert("config", &config_with_timestamp);
|
||||
context.insert("post", &TranslatedPost {
|
||||
title: translation.title.clone(),
|
||||
date: post.date.clone(),
|
||||
content: translation.content.clone(),
|
||||
slug: post.slug.clone(),
|
||||
url: translation.url.clone(),
|
||||
tags: post.tags.clone(),
|
||||
original_url: post.url.clone(),
|
||||
lang: translation.lang.clone(),
|
||||
});
|
||||
|
||||
let html = self.template_engine.render_with_context("post.html", &context)?;
|
||||
|
||||
let output_dir = self.base_path.join("public/posts");
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
let output_path = output_dir.join(format!("{}-{}.html", post.filename_slug, translation.lang));
|
||||
fs::write(output_path, html)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_excerpt(&self, html_content: &str) -> String {
|
||||
// Simple excerpt extraction - take first 200 characters of text content
|
||||
let text_content = html_content
|
||||
.replace("<p>", "")
|
||||
.replace("</p>", " ")
|
||||
.replace("<br>", " ")
|
||||
.replace("<br/>", " ");
|
||||
|
||||
// Remove HTML tags with a simple regex-like approach
|
||||
let mut text = String::new();
|
||||
let mut in_tag = false;
|
||||
for ch in text_content.chars() {
|
||||
match ch {
|
||||
'<' => in_tag = true,
|
||||
'>' => in_tag = false,
|
||||
_ if !in_tag => text.push(ch),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let excerpt = text.trim().chars().take(200).collect::<String>();
|
||||
if text.len() > 200 {
|
||||
format!("{}...", excerpt)
|
||||
} else {
|
||||
excerpt
|
||||
}
|
||||
}
|
||||
|
||||
async fn generate_markdown_view(&self, post: &Post) -> Result<()> {
|
||||
// Find original markdown file
|
||||
let posts_dir = self.base_path.join("content/posts");
|
||||
|
||||
// Try to find the markdown file by checking all files in posts directory
|
||||
for entry in fs::read_dir(&posts_dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if let Some(extension) = path.extension() {
|
||||
if extension == "md" {
|
||||
let content = fs::read_to_string(&path)?;
|
||||
let (frontmatter, _) = self.markdown_processor.parse_frontmatter(&content)?;
|
||||
|
||||
// Check if this file has the same slug
|
||||
let file_slug = frontmatter.get("slug")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or_else(|| {
|
||||
path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
});
|
||||
|
||||
if file_slug == post.slug || path.file_stem().and_then(|s| s.to_str()).unwrap_or("") == post.filename_slug {
|
||||
let output_dir = self.base_path.join("public/posts");
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
let output_path = output_dir.join(format!("{}.md", post.filename_slug));
|
||||
fs::write(output_path, content)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_json_index(&self, posts: &[Post]) -> Result<()> {
|
||||
let index_data: Vec<serde_json::Value> = posts.iter().map(|post| {
|
||||
// Parse date for proper formatting
|
||||
let parsed_date = chrono::NaiveDate::parse_from_str(&post.date, "%Y-%m-%d")
|
||||
.unwrap_or_else(|_| chrono::Utc::now().naive_utc().date());
|
||||
|
||||
// Format to Hugo-style date format (Mon Jan 2, 2006)
|
||||
let formatted_date = parsed_date.format("%a %b %-d, %Y").to_string();
|
||||
|
||||
// Create UTC datetime for utc_time field
|
||||
let utc_datetime = parsed_date.and_hms_opt(0, 0, 0)
|
||||
.unwrap_or_else(|| chrono::Utc::now().naive_utc());
|
||||
let utc_time = format!("{}Z", utc_datetime.format("%Y-%m-%dT%H:%M:%S"));
|
||||
|
||||
// Extract plain text content from HTML
|
||||
let contents = self.extract_plain_text(&post.content);
|
||||
|
||||
serde_json::json!({
|
||||
"title": post.title,
|
||||
"tags": post.tags,
|
||||
"description": self.extract_excerpt(&post.content),
|
||||
"categories": [],
|
||||
"contents": contents,
|
||||
"href": format!("{}{}", self.config.site.base_url.trim_end_matches('/'), post.url),
|
||||
"utc_time": utc_time,
|
||||
"formated_time": formatted_date
|
||||
})
|
||||
}).collect();
|
||||
|
||||
// Write JSON index to public directory
|
||||
let output_path = self.base_path.join("public/index.json");
|
||||
let json_content = serde_json::to_string_pretty(&index_data)?;
|
||||
fs::write(output_path, json_content)?;
|
||||
|
||||
println!("{} JSON index with {} posts", "Generated".cyan(), posts.len());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_pds_page(&self) -> Result<()> {
|
||||
let public_dir = self.base_path.join("public");
|
||||
let pds_dir = public_dir.join("pds");
|
||||
fs::create_dir_all(&pds_dir)?;
|
||||
|
||||
// Generate PDS page using the pds.html template
|
||||
let config_with_timestamp = self.create_config_with_timestamp()?;
|
||||
let mut context = tera::Context::new();
|
||||
context.insert("config", &config_with_timestamp);
|
||||
context.insert("site", &self.config.site);
|
||||
context.insert("page", &serde_json::json!({
|
||||
"title": "AT URI Browser",
|
||||
"description": "AT Protocol レコードをブラウズし、分散SNSのコンテンツを探索できます"
|
||||
}));
|
||||
|
||||
let rendered_content = self.template_engine.render("pds.html", &context)?;
|
||||
let output_path = pds_dir.join("index.html");
|
||||
fs::write(output_path, rendered_content)?;
|
||||
|
||||
println!("{} PDS page", "Generated".cyan());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_game_page(&self) -> Result<()> {
|
||||
let public_dir = self.base_path.join("public");
|
||||
let game_dir = public_dir.join("game");
|
||||
fs::create_dir_all(&game_dir)?;
|
||||
|
||||
// Generate Game page using the game.html template
|
||||
let config_with_timestamp = self.create_config_with_timestamp()?;
|
||||
let mut context = tera::Context::new();
|
||||
context.insert("config", &config_with_timestamp);
|
||||
context.insert("site", &self.config.site);
|
||||
context.insert("page", &serde_json::json!({
|
||||
"title": "Game",
|
||||
"description": "Play the game with AT Protocol authentication"
|
||||
}));
|
||||
|
||||
let rendered_content = self.template_engine.render("game.html", &context)?;
|
||||
let output_path = game_dir.join("index.html");
|
||||
fs::write(output_path, rendered_content)?;
|
||||
|
||||
println!("{} Game page", "Generated".cyan());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn extract_plain_text(&self, html_content: &str) -> String {
|
||||
// Remove HTML tags and extract plain text
|
||||
let mut text = String::new();
|
||||
let mut in_tag = false;
|
||||
|
||||
for ch in html_content.chars() {
|
||||
match ch {
|
||||
'<' => in_tag = true,
|
||||
'>' => in_tag = false,
|
||||
_ if !in_tag => text.push(ch),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up whitespace
|
||||
text.split_whitespace().collect::<Vec<_>>().join(" ")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
struct TranslatedPost {
|
||||
pub title: String,
|
||||
pub date: String,
|
||||
pub content: String,
|
||||
pub slug: String,
|
||||
pub url: String,
|
||||
pub tags: Vec<String>,
|
||||
pub original_url: String,
|
||||
pub lang: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
pub struct Post {
|
||||
pub title: String,
|
||||
pub date: String,
|
||||
pub content: String,
|
||||
pub slug: String,
|
||||
pub filename_slug: String, // Added for URL generation
|
||||
pub url: String,
|
||||
pub tags: Vec<String>,
|
||||
pub translations: Option<Vec<Translation>>,
|
||||
pub ai_comment: Option<String>,
|
||||
pub extra: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
pub struct Translation {
|
||||
pub lang: String,
|
||||
pub title: String,
|
||||
pub content: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
#[allow(dead_code)]
|
||||
struct BlogPost {
|
||||
title: String,
|
||||
url: String,
|
||||
date: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
#[allow(dead_code)]
|
||||
struct BlogIndex {
|
||||
posts: Vec<BlogPost>,
|
||||
}
|
||||
|
||||
15
src/lib.rs
15
src/lib.rs
@@ -1,15 +0,0 @@
|
||||
// Export modules for testing
|
||||
pub mod ai;
|
||||
pub mod analyzer;
|
||||
pub mod atproto;
|
||||
pub mod commands;
|
||||
pub mod config;
|
||||
pub mod doc_generator;
|
||||
pub mod generator;
|
||||
pub mod markdown;
|
||||
pub mod shortcode;
|
||||
pub mod mcp;
|
||||
pub mod oauth;
|
||||
// pub mod ollama_proxy; // Temporarily disabled - uses actix-web instead of axum
|
||||
pub mod template;
|
||||
pub mod translator;
|
||||
83
src/login.rs
Normal file
83
src/login.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct CreateSessionRequest {
|
||||
identifier: String,
|
||||
password: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct CreateSessionResponse {
|
||||
#[serde(rename = "accessJwt")]
|
||||
access_jwt: String,
|
||||
#[serde(rename = "refreshJwt")]
|
||||
refresh_jwt: String,
|
||||
handle: String,
|
||||
did: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct DescribeRepoResponse {
|
||||
handle: String,
|
||||
did: String,
|
||||
}
|
||||
|
||||
pub async fn execute(handle: &str, password: &str, pds: &str) -> Result<()> {
|
||||
println!("Logging in as {} to {}...", handle, pds);
|
||||
|
||||
// Resolve handle to DID
|
||||
let pds_url = format!("https://{}", pds);
|
||||
let describe_url = format!(
|
||||
"{}/xrpc/com.atproto.repo.describeRepo?repo={}",
|
||||
pds_url, handle
|
||||
);
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let describe_res: DescribeRepoResponse = client
|
||||
.get(&describe_url)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to resolve handle")?
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse describeRepo response")?;
|
||||
|
||||
println!("Resolved handle to DID: {}", describe_res.did);
|
||||
|
||||
// Create session
|
||||
let session_url = format!("{}/xrpc/com.atproto.server.createSession", pds_url);
|
||||
let session_req = CreateSessionRequest {
|
||||
identifier: handle.to_string(),
|
||||
password: password.to_string(),
|
||||
};
|
||||
|
||||
let session_res: CreateSessionResponse = client
|
||||
.post(&session_url)
|
||||
.json(&session_req)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to create session")?
|
||||
.json()
|
||||
.await
|
||||
.context("Failed to parse createSession response")?;
|
||||
|
||||
println!("Successfully authenticated!");
|
||||
|
||||
// Save config
|
||||
let config = Config {
|
||||
pds: pds.to_string(),
|
||||
handle: handle.to_string(),
|
||||
did: session_res.did,
|
||||
access_jwt: session_res.access_jwt,
|
||||
refresh_jwt: session_res.refresh_jwt,
|
||||
};
|
||||
|
||||
config.save()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
300
src/main.rs
300
src/main.rs
@@ -1,289 +1,75 @@
|
||||
use anyhow::Result;
|
||||
use clap::{Parser, Subcommand};
|
||||
use std::path::PathBuf;
|
||||
|
||||
mod analyzer;
|
||||
mod commands;
|
||||
mod doc_generator;
|
||||
mod generator;
|
||||
mod markdown;
|
||||
mod shortcode;
|
||||
mod template;
|
||||
mod oauth;
|
||||
mod translator;
|
||||
mod config;
|
||||
mod ai;
|
||||
mod atproto;
|
||||
mod mcp;
|
||||
mod login;
|
||||
mod post;
|
||||
mod build;
|
||||
mod delete;
|
||||
mod refresh;
|
||||
mod serve;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "ailog")]
|
||||
#[command(about = "A static blog generator with AI features")]
|
||||
#[command(disable_version_flag = true)]
|
||||
#[command(about = "A simple static blog generator with atproto integration")]
|
||||
struct Cli {
|
||||
/// Print version information
|
||||
#[arg(short = 'V', long = "version")]
|
||||
version: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
/// Initialize a new blog
|
||||
Init {
|
||||
/// Path to create the blog
|
||||
#[arg(default_value = ".")]
|
||||
path: PathBuf,
|
||||
},
|
||||
/// Build the blog
|
||||
Build {
|
||||
/// Path to the blog directory
|
||||
#[arg(default_value = ".")]
|
||||
path: PathBuf,
|
||||
},
|
||||
/// Create a new post
|
||||
New {
|
||||
/// Title of the post
|
||||
title: String,
|
||||
/// Slug for the post (optional, derived from title if not provided)
|
||||
/// Login to atproto PDS
|
||||
#[command(alias = "l")]
|
||||
Login {
|
||||
/// Handle (e.g., ai.syui.ai)
|
||||
handle: String,
|
||||
/// Password
|
||||
#[arg(short, long)]
|
||||
slug: Option<String>,
|
||||
/// Post format
|
||||
#[arg(short, long, default_value = "md")]
|
||||
format: String,
|
||||
/// Path to the blog directory
|
||||
#[arg(default_value = ".")]
|
||||
path: PathBuf,
|
||||
password: String,
|
||||
/// PDS server (e.g., syu.is, bsky.social)
|
||||
#[arg(short = 's', long, default_value = "syu.is")]
|
||||
pds: String,
|
||||
},
|
||||
/// Serve the blog locally
|
||||
/// Post markdown files to atproto
|
||||
#[command(alias = "p")]
|
||||
Post,
|
||||
/// Build static site from atproto records
|
||||
#[command(alias = "b")]
|
||||
Build,
|
||||
/// Delete all records from atproto
|
||||
#[command(alias = "d")]
|
||||
Delete,
|
||||
/// Start local preview server
|
||||
#[command(alias = "s")]
|
||||
Serve {
|
||||
/// Port to serve on
|
||||
#[arg(short, long, default_value = "8080")]
|
||||
/// Port number
|
||||
#[arg(short, long, default_value = "3000")]
|
||||
port: u16,
|
||||
/// Path to the blog directory
|
||||
#[arg(default_value = ".")]
|
||||
path: PathBuf,
|
||||
},
|
||||
/// Clean build artifacts
|
||||
Clean {
|
||||
/// Path to the blog directory
|
||||
#[arg(default_value = ".")]
|
||||
path: PathBuf,
|
||||
},
|
||||
/// Start MCP server for ai.gpt integration
|
||||
Mcp {
|
||||
/// Port to serve MCP on
|
||||
#[arg(short, long, default_value = "8002")]
|
||||
port: u16,
|
||||
/// Path to the blog directory
|
||||
#[arg(default_value = ".")]
|
||||
path: PathBuf,
|
||||
/// Enable Claude proxy mode
|
||||
#[arg(long)]
|
||||
claude_proxy: bool,
|
||||
/// API token for Claude proxy authentication
|
||||
#[arg(long)]
|
||||
api_token: Option<String>,
|
||||
/// Claude Code executable path
|
||||
#[arg(long, default_value = "claude")]
|
||||
claude_code_path: String,
|
||||
},
|
||||
/// Generate documentation from code
|
||||
Doc(commands::doc::DocCommand),
|
||||
/// ATProto authentication
|
||||
Auth {
|
||||
#[command(subcommand)]
|
||||
command: AuthCommands,
|
||||
},
|
||||
/// ATProto stream monitoring
|
||||
Stream {
|
||||
#[command(subcommand)]
|
||||
command: StreamCommands,
|
||||
},
|
||||
/// OAuth app management
|
||||
Oauth {
|
||||
#[command(subcommand)]
|
||||
command: OauthCommands,
|
||||
},
|
||||
/// Interactive blog writing mode (default)
|
||||
Interactive,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum AuthCommands {
|
||||
/// Initialize OAuth authentication
|
||||
Init {
|
||||
/// Specify PDS server (e.g., syu.is, bsky.social)
|
||||
#[arg(long)]
|
||||
pds: Option<String>,
|
||||
/// Handle/username for authentication
|
||||
#[arg(long)]
|
||||
handle: Option<String>,
|
||||
/// Use password authentication instead of JWT
|
||||
#[arg(long)]
|
||||
password: bool,
|
||||
/// Access JWT token (alternative to password auth)
|
||||
#[arg(long)]
|
||||
access_jwt: Option<String>,
|
||||
/// Refresh JWT token (required with access-jwt)
|
||||
#[arg(long)]
|
||||
refresh_jwt: Option<String>,
|
||||
},
|
||||
/// Show current authentication status
|
||||
Status,
|
||||
/// Logout and clear credentials
|
||||
Logout,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum StreamCommands {
|
||||
/// Start monitoring ATProto streams
|
||||
Start {
|
||||
/// Path to the blog project directory
|
||||
project_dir: Option<PathBuf>,
|
||||
/// Run as daemon
|
||||
#[arg(short, long)]
|
||||
daemon: bool,
|
||||
/// Enable AI content generation
|
||||
#[arg(long)]
|
||||
ai_generate: bool,
|
||||
},
|
||||
/// Initialize user list for admin account
|
||||
Init {
|
||||
/// Path to the blog project directory
|
||||
project_dir: Option<PathBuf>,
|
||||
/// Handles to add to initial user list (comma-separated)
|
||||
#[arg(long)]
|
||||
handles: Option<String>,
|
||||
},
|
||||
/// Stop monitoring
|
||||
Stop,
|
||||
/// Show monitoring status
|
||||
Status,
|
||||
/// Test API access to comments collection
|
||||
Test,
|
||||
/// Test user list update functionality
|
||||
TestUserUpdate,
|
||||
/// Test recent comment detection logic
|
||||
TestRecentDetection,
|
||||
/// Test complete polling cycle logic
|
||||
TestPollingCycle,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum OauthCommands {
|
||||
/// Build OAuth app
|
||||
Build {
|
||||
/// Path to the blog project directory
|
||||
project_dir: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Handle version flag
|
||||
if cli.version {
|
||||
println!("{}", env!("CARGO_PKG_VERSION"));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If no subcommand provided, start interactive mode
|
||||
let command = cli.command.unwrap_or(Commands::Interactive);
|
||||
|
||||
match command {
|
||||
Commands::Init { path } => {
|
||||
commands::init::execute(path).await?;
|
||||
match cli.command {
|
||||
Commands::Login { handle, password, pds } => {
|
||||
login::execute(&handle, &password, &pds).await?;
|
||||
}
|
||||
Commands::Build { path } => {
|
||||
commands::build::execute(path).await?;
|
||||
Commands::Post => {
|
||||
post::execute().await?;
|
||||
}
|
||||
Commands::New { title, slug, format, path } => {
|
||||
std::env::set_current_dir(path)?;
|
||||
commands::new::execute(title, slug, format).await?;
|
||||
Commands::Build => {
|
||||
build::execute().await?;
|
||||
}
|
||||
Commands::Serve { port, path } => {
|
||||
std::env::set_current_dir(path)?;
|
||||
commands::serve::execute(port).await?;
|
||||
Commands::Delete => {
|
||||
delete::execute().await?;
|
||||
}
|
||||
Commands::Clean { path } => {
|
||||
std::env::set_current_dir(path)?;
|
||||
commands::clean::execute().await?;
|
||||
}
|
||||
Commands::Mcp { port, path, claude_proxy, api_token, claude_code_path } => {
|
||||
use crate::mcp::McpServer;
|
||||
let mut server = McpServer::new(path);
|
||||
|
||||
if claude_proxy {
|
||||
let token = api_token
|
||||
.or_else(|| std::env::var("CLAUDE_PROXY_API_TOKEN").ok())
|
||||
.ok_or_else(|| {
|
||||
anyhow::anyhow!("API token is required when --claude-proxy is enabled. Set CLAUDE_PROXY_API_TOKEN environment variable or use --api-token")
|
||||
})?;
|
||||
server = server.with_claude_proxy(token, Some(claude_code_path.clone()));
|
||||
println!("Claude proxy mode enabled - using Claude Code executable: {}", claude_code_path);
|
||||
}
|
||||
|
||||
server.serve(port).await?;
|
||||
}
|
||||
Commands::Doc(doc_cmd) => {
|
||||
doc_cmd.execute(std::env::current_dir()?).await?;
|
||||
}
|
||||
Commands::Auth { command } => {
|
||||
match command {
|
||||
AuthCommands::Init { pds, handle, password, access_jwt, refresh_jwt } => {
|
||||
commands::auth::init_with_options(pds, handle, password, access_jwt, refresh_jwt).await?;
|
||||
}
|
||||
AuthCommands::Status => {
|
||||
commands::auth::status().await?;
|
||||
}
|
||||
AuthCommands::Logout => {
|
||||
commands::auth::logout().await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Stream { command } => {
|
||||
match command {
|
||||
StreamCommands::Start { project_dir, daemon, ai_generate } => {
|
||||
commands::stream::start(project_dir, daemon, ai_generate).await?;
|
||||
}
|
||||
StreamCommands::Init { project_dir, handles } => {
|
||||
commands::stream::init_user_list(project_dir, handles).await?;
|
||||
}
|
||||
StreamCommands::Stop => {
|
||||
commands::stream::stop().await?;
|
||||
}
|
||||
StreamCommands::Status => {
|
||||
commands::stream::status().await?;
|
||||
}
|
||||
StreamCommands::Test => {
|
||||
commands::stream::test_api().await?;
|
||||
}
|
||||
StreamCommands::TestUserUpdate => {
|
||||
commands::stream::test_user_update().await?;
|
||||
}
|
||||
StreamCommands::TestRecentDetection => {
|
||||
commands::stream::test_recent_detection().await?;
|
||||
}
|
||||
StreamCommands::TestPollingCycle => {
|
||||
commands::stream::test_polling_cycle().await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Oauth { command } => {
|
||||
match command {
|
||||
OauthCommands::Build { project_dir } => {
|
||||
commands::oauth::build(project_dir).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Interactive => {
|
||||
commands::interactive::run().await?;
|
||||
Commands::Serve { port } => {
|
||||
serve::execute(port).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
185
src/markdown.rs
185
src/markdown.rs
@@ -1,185 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use pulldown_cmark::{html, Options, Parser, CodeBlockKind};
|
||||
use syntect::parsing::SyntaxSet;
|
||||
use syntect::highlighting::ThemeSet;
|
||||
use syntect::html::{styled_line_to_highlighted_html, IncludeBackground};
|
||||
use gray_matter::Matter;
|
||||
use gray_matter::engine::YAML;
|
||||
use serde_json::Value;
|
||||
use crate::shortcode::ShortcodeProcessor;
|
||||
|
||||
pub struct MarkdownProcessor {
|
||||
highlight_code: bool,
|
||||
highlight_theme: String,
|
||||
syntax_set: SyntaxSet,
|
||||
theme_set: ThemeSet,
|
||||
shortcode_processor: ShortcodeProcessor,
|
||||
}
|
||||
|
||||
impl MarkdownProcessor {
|
||||
pub fn new(highlight_code: bool, highlight_theme: Option<String>) -> Self {
|
||||
Self {
|
||||
highlight_code,
|
||||
highlight_theme: highlight_theme.unwrap_or_else(|| "Monokai".to_string()),
|
||||
syntax_set: SyntaxSet::load_defaults_newlines(),
|
||||
theme_set: ThemeSet::load_defaults(),
|
||||
shortcode_processor: ShortcodeProcessor::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_frontmatter(&self, content: &str) -> Result<(serde_json::Map<String, Value>, String)> {
|
||||
let matter = Matter::<YAML>::new();
|
||||
let result = matter.parse(content);
|
||||
|
||||
let frontmatter = result.data
|
||||
.and_then(|pod| pod.as_hashmap().ok())
|
||||
.map(|map| {
|
||||
let mut json_map = serde_json::Map::new();
|
||||
for (k, v) in map {
|
||||
// Keys in hashmap are already strings
|
||||
let value = self.pod_to_json_value(v);
|
||||
json_map.insert(k, value);
|
||||
}
|
||||
json_map
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok((frontmatter, result.content))
|
||||
}
|
||||
|
||||
fn pod_to_json_value(&self, pod: gray_matter::Pod) -> Value {
|
||||
match pod {
|
||||
gray_matter::Pod::Null => Value::Null,
|
||||
gray_matter::Pod::Boolean(b) => Value::Bool(b),
|
||||
gray_matter::Pod::Integer(i) => Value::Number(serde_json::Number::from(i)),
|
||||
gray_matter::Pod::Float(f) => serde_json::Number::from_f64(f)
|
||||
.map(Value::Number)
|
||||
.unwrap_or(Value::Null),
|
||||
gray_matter::Pod::String(s) => Value::String(s),
|
||||
gray_matter::Pod::Array(arr) => {
|
||||
Value::Array(arr.into_iter().map(|p| self.pod_to_json_value(p)).collect())
|
||||
}
|
||||
gray_matter::Pod::Hash(map) => {
|
||||
let mut json_map = serde_json::Map::new();
|
||||
for (k, v) in map {
|
||||
json_map.insert(k, self.pod_to_json_value(v));
|
||||
}
|
||||
Value::Object(json_map)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pub fn render(&self, content: &str) -> Result<String> {
|
||||
// Process shortcodes first
|
||||
let processed_content = self.shortcode_processor.process(content);
|
||||
|
||||
let mut options = Options::empty();
|
||||
options.insert(Options::ENABLE_STRIKETHROUGH);
|
||||
options.insert(Options::ENABLE_TABLES);
|
||||
options.insert(Options::ENABLE_FOOTNOTES);
|
||||
options.insert(Options::ENABLE_TASKLISTS);
|
||||
|
||||
if self.highlight_code {
|
||||
self.render_with_syntax_highlighting(&processed_content, options)
|
||||
} else {
|
||||
let parser = Parser::new_ext(&processed_content, options);
|
||||
let mut html_output = String::new();
|
||||
html::push_html(&mut html_output, parser);
|
||||
Ok(html_output)
|
||||
}
|
||||
}
|
||||
|
||||
/// Provide access to the shortcode processor for custom shortcode registration
|
||||
#[allow(dead_code)]
|
||||
pub fn shortcode_processor_mut(&mut self) -> &mut ShortcodeProcessor {
|
||||
&mut self.shortcode_processor
|
||||
}
|
||||
|
||||
fn render_with_syntax_highlighting(&self, content: &str, options: Options) -> Result<String> {
|
||||
let parser = Parser::new_ext(content, options);
|
||||
let mut html_output = String::new();
|
||||
let mut code_block = None;
|
||||
// Force use dark theme for better visibility on dark background
|
||||
let theme = self.theme_set.themes.get("base16-monokai.dark")
|
||||
.or_else(|| self.theme_set.themes.get("base16-ocean.dark"))
|
||||
.or_else(|| self.theme_set.themes.get("Solarized (dark)"))
|
||||
.or_else(|| self.theme_set.themes.get(&self.highlight_theme))
|
||||
.unwrap_or_else(|| self.theme_set.themes.values().next().unwrap());
|
||||
|
||||
let mut events = Vec::new();
|
||||
for event in parser {
|
||||
match event {
|
||||
pulldown_cmark::Event::Start(pulldown_cmark::Tag::CodeBlock(kind)) => {
|
||||
if let CodeBlockKind::Fenced(lang_info) = &kind {
|
||||
code_block = Some((String::new(), lang_info.to_string()));
|
||||
}
|
||||
}
|
||||
pulldown_cmark::Event::Text(text) => {
|
||||
if let Some((ref mut code, _)) = code_block {
|
||||
code.push_str(&text);
|
||||
} else {
|
||||
events.push(pulldown_cmark::Event::Text(text));
|
||||
}
|
||||
}
|
||||
pulldown_cmark::Event::End(pulldown_cmark::TagEnd::CodeBlock) => {
|
||||
if let Some((code, lang_info)) = code_block.take() {
|
||||
let highlighted = self.highlight_code_block(&code, &lang_info, theme);
|
||||
events.push(pulldown_cmark::Event::Html(highlighted.into()));
|
||||
}
|
||||
}
|
||||
_ => events.push(event),
|
||||
}
|
||||
}
|
||||
|
||||
html::push_html(&mut html_output, events.into_iter());
|
||||
Ok(html_output)
|
||||
}
|
||||
|
||||
fn highlight_code_block(&self, code: &str, lang_info: &str, theme: &syntect::highlighting::Theme) -> String {
|
||||
// Parse language and filename from lang_info (e.g., "sh:/path/to/file" or "rust:main.rs")
|
||||
let (lang, filename) = if lang_info.contains(':') {
|
||||
let parts: Vec<&str> = lang_info.splitn(2, ':').collect();
|
||||
(parts[0], Some(parts[1]))
|
||||
} else {
|
||||
(lang_info, None)
|
||||
};
|
||||
|
||||
// Map short language names to full names
|
||||
let lang = match lang {
|
||||
"rs" => "rust",
|
||||
"js" => "javascript",
|
||||
"ts" => "typescript",
|
||||
"sh" => "bash",
|
||||
"yml" => "yaml",
|
||||
"md" => "markdown",
|
||||
"py" => "python",
|
||||
_ => lang,
|
||||
};
|
||||
|
||||
let syntax = self.syntax_set
|
||||
.find_syntax_by_token(lang)
|
||||
.unwrap_or_else(|| self.syntax_set.find_syntax_plain_text());
|
||||
|
||||
let mut highlighter = syntect::easy::HighlightLines::new(syntax, theme);
|
||||
|
||||
// Create pre tag with optional filename attribute
|
||||
let pre_tag = if let Some(filename) = filename {
|
||||
format!("<pre data-filename=\"{}\">", filename)
|
||||
} else {
|
||||
"<pre>".to_string()
|
||||
};
|
||||
|
||||
let mut output = format!("{}<code>", pre_tag);
|
||||
|
||||
for line in code.lines() {
|
||||
let ranges = highlighter.highlight_line(line, &self.syntax_set).unwrap();
|
||||
let html_line = styled_line_to_highlighted_html(&ranges[..], IncludeBackground::No).unwrap();
|
||||
output.push_str(&html_line);
|
||||
output.push('\n');
|
||||
}
|
||||
|
||||
output.push_str("</code></pre>");
|
||||
output
|
||||
}
|
||||
}
|
||||
@@ -1,160 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::Json,
|
||||
};
|
||||
use axum_extra::{
|
||||
headers::{authorization::Bearer, Authorization},
|
||||
TypedHeader,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
// Removed unused import
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ChatRequest {
|
||||
pub question: String,
|
||||
#[serde(rename = "systemPrompt")]
|
||||
pub system_prompt: String,
|
||||
#[serde(default)]
|
||||
pub context: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ChatResponse {
|
||||
pub answer: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ClaudeProxyState {
|
||||
pub api_token: String,
|
||||
pub claude_code_path: String,
|
||||
}
|
||||
|
||||
pub async fn claude_chat_handler(
|
||||
State(state): State<crate::mcp::server::AppState>,
|
||||
auth: Option<TypedHeader<Authorization<Bearer>>>,
|
||||
Json(request): Json<ChatRequest>,
|
||||
) -> Result<Json<ChatResponse>, StatusCode> {
|
||||
// Claude proxyが有効かチェック
|
||||
let claude_proxy = state.claude_proxy.as_ref().ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
// 認証チェック
|
||||
let auth = auth.ok_or(StatusCode::UNAUTHORIZED)?;
|
||||
if auth.token() != claude_proxy.api_token {
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// Claude CodeのMCP通信実装
|
||||
let response = communicate_with_claude_mcp(
|
||||
&request.question,
|
||||
&request.system_prompt,
|
||||
&request.context,
|
||||
&claude_proxy.claude_code_path,
|
||||
).await?;
|
||||
|
||||
Ok(Json(ChatResponse { answer: response }))
|
||||
}
|
||||
|
||||
async fn communicate_with_claude_mcp(
|
||||
message: &str,
|
||||
system: &str,
|
||||
_context: &Value,
|
||||
claude_code_path: &str,
|
||||
) -> Result<String, StatusCode> {
|
||||
tracing::info!("Communicating with Claude Code via stdio");
|
||||
tracing::info!("Message: {}", message);
|
||||
tracing::info!("System prompt: {}", system);
|
||||
|
||||
// Claude Code MCPプロセスを起動
|
||||
// Use the full path to avoid shell function and don't use --continue
|
||||
let claude_executable = if claude_code_path == "claude" {
|
||||
// Use dirs crate for cross-platform home directory detection
|
||||
match dirs::home_dir() {
|
||||
Some(home) => home.join(".claude/local/claude").to_string_lossy().to_string(),
|
||||
None => "/Users/syui/.claude/local/claude".to_string(), // fallback
|
||||
}
|
||||
} else {
|
||||
claude_code_path.to_string()
|
||||
};
|
||||
|
||||
let mut child = tokio::process::Command::new(claude_executable)
|
||||
.args(&["--print", "--output-format", "text"])
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| {
|
||||
tracing::error!("Failed to start Claude Code process: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// プロンプトを構築
|
||||
let full_prompt = if !system.is_empty() {
|
||||
format!("{}\n\nUser: {}", system, message)
|
||||
} else {
|
||||
message.to_string()
|
||||
};
|
||||
|
||||
// 標準入力にプロンプトを送信
|
||||
if let Some(stdin) = child.stdin.take() {
|
||||
use tokio::io::AsyncWriteExt;
|
||||
let mut stdin = stdin;
|
||||
stdin.write_all(full_prompt.as_bytes()).await.map_err(|e| {
|
||||
tracing::error!("Failed to write to Claude Code stdin: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
stdin.shutdown().await.map_err(|e| {
|
||||
tracing::error!("Failed to close Claude Code stdin: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
}
|
||||
|
||||
// プロセス完了を待機(タイムアウト付き)
|
||||
let output = tokio::time::timeout(
|
||||
tokio::time::Duration::from_secs(30),
|
||||
child.wait_with_output()
|
||||
)
|
||||
.await
|
||||
.map_err(|_| {
|
||||
tracing::error!("Claude Code process timed out");
|
||||
StatusCode::REQUEST_TIMEOUT
|
||||
})?
|
||||
.map_err(|e| {
|
||||
tracing::error!("Claude Code process failed: {}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// プロセス終了ステータスをチェック
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
tracing::error!("Claude Code process failed with stderr: {}", stderr);
|
||||
return Ok("Claude Codeプロセスでエラーが発生しました".to_string());
|
||||
}
|
||||
|
||||
// 標準出力を解析
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
tracing::debug!("Claude Code stdout: {}", stdout);
|
||||
|
||||
// Claude Codeは通常プレーンテキストを返すので、そのまま返す
|
||||
Ok(stdout.trim().to_string())
|
||||
}
|
||||
|
||||
pub async fn claude_tools_handler() -> Json<Value> {
|
||||
Json(json!({
|
||||
"tools": {
|
||||
"chat": {
|
||||
"description": "Chat with Claude",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"message": {"type": "string"},
|
||||
"system": {"type": "string"}
|
||||
},
|
||||
"required": ["message"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
pub mod server;
|
||||
pub mod tools;
|
||||
pub mod types;
|
||||
pub mod claude_proxy;
|
||||
|
||||
pub use server::McpServer;
|
||||
@@ -1,169 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use axum::{
|
||||
extract::State,
|
||||
http::StatusCode,
|
||||
response::Json,
|
||||
routing::{get, post},
|
||||
Router,
|
||||
};
|
||||
use serde_json::{json, Value};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tower_http::cors::CorsLayer;
|
||||
use crate::mcp::tools::BlogTools;
|
||||
use crate::mcp::types::{McpRequest, McpResponse, McpError, CreatePostRequest, ListPostsRequest, BuildRequest};
|
||||
use crate::mcp::claude_proxy::{claude_chat_handler, claude_tools_handler, ClaudeProxyState};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
pub blog_tools: Arc<BlogTools>,
|
||||
pub claude_proxy: Option<Arc<ClaudeProxyState>>,
|
||||
}
|
||||
|
||||
pub struct McpServer {
|
||||
app_state: AppState,
|
||||
}
|
||||
|
||||
impl McpServer {
|
||||
pub fn new(base_path: PathBuf) -> Self {
|
||||
let blog_tools = Arc::new(BlogTools::new(base_path));
|
||||
let app_state = AppState {
|
||||
blog_tools,
|
||||
claude_proxy: None,
|
||||
};
|
||||
|
||||
Self { app_state }
|
||||
}
|
||||
|
||||
pub fn with_claude_proxy(mut self, api_token: String, claude_code_path: Option<String>) -> Self {
|
||||
let claude_code_path = claude_code_path.unwrap_or_else(|| "claude".to_string());
|
||||
self.app_state.claude_proxy = Some(Arc::new(ClaudeProxyState {
|
||||
api_token,
|
||||
claude_code_path,
|
||||
}));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn create_router(&self) -> Router {
|
||||
Router::new()
|
||||
.route("/", get(root_handler))
|
||||
.route("/mcp/tools/list", get(list_tools))
|
||||
.route("/mcp/tools/call", post(call_tool))
|
||||
.route("/health", get(health_check))
|
||||
.route("/api/claude-mcp", post(claude_chat_handler))
|
||||
.route("/claude/tools", get(claude_tools_handler))
|
||||
.layer(CorsLayer::permissive())
|
||||
.with_state(self.app_state.clone())
|
||||
}
|
||||
|
||||
pub async fn serve(&self, port: u16) -> Result<()> {
|
||||
let app = self.create_router();
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{}", port)).await?;
|
||||
println!("ai.log MCP Server listening on port {}", port);
|
||||
|
||||
axum::serve(listener, app).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn root_handler() -> Json<Value> {
|
||||
Json(json!({
|
||||
"name": "ai.log MCP Server",
|
||||
"version": "0.1.0",
|
||||
"description": "AI-powered static blog generator with MCP integration",
|
||||
"tools": ["create_blog_post", "list_blog_posts", "build_blog", "get_post_content"]
|
||||
}))
|
||||
}
|
||||
|
||||
async fn health_check() -> Json<Value> {
|
||||
Json(json!({
|
||||
"status": "healthy",
|
||||
"timestamp": chrono::Utc::now().to_rfc3339()
|
||||
}))
|
||||
}
|
||||
|
||||
async fn list_tools() -> Json<Value> {
|
||||
let tools = BlogTools::get_tools();
|
||||
Json(json!({
|
||||
"tools": tools
|
||||
}))
|
||||
}
|
||||
|
||||
async fn call_tool(
|
||||
State(state): State<AppState>,
|
||||
Json(request): Json<McpRequest>,
|
||||
) -> Result<Json<McpResponse>, StatusCode> {
|
||||
let tool_name = request.params
|
||||
.as_ref()
|
||||
.and_then(|p| p.get("name"))
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or(StatusCode::BAD_REQUEST)?;
|
||||
|
||||
let arguments = request.params
|
||||
.as_ref()
|
||||
.and_then(|p| p.get("arguments"))
|
||||
.cloned()
|
||||
.unwrap_or(json!({}));
|
||||
|
||||
let result = match tool_name {
|
||||
"create_blog_post" => {
|
||||
let req: CreatePostRequest = serde_json::from_value(arguments)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
state.blog_tools.create_post(req).await
|
||||
}
|
||||
"list_blog_posts" => {
|
||||
let req: ListPostsRequest = serde_json::from_value(arguments)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
state.blog_tools.list_posts(req).await
|
||||
}
|
||||
"build_blog" => {
|
||||
let req: BuildRequest = serde_json::from_value(arguments)
|
||||
.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
state.blog_tools.build_blog(req).await
|
||||
}
|
||||
"get_post_content" => {
|
||||
let slug = arguments.get("slug")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or(StatusCode::BAD_REQUEST)?;
|
||||
state.blog_tools.get_post_content(slug).await
|
||||
}
|
||||
"translate_document" => {
|
||||
state.blog_tools.translate_document(arguments).await
|
||||
}
|
||||
"generate_documentation" => {
|
||||
state.blog_tools.generate_documentation(arguments).await
|
||||
}
|
||||
_ => {
|
||||
return Ok(Json(McpResponse {
|
||||
jsonrpc: "2.0".to_string(),
|
||||
id: request.id,
|
||||
result: None,
|
||||
error: Some(McpError {
|
||||
code: -32601,
|
||||
message: format!("Method not found: {}", tool_name),
|
||||
data: None,
|
||||
}),
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(tool_result) => Ok(Json(McpResponse {
|
||||
jsonrpc: "2.0".to_string(),
|
||||
id: request.id,
|
||||
result: Some(serde_json::to_value(tool_result).unwrap()),
|
||||
error: None,
|
||||
})),
|
||||
Err(e) => Ok(Json(McpResponse {
|
||||
jsonrpc: "2.0".to_string(),
|
||||
id: request.id,
|
||||
result: None,
|
||||
error: Some(McpError {
|
||||
code: -32000,
|
||||
message: e.to_string(),
|
||||
data: None,
|
||||
}),
|
||||
})),
|
||||
}
|
||||
}
|
||||
504
src/mcp/tools.rs
504
src/mcp/tools.rs
@@ -1,504 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde_json::{json, Value};
|
||||
use std::path::PathBuf;
|
||||
use std::fs;
|
||||
use chrono::Local;
|
||||
use crate::mcp::types::*;
|
||||
use crate::generator::Generator;
|
||||
use crate::config::Config;
|
||||
|
||||
pub struct BlogTools {
|
||||
base_path: PathBuf,
|
||||
}
|
||||
|
||||
impl BlogTools {
|
||||
pub fn new(base_path: PathBuf) -> Self {
|
||||
Self { base_path }
|
||||
}
|
||||
|
||||
pub async fn create_post(&self, request: CreatePostRequest) -> Result<ToolResult> {
|
||||
let posts_dir = self.base_path.join("content/posts");
|
||||
|
||||
// Generate slug if not provided
|
||||
let slug = request.slug.unwrap_or_else(|| {
|
||||
request.title
|
||||
.chars()
|
||||
.map(|c| if c.is_alphanumeric() || c == ' ' { c.to_lowercase().to_string() } else { "".to_string() })
|
||||
.collect::<String>()
|
||||
.split_whitespace()
|
||||
.collect::<Vec<_>>()
|
||||
.join("-")
|
||||
});
|
||||
|
||||
let date = Local::now().format("%Y-%m-%d").to_string();
|
||||
let filename = format!("{}-{}.md", date, slug);
|
||||
let filepath = posts_dir.join(&filename);
|
||||
|
||||
// Create frontmatter
|
||||
let mut frontmatter = format!(
|
||||
"---\ntitle: {}\ndate: {}\n",
|
||||
request.title, date
|
||||
);
|
||||
|
||||
if let Some(tags) = request.tags {
|
||||
if !tags.is_empty() {
|
||||
frontmatter.push_str(&format!("tags: {:?}\n", tags));
|
||||
}
|
||||
}
|
||||
|
||||
frontmatter.push_str("---\n\n");
|
||||
|
||||
// Create full content
|
||||
let full_content = format!("{}{}", frontmatter, request.content);
|
||||
|
||||
// Ensure directory exists
|
||||
fs::create_dir_all(&posts_dir)?;
|
||||
|
||||
// Write file
|
||||
fs::write(&filepath, full_content)?;
|
||||
|
||||
Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: format!("Post created successfully: {}", filename),
|
||||
}],
|
||||
is_error: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn list_posts(&self, request: ListPostsRequest) -> Result<ToolResult> {
|
||||
let posts_dir = self.base_path.join("content/posts");
|
||||
|
||||
if !posts_dir.exists() {
|
||||
return Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: "No posts directory found".to_string(),
|
||||
}],
|
||||
is_error: Some(true),
|
||||
});
|
||||
}
|
||||
|
||||
let mut posts = Vec::new();
|
||||
|
||||
for entry in fs::read_dir(&posts_dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_file() && path.extension().map_or(false, |ext| ext == "md") {
|
||||
if let Ok(content) = fs::read_to_string(&path) {
|
||||
// Parse frontmatter
|
||||
if let Some((frontmatter_str, _)) = content.split_once("---\n") {
|
||||
if let Some((_, frontmatter_content)) = frontmatter_str.split_once("---\n") {
|
||||
// Simple YAML parsing for basic fields
|
||||
let mut title = "Untitled".to_string();
|
||||
let mut date = "Unknown".to_string();
|
||||
let mut tags = Vec::new();
|
||||
|
||||
for line in frontmatter_content.lines() {
|
||||
if let Some((key, value)) = line.split_once(':') {
|
||||
let key = key.trim();
|
||||
let value = value.trim();
|
||||
|
||||
match key {
|
||||
"title" => title = value.to_string(),
|
||||
"date" => date = value.to_string(),
|
||||
"tags" => {
|
||||
// Simple array parsing
|
||||
if value.starts_with('[') && value.ends_with(']') {
|
||||
let tags_str = &value[1..value.len()-1];
|
||||
tags = tags_str.split(',')
|
||||
.map(|s| s.trim().trim_matches('"').to_string())
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let slug = path.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
|
||||
posts.push(PostInfo {
|
||||
title,
|
||||
slug: slug.clone(),
|
||||
date,
|
||||
tags,
|
||||
url: format!("/posts/{}.html", slug),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply pagination
|
||||
let offset = request.offset.unwrap_or(0);
|
||||
let limit = request.limit.unwrap_or(10);
|
||||
|
||||
posts.sort_by(|a, b| b.date.cmp(&a.date));
|
||||
let paginated_posts: Vec<_> = posts.into_iter()
|
||||
.skip(offset)
|
||||
.take(limit)
|
||||
.collect();
|
||||
|
||||
let result = json!({
|
||||
"posts": paginated_posts,
|
||||
"total": paginated_posts.len()
|
||||
});
|
||||
|
||||
Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: serde_json::to_string_pretty(&result)?,
|
||||
}],
|
||||
is_error: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_blog(&self, request: BuildRequest) -> Result<ToolResult> {
|
||||
// Load configuration
|
||||
let config = Config::load(&self.base_path)?;
|
||||
|
||||
// Create generator
|
||||
let generator = Generator::new(self.base_path.clone(), config)?;
|
||||
|
||||
// Build the blog
|
||||
generator.build().await?;
|
||||
|
||||
let message = if request.enable_ai.unwrap_or(false) {
|
||||
"Blog built successfully with AI features enabled"
|
||||
} else {
|
||||
"Blog built successfully"
|
||||
};
|
||||
|
||||
Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: message.to_string(),
|
||||
}],
|
||||
is_error: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_post_content(&self, slug: &str) -> Result<ToolResult> {
|
||||
let posts_dir = self.base_path.join("content/posts");
|
||||
|
||||
// Find file by slug
|
||||
for entry in fs::read_dir(&posts_dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_file() && path.extension().map_or(false, |ext| ext == "md") {
|
||||
if let Some(filename) = path.file_stem().and_then(|s| s.to_str()) {
|
||||
if filename.contains(slug) {
|
||||
let content = fs::read_to_string(&path)?;
|
||||
return Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: content,
|
||||
}],
|
||||
is_error: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: format!("Post with slug '{}' not found", slug),
|
||||
}],
|
||||
is_error: Some(true),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn translate_document(&self, args: Value) -> Result<ToolResult> {
|
||||
use crate::commands::doc::DocCommand;
|
||||
use crate::commands::doc::DocAction;
|
||||
|
||||
let input_file = args.get("input_file")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| anyhow::anyhow!("input_file is required"))?;
|
||||
|
||||
let target_lang = args.get("target_lang")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| anyhow::anyhow!("target_lang is required"))?;
|
||||
|
||||
let source_lang = args.get("source_lang").and_then(|v| v.as_str()).map(|s| s.to_string());
|
||||
let output_file = args.get("output_file").and_then(|v| v.as_str()).map(|s| PathBuf::from(s));
|
||||
let model = args.get("model").and_then(|v| v.as_str()).unwrap_or("qwen2.5:latest");
|
||||
let ollama_endpoint = args.get("ollama_endpoint").and_then(|v| v.as_str()).unwrap_or("http://localhost:11434");
|
||||
|
||||
let doc_cmd = DocCommand {
|
||||
action: DocAction::Translate {
|
||||
input: PathBuf::from(input_file),
|
||||
target_lang: target_lang.to_string(),
|
||||
source_lang: source_lang.clone(),
|
||||
output: output_file,
|
||||
model: model.to_string(),
|
||||
ollama_endpoint: ollama_endpoint.to_string(),
|
||||
}
|
||||
};
|
||||
|
||||
match doc_cmd.execute(self.base_path.clone()).await {
|
||||
Ok(_) => {
|
||||
let output_path = if let Some(output) = args.get("output_file").and_then(|v| v.as_str()) {
|
||||
output.to_string()
|
||||
} else {
|
||||
let input_path = PathBuf::from(input_file);
|
||||
let stem = input_path.file_stem().unwrap().to_string_lossy();
|
||||
let ext = input_path.extension().unwrap_or_default().to_string_lossy();
|
||||
format!("{}.{}.{}", stem, target_lang, ext)
|
||||
};
|
||||
|
||||
Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: format!("Document translated successfully from {} to {}. Output: {}",
|
||||
source_lang.unwrap_or_else(|| "auto-detected".to_string()),
|
||||
target_lang, output_path),
|
||||
}],
|
||||
is_error: None,
|
||||
})
|
||||
}
|
||||
Err(e) => Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: format!("Translation failed: {}", e),
|
||||
}],
|
||||
is_error: Some(true),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn generate_documentation(&self, args: Value) -> Result<ToolResult> {
|
||||
use crate::commands::doc::DocCommand;
|
||||
use crate::commands::doc::DocAction;
|
||||
|
||||
let doc_type = args.get("doc_type")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| anyhow::anyhow!("doc_type is required"))?;
|
||||
|
||||
let source_path = args.get("source_path").and_then(|v| v.as_str()).unwrap_or(".");
|
||||
let output_path = args.get("output_path").and_then(|v| v.as_str());
|
||||
let with_ai = args.get("with_ai").and_then(|v| v.as_bool()).unwrap_or(true);
|
||||
let include_deps = args.get("include_deps").and_then(|v| v.as_bool()).unwrap_or(false);
|
||||
let format_type = args.get("format_type").and_then(|v| v.as_str()).unwrap_or("markdown");
|
||||
|
||||
let action = match doc_type {
|
||||
"readme" => DocAction::Readme {
|
||||
source: PathBuf::from(source_path),
|
||||
output: PathBuf::from(output_path.unwrap_or("README.md")),
|
||||
with_ai,
|
||||
},
|
||||
"api" => DocAction::Api {
|
||||
source: PathBuf::from(source_path),
|
||||
output: PathBuf::from(output_path.unwrap_or("./docs")),
|
||||
format: format_type.to_string(),
|
||||
},
|
||||
"structure" => DocAction::Structure {
|
||||
source: PathBuf::from(source_path),
|
||||
output: PathBuf::from(output_path.unwrap_or("docs/structure.md")),
|
||||
include_deps,
|
||||
},
|
||||
"changelog" => DocAction::Changelog {
|
||||
from: None,
|
||||
to: None,
|
||||
output: PathBuf::from(output_path.unwrap_or("CHANGELOG.md")),
|
||||
explain_changes: with_ai,
|
||||
},
|
||||
_ => return Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: format!("Unsupported doc_type: {}. Supported types: readme, api, structure, changelog", doc_type),
|
||||
}],
|
||||
is_error: Some(true),
|
||||
})
|
||||
};
|
||||
|
||||
let doc_cmd = DocCommand { action };
|
||||
|
||||
match doc_cmd.execute(self.base_path.clone()).await {
|
||||
Ok(_) => {
|
||||
let output_path = match doc_type {
|
||||
"readme" => output_path.unwrap_or("README.md"),
|
||||
"api" => output_path.unwrap_or("./docs"),
|
||||
"structure" => output_path.unwrap_or("docs/structure.md"),
|
||||
"changelog" => output_path.unwrap_or("CHANGELOG.md"),
|
||||
_ => "unknown"
|
||||
};
|
||||
|
||||
Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: format!("{} documentation generated successfully. Output: {}",
|
||||
doc_type.to_uppercase(), output_path),
|
||||
}],
|
||||
is_error: None,
|
||||
})
|
||||
}
|
||||
Err(e) => Ok(ToolResult {
|
||||
content: vec![Content {
|
||||
content_type: "text".to_string(),
|
||||
text: format!("Documentation generation failed: {}", e),
|
||||
}],
|
||||
is_error: Some(true),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_tools() -> Vec<Tool> {
|
||||
vec![
|
||||
Tool {
|
||||
name: "create_blog_post".to_string(),
|
||||
description: "Create a new blog post with title, content, and optional tags".to_string(),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"description": "The title of the blog post"
|
||||
},
|
||||
"content": {
|
||||
"type": "string",
|
||||
"description": "The content of the blog post in Markdown format"
|
||||
},
|
||||
"tags": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
"description": "Optional tags for the blog post"
|
||||
},
|
||||
"slug": {
|
||||
"type": "string",
|
||||
"description": "Optional custom slug for the post URL"
|
||||
}
|
||||
},
|
||||
"required": ["title", "content"]
|
||||
}),
|
||||
},
|
||||
Tool {
|
||||
name: "list_blog_posts".to_string(),
|
||||
description: "List existing blog posts with pagination".to_string(),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"description": "Maximum number of posts to return (default: 10)"
|
||||
},
|
||||
"offset": {
|
||||
"type": "integer",
|
||||
"description": "Number of posts to skip (default: 0)"
|
||||
}
|
||||
}
|
||||
}),
|
||||
},
|
||||
Tool {
|
||||
name: "build_blog".to_string(),
|
||||
description: "Build the static blog with AI features".to_string(),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"enable_ai": {
|
||||
"type": "boolean",
|
||||
"description": "Enable AI features during build (default: false)"
|
||||
},
|
||||
"translate": {
|
||||
"type": "boolean",
|
||||
"description": "Enable automatic translation (default: false)"
|
||||
}
|
||||
}
|
||||
}),
|
||||
},
|
||||
Tool {
|
||||
name: "get_post_content".to_string(),
|
||||
description: "Get the full content of a blog post by slug".to_string(),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"slug": {
|
||||
"type": "string",
|
||||
"description": "The slug of the blog post to retrieve"
|
||||
}
|
||||
},
|
||||
"required": ["slug"]
|
||||
}),
|
||||
},
|
||||
Tool {
|
||||
name: "translate_document".to_string(),
|
||||
description: "Translate markdown documents using Ollama AI while preserving structure".to_string(),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input_file": {
|
||||
"type": "string",
|
||||
"description": "Path to the input markdown file"
|
||||
},
|
||||
"target_lang": {
|
||||
"type": "string",
|
||||
"description": "Target language code (en, ja, zh, ko, es)"
|
||||
},
|
||||
"source_lang": {
|
||||
"type": "string",
|
||||
"description": "Source language code (auto-detect if not specified)"
|
||||
},
|
||||
"output_file": {
|
||||
"type": "string",
|
||||
"description": "Output file path (auto-generated if not specified)"
|
||||
},
|
||||
"model": {
|
||||
"type": "string",
|
||||
"description": "Ollama model to use (default: qwen2.5:latest)"
|
||||
},
|
||||
"ollama_endpoint": {
|
||||
"type": "string",
|
||||
"description": "Ollama API endpoint (default: http://localhost:11434)"
|
||||
}
|
||||
},
|
||||
"required": ["input_file", "target_lang"]
|
||||
}),
|
||||
},
|
||||
Tool {
|
||||
name: "generate_documentation".to_string(),
|
||||
description: "Generate various types of documentation from code analysis".to_string(),
|
||||
input_schema: json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"doc_type": {
|
||||
"type": "string",
|
||||
"enum": ["readme", "api", "structure", "changelog"],
|
||||
"description": "Type of documentation to generate"
|
||||
},
|
||||
"source_path": {
|
||||
"type": "string",
|
||||
"description": "Source directory to analyze (default: current directory)"
|
||||
},
|
||||
"output_path": {
|
||||
"type": "string",
|
||||
"description": "Output file or directory path"
|
||||
},
|
||||
"with_ai": {
|
||||
"type": "boolean",
|
||||
"description": "Include AI-generated insights (default: true)"
|
||||
},
|
||||
"include_deps": {
|
||||
"type": "boolean",
|
||||
"description": "Include dependency analysis (default: false)"
|
||||
},
|
||||
"format_type": {
|
||||
"type": "string",
|
||||
"enum": ["markdown", "html", "json"],
|
||||
"description": "Output format (default: markdown)"
|
||||
}
|
||||
},
|
||||
"required": ["doc_type"]
|
||||
}),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct McpRequest {
|
||||
pub jsonrpc: String,
|
||||
pub id: Option<serde_json::Value>,
|
||||
pub method: String,
|
||||
pub params: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct McpResponse {
|
||||
pub jsonrpc: String,
|
||||
pub id: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub result: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<McpError>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct McpError {
|
||||
pub code: i32,
|
||||
pub message: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub data: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Tool {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
#[serde(rename = "inputSchema")]
|
||||
pub input_schema: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ToolResult {
|
||||
pub content: Vec<Content>,
|
||||
#[serde(rename = "isError", skip_serializing_if = "Option::is_none")]
|
||||
pub is_error: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Content {
|
||||
#[serde(rename = "type")]
|
||||
pub content_type: String,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CreatePostRequest {
|
||||
pub title: String,
|
||||
pub content: String,
|
||||
pub tags: Option<Vec<String>>,
|
||||
pub slug: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ListPostsRequest {
|
||||
pub limit: Option<usize>,
|
||||
pub offset: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PostInfo {
|
||||
pub title: String,
|
||||
pub slug: String,
|
||||
pub date: String,
|
||||
pub tags: Vec<String>,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BuildRequest {
|
||||
pub enable_ai: Option<bool>,
|
||||
pub translate: Option<bool>,
|
||||
}
|
||||
210
src/oauth.rs
210
src/oauth.rs
@@ -1,210 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower_sessions::Session;
|
||||
use axum::{
|
||||
extract::Query,
|
||||
response::Html,
|
||||
Json,
|
||||
};
|
||||
use jsonwebtoken::{encode, decode, Header, Algorithm, Validation, EncodingKey, DecodingKey};
|
||||
use anyhow::Result;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct OAuthData {
|
||||
pub did: String,
|
||||
pub handle: String,
|
||||
pub display_name: Option<String>,
|
||||
pub avatar: Option<String>,
|
||||
pub access_jwt: Option<String>,
|
||||
pub refresh_jwt: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct OAuthCallback {
|
||||
pub code: Option<String>,
|
||||
pub state: Option<String>,
|
||||
pub error: Option<String>,
|
||||
pub error_description: Option<String>,
|
||||
pub iss: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Claims {
|
||||
pub sub: String, // DID
|
||||
pub handle: String,
|
||||
pub display_name: Option<String>,
|
||||
pub avatar: Option<String>,
|
||||
pub exp: usize,
|
||||
pub iat: usize,
|
||||
}
|
||||
|
||||
const _JWT_SECRET: &[u8] = b"ailog-oauth-secret-key-2025";
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn create_jwt(oauth_data: &OAuthData) -> Result<String> {
|
||||
let now = chrono::Utc::now().timestamp() as usize;
|
||||
let claims = Claims {
|
||||
sub: oauth_data.did.clone(),
|
||||
handle: oauth_data.handle.clone(),
|
||||
display_name: oauth_data.display_name.clone(),
|
||||
avatar: oauth_data.avatar.clone(),
|
||||
exp: now + 24 * 60 * 60, // 24 hours
|
||||
iat: now,
|
||||
};
|
||||
|
||||
let token = encode(
|
||||
&Header::default(),
|
||||
&claims,
|
||||
&EncodingKey::from_secret(_JWT_SECRET),
|
||||
)?;
|
||||
|
||||
Ok(token)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn verify_jwt(token: &str) -> Result<Claims> {
|
||||
let token_data = decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(_JWT_SECRET),
|
||||
&Validation::new(Algorithm::HS256),
|
||||
)?;
|
||||
|
||||
Ok(token_data.claims)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn oauth_callback_handler(
|
||||
Query(params): Query<OAuthCallback>,
|
||||
session: Session,
|
||||
) -> Result<Html<String>, String> {
|
||||
println!("🔧 OAuth callback received: {:?}", params);
|
||||
|
||||
if let Some(error) = params.error {
|
||||
let error_html = format!(
|
||||
r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>OAuth Error</title>
|
||||
<style>
|
||||
body {{ font-family: -apple-system, BlinkMacSystemFont, sans-serif; text-align: center; padding: 50px; }}
|
||||
.error {{ background: #f8d7da; color: #721c24; padding: 20px; border-radius: 8px; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="error">
|
||||
<h2>❌ Authentication Failed</h2>
|
||||
<p><strong>Error:</strong> {}</p>
|
||||
{}
|
||||
<button onclick="window.close()">Close Window</button>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"#,
|
||||
error,
|
||||
params.error_description.map(|d| format!("<p><strong>Description:</strong> {}</p>", d)).unwrap_or_default()
|
||||
);
|
||||
return Ok(Html(error_html));
|
||||
}
|
||||
|
||||
if let Some(code) = params.code {
|
||||
// In a real implementation, you would exchange the code for tokens here
|
||||
// For now, we'll create a mock session
|
||||
let oauth_data = OAuthData {
|
||||
did: format!("did:plc:example_{}", &code[..8]),
|
||||
handle: "user.bsky.social".to_string(),
|
||||
display_name: Some("OAuth User".to_string()),
|
||||
avatar: Some("https://via.placeholder.com/40x40/1185fe/ffffff?text=U".to_string()),
|
||||
access_jwt: None,
|
||||
refresh_jwt: None,
|
||||
};
|
||||
|
||||
// Create JWT
|
||||
let jwt_token = create_jwt(&oauth_data).map_err(|e| e.to_string())?;
|
||||
|
||||
// Store in session
|
||||
session.insert("oauth_data", &oauth_data).await.map_err(|e| e.to_string())?;
|
||||
session.insert("jwt_token", &jwt_token).await.map_err(|e| e.to_string())?;
|
||||
|
||||
println!("✅ OAuth session created for: {}", oauth_data.handle);
|
||||
|
||||
let success_html = format!(
|
||||
r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>OAuth Success</title>
|
||||
<style>
|
||||
body {{ font-family: -apple-system, BlinkMacSystemFont, sans-serif; text-align: center; padding: 50px; }}
|
||||
.success {{ background: #d1edff; color: #0c5460; padding: 20px; border-radius: 8px; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="success">
|
||||
<h2>✅ Authentication Successful</h2>
|
||||
<p><strong>Handle:</strong> @{}</p>
|
||||
<p><strong>DID:</strong> {}</p>
|
||||
<p>You can now close this window.</p>
|
||||
</div>
|
||||
<script>
|
||||
// Send success message to parent window
|
||||
if (window.opener && !window.opener.closed) {{
|
||||
window.opener.postMessage({{
|
||||
type: 'oauth_success',
|
||||
session: {{
|
||||
authenticated: true,
|
||||
did: '{}',
|
||||
handle: '{}',
|
||||
displayName: '{}',
|
||||
avatar: '{}',
|
||||
jwt: '{}'
|
||||
}}
|
||||
}}, window.location.origin);
|
||||
|
||||
setTimeout(() => window.close(), 2000);
|
||||
}}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
"#,
|
||||
oauth_data.handle,
|
||||
oauth_data.did,
|
||||
oauth_data.did,
|
||||
oauth_data.handle,
|
||||
oauth_data.display_name.as_deref().unwrap_or("User"),
|
||||
oauth_data.avatar.as_deref().unwrap_or(""),
|
||||
jwt_token
|
||||
);
|
||||
|
||||
return Ok(Html(success_html));
|
||||
}
|
||||
|
||||
Err("No authorization code received".to_string())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn oauth_session_handler(session: Session) -> Json<serde_json::Value> {
|
||||
if let Ok(Some(oauth_data)) = session.get::<OAuthData>("oauth_data").await {
|
||||
if let Ok(Some(jwt_token)) = session.get::<String>("jwt_token").await {
|
||||
return Json(serde_json::json!({
|
||||
"authenticated": true,
|
||||
"user": oauth_data,
|
||||
"jwt": jwt_token
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
Json(serde_json::json!({
|
||||
"authenticated": false
|
||||
}))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn oauth_logout_handler(session: Session) -> Json<serde_json::Value> {
|
||||
let _ = session.remove::<OAuthData>("oauth_data").await;
|
||||
let _ = session.remove::<String>("jwt_token").await;
|
||||
|
||||
Json(serde_json::json!({
|
||||
"success": true,
|
||||
"message": "Logged out successfully"
|
||||
}))
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
use actix_web::{web, App, HttpResponse, HttpServer, middleware};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Mutex;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
#[derive(Clone)]
|
||||
struct RateLimiter {
|
||||
requests: Arc<Mutex<HashMap<String, Vec<DateTime<Utc>>>>>,
|
||||
limit_per_hour: usize,
|
||||
}
|
||||
|
||||
impl RateLimiter {
|
||||
fn new(limit: usize) -> Self {
|
||||
Self {
|
||||
requests: Arc::new(Mutex::new(HashMap::new())),
|
||||
limit_per_hour: limit,
|
||||
}
|
||||
}
|
||||
|
||||
fn check_limit(&self, user_id: &str) -> bool {
|
||||
let mut requests = self.requests.lock().unwrap();
|
||||
let now = Utc::now();
|
||||
let hour_ago = now - chrono::Duration::hours(1);
|
||||
|
||||
let user_requests = requests.entry(user_id.to_string()).or_insert(Vec::new());
|
||||
user_requests.retain(|&time| time > hour_ago);
|
||||
|
||||
if user_requests.len() < self.limit_per_hour {
|
||||
user_requests.push(now);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GenerateRequest {
|
||||
model: String,
|
||||
prompt: String,
|
||||
stream: bool,
|
||||
options: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
async fn proxy_generate(
|
||||
req: web::Json<GenerateRequest>,
|
||||
data: web::Data<AppState>,
|
||||
user_info: web::ReqData<UserInfo>, // ATProto認証から取得
|
||||
) -> Result<HttpResponse, actix_web::Error> {
|
||||
// レート制限チェック
|
||||
if !data.rate_limiter.check_limit(&user_info.did) {
|
||||
return Ok(HttpResponse::TooManyRequests()
|
||||
.json(serde_json::json!({
|
||||
"error": "Rate limit exceeded. Please try again later."
|
||||
})));
|
||||
}
|
||||
|
||||
// プロンプトサイズ制限
|
||||
if req.prompt.len() > 500 {
|
||||
return Ok(HttpResponse::BadRequest()
|
||||
.json(serde_json::json!({
|
||||
"error": "Prompt too long. Maximum 500 characters."
|
||||
})));
|
||||
}
|
||||
|
||||
// Ollamaへのリクエスト転送
|
||||
let client = reqwest::Client::new();
|
||||
let response = client
|
||||
.post("http://localhost:11434/api/generate")
|
||||
.json(&req.into_inner())
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let body = response.bytes().await?;
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("application/json")
|
||||
.body(body))
|
||||
}
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> std::io::Result<()> {
|
||||
let rate_limiter = RateLimiter::new(20); // 1時間に20リクエスト
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.app_data(web::Data::new(AppState {
|
||||
rate_limiter: rate_limiter.clone(),
|
||||
}))
|
||||
.wrap(middleware::Logger::default())
|
||||
.route("/api/generate", web::post().to(proxy_generate))
|
||||
})
|
||||
.bind("127.0.0.1:8080")?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
172
src/post.rs
Normal file
172
src/post.rs
Normal file
@@ -0,0 +1,172 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::config::{Config, RecordMapping};
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct PutRecordRequest {
|
||||
repo: String,
|
||||
collection: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
rkey: Option<String>,
|
||||
record: PostRecord,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
struct PostRecord {
|
||||
#[serde(rename = "$type")]
|
||||
schema_type: String,
|
||||
title: String,
|
||||
content: String,
|
||||
#[serde(rename = "createdAt")]
|
||||
created_at: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct PutRecordResponse {
|
||||
uri: String,
|
||||
cid: String,
|
||||
#[serde(default)]
|
||||
commit: Option<serde_json::Value>,
|
||||
#[serde(rename = "validationStatus", default)]
|
||||
validation_status: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn execute() -> Result<()> {
|
||||
let mut config = Config::load()?;
|
||||
|
||||
// Refresh session before API calls
|
||||
crate::refresh::refresh_session(&mut config).await?;
|
||||
|
||||
let mut mapping = Config::load_mapping()?;
|
||||
println!("Posting markdown files from ./content/post/...");
|
||||
|
||||
let pds_url = format!("https://{}", config.pds);
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
// Walk through ./content/post/
|
||||
for entry in WalkDir::new("./content/post")
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.path().extension().and_then(|s| s.to_str()) == Some("md"))
|
||||
{
|
||||
let path = entry.path();
|
||||
let filename = path
|
||||
.file_name()
|
||||
.and_then(|s| s.to_str())
|
||||
.context("Invalid filename")?
|
||||
.to_string();
|
||||
|
||||
println!("Processing: {}", filename);
|
||||
|
||||
let content = std::fs::read_to_string(path)?;
|
||||
|
||||
// Use filename as title (simplified)
|
||||
let title = path
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("Untitled");
|
||||
|
||||
// Check if this file already has a mapping
|
||||
let existing_rkey = mapping.get(&filename).map(|m| m.rkey.clone());
|
||||
|
||||
// Create record
|
||||
let record = PostRecord {
|
||||
schema_type: "ai.syui.log.post".to_string(),
|
||||
title: title.to_string(),
|
||||
content,
|
||||
created_at: chrono::Utc::now().to_rfc3339(),
|
||||
};
|
||||
|
||||
let res: PutRecordResponse = if let Some(rkey) = existing_rkey.clone() {
|
||||
// Update existing record with putRecord
|
||||
let put_req = PutRecordRequest {
|
||||
repo: config.did.clone(),
|
||||
collection: "ai.syui.log.post".to_string(),
|
||||
rkey: Some(rkey),
|
||||
record: record.clone(),
|
||||
};
|
||||
|
||||
let put_url = format!("{}/xrpc/com.atproto.repo.putRecord", pds_url);
|
||||
let response = client
|
||||
.post(&put_url)
|
||||
.header("Authorization", format!("Bearer {}", config.access_jwt))
|
||||
.json(&put_req)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to put record")?;
|
||||
|
||||
let status = response.status();
|
||||
let body_text = response.text().await?;
|
||||
|
||||
if !status.is_success() {
|
||||
eprintln!("Error response ({}): {}", status, body_text);
|
||||
anyhow::bail!("API returned error: {}", body_text);
|
||||
}
|
||||
|
||||
serde_json::from_str(&body_text)
|
||||
.context(format!("Failed to parse putRecord response. Body: {}", body_text))?
|
||||
} else {
|
||||
// Create new record with createRecord (auto-generates TID)
|
||||
#[derive(Serialize)]
|
||||
struct CreateRecordRequest {
|
||||
repo: String,
|
||||
collection: String,
|
||||
record: PostRecord,
|
||||
}
|
||||
|
||||
let create_req = CreateRecordRequest {
|
||||
repo: config.did.clone(),
|
||||
collection: "ai.syui.log.post".to_string(),
|
||||
record,
|
||||
};
|
||||
|
||||
let create_url = format!("{}/xrpc/com.atproto.repo.createRecord", pds_url);
|
||||
let response = client
|
||||
.post(&create_url)
|
||||
.header("Authorization", format!("Bearer {}", config.access_jwt))
|
||||
.json(&create_req)
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to create record")?;
|
||||
|
||||
let status = response.status();
|
||||
let body_text = response.text().await?;
|
||||
|
||||
if !status.is_success() {
|
||||
eprintln!("Error response ({}): {}", status, body_text);
|
||||
anyhow::bail!("API returned error: {}", body_text);
|
||||
}
|
||||
|
||||
serde_json::from_str(&body_text)
|
||||
.context(format!("Failed to parse createRecord response. Body: {}", body_text))?
|
||||
};
|
||||
|
||||
// Extract rkey from URI
|
||||
let rkey = res.uri.split('/').last().unwrap().to_string();
|
||||
|
||||
// Update mapping
|
||||
mapping.insert(
|
||||
filename.clone(),
|
||||
RecordMapping {
|
||||
rkey: rkey.clone(),
|
||||
uri: res.uri.clone(),
|
||||
cid: res.cid.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
if existing_rkey.is_some() {
|
||||
println!(" ✓ Updated: {} ({})", title, rkey);
|
||||
} else {
|
||||
println!(" ✓ Created: {} ({})", title, rkey);
|
||||
}
|
||||
}
|
||||
|
||||
// Save mapping
|
||||
Config::save_mapping(&mapping)?;
|
||||
println!("Mapping saved to: {}", Config::mapping_path()?.display());
|
||||
println!("Done!");
|
||||
Ok(())
|
||||
}
|
||||
50
src/refresh.rs
Normal file
50
src/refresh.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use anyhow::{Context, Result};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct RefreshSessionResponse {
|
||||
#[serde(rename = "accessJwt")]
|
||||
access_jwt: String,
|
||||
#[serde(rename = "refreshJwt")]
|
||||
refresh_jwt: String,
|
||||
handle: String,
|
||||
did: String,
|
||||
}
|
||||
|
||||
pub async fn refresh_session(config: &mut Config) -> Result<()> {
|
||||
let pds_url = format!("https://{}", config.pds);
|
||||
let refresh_url = format!("{}/xrpc/com.atproto.server.refreshSession", pds_url);
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let response = client
|
||||
.post(&refresh_url)
|
||||
.header("Authorization", format!("Bearer {}", config.refresh_jwt))
|
||||
.send()
|
||||
.await
|
||||
.context("Failed to refresh session")?;
|
||||
|
||||
let status = response.status();
|
||||
let body_text = response.text().await?;
|
||||
|
||||
if !status.is_success() {
|
||||
eprintln!("Refresh session failed ({}): {}", status, body_text);
|
||||
anyhow::bail!("Failed to refresh session. Please run 'ailog login' again.");
|
||||
}
|
||||
|
||||
let res: RefreshSessionResponse = serde_json::from_str(&body_text)
|
||||
.context(format!("Failed to parse refreshSession response. Body: {}", body_text))?;
|
||||
|
||||
// Update config with new tokens
|
||||
config.access_jwt = res.access_jwt;
|
||||
config.refresh_jwt = res.refresh_jwt;
|
||||
|
||||
// Save updated config (silent)
|
||||
let path = Config::config_path()?;
|
||||
let content = serde_json::to_string_pretty(config)?;
|
||||
std::fs::write(&path, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
29
src/serve.rs
Normal file
29
src/serve.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use anyhow::Result;
|
||||
use axum::Router;
|
||||
use std::net::SocketAddr;
|
||||
use tower_http::services::ServeDir;
|
||||
|
||||
pub async fn execute(port: u16) -> Result<()> {
|
||||
let public_dir = "./public";
|
||||
|
||||
// Check if public directory exists
|
||||
if !std::path::Path::new(public_dir).exists() {
|
||||
anyhow::bail!("Public directory not found. Run 'ailog build' first.");
|
||||
}
|
||||
|
||||
println!("Starting server...");
|
||||
println!(" → Serving: {}", public_dir);
|
||||
println!(" → Address: http://localhost:{}", port);
|
||||
println!(" → Blog: http://localhost:{}/", port);
|
||||
println!(" → AT Browser: http://localhost:{}/at/", port);
|
||||
println!("\nPress Ctrl+C to stop");
|
||||
|
||||
let app = Router::new().nest_service("/", ServeDir::new(public_dir));
|
||||
|
||||
let addr = SocketAddr::from(([127, 0, 0, 1], port));
|
||||
let listener = tokio::net::TcpListener::bind(addr).await?;
|
||||
|
||||
axum::serve(listener, app).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
256
src/shortcode.rs
256
src/shortcode.rs
@@ -1,256 +0,0 @@
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub struct ShortcodeProcessor {
|
||||
shortcodes: HashMap<String, Box<dyn Fn(&str) -> String + Send + Sync>>,
|
||||
}
|
||||
|
||||
impl ShortcodeProcessor {
|
||||
pub fn new() -> Self {
|
||||
let mut processor = Self {
|
||||
shortcodes: HashMap::new(),
|
||||
};
|
||||
|
||||
// Register built-in shortcodes
|
||||
processor.register_img_compare();
|
||||
processor.register_message();
|
||||
|
||||
processor
|
||||
}
|
||||
|
||||
fn register_img_compare(&mut self) {
|
||||
self.shortcodes.insert(
|
||||
"img-compare".to_string(),
|
||||
Box::new(|attrs| Self::parse_img_compare_shortcode(attrs)),
|
||||
);
|
||||
}
|
||||
|
||||
fn register_message(&mut self) {
|
||||
self.shortcodes.insert(
|
||||
"msg".to_string(),
|
||||
Box::new(|attrs| Self::parse_message_shortcode(attrs)),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn process(&self, content: &str) -> String {
|
||||
let mut processed = content.to_string();
|
||||
|
||||
// Process {{< shortcode >}} format (Hugo-style)
|
||||
let hugo_regex = Regex::new(r#"\{\{<\s*(\w+(?:-\w+)*)\s*([^>]*)\s*>\}\}"#).unwrap();
|
||||
processed = hugo_regex.replace_all(&processed, |caps: ®ex::Captures| {
|
||||
let shortcode_name = &caps[1];
|
||||
let attrs = caps.get(2).map(|m| m.as_str()).unwrap_or("");
|
||||
|
||||
if let Some(handler) = self.shortcodes.get(shortcode_name) {
|
||||
handler(attrs)
|
||||
} else {
|
||||
caps[0].to_string() // Return original if shortcode not found
|
||||
}
|
||||
}).to_string();
|
||||
|
||||
// Process [shortcode] format (Bracket-style)
|
||||
let bracket_regex = Regex::new(r#"\[(\w+(?:-\w+)*)\s*([^\]]*)\]"#).unwrap();
|
||||
processed = bracket_regex.replace_all(&processed, |caps: ®ex::Captures| {
|
||||
let shortcode_name = &caps[1];
|
||||
let attrs = caps.get(2).map(|m| m.as_str()).unwrap_or("");
|
||||
|
||||
if let Some(handler) = self.shortcodes.get(shortcode_name) {
|
||||
handler(attrs)
|
||||
} else {
|
||||
caps[0].to_string() // Return original if shortcode not found
|
||||
}
|
||||
}).to_string();
|
||||
|
||||
processed
|
||||
}
|
||||
|
||||
fn parse_attributes(attrs: &str) -> HashMap<String, String> {
|
||||
let attr_regex = Regex::new(r#"(\w+(?:-\w+)*)=(?:"([^"]*)"|'([^']*)'|([^\s]+))"#).unwrap();
|
||||
let mut attributes = HashMap::new();
|
||||
|
||||
for caps in attr_regex.captures_iter(attrs) {
|
||||
let key = caps.get(1).unwrap().as_str().to_string();
|
||||
let value = caps.get(2).or(caps.get(3)).or(caps.get(4)).unwrap().as_str().to_string();
|
||||
attributes.insert(key, value);
|
||||
}
|
||||
|
||||
attributes
|
||||
}
|
||||
|
||||
fn parse_img_compare_shortcode(attrs: &str) -> String {
|
||||
let attributes = Self::parse_attributes(attrs);
|
||||
|
||||
let before = attributes.get("before").map(|s| s.as_str()).unwrap_or("");
|
||||
let after = attributes.get("after").map(|s| s.as_str()).unwrap_or("");
|
||||
let before_caption = attributes.get("before-caption")
|
||||
.or(attributes.get("before-alt"))
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("Before");
|
||||
let after_caption = attributes.get("after-caption")
|
||||
.or(attributes.get("after-alt"))
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or("After");
|
||||
let width = attributes.get("width").map(|s| s.as_str()).unwrap_or("1000");
|
||||
let height = attributes.get("height").map(|s| s.as_str()).unwrap_or("400");
|
||||
let alt = attributes.get("alt").map(|s| s.as_str()).unwrap_or("");
|
||||
|
||||
let alt_suffix = if !alt.is_empty() {
|
||||
format!(" | {}", alt)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
|
||||
format!(r#"
|
||||
<div class="img-comparison-container">
|
||||
<div class="img-comparison-slider" style="height: {}px;">
|
||||
<div class="img-before overlay-side">
|
||||
<img src="{}" alt="{}{}" loading="lazy" width="{}">
|
||||
</div>
|
||||
<div class="img-after">
|
||||
<img src="{}" alt="{}{}" loading="lazy" width="{}">
|
||||
</div>
|
||||
<input type="range" min="0" max="100" value="50" class="slider">
|
||||
<div class="slider-thumb">
|
||||
<div class="slider-thumb-img"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>"#,
|
||||
height,
|
||||
before, before_caption, alt_suffix, width,
|
||||
after, after_caption, alt_suffix, width
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_message_shortcode(attrs: &str) -> String {
|
||||
let attributes = Self::parse_attributes(attrs);
|
||||
|
||||
let msg_type = attributes.get("type").map(|s| s.as_str()).unwrap_or("info");
|
||||
let content = attributes.get("content").map(|s| s.as_str()).unwrap_or("");
|
||||
|
||||
let (symbol, class_suffix) = match msg_type {
|
||||
"info" => ("!", "message"),
|
||||
"warning" => ("⚠", "warning"),
|
||||
"error" => ("✖", "error"),
|
||||
"success" => ("✓", "success"),
|
||||
"note" => ("📝", "note"),
|
||||
_ => ("!", "message"),
|
||||
};
|
||||
|
||||
format!(r#"
|
||||
<aside class="msg {}"><span class="msg-symbol">{}</span><div class="msg-content">
|
||||
<p>{}</p>
|
||||
</div></aside>"#,
|
||||
class_suffix, symbol, content
|
||||
)
|
||||
}
|
||||
|
||||
/// Register a custom shortcode handler
|
||||
#[allow(dead_code)]
|
||||
pub fn register_shortcode<F>(&mut self, name: &str, handler: F)
|
||||
where
|
||||
F: Fn(&str) -> String + Send + Sync + 'static,
|
||||
{
|
||||
self.shortcodes.insert(name.to_string(), Box::new(handler));
|
||||
}
|
||||
|
||||
/// Get list of registered shortcodes
|
||||
#[allow(dead_code)]
|
||||
pub fn get_shortcode_names(&self) -> Vec<&String> {
|
||||
self.shortcodes.keys().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ShortcodeProcessor {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_img_compare_hugo_style() {
|
||||
let processor = ShortcodeProcessor::new();
|
||||
let input = r#"{{< img-compare before="/before.jpg" after="/after.jpg" >}}"#;
|
||||
let result = processor.process(input);
|
||||
|
||||
assert!(result.contains("img-comparison-container"));
|
||||
assert!(result.contains("/before.jpg"));
|
||||
assert!(result.contains("/after.jpg"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_img_compare_bracket_style() {
|
||||
let processor = ShortcodeProcessor::new();
|
||||
let input = r#"[img-compare before="/before.jpg" after="/after.jpg"]"#;
|
||||
let result = processor.process(input);
|
||||
|
||||
assert!(result.contains("img-comparison-container"));
|
||||
assert!(result.contains("/before.jpg"));
|
||||
assert!(result.contains("/after.jpg"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_custom_shortcode() {
|
||||
let mut processor = ShortcodeProcessor::new();
|
||||
processor.register_shortcode("test", |_| "<div>test</div>".to_string());
|
||||
|
||||
let input = "{{< test >}}";
|
||||
let result = processor.process(input);
|
||||
|
||||
assert_eq!(result, "<div>test</div>");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_unknown_shortcode() {
|
||||
let processor = ShortcodeProcessor::new();
|
||||
let input = "{{< unknown attr=\"value\" >}}";
|
||||
let result = processor.process(input);
|
||||
|
||||
assert_eq!(result, input); // Should return original
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_attribute_parsing() {
|
||||
let attributes = ShortcodeProcessor::parse_attributes(r#"before="/test.jpg" after='test2.jpg' width=800"#);
|
||||
|
||||
assert_eq!(attributes.get("before").unwrap(), "/test.jpg");
|
||||
assert_eq!(attributes.get("after").unwrap(), "test2.jpg");
|
||||
assert_eq!(attributes.get("width").unwrap(), "800");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_message_shortcode_info() {
|
||||
let processor = ShortcodeProcessor::new();
|
||||
let input = r#"[msg type="info" content="This is an info message"]"#;
|
||||
let result = processor.process(input);
|
||||
|
||||
assert!(result.contains("msg message"));
|
||||
assert!(result.contains("This is an info message"));
|
||||
assert!(result.contains("!"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_message_shortcode_warning() {
|
||||
let processor = ShortcodeProcessor::new();
|
||||
let input = r#"{{< msg type="warning" content="This is a warning" >}}"#;
|
||||
let result = processor.process(input);
|
||||
|
||||
assert!(result.contains("msg warning"));
|
||||
assert!(result.contains("This is a warning"));
|
||||
assert!(result.contains("⚠"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_message_shortcode_default() {
|
||||
let processor = ShortcodeProcessor::new();
|
||||
let input = r#"[msg content="Default message"]"#;
|
||||
let result = processor.process(input);
|
||||
|
||||
assert!(result.contains("msg message"));
|
||||
assert!(result.contains("Default message"));
|
||||
assert!(result.contains("!"));
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use tera::{Tera, Context};
|
||||
use std::path::PathBuf;
|
||||
use crate::config::Config;
|
||||
use crate::generator::Post;
|
||||
|
||||
pub struct TemplateEngine {
|
||||
tera: Tera,
|
||||
}
|
||||
|
||||
impl TemplateEngine {
|
||||
pub fn new(template_dir: PathBuf) -> Result<Self> {
|
||||
let pattern = format!("{}/**/*.html", template_dir.display());
|
||||
let tera = Tera::new(&pattern)?;
|
||||
|
||||
Ok(Self { tera })
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn create_context(&self, config: &Config, posts: &[Post]) -> Result<Context> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &config.site);
|
||||
context.insert("posts", posts);
|
||||
Ok(context)
|
||||
}
|
||||
|
||||
pub fn render(&self, template: &str, context: &Context) -> Result<String> {
|
||||
let output = self.tera.render(template, context)?;
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub fn render_with_context(&self, template: &str, context: &Context) -> Result<String> {
|
||||
let output = self.tera.render(template, context)?;
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
@@ -1,254 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use regex::Regex;
|
||||
use super::MarkdownSection;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MarkdownParser {
|
||||
_code_block_regex: Regex,
|
||||
header_regex: Regex,
|
||||
link_regex: Regex,
|
||||
image_regex: Regex,
|
||||
table_regex: Regex,
|
||||
list_regex: Regex,
|
||||
quote_regex: Regex,
|
||||
}
|
||||
|
||||
impl MarkdownParser {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
_code_block_regex: Regex::new(r"```([a-zA-Z0-9]*)\n([\s\S]*?)\n```").unwrap(),
|
||||
header_regex: Regex::new(r"^(#{1,6})\s+(.+)$").unwrap(),
|
||||
link_regex: Regex::new(r"\[([^\]]+)\]\(([^)]+)\)").unwrap(),
|
||||
image_regex: Regex::new(r"!\[([^\]]*)\]\(([^)]+)\)").unwrap(),
|
||||
table_regex: Regex::new(r"^\|.*\|$").unwrap(),
|
||||
list_regex: Regex::new(r"^[\s]*[-*+]\s+(.+)$").unwrap(),
|
||||
quote_regex: Regex::new(r"^>\s+(.+)$").unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_markdown(&self, content: &str) -> Result<Vec<MarkdownSection>> {
|
||||
let mut sections = Vec::new();
|
||||
let mut current_text = String::new();
|
||||
let lines: Vec<&str> = content.lines().collect();
|
||||
let mut i = 0;
|
||||
|
||||
while i < lines.len() {
|
||||
let line = lines[i];
|
||||
|
||||
// Check for code blocks
|
||||
if line.starts_with("```") {
|
||||
// Save accumulated text
|
||||
if !current_text.trim().is_empty() {
|
||||
sections.extend(self.parse_text_sections(¤t_text)?);
|
||||
current_text.clear();
|
||||
}
|
||||
|
||||
// Parse code block
|
||||
let (code_section, lines_consumed) = self.parse_code_block(&lines[i..])?;
|
||||
sections.push(code_section);
|
||||
i += lines_consumed;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for headers
|
||||
if let Some(caps) = self.header_regex.captures(line) {
|
||||
// Save accumulated text
|
||||
if !current_text.trim().is_empty() {
|
||||
sections.extend(self.parse_text_sections(¤t_text)?);
|
||||
current_text.clear();
|
||||
}
|
||||
|
||||
let level = caps.get(1).unwrap().as_str().len() as u8;
|
||||
let header_text = caps.get(2).unwrap().as_str().to_string();
|
||||
sections.push(MarkdownSection::Header(header_text, level));
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for tables
|
||||
if self.table_regex.is_match(line) {
|
||||
// Save accumulated text
|
||||
if !current_text.trim().is_empty() {
|
||||
sections.extend(self.parse_text_sections(¤t_text)?);
|
||||
current_text.clear();
|
||||
}
|
||||
|
||||
let (table_section, lines_consumed) = self.parse_table(&lines[i..])?;
|
||||
sections.push(table_section);
|
||||
i += lines_consumed;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for quotes
|
||||
if let Some(caps) = self.quote_regex.captures(line) {
|
||||
// Save accumulated text
|
||||
if !current_text.trim().is_empty() {
|
||||
sections.extend(self.parse_text_sections(¤t_text)?);
|
||||
current_text.clear();
|
||||
}
|
||||
|
||||
let quote_text = caps.get(1).unwrap().as_str().to_string();
|
||||
sections.push(MarkdownSection::Quote(quote_text));
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for lists
|
||||
if let Some(caps) = self.list_regex.captures(line) {
|
||||
// Save accumulated text
|
||||
if !current_text.trim().is_empty() {
|
||||
sections.extend(self.parse_text_sections(¤t_text)?);
|
||||
current_text.clear();
|
||||
}
|
||||
|
||||
let list_text = caps.get(1).unwrap().as_str().to_string();
|
||||
sections.push(MarkdownSection::List(list_text));
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Accumulate regular text
|
||||
current_text.push_str(line);
|
||||
current_text.push('\n');
|
||||
i += 1;
|
||||
}
|
||||
|
||||
// Process remaining text
|
||||
if !current_text.trim().is_empty() {
|
||||
sections.extend(self.parse_text_sections(¤t_text)?);
|
||||
}
|
||||
|
||||
Ok(sections)
|
||||
}
|
||||
|
||||
fn parse_code_block(&self, lines: &[&str]) -> Result<(MarkdownSection, usize)> {
|
||||
if lines.is_empty() || !lines[0].starts_with("```") {
|
||||
anyhow::bail!("Not a code block");
|
||||
}
|
||||
|
||||
let first_line = lines[0];
|
||||
let language = if first_line.len() > 3 {
|
||||
Some(first_line[3..].trim().to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut content = String::new();
|
||||
let mut end_index = 1;
|
||||
|
||||
for (i, &line) in lines[1..].iter().enumerate() {
|
||||
if line.starts_with("```") {
|
||||
end_index = i + 2; // +1 for slice offset, +1 for closing line
|
||||
break;
|
||||
}
|
||||
if i > 0 {
|
||||
content.push('\n');
|
||||
}
|
||||
content.push_str(line);
|
||||
}
|
||||
|
||||
Ok((MarkdownSection::Code(content, language), end_index))
|
||||
}
|
||||
|
||||
fn parse_table(&self, lines: &[&str]) -> Result<(MarkdownSection, usize)> {
|
||||
let mut table_content = String::new();
|
||||
let mut line_count = 0;
|
||||
|
||||
for &line in lines {
|
||||
if self.table_regex.is_match(line) {
|
||||
if line_count > 0 {
|
||||
table_content.push('\n');
|
||||
}
|
||||
table_content.push_str(line);
|
||||
line_count += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((MarkdownSection::Table(table_content), line_count))
|
||||
}
|
||||
|
||||
fn parse_text_sections(&self, text: &str) -> Result<Vec<MarkdownSection>> {
|
||||
let mut sections = Vec::new();
|
||||
let mut remaining = text;
|
||||
|
||||
// Look for images first (they should be preserved)
|
||||
while let Some(caps) = self.image_regex.captures(remaining) {
|
||||
let full_match = caps.get(0).unwrap();
|
||||
let before = &remaining[..full_match.start()];
|
||||
let alt = caps.get(1).unwrap().as_str().to_string();
|
||||
let url = caps.get(2).unwrap().as_str().to_string();
|
||||
|
||||
if !before.trim().is_empty() {
|
||||
sections.push(MarkdownSection::Text(before.to_string()));
|
||||
}
|
||||
|
||||
sections.push(MarkdownSection::Image(alt, url));
|
||||
remaining = &remaining[full_match.end()..];
|
||||
}
|
||||
|
||||
// Look for links
|
||||
let mut current_text = remaining.to_string();
|
||||
while let Some(caps) = self.link_regex.captures(¤t_text) {
|
||||
let full_match = caps.get(0).unwrap();
|
||||
let before = ¤t_text[..full_match.start()];
|
||||
let link_text = caps.get(1).unwrap().as_str().to_string();
|
||||
let url = caps.get(2).unwrap().as_str().to_string();
|
||||
|
||||
if !before.trim().is_empty() {
|
||||
sections.push(MarkdownSection::Text(before.to_string()));
|
||||
}
|
||||
|
||||
sections.push(MarkdownSection::Link(link_text, url));
|
||||
current_text = current_text[full_match.end()..].to_string();
|
||||
}
|
||||
|
||||
// Add remaining text
|
||||
if !current_text.trim().is_empty() {
|
||||
sections.push(MarkdownSection::Text(current_text));
|
||||
}
|
||||
|
||||
Ok(sections)
|
||||
}
|
||||
|
||||
pub fn rebuild_markdown(&self, sections: Vec<MarkdownSection>) -> String {
|
||||
let mut result = String::new();
|
||||
|
||||
for section in sections {
|
||||
match section {
|
||||
MarkdownSection::Text(text) => {
|
||||
result.push_str(&text);
|
||||
}
|
||||
MarkdownSection::Code(content, Some(lang)) => {
|
||||
result.push_str(&format!("```{}\n{}\n```\n", lang, content));
|
||||
}
|
||||
MarkdownSection::Code(content, None) => {
|
||||
result.push_str(&format!("```\n{}\n```\n", content));
|
||||
}
|
||||
MarkdownSection::Header(text, level) => {
|
||||
let hashes = "#".repeat(level as usize);
|
||||
result.push_str(&format!("{} {}\n", hashes, text));
|
||||
}
|
||||
MarkdownSection::Link(text, url) => {
|
||||
result.push_str(&format!("[{}]({})", text, url));
|
||||
}
|
||||
MarkdownSection::Image(alt, url) => {
|
||||
result.push_str(&format!("", alt, url));
|
||||
}
|
||||
MarkdownSection::Table(content) => {
|
||||
result.push_str(&content);
|
||||
result.push('\n');
|
||||
}
|
||||
MarkdownSection::List(text) => {
|
||||
result.push_str(&format!("- {}\n", text));
|
||||
}
|
||||
MarkdownSection::Quote(text) => {
|
||||
result.push_str(&format!("> {}\n", text));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
pub mod ollama_translator;
|
||||
pub mod markdown_parser;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TranslationConfig {
|
||||
pub source_lang: String,
|
||||
pub target_lang: String,
|
||||
pub ollama_endpoint: String,
|
||||
pub model: String,
|
||||
pub preserve_code: bool,
|
||||
pub preserve_links: bool,
|
||||
}
|
||||
|
||||
impl Default for TranslationConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
source_lang: "ja".to_string(),
|
||||
target_lang: "en".to_string(),
|
||||
ollama_endpoint: "http://localhost:11434".to_string(),
|
||||
model: "qwen2.5:latest".to_string(),
|
||||
preserve_code: true,
|
||||
preserve_links: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum MarkdownSection {
|
||||
Text(String),
|
||||
Code(String, Option<String>), // content, language
|
||||
Header(String, u8), // content, level (1-6)
|
||||
Link(String, String), // text, url
|
||||
Image(String, String), // alt, url
|
||||
Table(String),
|
||||
List(String),
|
||||
Quote(String),
|
||||
}
|
||||
|
||||
pub trait Translator {
|
||||
#[allow(dead_code)]
|
||||
fn translate(&self, content: &str, config: &TranslationConfig) -> impl std::future::Future<Output = Result<String>> + Send;
|
||||
fn translate_markdown(&self, content: &str, config: &TranslationConfig) -> impl std::future::Future<Output = Result<String>> + Send;
|
||||
fn translate_sections(&self, sections: Vec<MarkdownSection>, config: &TranslationConfig) -> impl std::future::Future<Output = Result<Vec<MarkdownSection>>> + Send;
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub struct TranslationResult {
|
||||
pub original: String,
|
||||
pub translated: String,
|
||||
pub source_lang: String,
|
||||
pub target_lang: String,
|
||||
pub model: String,
|
||||
pub metrics: TranslationMetrics,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[allow(dead_code)]
|
||||
pub struct TranslationMetrics {
|
||||
pub character_count: usize,
|
||||
pub word_count: usize,
|
||||
pub translation_time_ms: u64,
|
||||
pub sections_translated: usize,
|
||||
pub sections_preserved: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LanguageMapping {
|
||||
pub mappings: HashMap<String, LanguageInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LanguageInfo {
|
||||
#[allow(dead_code)]
|
||||
pub name: String,
|
||||
#[allow(dead_code)]
|
||||
pub code: String,
|
||||
pub ollama_prompt: String,
|
||||
}
|
||||
|
||||
impl LanguageMapping {
|
||||
pub fn new() -> Self {
|
||||
let mut mappings = HashMap::new();
|
||||
|
||||
// 主要言語の設定
|
||||
mappings.insert("ja".to_string(), LanguageInfo {
|
||||
name: "Japanese".to_string(),
|
||||
code: "ja".to_string(),
|
||||
ollama_prompt: "You are a professional Japanese translator specializing in technical documentation.".to_string(),
|
||||
});
|
||||
|
||||
mappings.insert("en".to_string(), LanguageInfo {
|
||||
name: "English".to_string(),
|
||||
code: "en".to_string(),
|
||||
ollama_prompt: "You are a professional English translator specializing in technical documentation.".to_string(),
|
||||
});
|
||||
|
||||
mappings.insert("zh".to_string(), LanguageInfo {
|
||||
name: "Chinese".to_string(),
|
||||
code: "zh".to_string(),
|
||||
ollama_prompt: "You are a professional Chinese translator specializing in technical documentation.".to_string(),
|
||||
});
|
||||
|
||||
mappings.insert("ko".to_string(), LanguageInfo {
|
||||
name: "Korean".to_string(),
|
||||
code: "ko".to_string(),
|
||||
ollama_prompt: "You are a professional Korean translator specializing in technical documentation.".to_string(),
|
||||
});
|
||||
|
||||
mappings.insert("es".to_string(), LanguageInfo {
|
||||
name: "Spanish".to_string(),
|
||||
code: "es".to_string(),
|
||||
ollama_prompt: "You are a professional Spanish translator specializing in technical documentation.".to_string(),
|
||||
});
|
||||
|
||||
Self { mappings }
|
||||
}
|
||||
|
||||
pub fn get_language_info(&self, code: &str) -> Option<&LanguageInfo> {
|
||||
self.mappings.get(code)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_supported_languages(&self) -> Vec<String> {
|
||||
self.mappings.keys().cloned().collect()
|
||||
}
|
||||
}
|
||||
@@ -1,233 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use reqwest::Client;
|
||||
use serde_json::json;
|
||||
use std::time::Instant;
|
||||
use super::*;
|
||||
use crate::translator::markdown_parser::MarkdownParser;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OllamaTranslator {
|
||||
client: Client,
|
||||
language_mapping: LanguageMapping,
|
||||
parser: MarkdownParser,
|
||||
}
|
||||
|
||||
impl OllamaTranslator {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
language_mapping: LanguageMapping::new(),
|
||||
parser: MarkdownParser::new(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn call_ollama(&self, prompt: &str, config: &TranslationConfig) -> Result<String> {
|
||||
let request_body = json!({
|
||||
"model": config.model,
|
||||
"prompt": prompt,
|
||||
"stream": false,
|
||||
"options": {
|
||||
"temperature": 0.3,
|
||||
"top_p": 0.9,
|
||||
"top_k": 40
|
||||
}
|
||||
});
|
||||
|
||||
let url = format!("{}/api/generate", config.ollama_endpoint);
|
||||
|
||||
let response = self.client
|
||||
.post(&url)
|
||||
.json(&request_body)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
anyhow::bail!("Ollama API request failed: {}", response.status());
|
||||
}
|
||||
|
||||
let response_text = response.text().await?;
|
||||
let response_json: serde_json::Value = serde_json::from_str(&response_text)?;
|
||||
|
||||
let translated = response_json
|
||||
.get("response")
|
||||
.and_then(|v| v.as_str())
|
||||
.ok_or_else(|| anyhow::anyhow!("Invalid response from Ollama"))?;
|
||||
|
||||
Ok(translated.to_string())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn build_translation_prompt(&self, text: &str, config: &TranslationConfig) -> Result<String> {
|
||||
let source_info = self.language_mapping.get_language_info(&config.source_lang)
|
||||
.ok_or_else(|| anyhow::anyhow!("Unsupported source language: {}", config.source_lang))?;
|
||||
|
||||
let target_info = self.language_mapping.get_language_info(&config.target_lang)
|
||||
.ok_or_else(|| anyhow::anyhow!("Unsupported target language: {}", config.target_lang))?;
|
||||
|
||||
let prompt = format!(
|
||||
r#"{system_prompt}
|
||||
|
||||
Translate the following text from {source_lang} to {target_lang}.
|
||||
|
||||
IMPORTANT RULES:
|
||||
1. Preserve all Markdown formatting (headers, links, code blocks, etc.)
|
||||
2. Do NOT translate content within code blocks (```)
|
||||
3. Do NOT translate URLs or file paths
|
||||
4. Preserve technical terms when appropriate
|
||||
5. Maintain the original structure and formatting
|
||||
6. Only output the translated text, no explanations
|
||||
|
||||
Original text ({source_code}):
|
||||
{text}
|
||||
|
||||
Translated text ({target_code}):"#,
|
||||
system_prompt = target_info.ollama_prompt,
|
||||
source_lang = source_info.name,
|
||||
target_lang = target_info.name,
|
||||
source_code = source_info.code,
|
||||
target_code = target_info.code,
|
||||
text = text
|
||||
);
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
|
||||
fn build_section_translation_prompt(&self, section: &MarkdownSection, config: &TranslationConfig) -> Result<String> {
|
||||
let target_info = self.language_mapping.get_language_info(&config.target_lang)
|
||||
.ok_or_else(|| anyhow::anyhow!("Unsupported target language: {}", config.target_lang))?;
|
||||
|
||||
let (content, section_type) = match section {
|
||||
MarkdownSection::Text(text) => (text.clone(), "text"),
|
||||
MarkdownSection::Header(text, _) => (text.clone(), "header"),
|
||||
MarkdownSection::Quote(text) => (text.clone(), "quote"),
|
||||
MarkdownSection::List(text) => (text.clone(), "list"),
|
||||
_ => return Ok(String::new()), // Skip translation for code, links, etc.
|
||||
};
|
||||
|
||||
let prompt = format!(
|
||||
r#"{system_prompt}
|
||||
|
||||
Translate this {section_type} from {source_lang} to {target_lang}.
|
||||
|
||||
RULES:
|
||||
- Only translate the text content
|
||||
- Preserve formatting symbols (*, #, >, etc.)
|
||||
- Keep technical terms when appropriate
|
||||
- Output only the translated text
|
||||
|
||||
Text to translate:
|
||||
{content}
|
||||
|
||||
Translation:"#,
|
||||
system_prompt = target_info.ollama_prompt,
|
||||
section_type = section_type,
|
||||
source_lang = config.source_lang,
|
||||
target_lang = config.target_lang,
|
||||
content = content
|
||||
);
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
}
|
||||
|
||||
impl Translator for OllamaTranslator {
|
||||
fn translate(&self, content: &str, config: &TranslationConfig) -> impl std::future::Future<Output = Result<String>> + Send {
|
||||
async move {
|
||||
let prompt = self.build_translation_prompt(content, config)?;
|
||||
self.call_ollama(&prompt, config).await
|
||||
}
|
||||
}
|
||||
|
||||
fn translate_markdown(&self, content: &str, config: &TranslationConfig) -> impl std::future::Future<Output = Result<String>> + Send {
|
||||
async move {
|
||||
println!("🔄 Parsing markdown content...");
|
||||
let sections = self.parser.parse_markdown(content)?;
|
||||
|
||||
println!("📝 Found {} sections to process", sections.len());
|
||||
let translated_sections = self.translate_sections(sections, config).await?;
|
||||
|
||||
println!("✅ Rebuilding markdown from translated sections...");
|
||||
let result = self.parser.rebuild_markdown(translated_sections);
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
fn translate_sections(&self, sections: Vec<MarkdownSection>, config: &TranslationConfig) -> impl std::future::Future<Output = Result<Vec<MarkdownSection>>> + Send {
|
||||
let config = config.clone();
|
||||
let client = self.client.clone();
|
||||
let parser = self.parser.clone();
|
||||
let language_mapping = self.language_mapping.clone();
|
||||
|
||||
async move {
|
||||
let translator = OllamaTranslator {
|
||||
client,
|
||||
language_mapping,
|
||||
parser,
|
||||
};
|
||||
|
||||
let mut translated_sections = Vec::new();
|
||||
let start_time = Instant::now();
|
||||
|
||||
for (index, section) in sections.into_iter().enumerate() {
|
||||
println!(" 🔤 Processing section {}", index + 1);
|
||||
|
||||
let translated_section = match §ion {
|
||||
MarkdownSection::Code(_content, _lang) => {
|
||||
if config.preserve_code {
|
||||
println!(" ⏭️ Preserving code block");
|
||||
section // Preserve code blocks
|
||||
} else {
|
||||
section // Still preserve for now
|
||||
}
|
||||
}
|
||||
MarkdownSection::Link(text, url) => {
|
||||
if config.preserve_links {
|
||||
println!(" ⏭️ Preserving link");
|
||||
section // Preserve links
|
||||
} else {
|
||||
// Translate link text only
|
||||
let prompt = translator.build_section_translation_prompt(&MarkdownSection::Text(text.clone()), &config)?;
|
||||
let translated_text = translator.call_ollama(&prompt, &config).await?;
|
||||
MarkdownSection::Link(translated_text.trim().to_string(), url.clone())
|
||||
}
|
||||
}
|
||||
MarkdownSection::Image(_alt, _url) => {
|
||||
println!(" 🖼️ Preserving image");
|
||||
section // Preserve images
|
||||
}
|
||||
MarkdownSection::Table(content) => {
|
||||
println!(" 📊 Translating table content");
|
||||
let prompt = translator.build_section_translation_prompt(&MarkdownSection::Text(content.clone()), &config)?;
|
||||
let translated_content = translator.call_ollama(&prompt, &config).await?;
|
||||
MarkdownSection::Table(translated_content.trim().to_string())
|
||||
}
|
||||
_ => {
|
||||
// Translate text sections
|
||||
println!(" 🔤 Translating text");
|
||||
let prompt = translator.build_section_translation_prompt(§ion, &config)?;
|
||||
let translated_text = translator.call_ollama(&prompt, &config).await?;
|
||||
|
||||
match section {
|
||||
MarkdownSection::Text(_) => MarkdownSection::Text(translated_text.trim().to_string()),
|
||||
MarkdownSection::Header(_, level) => MarkdownSection::Header(translated_text.trim().to_string(), level),
|
||||
MarkdownSection::Quote(_) => MarkdownSection::Quote(translated_text.trim().to_string()),
|
||||
MarkdownSection::List(_) => MarkdownSection::List(translated_text.trim().to_string()),
|
||||
_ => section,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
translated_sections.push(translated_section);
|
||||
|
||||
// Add small delay to avoid overwhelming Ollama
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
let elapsed = start_time.elapsed();
|
||||
println!("⏱️ Translation completed in {:.2}s", elapsed.as_secs_f64());
|
||||
|
||||
Ok(translated_sections)
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user