update gpt

This commit is contained in:
2025-06-06 03:18:20 +09:00
parent a9dca2fe38
commit c0e4dc63ea
18 changed files with 2827 additions and 51 deletions

313
src/analyzer/mod.rs Normal file
View File

@ -0,0 +1,313 @@
pub mod rust_analyzer;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectInfo {
pub name: String,
pub description: Option<String>,
pub version: String,
pub authors: Vec<String>,
pub license: Option<String>,
pub dependencies: HashMap<String, String>,
pub modules: Vec<ModuleInfo>,
pub structure: ProjectStructure,
pub metrics: ProjectMetrics,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModuleInfo {
pub name: String,
pub path: PathBuf,
pub functions: Vec<FunctionInfo>,
pub structs: Vec<StructInfo>,
pub enums: Vec<EnumInfo>,
pub traits: Vec<TraitInfo>,
pub docs: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FunctionInfo {
pub name: String,
pub visibility: String,
pub is_async: bool,
pub parameters: Vec<Parameter>,
pub return_type: Option<String>,
pub docs: Option<String>,
pub line_number: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Parameter {
pub name: String,
pub param_type: String,
pub is_mutable: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StructInfo {
pub name: String,
pub visibility: String,
pub fields: Vec<FieldInfo>,
pub docs: Option<String>,
pub line_number: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FieldInfo {
pub name: String,
pub field_type: String,
pub visibility: String,
pub docs: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EnumInfo {
pub name: String,
pub visibility: String,
pub variants: Vec<VariantInfo>,
pub docs: Option<String>,
pub line_number: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VariantInfo {
pub name: String,
pub fields: Vec<FieldInfo>,
pub docs: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TraitInfo {
pub name: String,
pub visibility: String,
pub methods: Vec<FunctionInfo>,
pub docs: Option<String>,
pub line_number: usize,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectStructure {
pub directories: Vec<DirectoryInfo>,
pub files: Vec<FileInfo>,
pub dependency_graph: HashMap<String, Vec<String>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DirectoryInfo {
pub name: String,
pub path: PathBuf,
pub file_count: usize,
pub subdirectories: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileInfo {
pub name: String,
pub path: PathBuf,
pub language: String,
pub lines_of_code: usize,
pub is_test: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProjectMetrics {
pub total_lines: usize,
pub total_files: usize,
pub test_files: usize,
pub dependency_count: usize,
pub complexity_score: f32,
pub test_coverage: Option<f32>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ApiInfo {
pub modules: Vec<ModuleInfo>,
pub public_functions: Vec<FunctionInfo>,
pub public_structs: Vec<StructInfo>,
pub public_enums: Vec<EnumInfo>,
pub public_traits: Vec<TraitInfo>,
}
pub struct CodeAnalyzer {
rust_analyzer: rust_analyzer::RustAnalyzer,
}
impl CodeAnalyzer {
pub fn new() -> Self {
Self {
rust_analyzer: rust_analyzer::RustAnalyzer::new(),
}
}
pub fn analyze_project(&self, path: &Path) -> Result<ProjectInfo> {
println!(" 🔍 Analyzing project at: {}", path.display());
// Check if this is a Rust project
let cargo_toml = path.join("Cargo.toml");
if cargo_toml.exists() {
return self.rust_analyzer.analyze_project(path);
}
// For now, only support Rust projects
anyhow::bail!("Only Rust projects are currently supported");
}
pub fn analyze_api(&self, path: &Path) -> Result<ApiInfo> {
println!(" 📚 Analyzing API at: {}", path.display());
let project_info = self.analyze_project(path.parent().unwrap_or(path))?;
// Extract only public items
let mut public_functions = Vec::new();
let mut public_structs = Vec::new();
let mut public_enums = Vec::new();
let mut public_traits = Vec::new();
for module in &project_info.modules {
for func in &module.functions {
if func.visibility == "pub" {
public_functions.push(func.clone());
}
}
for struct_info in &module.structs {
if struct_info.visibility == "pub" {
public_structs.push(struct_info.clone());
}
}
for enum_info in &module.enums {
if enum_info.visibility == "pub" {
public_enums.push(enum_info.clone());
}
}
for trait_info in &module.traits {
if trait_info.visibility == "pub" {
public_traits.push(trait_info.clone());
}
}
}
Ok(ApiInfo {
modules: project_info.modules,
public_functions,
public_structs,
public_enums,
public_traits,
})
}
pub fn analyze_structure(&self, path: &Path, include_deps: bool) -> Result<ProjectStructure> {
println!(" 🏗️ Analyzing structure at: {}", path.display());
let mut directories = Vec::new();
let mut files = Vec::new();
let mut dependency_graph = HashMap::new();
self.walk_directory(path, &mut directories, &mut files)?;
if include_deps {
dependency_graph = self.analyze_dependencies(path)?;
}
Ok(ProjectStructure {
directories,
files,
dependency_graph,
})
}
fn walk_directory(
&self,
path: &Path,
directories: &mut Vec<DirectoryInfo>,
files: &mut Vec<FileInfo>,
) -> Result<()> {
use walkdir::WalkDir;
let walker = WalkDir::new(path)
.into_iter()
.filter_entry(|e| {
let name = e.file_name().to_string_lossy();
// Skip hidden files and common build/cache directories
!name.starts_with('.')
&& name != "target"
&& name != "node_modules"
&& name != "dist"
});
for entry in walker {
let entry = entry?;
let path = entry.path();
let relative_path = path.strip_prefix(path.ancestors().last().unwrap())?;
if entry.file_type().is_dir() {
let file_count = std::fs::read_dir(path)?
.filter_map(|e| e.ok())
.filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false))
.count();
let subdirectories = std::fs::read_dir(path)?
.filter_map(|e| e.ok())
.filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
.map(|e| e.file_name().to_string_lossy().to_string())
.collect();
directories.push(DirectoryInfo {
name: path.file_name().unwrap().to_string_lossy().to_string(),
path: relative_path.to_path_buf(),
file_count,
subdirectories,
});
} else if entry.file_type().is_file() {
let language = self.detect_language(path);
let lines_of_code = self.count_lines(path)?;
let is_test = self.is_test_file(path);
files.push(FileInfo {
name: path.file_name().unwrap().to_string_lossy().to_string(),
path: relative_path.to_path_buf(),
language,
lines_of_code,
is_test,
});
}
}
Ok(())
}
fn detect_language(&self, path: &Path) -> String {
match path.extension().and_then(|s| s.to_str()) {
Some("rs") => "rust".to_string(),
Some("py") => "python".to_string(),
Some("js") => "javascript".to_string(),
Some("ts") => "typescript".to_string(),
Some("md") => "markdown".to_string(),
Some("toml") => "toml".to_string(),
Some("json") => "json".to_string(),
Some("yaml") | Some("yml") => "yaml".to_string(),
_ => "unknown".to_string(),
}
}
fn count_lines(&self, path: &Path) -> Result<usize> {
let content = std::fs::read_to_string(path)?;
Ok(content.lines().count())
}
fn is_test_file(&self, path: &Path) -> bool {
let filename = path.file_name().unwrap().to_string_lossy();
filename.contains("test")
|| filename.starts_with("test_")
|| path.to_string_lossy().contains("/tests/")
}
fn analyze_dependencies(&self, _path: &Path) -> Result<HashMap<String, Vec<String>>> {
// For now, just return empty dependencies
// TODO: Implement actual dependency analysis
Ok(HashMap::new())
}
}

View File

@ -0,0 +1,512 @@
use anyhow::Result;
use std::collections::HashMap;
use std::path::Path;
use syn::{visit::Visit, ItemEnum, ItemFn, ItemStruct, ItemTrait, Visibility};
use super::*;
pub struct RustAnalyzer;
impl RustAnalyzer {
pub fn new() -> Self {
Self
}
pub fn analyze_project(&self, path: &Path) -> Result<ProjectInfo> {
// Parse Cargo.toml
let cargo_toml_path = path.join("Cargo.toml");
let cargo_content = std::fs::read_to_string(&cargo_toml_path)?;
let cargo_toml: toml::Value = toml::from_str(&cargo_content)?;
let package = cargo_toml.get("package").unwrap();
let name = package.get("name").unwrap().as_str().unwrap().to_string();
let description = package.get("description").map(|v| v.as_str().unwrap().to_string());
let version = package.get("version").unwrap().as_str().unwrap().to_string();
let authors = package
.get("authors")
.map(|v| {
v.as_array()
.unwrap()
.iter()
.map(|a| a.as_str().unwrap().to_string())
.collect()
})
.unwrap_or_default();
let license = package.get("license").map(|v| v.as_str().unwrap().to_string());
// Parse dependencies
let dependencies = self.parse_dependencies(&cargo_toml)?;
// Analyze source code
let src_path = path.join("src");
let modules = self.analyze_modules(&src_path)?;
// Calculate metrics
let metrics = self.calculate_metrics(&modules, &dependencies);
// Analyze structure
let structure = self.analyze_project_structure(path)?;
Ok(ProjectInfo {
name,
description,
version,
authors,
license,
dependencies,
modules,
structure,
metrics,
})
}
fn parse_dependencies(&self, cargo_toml: &toml::Value) -> Result<HashMap<String, String>> {
let mut dependencies = HashMap::new();
if let Some(deps) = cargo_toml.get("dependencies") {
if let Some(deps_table) = deps.as_table() {
for (name, value) in deps_table {
let version = match value {
toml::Value::String(v) => v.clone(),
toml::Value::Table(t) => {
t.get("version")
.and_then(|v| v.as_str())
.unwrap_or("*")
.to_string()
}
_ => "*".to_string(),
};
dependencies.insert(name.clone(), version);
}
}
}
Ok(dependencies)
}
fn analyze_modules(&self, src_path: &Path) -> Result<Vec<ModuleInfo>> {
let mut modules = Vec::new();
if !src_path.exists() {
return Ok(modules);
}
// Walk through all .rs files
for entry in walkdir::WalkDir::new(src_path) {
let entry = entry?;
if entry.file_type().is_file() {
if let Some(extension) = entry.path().extension() {
if extension == "rs" {
if let Ok(module) = self.analyze_rust_file(entry.path()) {
modules.push(module);
}
}
}
}
}
Ok(modules)
}
fn analyze_rust_file(&self, file_path: &Path) -> Result<ModuleInfo> {
let content = std::fs::read_to_string(file_path)?;
let syntax_tree = syn::parse_file(&content)?;
let mut visitor = RustVisitor::new();
visitor.visit_file(&syntax_tree);
let module_name = file_path
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
// Extract module-level documentation
let docs = self.extract_module_docs(&content);
Ok(ModuleInfo {
name: module_name,
path: file_path.to_path_buf(),
functions: visitor.functions,
structs: visitor.structs,
enums: visitor.enums,
traits: visitor.traits,
docs,
})
}
fn extract_module_docs(&self, content: &str) -> Option<String> {
let lines: Vec<&str> = content.lines().collect();
let mut doc_lines = Vec::new();
let mut in_module_doc = false;
for line in lines {
let trimmed = line.trim();
if trimmed.starts_with("//!") {
in_module_doc = true;
doc_lines.push(trimmed.trim_start_matches("//!").trim());
} else if trimmed.starts_with("/*!") {
in_module_doc = true;
let content = trimmed.trim_start_matches("/*!").trim_end_matches("*/").trim();
doc_lines.push(content);
} else if in_module_doc && !trimmed.is_empty() && !trimmed.starts_with("//") {
break;
}
}
if doc_lines.is_empty() {
None
} else {
Some(doc_lines.join("\n"))
}
}
fn calculate_metrics(&self, modules: &[ModuleInfo], dependencies: &HashMap<String, String>) -> ProjectMetrics {
let total_lines = modules.iter().map(|m| {
std::fs::read_to_string(&m.path)
.map(|content| content.lines().count())
.unwrap_or(0)
}).sum();
let total_files = modules.len();
let test_files = modules.iter().filter(|m| {
m.name.contains("test") || m.path.to_string_lossy().contains("/tests/")
}).count();
let dependency_count = dependencies.len();
// Simple complexity calculation based on number of functions and structs
let complexity_score = modules.iter().map(|m| {
(m.functions.len() + m.structs.len() + m.enums.len() + m.traits.len()) as f32
}).sum::<f32>() / modules.len().max(1) as f32;
ProjectMetrics {
total_lines,
total_files,
test_files,
dependency_count,
complexity_score,
test_coverage: None, // TODO: Implement test coverage calculation
}
}
fn analyze_project_structure(&self, path: &Path) -> Result<ProjectStructure> {
let mut directories = Vec::new();
let mut files = Vec::new();
self.walk_directory(path, &mut directories, &mut files)?;
Ok(ProjectStructure {
directories,
files,
dependency_graph: HashMap::new(), // TODO: Implement dependency graph
})
}
fn walk_directory(
&self,
path: &Path,
directories: &mut Vec<DirectoryInfo>,
files: &mut Vec<FileInfo>,
) -> Result<()> {
for entry in walkdir::WalkDir::new(path).max_depth(3) {
let entry = entry?;
let relative_path = entry.path().strip_prefix(path)?;
if entry.file_type().is_dir() && relative_path != Path::new("") {
let file_count = std::fs::read_dir(entry.path())?
.filter_map(|e| e.ok())
.filter(|e| e.file_type().map(|ft| ft.is_file()).unwrap_or(false))
.count();
let subdirectories = std::fs::read_dir(entry.path())?
.filter_map(|e| e.ok())
.filter(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
.map(|e| e.file_name().to_string_lossy().to_string())
.collect();
directories.push(DirectoryInfo {
name: entry.path().file_name().unwrap().to_string_lossy().to_string(),
path: relative_path.to_path_buf(),
file_count,
subdirectories,
});
} else if entry.file_type().is_file() {
let language = match entry.path().extension().and_then(|s| s.to_str()) {
Some("rs") => "rust".to_string(),
Some("toml") => "toml".to_string(),
Some("md") => "markdown".to_string(),
_ => "unknown".to_string(),
};
let lines_of_code = std::fs::read_to_string(entry.path())
.map(|content| content.lines().count())
.unwrap_or(0);
let is_test = entry.path().to_string_lossy().contains("test");
files.push(FileInfo {
name: entry.path().file_name().unwrap().to_string_lossy().to_string(),
path: relative_path.to_path_buf(),
language,
lines_of_code,
is_test,
});
}
}
Ok(())
}
}
struct RustVisitor {
functions: Vec<FunctionInfo>,
structs: Vec<StructInfo>,
enums: Vec<EnumInfo>,
traits: Vec<TraitInfo>,
current_line: usize,
}
impl RustVisitor {
fn new() -> Self {
Self {
functions: Vec::new(),
structs: Vec::new(),
enums: Vec::new(),
traits: Vec::new(),
current_line: 1,
}
}
fn visibility_to_string(&self, vis: &Visibility) -> String {
match vis {
Visibility::Public(_) => "pub".to_string(),
Visibility::Restricted(_) => "pub(restricted)".to_string(),
Visibility::Inherited => "private".to_string(),
}
}
fn extract_docs(&self, attrs: &[syn::Attribute]) -> Option<String> {
let mut docs = Vec::new();
for attr in attrs {
if attr.path().is_ident("doc") {
if let syn::Meta::NameValue(meta) = &attr.meta {
if let syn::Expr::Lit(expr_lit) = &meta.value {
if let syn::Lit::Str(lit_str) = &expr_lit.lit {
docs.push(lit_str.value());
}
}
}
}
}
if docs.is_empty() {
None
} else {
Some(docs.join("\n"))
}
}
}
impl<'ast> Visit<'ast> for RustVisitor {
fn visit_item_fn(&mut self, node: &'ast ItemFn) {
let name = node.sig.ident.to_string();
let visibility = self.visibility_to_string(&node.vis);
let is_async = node.sig.asyncness.is_some();
let parameters = node.sig.inputs.iter().map(|input| {
match input {
syn::FnArg::Receiver(_) => Parameter {
name: "self".to_string(),
param_type: "Self".to_string(),
is_mutable: false,
},
syn::FnArg::Typed(typed) => {
let name = match &*typed.pat {
syn::Pat::Ident(ident) => ident.ident.to_string(),
_ => "unknown".to_string(),
};
Parameter {
name,
param_type: quote::quote!(#typed.ty).to_string(),
is_mutable: false, // TODO: Detect mutability
}
}
}
}).collect();
let return_type = match &node.sig.output {
syn::ReturnType::Default => None,
syn::ReturnType::Type(_, ty) => Some(quote::quote!(#ty).to_string()),
};
let docs = self.extract_docs(&node.attrs);
self.functions.push(FunctionInfo {
name,
visibility,
is_async,
parameters,
return_type,
docs,
line_number: self.current_line,
});
syn::visit::visit_item_fn(self, node);
}
fn visit_item_struct(&mut self, node: &'ast ItemStruct) {
let name = node.ident.to_string();
let visibility = self.visibility_to_string(&node.vis);
let docs = self.extract_docs(&node.attrs);
let fields = match &node.fields {
syn::Fields::Named(fields) => {
fields.named.iter().map(|field| {
FieldInfo {
name: field.ident.as_ref().unwrap().to_string(),
field_type: quote::quote!(#field.ty).to_string(),
visibility: self.visibility_to_string(&field.vis),
docs: self.extract_docs(&field.attrs),
}
}).collect()
}
syn::Fields::Unnamed(fields) => {
fields.unnamed.iter().enumerate().map(|(i, field)| {
FieldInfo {
name: format!("field_{}", i),
field_type: quote::quote!(#field.ty).to_string(),
visibility: self.visibility_to_string(&field.vis),
docs: self.extract_docs(&field.attrs),
}
}).collect()
}
syn::Fields::Unit => Vec::new(),
};
self.structs.push(StructInfo {
name,
visibility,
fields,
docs,
line_number: self.current_line,
});
syn::visit::visit_item_struct(self, node);
}
fn visit_item_enum(&mut self, node: &'ast ItemEnum) {
let name = node.ident.to_string();
let visibility = self.visibility_to_string(&node.vis);
let docs = self.extract_docs(&node.attrs);
let variants = node.variants.iter().map(|variant| {
let variant_name = variant.ident.to_string();
let variant_docs = self.extract_docs(&variant.attrs);
let fields = match &variant.fields {
syn::Fields::Named(fields) => {
fields.named.iter().map(|field| {
FieldInfo {
name: field.ident.as_ref().unwrap().to_string(),
field_type: quote::quote!(#field.ty).to_string(),
visibility: self.visibility_to_string(&field.vis),
docs: self.extract_docs(&field.attrs),
}
}).collect()
}
syn::Fields::Unnamed(fields) => {
fields.unnamed.iter().enumerate().map(|(i, field)| {
FieldInfo {
name: format!("field_{}", i),
field_type: quote::quote!(#field.ty).to_string(),
visibility: self.visibility_to_string(&field.vis),
docs: self.extract_docs(&field.attrs),
}
}).collect()
}
syn::Fields::Unit => Vec::new(),
};
VariantInfo {
name: variant_name,
fields,
docs: variant_docs,
}
}).collect();
self.enums.push(EnumInfo {
name,
visibility,
variants,
docs,
line_number: self.current_line,
});
syn::visit::visit_item_enum(self, node);
}
fn visit_item_trait(&mut self, node: &'ast ItemTrait) {
let name = node.ident.to_string();
let visibility = self.visibility_to_string(&node.vis);
let docs = self.extract_docs(&node.attrs);
let methods = node.items.iter().filter_map(|item| {
match item {
syn::TraitItem::Fn(method) => {
let method_name = method.sig.ident.to_string();
let method_visibility = "pub".to_string(); // Trait methods are inherently public
let is_async = method.sig.asyncness.is_some();
let parameters = method.sig.inputs.iter().map(|input| {
match input {
syn::FnArg::Receiver(_) => Parameter {
name: "self".to_string(),
param_type: "Self".to_string(),
is_mutable: false,
},
syn::FnArg::Typed(typed) => {
let name = match &*typed.pat {
syn::Pat::Ident(ident) => ident.ident.to_string(),
_ => "unknown".to_string(),
};
Parameter {
name,
param_type: quote::quote!(#typed.ty).to_string(),
is_mutable: false,
}
}
}
}).collect();
let return_type = match &method.sig.output {
syn::ReturnType::Default => None,
syn::ReturnType::Type(_, ty) => Some(quote::quote!(#ty).to_string()),
};
let method_docs = self.extract_docs(&method.attrs);
Some(FunctionInfo {
name: method_name,
visibility: method_visibility,
is_async,
parameters,
return_type,
docs: method_docs,
line_number: self.current_line,
})
}
_ => None,
}
}).collect();
self.traits.push(TraitInfo {
name,
visibility,
methods,
docs,
line_number: self.current_line,
});
syn::visit::visit_item_trait(self, node);
}
}