Skip to content

Commit 7be04bb

Browse files
committed
Merge branch 'main' of github.com:opensensor/codediff
2 parents ebd721e + cd93100 commit 7be04bb

File tree

1 file changed

+67
-122
lines changed

1 file changed

+67
-122
lines changed

crates/web-ui/src/handlers.rs

Lines changed: 67 additions & 122 deletions
Original file line numberDiff line numberDiff line change
@@ -150,24 +150,31 @@ async fn perform_comparison(
150150
// Initialize components
151151
let language_detector = LanguageDetector;
152152
let parser_engine = TreeSitterParser::new()?;
153-
let semantic_analyzer = SemanticAnalyzer::new();
153+
let mut semantic_analyzer = SemanticAnalyzer::new();
154154
let diff_engine = DiffEngine::new();
155155

156156
// Detect language
157-
let language = language_detector.detect_from_path(&file1.path)
158-
.or_else(|| language_detector.detect_from_content(&file1.content))
159-
.unwrap_or(Language::Unknown);
157+
let language = {
158+
let path_lang = LanguageDetector::detect_from_path(&file1.path);
159+
if path_lang != Language::Unknown {
160+
path_lang
161+
} else {
162+
LanguageDetector::detect_from_content(&file1.content)
163+
}
164+
};
160165

161166
// Parse both files
162167
let parse_result1 = parser_engine.parse(&file1.content, language)?;
163168
let parse_result2 = parser_engine.parse(&file2.content, language)?;
164-
let ast1 = parse_result1.ast;
165-
let ast2 = parse_result2.ast;
166169

167170
// Perform semantic analysis
168171
let semantic1 = semantic_analyzer.analyze(&parse_result1)?;
169172
let semantic2 = semantic_analyzer.analyze(&parse_result2)?;
170173

174+
// Extract ASTs for structure comparison
175+
let ast1 = &parse_result1.ast;
176+
let ast2 = &parse_result2.ast;
177+
171178
// Initialize components that need language
172179
let function_matcher = FunctionMatcher::new(0.7); // threshold
173180
let similarity_scorer = SimilarityScorer::new(language, smart_diff_engine::SimilarityScoringConfig::default());
@@ -192,7 +199,7 @@ async fn perform_comparison(
192199
let semantic_similarity = overall_similarity; // Simplified
193200

194201
// Use changes from function matching
195-
let changes = function_matches.changes;
202+
let changes = function_matches.changes.clone();
196203

197204
// Detect refactoring patterns
198205
let refactoring_patterns = refactoring_detector.detect_patterns(&changes);
@@ -205,14 +212,14 @@ async fn perform_comparison(
205212
lines: file1.content.lines().count(),
206213
functions: functions1.len(),
207214
classes: count_classes_from_symbol_table(&semantic1.symbol_table),
208-
complexity: calculate_complexity_from_symbol_table(&semantic1.symbol_table),
215+
complexity: calculate_complexity_from_symbol_table(&semantic1.symbol_table) as f64,
209216
},
210217
target: FileMetadata {
211218
path: file2.path.clone(),
212219
lines: file2.content.lines().count(),
213220
functions: functions2.len(),
214221
classes: count_classes_from_symbol_table(&semantic2.symbol_table),
215-
complexity: calculate_complexity_from_symbol_table(&semantic2.symbol_table),
222+
complexity: calculate_complexity_from_symbol_table(&semantic2.symbol_table) as f64,
216223
},
217224
language: language.to_string(),
218225
similarity: SimilarityScore {
@@ -231,122 +238,70 @@ async fn perform_comparison(
231238
Ok(analysis)
232239
}
233240

234-
/// Build function analysis from matches
235-
fn build_function_analysis(matches: &[smart_diff_engine::FunctionMatch]) -> FunctionAnalysis {
236-
let total_functions = matches.len();
237-
let matched_functions = matches.iter().filter(|m| m.target_function.is_some()).count();
238-
let average_similarity = if matched_functions > 0 {
239-
matches.iter()
240-
.filter(|m| m.target_function.is_some())
241-
.map(|m| m.similarity.overall)
242-
.sum::<f64>() / matched_functions as f64
243-
} else {
244-
0.0
245-
};
246-
247-
let function_matches = matches.iter().enumerate().map(|(i, m)| {
248-
FunctionMatch {
249-
id: format!("func-{}", i),
250-
source_function: FunctionInfo {
251-
name: m.source_function.name.clone(),
252-
signature: m.source_function.signature.clone(),
253-
start_line: m.source_function.start_line,
254-
end_line: m.source_function.end_line,
255-
complexity: m.source_function.complexity as usize,
256-
parameters: m.source_function.parameters.clone(),
257-
return_type: m.source_function.return_type.clone(),
258-
},
259-
target_function: m.target_function.as_ref().map(|tf| FunctionInfo {
260-
name: tf.name.clone(),
261-
signature: tf.signature.clone(),
262-
start_line: tf.start_line,
263-
end_line: tf.end_line,
264-
complexity: tf.complexity as usize,
265-
parameters: tf.parameters.clone(),
266-
return_type: tf.return_type.clone(),
267-
}),
268-
similarity: SimilarityScore {
269-
overall: m.similarity.overall,
270-
structure: m.similarity.structure,
271-
content: m.similarity.content,
272-
semantic: m.similarity.semantic,
273-
},
274-
change_type: m.change_type.to_string(),
275-
refactoring_pattern: m.refactoring_pattern.as_ref().map(|rp| RefactoringPattern {
276-
pattern_type: rp.pattern_type.clone(),
277-
description: rp.description.clone(),
278-
confidence: rp.confidence,
279-
evidence: rp.evidence.clone(),
280-
impact: rp.impact.clone(),
281-
}),
282-
}
283-
}).collect();
284-
241+
/// Build function analysis from match result
242+
fn build_function_analysis(match_result: &smart_diff_parser::MatchResult) -> FunctionAnalysis {
243+
// Simplified function analysis based on MatchResult
285244
FunctionAnalysis {
286-
total_functions,
287-
matched_functions,
288-
function_matches,
289-
average_similarity,
245+
total_functions: match_result.mapping.len(),
246+
matched_functions: match_result.mapping.len(),
247+
function_matches: Vec::new(), // Simplified - would need proper conversion
248+
average_similarity: match_result.similarity,
290249
}
291250
}
292251

293-
/// Build change analysis from classified changes
294-
fn build_change_analysis(changes: &[smart_diff_engine::ClassifiedChange]) -> ChangeAnalysis {
252+
/// Build change analysis from changes
253+
fn build_change_analysis(changes: &[smart_diff_parser::Change]) -> ChangeAnalysis {
295254
let total_changes = changes.len();
296255
let mut change_types = HashMap::new();
297256

298257
for change in changes {
299-
*change_types.entry(change.change_type.clone()).or_insert(0) += 1;
258+
*change_types.entry(format!("{:?}", change.change_type)).or_insert(0) += 1;
300259
}
301260

302261
let detailed_changes = changes.iter().enumerate().map(|(i, change)| {
303262
DetailedChange {
304263
id: format!("change-{}", i),
305-
change_type: change.change_type.clone(),
306-
description: change.description.clone(),
264+
change_type: format!("{:?}", change.change_type),
265+
description: change.details.description.clone(),
307266
confidence: change.confidence,
308267
location: ChangeLocation {
309-
file: change.location.file.clone(),
310-
start_line: change.location.start_line,
311-
end_line: change.location.end_line,
312-
function: change.location.function.clone(),
268+
file: "unknown".to_string(), // Simplified
269+
start_line: 0,
270+
end_line: 0,
271+
function: None,
313272
},
314-
impact: change.impact.clone(),
273+
impact: "medium".to_string(), // Simplified
315274
}
316275
}).collect();
317276

318-
let breaking_changes = changes.iter()
319-
.filter(|c| c.impact == "breaking")
320-
.count();
321-
322277
ChangeAnalysis {
323278
total_changes,
324279
change_types,
325280
detailed_changes,
326281
impact_assessment: ImpactAssessment {
327-
risk_level: if breaking_changes > 0 { "high" } else { "low" }.to_string(),
328-
breaking_changes,
329-
effort_estimate: estimate_effort(changes),
330-
affected_components: extract_affected_components(changes),
282+
risk_level: "medium".to_string(),
283+
breaking_changes: 0,
284+
effort_estimate: "medium".to_string(),
285+
affected_components: vec![],
331286
},
332287
}
333288
}
334289

335290
/// Build refactoring patterns from detected patterns
336291
fn build_refactoring_patterns(patterns: &[smart_diff_engine::RefactoringPattern]) -> Vec<RefactoringPattern> {
337292
patterns.iter().map(|pattern| RefactoringPattern {
338-
pattern_type: pattern.pattern_type.clone(),
293+
pattern_type: format!("{:?}", pattern.pattern_type),
339294
description: pattern.description.clone(),
340295
confidence: pattern.confidence,
341-
evidence: pattern.evidence.clone(),
342-
impact: pattern.impact.clone(),
296+
evidence: pattern.evidence.iter().map(|e| format!("{:?}", e)).collect(),
297+
impact: format!("{:?}", pattern.analysis.impact),
343298
}).collect()
344299
}
345300

346301
/// Build structure comparison from ASTs
347302
fn build_structure_comparison(
348-
ast1: &smart_diff_parser::AST,
349-
ast2: &smart_diff_parser::AST,
303+
_ast1: &smart_diff_parser::ASTNode,
304+
_ast2: &smart_diff_parser::ASTNode,
350305
) -> StructureComparison {
351306
// This would be implemented with actual AST traversal
352307
// For now, return a simplified structure
@@ -369,24 +324,7 @@ fn build_structure_comparison(
369324
}
370325
}
371326

372-
/// Estimate effort for implementing changes
373-
fn estimate_effort(changes: &[smart_diff_engine::ClassifiedChange]) -> String {
374-
let total_changes = changes.len();
375-
match total_changes {
376-
0..=5 => "low".to_string(),
377-
6..=15 => "medium".to_string(),
378-
_ => "high".to_string(),
379-
}
380-
}
381327

382-
/// Extract affected components from changes
383-
fn extract_affected_components(changes: &[smart_diff_engine::ClassifiedChange]) -> Vec<String> {
384-
changes.iter()
385-
.filter_map(|c| c.location.function.clone())
386-
.collect::<std::collections::HashSet<_>>()
387-
.into_iter()
388-
.collect()
389-
}
390328

391329
/// Multi-file analysis endpoint
392330
pub async fn analyze(
@@ -431,9 +369,14 @@ async fn perform_multi_file_analysis(
431369

432370
// Analyze each file
433371
for file in files {
434-
let language = language_detector.detect_from_path(&file.path)
435-
.or_else(|| language_detector.detect_from_content(&file.content))
436-
.unwrap_or(Language::Unknown);
372+
let language = {
373+
let path_lang = LanguageDetector::detect_from_path(&file.path);
374+
if path_lang != Language::Unknown {
375+
path_lang
376+
} else {
377+
LanguageDetector::detect_from_content(&file.content)
378+
}
379+
};
437380
let parse_result = parser_engine.parse(&file.content, language)?;
438381
let semantic = semantic_analyzer.analyze(&parse_result)?;
439382

@@ -445,19 +388,19 @@ async fn perform_multi_file_analysis(
445388
name: f.signature.name.clone(),
446389
signature: format!("{}({})", f.signature.name,
447390
f.signature.parameters.iter()
448-
.map(|p| format!("{}: {}", p.name, p.param_type.to_string()))
391+
.map(|p| format!("{}: {}", p.name, p.param_type.name))
449392
.collect::<Vec<_>>()
450393
.join(", ")),
451394
start_line: f.location.start_line,
452395
end_line: f.location.end_line,
453396
complexity: 1, // Simplified
454397
parameters: f.signature.parameters.iter().map(|p| p.name.clone()).collect(),
455398
return_type: f.signature.return_type.as_ref()
456-
.map(|t| t.to_string())
399+
.map(|t| t.name.clone())
457400
.unwrap_or_else(|| "void".to_string()),
458401
}).collect();
459402

460-
all_functions.extend(functions);
403+
all_functions.extend(functions.clone());
461404

462405
let file_result = FileAnalysisResult {
463406
file: FileMetadata {
@@ -558,16 +501,12 @@ struct MultiFileAnalysisResult {
558501
summary: AnalysisSummary,
559502
}
560503

561-
fn calculate_complexity_distribution(functions: &[smart_diff_semantic::Function]) -> HashMap<String, usize> {
504+
fn calculate_complexity_distribution(functions: &[smart_diff_parser::Function]) -> HashMap<String, usize> {
562505
let mut distribution = HashMap::new();
563506

564-
for function in functions {
565-
let complexity_range = match function.complexity {
566-
0..=5 => "low",
567-
6..=10 => "medium",
568-
11..=20 => "high",
569-
_ => "very_high",
570-
};
507+
for _function in functions {
508+
// Simplified complexity calculation
509+
let complexity_range = "medium"; // Placeholder
571510
*distribution.entry(complexity_range.to_string()).or_insert(0) += 1;
572511
}
573512

@@ -590,16 +529,16 @@ fn detect_issues(symbol_table: &smart_diff_semantic::SymbolTable) -> Vec<String>
590529

591530
fn perform_cross_file_analysis(
592531
functions: &[smart_diff_parser::Function],
593-
files: &[FileInfo],
532+
_files: &[FileInfo],
594533
) -> anyhow::Result<CrossFileAnalysis> {
595534
// Detect duplicate functions
596535
let mut duplicate_functions = Vec::new();
597-
let mut seen_signatures = HashMap::new();
536+
let mut seen_signatures: HashMap<String, Vec<ChangeLocation>> = HashMap::new();
598537

599538
for function in functions {
600539
let signature_str = format!("{}({})", function.signature.name,
601540
function.signature.parameters.iter()
602-
.map(|p| format!("{}: {}", p.name, p.param_type.to_string()))
541+
.map(|p| format!("{}: {}", p.name, p.param_type.name))
603542
.collect::<Vec<_>>()
604543
.join(", "));
605544

@@ -679,9 +618,15 @@ fn extract_functions_from_symbol_table(symbol_table: &smart_diff_semantic::Symbo
679618
};
680619

681620
// Create a simple AST node for the function body
621+
let metadata = smart_diff_parser::NodeMetadata {
622+
line: 0,
623+
column: 0,
624+
original_text: String::new(),
625+
attributes: std::collections::HashMap::new(),
626+
};
682627
let body = smart_diff_parser::ASTNode::new(
683628
smart_diff_parser::NodeType::Function,
684-
smart_diff_parser::ASTMetadata::default(),
629+
metadata,
685630
);
686631

687632
let function = Function {

0 commit comments

Comments
 (0)