diff --git a/src/ai-crawler/.env b/src/ai-crawler/.env new file mode 100644 index 0000000..3315a42 --- /dev/null +++ b/src/ai-crawler/.env @@ -0,0 +1,6 @@ +API_BASE_URL=http://localhost:3000 +WEBHOOK_SECRET=your_secret_key_here + +# Optional: Proxy settings nếu cần +# HTTP_PROXY=http://proxy:port +# HTTPS_PROXY=https://proxy:port \ No newline at end of file diff --git a/src/ai-crawler/crawler.py b/src/ai-crawler/crawler.py new file mode 100644 index 0000000..5acdcd1 --- /dev/null +++ b/src/ai-crawler/crawler.py @@ -0,0 +1,204 @@ +import requests +import time +import os +from datetime import datetime +from dotenv import load_dotenv + +# Load environment variables +load_dotenv() + +class ScamCrawler: + def __init__(self, api_base_url=None): + self.api_base_url = api_base_url or os.getenv('API_BASE_URL', 'http://localhost:3000') + print(f"✅ Crawler initialized with API: {self.api_base_url}") + + def start_crawl(self, source_id): + """Bắt đầu crawl và tạo log""" + print(f"📡 Starting crawl for source {source_id}...") + + try: + response = requests.post( + f"{self.api_base_url}/api/ai/crawl-start", + json={"source_id": source_id}, + timeout=10 + ) + response.raise_for_status() + data = response.json() + + if data.get("success"): + print(f"✅ Crawl log created: {data['data']['crawl_log_id']}") + return data["data"]["crawl_log_id"] + else: + raise Exception(f"API returned error: {data.get('error')}") + + except requests.exceptions.RequestException as e: + print(f"❌ Network error: {str(e)}") + raise + except Exception as e: + print(f"❌ Error: {str(e)}") + raise + + def save_phone_number(self, phone_number, source_id, confidence_score, scam_type=None, raw_data=None): + """Lưu số điện thoại đã crawl được""" + try: + response = requests.post( + f"{self.api_base_url}/api/ai/save-numbers", + json={ + "phone_number": phone_number, + "source_id": source_id, + "confidence_score": confidence_score, + "scam_type": scam_type, + "raw_data": raw_data + }, + timeout=10 + ) + response.raise_for_status() + return response.json() + except Exception as e: + print(f"❌ Error saving phone {phone_number}: {str(e)}") + return {"success": False, "error": str(e)} + + def complete_crawl(self, crawl_log_id, status, numbers_found, numbers_added, + numbers_updated, crawl_duration, error_message=None): + """Hoàn thành crawl và cập nhật log""" + try: + response = requests.post( + f"{self.api_base_url}/api/ai/crawl-complete", + json={ + "crawl_log_id": crawl_log_id, + "status": status, + "numbers_found": numbers_found, + "numbers_added": numbers_added, + "numbers_updated": numbers_updated, + "crawl_duration": crawl_duration, + "error_message": error_message + }, + timeout=10 + ) + response.raise_for_status() + return response.json() + except Exception as e: + print(f"❌ Error completing crawl: {str(e)}") + return {"success": False, "error": str(e)} + + def crawl_source(self, source_id, source_url): + """Main crawl function""" + print(f"\n{'='*60}") + print(f"🚀 Starting crawl for source {source_id}: {source_url}") + print(f"{'='*60}") + + start_time = time.time() + crawl_log_id = None + numbers_found = 0 + numbers_added = 0 + numbers_updated = 0 + + try: + # 1. Bắt đầu crawl + crawl_log_id = self.start_crawl(source_id) + + # 2. Crawl website + print(f"🔍 Extracting phone numbers from {source_url}...") + phone_numbers = self.extract_phone_numbers(source_url) + numbers_found = len(phone_numbers) + print(f"📊 Found {numbers_found} phone numbers") + + # 3. Lưu từng số điện thoại + for i, phone_data in enumerate(phone_numbers, 1): + print(f"💾 Saving phone {i}/{numbers_found}: {phone_data['phone']}") + + result = self.save_phone_number( + phone_number=phone_data["phone"], + source_id=source_id, + confidence_score=phone_data["confidence"], + scam_type=phone_data.get("scam_type"), + raw_data=phone_data.get("raw_data") + ) + + if result.get("success"): + if result["data"]["is_new"]: + numbers_added += 1 + print(f" ✅ New number added") + else: + numbers_updated += 1 + print(f" ♻️ Existing number updated") + else: + print(f" ❌ Failed to save") + + # 4. Hoàn thành crawl + crawl_duration = int(time.time() - start_time) + self.complete_crawl( + crawl_log_id=crawl_log_id, + status="success", + numbers_found=numbers_found, + numbers_added=numbers_added, + numbers_updated=numbers_updated, + crawl_duration=crawl_duration + ) + + print(f"\n{'='*60}") + print(f"🎉 Crawl completed successfully!") + print(f"📊 Stats:") + print(f" - Found: {numbers_found}") + print(f" - Added: {numbers_added}") + print(f" - Updated: {numbers_updated}") + print(f" - Duration: {crawl_duration}s") + print(f"{'='*60}\n") + + except Exception as e: + print(f"\n{'='*60}") + print(f"❌ Error during crawl: {str(e)}") + print(f"{'='*60}\n") + + if crawl_log_id: + crawl_duration = int(time.time() - start_time) + self.complete_crawl( + crawl_log_id=crawl_log_id, + status="failed", + numbers_found=numbers_found, + numbers_added=numbers_added, + numbers_updated=numbers_updated, + crawl_duration=crawl_duration, + error_message=str(e) + ) + + def extract_phone_numbers(self, url): + """ + Crawl website và extract số điện thoại + TODO: Implement actual web scraping logic + """ + # Placeholder - trả về dữ liệu mẫu + print("⚠️ Using mock data (implement actual crawling)") + + import random + + mock_phones = [ + f"09{random.randint(10000000, 99999999)}", + f"03{random.randint(10000000, 99999999)}", + f"07{random.randint(10000000, 99999999)}", + ] + + return [ + { + "phone": phone, + "confidence": round(random.uniform(70, 99), 1), + "scam_type": random.choice(["loan_scam", "investment_fraud", "impersonation"]), + "raw_data": {"source_text": f"Mock data for {phone}"} + } + for phone in mock_phones + ] + +# Usage example +if __name__ == "__main__": + print("🤖 ScamCrawler Test") + print(f"⏰ Started at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n") + + crawler = ScamCrawler() + + # Test crawl với source_id = 1 + crawler.crawl_source( + source_id=1, + source_url="https://example-forum.com/scam-reports" + ) + + print(f"\n⏰ Finished at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") \ No newline at end of file diff --git a/src/ai-crawler/requirements.txt b/src/ai-crawler/requirements.txt new file mode 100644 index 0000000..06eda72 --- /dev/null +++ b/src/ai-crawler/requirements.txt @@ -0,0 +1,5 @@ +requests==2.31.0 +schedule==1.2.0 +beautifulsoup4==4.12.2 +selenium==4.15.2 +python-dotenv==1.0.0 \ No newline at end of file diff --git a/src/ai-crawler/scheduler.py b/src/ai-crawler/scheduler.py new file mode 100644 index 0000000..5a82028 --- /dev/null +++ b/src/ai-crawler/scheduler.py @@ -0,0 +1,74 @@ +import schedule +import time +import requests +from crawler import ScamCrawler + +class CrawlScheduler: + def __init__(self, api_base_url="http://localhost:3000"): + self.api_base_url = api_base_url + self.crawler = ScamCrawler(api_base_url) + + def get_active_sources(self): + """Lấy danh sách web sources đang hoạt động""" + response = requests.get(f"{self.api_base_url}/api/admin/web-sources") + data = response.json() + + if data["success"]: + # Chỉ lấy sources đang active + return [s for s in data["data"] if s["is_active"]] + return [] + + def crawl_all_sources(self): + """Crawl tất cả sources""" + sources = self.get_active_sources() + print(f"Found {len(sources)} active sources") + + for source in sources: + try: + self.crawler.crawl_source( + source_id=source["id"], + source_url=source["source_url"] + ) + time.sleep(5) # Delay giữa các requests + except Exception as e: + print(f"Error crawling source {source['id']}: {str(e)}") + + def crawl_by_frequency(self, frequency): + """Crawl sources theo tần suất""" + sources = self.get_active_sources() + sources_to_crawl = [s for s in sources if s["crawl_frequency"] == frequency] + + print(f"Crawling {len(sources_to_crawl)} sources with frequency: {frequency}") + + for source in sources_to_crawl: + try: + self.crawler.crawl_source( + source_id=source["id"], + source_url=source["source_url"] + ) + time.sleep(5) + except Exception as e: + print(f"Error: {str(e)}") + + def start(self): + """Khởi động scheduler""" + print("Starting crawler scheduler...") + + # Schedule hourly crawls + schedule.every().hour.do(lambda: self.crawl_by_frequency("hourly")) + + # Schedule daily crawls (at 2 AM) + schedule.every().day.at("02:00").do(lambda: self.crawl_by_frequency("daily")) + + # Schedule weekly crawls (Monday at 3 AM) + schedule.every().monday.at("03:00").do(lambda: self.crawl_by_frequency("weekly")) + + print("Scheduler started. Running...") + + while True: + schedule.run_pending() + time.sleep(60) # Check every minute + +if __name__ == "__main__": + scheduler = CrawlScheduler() + scheduler.start() \ No newline at end of file diff --git a/src/app/api/admin/ai-logs/route.ts b/src/app/api/admin/ai-logs/route.ts new file mode 100644 index 0000000..70eb064 --- /dev/null +++ b/src/app/api/admin/ai-logs/route.ts @@ -0,0 +1,68 @@ +import { NextResponse } from 'next/server'; +import pool from '@/app/lib/db'; + +export async function GET(request: Request) { + try { + const { searchParams } = new URL(request.url); + const limit = parseInt(searchParams.get('limit') || '50'); + const source_id = searchParams.get('source_id'); + const status = searchParams.get('status'); + + let query = ` + SELECT + acl.*, + ws.source_name, + ws.source_url, + ws.priority + FROM ai_crawl_logs acl + LEFT JOIN web_sources ws ON acl.source_id = ws.id + WHERE 1=1 + `; + + const params: any[] = []; + let paramIndex = 1; + + if (source_id) { + query += ` AND acl.source_id = $${paramIndex}`; + params.push(source_id); + paramIndex++; + } + + if (status) { + query += ` AND acl.status = $${paramIndex}`; + params.push(status); + paramIndex++; + } + + query += ` ORDER BY acl.started_at DESC LIMIT $${paramIndex}`; + params.push(limit); + + const result = await pool.query(query, params); + + const statsQuery = ` + SELECT + COUNT(*) as total_crawls, + COUNT(*) FILTER (WHERE status = 'success') as successful, + COUNT(*) FILTER (WHERE status = 'failed') as failed, + SUM(numbers_found) as total_numbers_found, + SUM(numbers_added) as total_numbers_added, + AVG(crawl_duration) as avg_duration + FROM ai_crawl_logs + WHERE started_at >= CURRENT_DATE - INTERVAL '30 days'; + `; + + const statsResult = await pool.query(statsQuery); + + return NextResponse.json({ + success: true, + data: result.rows, + stats: statsResult.rows[0] + }); + } catch (error) { + console.error('Database error:', error); + return NextResponse.json( + { success: false, error: 'Failed to fetch AI logs' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/src/app/api/chat/admin/phone-numbers/route.ts b/src/app/api/admin/phone-numbers/route.ts similarity index 100% rename from src/app/api/chat/admin/phone-numbers/route.ts rename to src/app/api/admin/phone-numbers/route.ts diff --git a/src/app/api/chat/admin/reports/route.ts b/src/app/api/admin/reports/route.ts similarity index 100% rename from src/app/api/chat/admin/reports/route.ts rename to src/app/api/admin/reports/route.ts diff --git a/src/app/api/chat/admin/stats/route.ts b/src/app/api/admin/stats/route.ts similarity index 100% rename from src/app/api/chat/admin/stats/route.ts rename to src/app/api/admin/stats/route.ts diff --git a/src/app/api/admin/trigger-crawl/route.ts b/src/app/api/admin/trigger-crawl/route.ts new file mode 100644 index 0000000..597eb20 --- /dev/null +++ b/src/app/api/admin/trigger-crawl/route.ts @@ -0,0 +1,50 @@ +import { NextResponse } from 'next/server'; +import pool from '@/app/lib/db'; + +export async function POST(request: Request) { + try { + const { source_id } = await request.json(); + + if (!source_id) { + return NextResponse.json( + { success: false, error: 'source_id is required' }, + { status: 400 } + ); + } + + const checkQuery = `SELECT * FROM web_sources WHERE id = $1 AND is_active = true`; + const checkResult = await pool.query(checkQuery, [source_id]); + + if (checkResult.rows.length === 0) { + return NextResponse.json( + { success: false, error: 'Web source not found or inactive' }, + { status: 404 } + ); + } + + const source = checkResult.rows[0]; + + const logQuery = ` + INSERT INTO ai_crawl_logs (source_id, status, started_at) + VALUES ($1, 'pending', CURRENT_TIMESTAMP) + RETURNING id; + `; + const logResult = await pool.query(logQuery, [source_id]); + + return NextResponse.json({ + success: true, + message: `Crawl triggered for ${source.source_name}`, + data: { + crawl_log_id: logResult.rows[0].id, + source_name: source.source_name, + source_url: source.source_url + } + }); + } catch (error) { + console.error('Error triggering crawl:', error); + return NextResponse.json( + { success: false, error: 'Failed to trigger crawl' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/src/app/api/chat/admin/users/route.ts b/src/app/api/admin/users/route.ts similarity index 100% rename from src/app/api/chat/admin/users/route.ts rename to src/app/api/admin/users/route.ts diff --git a/src/app/api/admin/web-sources/route.ts b/src/app/api/admin/web-sources/route.ts new file mode 100644 index 0000000..bd116a5 --- /dev/null +++ b/src/app/api/admin/web-sources/route.ts @@ -0,0 +1,162 @@ +import { NextResponse } from 'next/server'; +import pool from '@/app/lib/db'; + +export async function GET() { + try { + const query = ` + SELECT + ws.*, + COUNT(DISTINCT acl.id) as total_crawls, + SUM(acl.numbers_found) as total_numbers_found, + AVG(acl.crawl_duration) as avg_duration, + MAX(acl.completed_at) as last_crawl, + COUNT(acl.id) FILTER (WHERE acl.status = 'success') as successful_crawls, + COUNT(acl.id) FILTER (WHERE acl.status = 'failed') as failed_crawls + FROM web_sources ws + LEFT JOIN ai_crawl_logs acl ON ws.id = acl.source_id + GROUP BY ws.id + ORDER BY ws.priority DESC, ws.created_at DESC; + `; + + const result = await pool.query(query); + + return NextResponse.json({ + success: true, + data: result.rows + }); + } catch (error) { + console.error('Database error:', error); + return NextResponse.json( + { success: false, error: 'Failed to fetch web sources' }, + { status: 500 } + ); + } +} + +export async function POST(request: Request) { + try { + const { source_name, source_url, crawl_frequency, priority } = await request.json(); + + if (!source_name || !source_url) { + return NextResponse.json( + { success: false, error: 'source_name and source_url are required' }, + { status: 400 } + ); + } + + const query = ` + INSERT INTO web_sources ( + source_name, + source_url, + crawl_frequency, + priority, + is_active + ) + VALUES ($1, $2, $3, $4, true) + RETURNING *; + `; + + const result = await pool.query(query, [ + source_name, + source_url, + crawl_frequency || 'daily', + priority || 3 + ]); + + return NextResponse.json({ + success: true, + data: result.rows[0], + message: 'Web source added successfully' + }); + } catch (error) { + console.error('Database error:', error); + return NextResponse.json( + { success: false, error: 'Failed to add web source' }, + { status: 500 } + ); + } +} + +export async function PATCH(request: Request) { + try { + const { id, is_active, priority, crawl_frequency } = await request.json(); + + if (!id) { + return NextResponse.json( + { success: false, error: 'id is required' }, + { status: 400 } + ); + } + + const query = ` + UPDATE web_sources + SET + is_active = COALESCE($2, is_active), + priority = COALESCE($3, priority), + crawl_frequency = COALESCE($4, crawl_frequency) + WHERE id = $1 + RETURNING *; + `; + + const result = await pool.query(query, [ + id, + is_active, + priority, + crawl_frequency + ]); + + if (result.rows.length === 0) { + return NextResponse.json( + { success: false, error: 'Web source not found' }, + { status: 404 } + ); + } + + return NextResponse.json({ + success: true, + data: result.rows[0], + message: 'Web source updated successfully' + }); + } catch (error) { + console.error('Database error:', error); + return NextResponse.json( + { success: false, error: 'Failed to update web source' }, + { status: 500 } + ); + } +} + +export async function DELETE(request: Request) { + try { + const { searchParams } = new URL(request.url); + const id = searchParams.get('id'); + + if (!id) { + return NextResponse.json( + { success: false, error: 'id is required' }, + { status: 400 } + ); + } + + const query = `DELETE FROM web_sources WHERE id = $1 RETURNING *`; + const result = await pool.query(query, [id]); + + if (result.rows.length === 0) { + return NextResponse.json( + { success: false, error: 'Web source not found' }, + { status: 404 } + ); + } + + return NextResponse.json({ + success: true, + message: 'Web source deleted successfully' + }); + } catch (error) { + console.error('Database error:', error); + return NextResponse.json( + { success: false, error: 'Failed to delete web source' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/src/app/api/ai/crawl-complete/route.ts b/src/app/api/ai/crawl-complete/route.ts new file mode 100644 index 0000000..29b87d2 --- /dev/null +++ b/src/app/api/ai/crawl-complete/route.ts @@ -0,0 +1,80 @@ +import { NextResponse } from 'next/server'; +import pool from '@/app/lib/db'; + +export async function POST(request: Request) { + try { + const { + crawl_log_id, + status, + numbers_found, + numbers_added, + numbers_updated, + error_message, + crawl_duration + } = await request.json(); + + if (!crawl_log_id) { + return NextResponse.json( + { success: false, error: 'crawl_log_id is required' }, + { status: 400 } + ); + } + + const query = ` + UPDATE ai_crawl_logs + SET + status = $1, + numbers_found = $2, + numbers_added = $3, + numbers_updated = $4, + error_message = $5, + crawl_duration = $6, + completed_at = CURRENT_TIMESTAMP + WHERE id = $7 + RETURNING *; + `; + + const result = await pool.query(query, [ + status, + numbers_found || 0, + numbers_added || 0, + numbers_updated || 0, + error_message || null, + crawl_duration || 0, + crawl_log_id + ]); + + if (result.rows.length === 0) { + return NextResponse.json( + { success: false, error: 'Crawl log not found' }, + { status: 404 } + ); + } + + if (status === 'success') { + await pool.query(` + UPDATE web_sources + SET + total_numbers_found = total_numbers_found + $1, + success_rate = ( + SELECT + (COUNT(*) FILTER (WHERE status = 'success')::DECIMAL / COUNT(*)) * 100 + FROM ai_crawl_logs + WHERE source_id = (SELECT source_id FROM ai_crawl_logs WHERE id = $2) + ) + WHERE id = (SELECT source_id FROM ai_crawl_logs WHERE id = $2) + `, [numbers_found, crawl_log_id]); + } + + return NextResponse.json({ + success: true, + data: result.rows[0] + }); + } catch (error) { + console.error('Error completing crawl:', error); + return NextResponse.json( + { success: false, error: 'Failed to complete crawl' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/src/app/api/ai/crawl-starts/route.ts b/src/app/api/ai/crawl-starts/route.ts new file mode 100644 index 0000000..9f922dc --- /dev/null +++ b/src/app/api/ai/crawl-starts/route.ts @@ -0,0 +1,47 @@ +import { NextResponse } from 'next/server'; +import pool from '@/app/lib/db'; + +export async function POST(request: Request) { + try { + const { source_id } = await request.json(); + + if (!source_id) { + return NextResponse.json( + { success: false, error: 'source_id is required' }, + { status: 400 } + ); + } + + const query = ` + INSERT INTO ai_crawl_logs ( + source_id, + status, + started_at + ) + VALUES ($1, 'running', CURRENT_TIMESTAMP) + RETURNING id, started_at; + `; + + const result = await pool.query(query, [source_id]); + const crawlLog = result.rows[0]; + + await pool.query( + `UPDATE web_sources SET last_crawled_at = CURRENT_TIMESTAMP WHERE id = $1`, + [source_id] + ); + + return NextResponse.json({ + success: true, + data: { + crawl_log_id: crawlLog.id, + started_at: crawlLog.started_at + } + }); + } catch (error) { + console.error('Error starting crawl:', error); + return NextResponse.json( + { success: false, error: 'Failed to start crawl' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/src/app/api/ai/save-numbers/route.ts b/src/app/api/ai/save-numbers/route.ts new file mode 100644 index 0000000..5eeed23 --- /dev/null +++ b/src/app/api/ai/save-numbers/route.ts @@ -0,0 +1,127 @@ +import { NextResponse } from 'next/server'; +import pool from '@/app/lib/db'; + +export async function POST(request: Request) { + try { + const { + phone_number, + source_id, + scam_type, + confidence_score, + raw_data + } = await request.json(); + + if (!phone_number || !source_id) { + return NextResponse.json( + { success: false, error: 'phone_number and source_id are required' }, + { status: 400 } + ); + } + + const client = await pool.connect(); + + try { + await client.query('BEGIN'); + + const scamDataQuery = ` + INSERT INTO scam_data ( + phone_number, + source_id, + scam_type, + confidence_score, + raw_data + ) + VALUES ($1, $2, $3, $4, $5) + RETURNING id; + `; + + await client.query(scamDataQuery, [ + phone_number, + source_id, + scam_type || null, + confidence_score || 0, + raw_data || {} + ]); + + const checkQuery = ` + SELECT id, total_reports FROM phone_numbers WHERE phone_number = $1 + `; + const checkResult = await client.query(checkQuery, [phone_number]); + + let phoneNumberId; + let isNew = false; + + if (checkResult.rows.length === 0) { + let riskLevel = 'low'; + if (confidence_score >= 90) riskLevel = 'critical'; + else if (confidence_score >= 70) riskLevel = 'high'; + else if (confidence_score >= 50) riskLevel = 'medium'; + + const insertQuery = ` + INSERT INTO phone_numbers ( + phone_number, + is_scam, + risk_level, + total_reports, + first_reported_at + ) + VALUES ($1, $2, $3, 1, CURRENT_TIMESTAMP) + RETURNING id; + `; + + const insertResult = await client.query(insertQuery, [ + phone_number, + confidence_score >= 50, + riskLevel + ]); + + phoneNumberId = insertResult.rows[0].id; + isNew = true; + } else { + phoneNumberId = checkResult.rows[0].id; + + const updateQuery = ` + UPDATE phone_numbers + SET + total_reports = total_reports + 1, + last_updated = CURRENT_TIMESTAMP, + is_scam = CASE + WHEN $2 >= 50 THEN true + ELSE is_scam + END, + risk_level = CASE + WHEN $2 >= 90 THEN 'critical' + WHEN $2 >= 70 THEN 'high' + WHEN $2 >= 50 THEN 'medium' + ELSE risk_level + END + WHERE id = $1; + `; + + await client.query(updateQuery, [phoneNumberId, confidence_score]); + } + + await client.query('COMMIT'); + + return NextResponse.json({ + success: true, + data: { + phone_number_id: phoneNumberId, + is_new: isNew, + action: isNew ? 'created' : 'updated' + } + }); + } catch (error) { + await client.query('ROLLBACK'); + throw error; + } finally { + client.release(); + } + } catch (error) { + console.error('Error saving phone number:', error); + return NextResponse.json( + { success: false, error: 'Failed to save phone number' }, + { status: 500 } + ); + } +} \ No newline at end of file diff --git a/src/app/api/chat/admin/ai-logs/route.ts b/src/app/api/chat/admin/ai-logs/route.ts deleted file mode 100644 index b6782f4..0000000 --- a/src/app/api/chat/admin/ai-logs/route.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { NextResponse } from 'next/server'; -import pool from '@/app/lib/db'; - -export async function GET() { - try { - const query = ` - SELECT - acl.*, - ws.source_name, - ws.source_url - FROM ai_crawl_logs acl - LEFT JOIN web_sources ws ON acl.source_id = ws.id - ORDER BY acl.started_at DESC - LIMIT 50; - `; - - const result = await pool.query(query); - - return NextResponse.json({ - success: true, - data: result.rows - }); - } catch (error) { - console.error('Database error:', error); - return NextResponse.json( - { success: false, error: 'Failed to fetch AI logs' }, - { status: 500 } - ); - } -} \ No newline at end of file diff --git a/src/app/api/chat/admin/web-sources/route.ts b/src/app/api/chat/admin/web-sources/route.ts deleted file mode 100644 index bd23790..0000000 --- a/src/app/api/chat/admin/web-sources/route.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { NextResponse } from 'next/server'; -import pool from '@/app/lib/db'; - -export async function GET() { - try { - const query = ` - SELECT - ws.*, - COUNT(acl.id) as total_crawls, - SUM(acl.numbers_found) as total_numbers_found - FROM web_sources ws - LEFT JOIN ai_crawl_logs acl ON ws.id = acl.source_id - GROUP BY ws.id - ORDER BY ws.priority DESC; - `; - - const result = await pool.query(query); - - return NextResponse.json({ - success: true, - data: result.rows - }); - } catch (error) { - console.error('Database error:', error); - return NextResponse.json( - { success: false, error: 'Failed to fetch web sources' }, - { status: 500 } - ); - } -} - -export async function POST(request: Request) { - try { - const { source_name, source_url, crawl_frequency, priority } = await request.json(); - - const query = ` - INSERT INTO web_sources (source_name, source_url, crawl_frequency, priority) - VALUES ($1, $2, $3, $4) - RETURNING *; - `; - - const result = await pool.query(query, [ - source_name, - source_url, - crawl_frequency, - priority - ]); - - return NextResponse.json({ - success: true, - data: result.rows[0] - }); - } catch (error) { - console.error('Database error:', error); - return NextResponse.json( - { success: false, error: 'Failed to add web source' }, - { status: 500 } - ); - } -} \ No newline at end of file