-
Notifications
You must be signed in to change notification settings - Fork 36
⚡ Bolt: Optimize issue endpoints with response caching #475
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -9,8 +9,10 @@ | |||||
| import os | ||||||
| import logging | ||||||
| import hashlib | ||||||
| import json | ||||||
| from datetime import datetime, timezone | ||||||
|
|
||||||
| from fastapi import Response | ||||||
| from backend.database import get_db | ||||||
| from backend.models import Issue, PushSubscription | ||||||
| from backend.schemas import ( | ||||||
|
|
@@ -297,10 +299,11 @@ def get_nearby_issues( | |||||
| """ | ||||||
| try: | ||||||
| # Check cache first | ||||||
| cache_key = f"{latitude:.5f}_{longitude:.5f}_{radius}_{limit}" | ||||||
| cached_data = nearby_issues_cache.get(cache_key) | ||||||
| if cached_data: | ||||||
| return cached_data | ||||||
| # v2: cache JSON string to avoid conflicts with v1 list data and ensure safe typing | ||||||
| cache_key = f"v2_nearby_{latitude:.5f}_{longitude:.5f}_{radius}_{limit}" | ||||||
| cached_json = nearby_issues_cache.get(cache_key) | ||||||
| if cached_json: | ||||||
| return Response(content=cached_json, media_type="application/json") | ||||||
|
|
||||||
| # Query open issues with coordinates | ||||||
| # Optimization: Use bounding box to filter candidates in SQL | ||||||
|
|
@@ -345,9 +348,12 @@ def get_nearby_issues( | |||||
| ] | ||||||
|
|
||||||
| # Update cache | ||||||
| nearby_issues_cache.set(nearby_responses, cache_key) | ||||||
| # Optimize: Cache serialized JSON string to avoid serialization overhead on subsequent requests | ||||||
| data_dicts = [m.model_dump(mode='json') for m in nearby_responses] | ||||||
| json_content = json.dumps(data_dicts, default=str) | ||||||
| nearby_issues_cache.set(json_content, cache_key) | ||||||
|
|
||||||
RohanExploit marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
| return nearby_responses | ||||||
| return Response(content=json_content, media_type="application/json") | ||||||
|
|
||||||
| except Exception as e: | ||||||
| logger.error(f"Error getting nearby issues: {e}", exc_info=True) | ||||||
|
|
@@ -659,10 +665,11 @@ def get_recent_issues( | |||||
| offset: int = Query(0, ge=0, description="Number of issues to skip"), | ||||||
| db: Session = Depends(get_db) | ||||||
| ): | ||||||
| cache_key = f"recent_issues_{limit}_{offset}" | ||||||
| cached_data = recent_issues_cache.get(cache_key) | ||||||
| if cached_data: | ||||||
| return JSONResponse(content=cached_data) | ||||||
| # v2: cache JSON string to avoid conflicts with v1 list data and ensure safe typing | ||||||
| cache_key = f"v2_recent_issues_{limit}_{offset}" | ||||||
| cached_json = recent_issues_cache.get(cache_key) | ||||||
| if cached_json: | ||||||
| return Response(content=cached_json, media_type="application/json") | ||||||
|
|
||||||
| # Fetch issues with pagination | ||||||
| # Optimized: Use column projection to fetch only needed fields | ||||||
|
|
@@ -700,5 +707,7 @@ def get_recent_issues( | |||||
| }) | ||||||
|
|
||||||
| # Thread-safe cache update | ||||||
| recent_issues_cache.set(data, cache_key) | ||||||
| return data | ||||||
| # Optimize: Cache serialized JSON string to avoid serialization overhead on subsequent requests | ||||||
| json_content = json.dumps(data, default=str) | ||||||
|
||||||
| json_content = json.dumps(data, default=str) | |
| json_content = json.dumps(data) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,144 @@ | ||
| import pytest | ||
| import json | ||
| import sys | ||
| import os | ||
| from datetime import datetime, timezone | ||
| from unittest.mock import MagicMock, patch | ||
|
|
||
| # Mock heavy/missing dependencies before importing app | ||
| sys.modules['magic'] = MagicMock() | ||
| sys.modules['telegram'] = MagicMock() | ||
| sys.modules['telegram.ext'] = MagicMock() | ||
| sys.modules['google'] = MagicMock() | ||
| sys.modules['google.generativeai'] = MagicMock() | ||
| sys.modules['google.cloud'] = MagicMock() | ||
| sys.modules['firebase_admin'] = MagicMock() | ||
| sys.modules['firebase_functions'] = MagicMock() | ||
| sys.modules['speech_recognition'] = MagicMock() | ||
| sys.modules['transformers'] = MagicMock() | ||
| sys.modules['ultralytics'] = MagicMock() | ||
| sys.modules['cv2'] = MagicMock() | ||
| sys.modules['pywebpush'] = MagicMock() | ||
| sys.modules['langdetect'] = MagicMock() | ||
| sys.modules['googletrans'] = MagicMock() | ||
| sys.modules['a2wsgi'] = MagicMock() | ||
|
|
||
| # Mock torch properly for scipy/sklearn checks | ||
| mock_torch = MagicMock() | ||
| class MockTensor: pass | ||
RohanExploit marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| mock_torch.Tensor = MockTensor | ||
| sys.modules['torch'] = mock_torch | ||
|
|
||
RohanExploit marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| # Now import app | ||
| from fastapi.testclient import TestClient | ||
| from backend.main import app | ||
| from backend.database import get_db | ||
|
|
||
| client = TestClient(app) | ||
|
|
||
| @pytest.fixture | ||
| def mock_db_session(): | ||
| mock_session = MagicMock() | ||
| app.dependency_overrides[get_db] = lambda: mock_session | ||
| yield mock_session | ||
| app.dependency_overrides = {} | ||
|
|
||
| @pytest.fixture | ||
| def mock_caches(): | ||
| with patch("backend.routers.issues.recent_issues_cache") as mock_recent, \ | ||
| patch("backend.routers.issues.nearby_issues_cache") as mock_nearby: | ||
|
|
||
| # Default behavior: cache miss | ||
| mock_recent.get.return_value = None | ||
| mock_nearby.get.return_value = None | ||
|
|
||
| yield mock_recent, mock_nearby | ||
|
|
||
| def test_get_recent_issues_cache_miss_then_hit(mock_db_session, mock_caches): | ||
| mock_recent, _ = mock_caches | ||
|
|
||
| # Setup DB return | ||
| mock_issue = MagicMock() | ||
| mock_issue.id = 1 | ||
| mock_issue.category = "Road" | ||
| mock_issue.description = "Test Issue" | ||
| mock_issue.created_at = datetime.now(timezone.utc) | ||
| mock_issue.image_path = None | ||
| mock_issue.status = "open" | ||
| mock_issue.upvotes = 0 | ||
| mock_issue.location = "Test Loc" | ||
| mock_issue.latitude = 10.0 | ||
| mock_issue.longitude = 20.0 | ||
|
|
||
| # Mock the query chain | ||
| mock_query = mock_db_session.query.return_value | ||
| mock_query.order_by.return_value.offset.return_value.limit.return_value.all.return_value = [mock_issue] | ||
|
|
||
| # 1. Cache Miss | ||
| response = client.get("/api/issues/recent") | ||
| assert response.status_code == 200 | ||
| data1 = response.json() | ||
| assert len(data1) == 1 | ||
| assert data1[0]["id"] == 1 | ||
|
|
||
| # Verify cache set was called | ||
| assert mock_recent.set.called | ||
| args, _ = mock_recent.set.call_args | ||
| cached_content = args[0] | ||
|
|
||
| # 2. Cache Hit | ||
| mock_recent.get.return_value = cached_content | ||
| mock_db_session.query.reset_mock() | ||
|
|
||
| response = client.get("/api/issues/recent") | ||
| assert response.status_code == 200 | ||
| data2 = response.json() | ||
|
|
||
| assert data1 == data2 | ||
| mock_db_session.query.assert_not_called() | ||
|
|
||
| def test_get_nearby_issues_cache_miss_then_hit(mock_db_session, mock_caches): | ||
| _, mock_nearby = mock_caches | ||
|
|
||
| # Setup DB return for nearby | ||
| mock_issue = MagicMock() | ||
| mock_issue.id = 2 | ||
| mock_issue.category = "Water" | ||
| mock_issue.description = "Nearby Issue" | ||
| mock_issue.created_at = datetime.now(timezone.utc) | ||
| mock_issue.status = "open" | ||
| mock_issue.latitude = 10.0 | ||
| mock_issue.longitude = 20.0 | ||
| mock_issue.upvotes = 5 | ||
|
|
||
| # db.query(...).filter(...).all() | ||
| mock_query = mock_db_session.query.return_value | ||
| mock_query.filter.return_value.all.return_value = [mock_issue] | ||
|
|
||
| with patch("backend.routers.issues.find_nearby_issues") as mock_find: | ||
| mock_find.return_value = [(mock_issue, 15.0)] # Issue, distance | ||
|
|
||
| # 1. Cache Miss | ||
| response = client.get("/api/issues/nearby?latitude=10.0&longitude=20.0") | ||
| assert response.status_code == 200 | ||
| data1 = response.json() | ||
| assert len(data1) == 1 | ||
| assert data1[0]["distance_meters"] == 15.0 | ||
|
|
||
| # Verify cache set | ||
| assert mock_nearby.set.called | ||
| args, _ = mock_nearby.set.call_args | ||
| cached_content = args[0] | ||
|
|
||
| # 2. Cache Hit | ||
| mock_nearby.get.return_value = cached_content | ||
| mock_db_session.query.reset_mock() | ||
| mock_find.reset_mock() | ||
|
|
||
| response = client.get("/api/issues/nearby?latitude=10.0&longitude=20.0") | ||
| assert response.status_code == 200 | ||
| data2 = response.json() | ||
|
|
||
| assert data1 == data2 | ||
| mock_db_session.query.assert_not_called() | ||
| mock_find.assert_not_called() | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The Response import on line 15 can be consolidated with the other FastAPI imports on line 2 for better organization. This would make:
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, Request, BackgroundTasks, status, Response