Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 21 additions & 12 deletions backend/routers/issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,10 @@
import os
import logging
import hashlib
import json
from datetime import datetime, timezone

from fastapi import Response
Copy link

Copilot AI Feb 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The Response import on line 15 can be consolidated with the other FastAPI imports on line 2 for better organization. This would make:
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Query, Request, BackgroundTasks, status, Response

Copilot uses AI. Check for mistakes.
from backend.database import get_db
from backend.models import Issue, PushSubscription
from backend.schemas import (
Expand Down Expand Up @@ -297,10 +299,11 @@ def get_nearby_issues(
"""
try:
# Check cache first
cache_key = f"{latitude:.5f}_{longitude:.5f}_{radius}_{limit}"
cached_data = nearby_issues_cache.get(cache_key)
if cached_data:
return cached_data
# v2: cache JSON string to avoid conflicts with v1 list data and ensure safe typing
cache_key = f"v2_nearby_{latitude:.5f}_{longitude:.5f}_{radius}_{limit}"
cached_json = nearby_issues_cache.get(cache_key)
if cached_json:
return Response(content=cached_json, media_type="application/json")

# Query open issues with coordinates
# Optimization: Use bounding box to filter candidates in SQL
Expand Down Expand Up @@ -345,9 +348,12 @@ def get_nearby_issues(
]

# Update cache
nearby_issues_cache.set(nearby_responses, cache_key)
# Optimize: Cache serialized JSON string to avoid serialization overhead on subsequent requests
data_dicts = [m.model_dump(mode='json') for m in nearby_responses]
json_content = json.dumps(data_dicts, default=str)
nearby_issues_cache.set(json_content, cache_key)

return nearby_responses
return Response(content=json_content, media_type="application/json")

except Exception as e:
logger.error(f"Error getting nearby issues: {e}", exc_info=True)
Expand Down Expand Up @@ -659,10 +665,11 @@ def get_recent_issues(
offset: int = Query(0, ge=0, description="Number of issues to skip"),
db: Session = Depends(get_db)
):
cache_key = f"recent_issues_{limit}_{offset}"
cached_data = recent_issues_cache.get(cache_key)
if cached_data:
return JSONResponse(content=cached_data)
# v2: cache JSON string to avoid conflicts with v1 list data and ensure safe typing
cache_key = f"v2_recent_issues_{limit}_{offset}"
cached_json = recent_issues_cache.get(cache_key)
if cached_json:
return Response(content=cached_json, media_type="application/json")

# Fetch issues with pagination
# Optimized: Use column projection to fetch only needed fields
Expand Down Expand Up @@ -700,5 +707,7 @@ def get_recent_issues(
})

# Thread-safe cache update
recent_issues_cache.set(data, cache_key)
return data
# Optimize: Cache serialized JSON string to avoid serialization overhead on subsequent requests
json_content = json.dumps(data, default=str)
Copy link

Copilot AI Feb 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The default=str fallback in json.dumps is too broad and could mask serialization issues. Since created_at is already converted to ISO format string on line 700, the default=str won't be called for datetimes. However, for other potentially non-JSON-serializable types, this will silently convert them to strings using their str method, which may not produce valid JSON representations.

Consider being more explicit about which types need custom serialization, or verify that all data is already JSON-serializable before calling json.dumps. Pydantic's model_dump(mode='json') (used in get_nearby_issues) is a more robust approach as it handles all Pydantic types consistently.

Suggested change
json_content = json.dumps(data, default=str)
json_content = json.dumps(data)

Copilot uses AI. Check for mistakes.
recent_issues_cache.set(json_content, cache_key)
return Response(content=json_content, media_type="application/json")
144 changes: 144 additions & 0 deletions backend/tests/test_issues_cache_mocked.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
import pytest
import json
import sys
import os
from datetime import datetime, timezone
from unittest.mock import MagicMock, patch

# Mock heavy/missing dependencies before importing app
sys.modules['magic'] = MagicMock()
sys.modules['telegram'] = MagicMock()
sys.modules['telegram.ext'] = MagicMock()
sys.modules['google'] = MagicMock()
sys.modules['google.generativeai'] = MagicMock()
sys.modules['google.cloud'] = MagicMock()
sys.modules['firebase_admin'] = MagicMock()
sys.modules['firebase_functions'] = MagicMock()
sys.modules['speech_recognition'] = MagicMock()
sys.modules['transformers'] = MagicMock()
sys.modules['ultralytics'] = MagicMock()
sys.modules['cv2'] = MagicMock()
sys.modules['pywebpush'] = MagicMock()
sys.modules['langdetect'] = MagicMock()
sys.modules['googletrans'] = MagicMock()
sys.modules['a2wsgi'] = MagicMock()

# Mock torch properly for scipy/sklearn checks
mock_torch = MagicMock()
class MockTensor: pass
mock_torch.Tensor = MockTensor
sys.modules['torch'] = mock_torch

# Now import app
from fastapi.testclient import TestClient
from backend.main import app
from backend.database import get_db

client = TestClient(app)

@pytest.fixture
def mock_db_session():
mock_session = MagicMock()
app.dependency_overrides[get_db] = lambda: mock_session
yield mock_session
app.dependency_overrides = {}

@pytest.fixture
def mock_caches():
with patch("backend.routers.issues.recent_issues_cache") as mock_recent, \
patch("backend.routers.issues.nearby_issues_cache") as mock_nearby:

# Default behavior: cache miss
mock_recent.get.return_value = None
mock_nearby.get.return_value = None

yield mock_recent, mock_nearby

def test_get_recent_issues_cache_miss_then_hit(mock_db_session, mock_caches):
mock_recent, _ = mock_caches

# Setup DB return
mock_issue = MagicMock()
mock_issue.id = 1
mock_issue.category = "Road"
mock_issue.description = "Test Issue"
mock_issue.created_at = datetime.now(timezone.utc)
mock_issue.image_path = None
mock_issue.status = "open"
mock_issue.upvotes = 0
mock_issue.location = "Test Loc"
mock_issue.latitude = 10.0
mock_issue.longitude = 20.0

# Mock the query chain
mock_query = mock_db_session.query.return_value
mock_query.order_by.return_value.offset.return_value.limit.return_value.all.return_value = [mock_issue]

# 1. Cache Miss
response = client.get("/api/issues/recent")
assert response.status_code == 200
data1 = response.json()
assert len(data1) == 1
assert data1[0]["id"] == 1

# Verify cache set was called
assert mock_recent.set.called
args, _ = mock_recent.set.call_args
cached_content = args[0]

# 2. Cache Hit
mock_recent.get.return_value = cached_content
mock_db_session.query.reset_mock()

response = client.get("/api/issues/recent")
assert response.status_code == 200
data2 = response.json()

assert data1 == data2
mock_db_session.query.assert_not_called()

def test_get_nearby_issues_cache_miss_then_hit(mock_db_session, mock_caches):
_, mock_nearby = mock_caches

# Setup DB return for nearby
mock_issue = MagicMock()
mock_issue.id = 2
mock_issue.category = "Water"
mock_issue.description = "Nearby Issue"
mock_issue.created_at = datetime.now(timezone.utc)
mock_issue.status = "open"
mock_issue.latitude = 10.0
mock_issue.longitude = 20.0
mock_issue.upvotes = 5

# db.query(...).filter(...).all()
mock_query = mock_db_session.query.return_value
mock_query.filter.return_value.all.return_value = [mock_issue]

with patch("backend.routers.issues.find_nearby_issues") as mock_find:
mock_find.return_value = [(mock_issue, 15.0)] # Issue, distance

# 1. Cache Miss
response = client.get("/api/issues/nearby?latitude=10.0&longitude=20.0")
assert response.status_code == 200
data1 = response.json()
assert len(data1) == 1
assert data1[0]["distance_meters"] == 15.0

# Verify cache set
assert mock_nearby.set.called
args, _ = mock_nearby.set.call_args
cached_content = args[0]

# 2. Cache Hit
mock_nearby.get.return_value = cached_content
mock_db_session.query.reset_mock()
mock_find.reset_mock()

response = client.get("/api/issues/nearby?latitude=10.0&longitude=20.0")
assert response.status_code == 200
data2 = response.json()

assert data1 == data2
mock_db_session.query.assert_not_called()
mock_find.assert_not_called()