Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 40 additions & 4 deletions backend/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,13 @@ class UserRole(enum.Enum):
USER = "user"
OFFICIAL = "official"

class VerificationStatus(enum.Enum):
PENDING = "pending"
VERIFIED = "verified"
FLAGGED = "flagged"
FRAUD_DETECTED = "fraud_detected"


class User(Base):
__tablename__ = "users"

Expand Down Expand Up @@ -258,10 +265,21 @@ class ResolutionEvidence(Base):
__tablename__ = "resolution_evidence"
id = Column(Integer, primary_key=True, index=True)
grievance_id = Column(Integer, ForeignKey("grievances.id"), nullable=False)
file_path = Column(String, nullable=False)
token_id = Column(Integer, nullable=True)
Copy link

Copilot AI Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ResolutionEvidence.token_id appears to store the DB PK of resolution_proof_tokens (see usages that query ResolutionProofToken.id == evidence.token_id). It should be declared as a ForeignKey("resolution_proof_tokens.id") (and ideally indexed) to enforce referential integrity and make joins/queries safer.

Suggested change
token_id = Column(Integer, nullable=True)
token_id = Column(Integer, ForeignKey("resolution_proof_tokens.id"), nullable=True, index=True)

Copilot uses AI. Check for mistakes.
Copy link
Contributor

@cubic-dev-ai cubic-dev-ai bot Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1: Missing ForeignKey constraint on token_id. This column is used as a reference to resolution_proof_tokens.id (confirmed by service code that assigns token.id to it and joins on it), but without the FK declaration the database won't enforce referential integrity — orphaned or invalid token_id values can be inserted.

Prompt for AI agents
Check if this issue is valid — if so, understand the root cause and fix it. At backend/models.py, line 268:

<comment>Missing `ForeignKey` constraint on `token_id`. This column is used as a reference to `resolution_proof_tokens.id` (confirmed by service code that assigns `token.id` to it and joins on it), but without the FK declaration the database won't enforce referential integrity — orphaned or invalid `token_id` values can be inserted.</comment>

<file context>
@@ -258,10 +265,21 @@ class ResolutionEvidence(Base):
     id = Column(Integer, primary_key=True, index=True)
     grievance_id = Column(Integer, ForeignKey("grievances.id"), nullable=False)
-    file_path = Column(String, nullable=False)
+    token_id = Column(Integer, nullable=True)
+    evidence_hash = Column(String, nullable=False)
+    gps_latitude = Column(Float, nullable=True)
</file context>
Suggested change
token_id = Column(Integer, nullable=True)
token_id = Column(Integer, ForeignKey("resolution_proof_tokens.id"), nullable=True)
Fix with Cubic

evidence_hash = Column(String, nullable=False)
Comment on lines +268 to +269
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Disambiguate and constrain token linkage fields.

Line 268 and Line 291 currently define two different token_id concepts (Integer vs String), and the String identifier is nullable. This is prone to data-integrity drift and ambiguous query behavior. Use an explicit FK name for evidence linkage and make token identifiers non-null.

Suggested schema adjustment
 class ResolutionEvidence(Base):
@@
-    token_id = Column(Integer, nullable=True)
+    resolution_proof_token_id = Column(Integer, ForeignKey("resolution_proof_tokens.id"), nullable=True, index=True)
@@
-    grievance = relationship("Grievance", back_populates="resolution_evidence")
+    grievance = relationship("Grievance", back_populates="resolution_evidence")
+    resolution_proof_token = relationship("ResolutionProofToken", back_populates="evidence_items")

 class ResolutionProofToken(Base):
@@
-    token_id = Column(String, unique=True, index=True)
+    token_id = Column(String, unique=True, index=True, nullable=False)
@@
-    grievance = relationship("Grievance", back_populates="resolution_tokens")
+    grievance = relationship("Grievance", back_populates="resolution_tokens")
+    evidence_items = relationship("ResolutionEvidence", back_populates="resolution_proof_token")

Also applies to: 291-291

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@backend/models.py` around lines 268 - 269, There are two different "token_id"
fields (one Integer, one String) and a nullable string token that create
ambiguity; change the evidence linkage token column to an explicit FK (e.g.,
token_id -> Column(Integer, ForeignKey('tokens.id',
name='fk_evidence_token_id'), nullable=False) on the evidence model referencing
the tokens table) and rename the other string identifier to a distinct name
(e.g., token_identifier or token_hash) and make it nullable=False as well so
both token identifiers are non-null and unambiguous; ensure evidence_hash
remains non-null and update any references/usages to the renamed string token
field in code and queries.

gps_latitude = Column(Float, nullable=True)
gps_longitude = Column(Float, nullable=True)
capture_timestamp = Column(DateTime, nullable=True)
device_fingerprint_hash = Column(String, nullable=True)
metadata_bundle = Column(JSON, nullable=True)
server_signature = Column(String, nullable=True)
verification_status = Column(Enum(VerificationStatus), default=VerificationStatus.PENDING)
Copy link

Copilot AI Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

verification_status currently allows NULLs. Since the service logic treats verification status as a required state machine, consider setting nullable=False (and optionally indexing it if you commonly filter by status). This avoids records with an undefined verification state.

Suggested change
verification_status = Column(Enum(VerificationStatus), default=VerificationStatus.PENDING)
verification_status = Column(Enum(VerificationStatus), default=VerificationStatus.PENDING, nullable=False)

Copilot uses AI. Check for mistakes.

file_path = Column(String, nullable=True) # made true to match earlier schema that had it but didn't require it in tests
Copy link

Copilot AI Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Inline comment # made true to match earlier schema... is confusing since this change makes the column nullable, not "true". Reword to clearly state the intention (e.g., "made nullable" / "optional for backward compatibility").

Suggested change
file_path = Column(String, nullable=True) # made true to match earlier schema that had it but didn't require it in tests
file_path = Column(String, nullable=True) # kept nullable to match earlier schema where this field was optional in tests

Copilot uses AI. Check for mistakes.
media_type = Column(String, default="image")
description = Column(Text, nullable=True)
uploaded_at = Column(DateTime, default=lambda: datetime.datetime.now(datetime.timezone.utc))
created_at = Column(DateTime, default=lambda: datetime.datetime.now(datetime.timezone.utc))

# Relationship
grievance = relationship("Grievance", back_populates="resolution_evidence")
Expand All @@ -270,10 +288,28 @@ class ResolutionProofToken(Base):
__tablename__ = "resolution_proof_tokens"
id = Column(Integer, primary_key=True, index=True)
grievance_id = Column(Integer, ForeignKey("grievances.id"), nullable=False)
token = Column(String, unique=True, index=True)
generated_at = Column(DateTime, default=lambda: datetime.datetime.now(datetime.timezone.utc))
expires_at = Column(DateTime, nullable=False)
token_id = Column(String, unique=True, index=True)
authority_email = Column(String, nullable=False)
geofence_latitude = Column(Float, nullable=False)
geofence_longitude = Column(Float, nullable=False)
geofence_radius_meters = Column(Float, nullable=False)
valid_from = Column(DateTime, nullable=False)
valid_until = Column(DateTime, nullable=False)
nonce = Column(String, nullable=False)
token_signature = Column(String, nullable=False)
Comment on lines +291 to +299
Copy link

Copilot AI Mar 4, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ResolutionProofToken.token_id is implicitly nullable here. Since token_id is used as the primary lookup key and should never be NULL, make it nullable=False (and consider adding a DB-level constraint/server_default if needed). Note that unique=True does not prevent multiple NULLs on some DBs (and can cause confusing duplicates in SQLite).

Copilot uses AI. Check for mistakes.
is_used = Column(Boolean, default=False)
used_at = Column(DateTime, nullable=True)
generated_at = Column(DateTime, default=lambda: datetime.datetime.now(datetime.timezone.utc))

# Relationship
grievance = relationship("Grievance", back_populates="resolution_tokens")


class EvidenceAuditLog(Base):
__tablename__ = "evidence_audit_logs"
id = Column(Integer, primary_key=True, index=True)
evidence_id = Column(Integer, ForeignKey("resolution_evidence.id"), nullable=False)
action = Column(String, nullable=False)
details = Column(String, nullable=True)
actor_email = Column(String, nullable=True)
timestamp = Column(DateTime, default=lambda: datetime.datetime.now(datetime.timezone.utc))
7 changes: 0 additions & 7 deletions backend/pothole_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,13 +129,6 @@ def reset_model():
_model_loading_error = None
logger.info("Model singleton state has been reset.")

if _model is None:
with _model_lock:
if _model is None: # Double check inside lock
try:
_model = load_model()
except Exception:
pass
return _model

def detect_potholes(image_source):
Expand Down
10 changes: 8 additions & 2 deletions tests/test_captioning.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,22 @@
from unittest.mock import patch, AsyncMock
from backend.main import app
import pytest
from io import BytesIO
from PIL import Image

@pytest.mark.asyncio
async def test_generate_description_endpoint():
# Mock the generate_image_caption function in 'backend.routers.detection' module
with patch("backend.routers.detection.generate_image_caption", new_callable=AsyncMock) as mock_caption:
with patch("backend.routers.detection._cached_generate_caption", new_callable=AsyncMock) as mock_caption:
mock_caption.return_value = "A photo of a pothole on the road"

with TestClient(app) as client:
# Create a dummy image
file_content = b"fake image content"
img = Image.new('RGB', (100, 100))
img_bytes = BytesIO()
img.save(img_bytes, format='JPEG')
file_content = img_bytes.getvalue()

files = {"image": ("test.jpg", file_content, "image/jpeg")}

response = client.post("/api/generate-description", files=files)
Expand Down
13 changes: 10 additions & 3 deletions tests/test_graffiti_endpoint.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
from fastapi.testclient import TestClient
from unittest.mock import patch
from unittest.mock import patch, AsyncMock
from backend.main import app
import pytest
from io import BytesIO
from PIL import Image

client = TestClient(app)

@pytest.fixture
def mock_detect_graffiti():
# Patch where it is imported in backend.routers.detection
with patch("backend.routers.detection.detect_graffiti_art_clip") as mock:
with patch("backend.routers.detection._cached_detect_graffiti", new_callable=AsyncMock) as mock:
yield mock

@pytest.fixture
Expand All @@ -24,7 +26,12 @@ def test_detect_graffiti(mock_detect_graffiti, mock_validate_file):
]

# Simple dummy bytes
files = {"image": ("test.jpg", b"fake_image_bytes", "image/jpeg")}
img = Image.new('RGB', (100, 100))
img_bytes = BytesIO()
img.save(img_bytes, format='JPEG')
file_content = img_bytes.getvalue()

files = {"image": ("test.jpg", file_content, "image/jpeg")}

response = client.post("/api/detect-graffiti", files=files)

Expand Down
9 changes: 7 additions & 2 deletions tests/test_smart_scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,19 @@
from unittest.mock import patch, AsyncMock
from backend.main import app
import pytest
from io import BytesIO
from PIL import Image

def test_smart_scan_endpoint():
with TestClient(app) as client:
# Patch the function where it is imported in the ROUTER
with patch("backend.routers.detection.detect_smart_scan_clip", new_callable=AsyncMock) as mock_detect:
with patch("backend.routers.detection._cached_detect_smart_scan", new_callable=AsyncMock) as mock_detect:
mock_detect.return_value = {"category": "pothole", "confidence": 0.95}

file_content = b"fakeimagebytes"
img = Image.new('RGB', (100, 100))
img_bytes = BytesIO()
img.save(img_bytes, format='JPEG')
file_content = img_bytes.getvalue()

response = client.post(
"/api/detect-smart-scan",
Expand Down