diff --git a/.env.example b/.env.example index 461fa2f..c8ce8de 100644 --- a/.env.example +++ b/.env.example @@ -4,7 +4,8 @@ PROJECT_VERSION=1.0.0 ENVIRONMENT=development -FRONTEND_URL=http://syncdesk.pro +WEB_FRONTEND_URL=http://localhost:3000 +MOBILE_FRONTEND_URL=syncdesk:// # CORS settings CORS_ALLOW_ORIGINS=["https://app.example.com","http://localhost:3000"] @@ -20,8 +21,8 @@ POSTGRES_HOST=localhost POSTGRES_PORT=5432 # Configurações do MongoDB -MONGO_INITDB_ROOT_USERNAME=mongouser # option to be used with docker compose -MONGO_INITDB_ROOT_PASSWORD=mongopassword # to be used with docker compose +MONGO_INITDB_ROOT_USERNAME=mongouser +MONGO_INITDB_ROOT_PASSWORD=mongopassword MONGO_USER=mongouser MONGO_PASSWORD=mongopassword MONGO_HOST=localhost diff --git a/.github/workflows/deploy-staging.yaml b/.github/workflows/deploy-staging.yaml new file mode 100644 index 0000000..63882cc --- /dev/null +++ b/.github/workflows/deploy-staging.yaml @@ -0,0 +1,65 @@ +name: Deploy Staging +on: + push: + branches: [ "develop" ] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: titus-system/syncdesk-api + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: deploy/Dockerfile + push: true + tags: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:staging + + deploy: + needs: build-and-push + runs-on: ubuntu-latest + permissions: + contents: read + packages: read + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Copy compose files to Droplet + uses: appleboy/scp-action@v0.1.7 + with: + host: ${{ secrets.DROPLET_HOST }} + username: ${{ secrets.SSH_USERNAME }} + key: ${{ secrets.SSH_PRIVATE_KEY }} + target: /opt/syncdesk + source: docker-compose.yaml,deploy/docker-compose.staging.yaml,deploy/prometheus,deploy/alertmanager,deploy/loki,deploy/promtail,deploy/grafana + + - name: Deploy to Droplet + uses: appleboy/ssh-action@v1.0.3 + with: + host: ${{ secrets.DROPLET_HOST }} + username: ${{ secrets.SSH_USERNAME }} + key: ${{ secrets.SSH_PRIVATE_KEY }} + script: | + cd /opt/syncdesk + docker compose -f docker-compose.yaml -f deploy/docker-compose.staging.yaml pull api + docker compose -f docker-compose.yaml -f deploy/docker-compose.staging.yaml up -d --force-recreate diff --git a/Makefile b/Makefile index d04bf62..0c651b4 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,5 @@ -.PHONY: install run dev lint format typecheck test seed migrate makemigration pre-commit +.PHONY: install run dev lint format typecheck test seed migrate makemigration pre-commit up down logs + install: poetry install @@ -41,3 +42,12 @@ pre-commit: poetry run mypy app/ poetry run bandit -c pyproject.toml -r app/ poetry run pytest + +up: + docker compose up --build + +down: + docker compose down + +logs: + docker compose logs -f diff --git a/alembic/env.py b/alembic/env.py index 555bd2d..283482b 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -8,6 +8,9 @@ from app.db.postgres.base import Base import app.domains.auth.models # noqa: F401 — register models with Base.metadata +import app.domains.companies.models # noqa: F401 — register models with Base.metadata +import app.domains.products.models # noqa: F401 — register models with Base.metadata +import app.domains.notifications.models # noqa: F401 — register models with Base.metadata config = context.config if config.config_file_name is not None: diff --git a/alembic/versions/14eee043999d_adds_email_outbox.py b/alembic/versions/14eee043999d_adds_email_outbox.py new file mode 100644 index 0000000..a98bfa9 --- /dev/null +++ b/alembic/versions/14eee043999d_adds_email_outbox.py @@ -0,0 +1,54 @@ +"""adds_email_outbox + +Revision ID: 14eee043999d +Revises: 337ea87962d5 +Create Date: 2026-04-30 07:22:32.475373 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = '14eee043999d' +down_revision: Union[str, Sequence[str], None] = '337ea87962d5' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('email_outbox', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('event_type', sa.String(length=64), nullable=False), + sa.Column('recipient', sa.String(length=320), nullable=False), + sa.Column('payload', postgresql.JSONB(astext_type=sa.Text()), nullable=False), + sa.Column('status', sa.Enum('PENDING', 'PROCESSING', 'SENT', 'RETRY', 'DEAD', name='email_outbox_status'), nullable=False), + sa.Column('attempts', sa.Integer(), nullable=False), + sa.Column('max_attempts', sa.Integer(), nullable=False), + sa.Column('last_error', sa.Text(), nullable=True), + sa.Column('next_attempt_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('sent_at', sa.DateTime(), nullable=True), + sa.Column('locked_at', sa.DateTime(), nullable=True), + sa.Column('lock_owner', sa.String(length=128), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_email_outbox')) + ) + op.create_index('ix_email_outbox_event_type', 'email_outbox', ['event_type'], unique=False) + op.create_index('ix_email_outbox_recipient', 'email_outbox', ['recipient'], unique=False) + op.create_index('ix_email_outbox_status_next_attempt_at', 'email_outbox', ['status', 'next_attempt_at'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_email_outbox_status_next_attempt_at', table_name='email_outbox') + op.drop_index('ix_email_outbox_recipient', table_name='email_outbox') + op.drop_index('ix_email_outbox_event_type', table_name='email_outbox') + op.drop_table('email_outbox') + # ### end Alembic commands ### diff --git a/alembic/versions/337ea87962d5_add_company_product.py b/alembic/versions/337ea87962d5_add_company_product.py new file mode 100644 index 0000000..3455420 --- /dev/null +++ b/alembic/versions/337ea87962d5_add_company_product.py @@ -0,0 +1,71 @@ +"""add_company_product + +Revision ID: 337ea87962d5 +Revises: 55fb7ad5d992 +Create Date: 2026-04-15 23:07:17.118432 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '337ea87962d5' +down_revision: Union[str, Sequence[str], None] = '55fb7ad5d992' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('companies', + sa.Column('id', sa.UUID(), nullable=False), + sa.Column('legal_name', sa.String(length=255), nullable=False), + sa.Column('trade_name', sa.String(length=255), nullable=True), + sa.Column('tax_id', sa.String(length=14), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('deleted_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_companies')) + ) + op.create_index(op.f('ix_companies_legal_name'), 'companies', ['legal_name'], unique=True) + op.create_index(op.f('ix_companies_tax_id'), 'companies', ['tax_id'], unique=True) + op.create_index(op.f('ix_companies_trade_name'), 'companies', ['trade_name'], unique=False) + op.create_table('products', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=127), nullable=False), + sa.Column('description', sa.String(length=500), nullable=True), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('deleted_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_products')) + ) + op.create_table('company_products', + sa.Column('company_id', sa.UUID(), nullable=False), + sa.Column('product_id', sa.Integer(), nullable=False), + sa.Column('bought_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), + sa.Column('support_until', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['company_id'], ['companies.id'], name=op.f('fk_company_products_company_id_companies')), + sa.ForeignKeyConstraint(['product_id'], ['products.id'], name=op.f('fk_company_products_product_id_products')), + sa.PrimaryKeyConstraint('company_id', 'product_id', name=op.f('pk_company_products')) + ) + op.add_column('users', sa.Column('company_id', sa.UUID(), nullable=True)) + op.create_index(op.f('ix_users_company_id'), 'users', ['company_id'], unique=False) + op.create_foreign_key(op.f('fk_users_company_id_companies'), 'users', 'companies', ['company_id'], ['id']) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint(op.f('fk_users_company_id_companies'), 'users', type_='foreignkey') + op.drop_index(op.f('ix_users_company_id'), table_name='users') + op.drop_column('users', 'company_id') + op.drop_table('company_products') + op.drop_table('products') + op.drop_index(op.f('ix_companies_trade_name'), table_name='companies') + op.drop_index(op.f('ix_companies_tax_id'), table_name='companies') + op.drop_index(op.f('ix_companies_legal_name'), table_name='companies') + op.drop_table('companies') + # ### end Alembic commands ### diff --git a/app/api/api_router.py b/app/api/api_router.py index bdc58c5..38f3601 100644 --- a/app/api/api_router.py +++ b/app/api/api_router.py @@ -7,11 +7,13 @@ from app.core.config import get_settings from app.core.dependencies import ResponseFactoryDep from app.domains.auth import auth_router, permission_router, role_router, user_router +from app.domains.companies import company_router from app.domains.live_chat import chat_router, conversation_router -from app.domains.ticket import ticket_router +from app.domains.products import product_router +from app.domains.ticket.routers import ticket_router from app.schemas.response import ErrorContent, GenericSuccessContent -from app.domains.chatbot.routers import router as chatbot_router +from app.domains.chatbot import chatbot_router api_router = APIRouter() @@ -51,4 +53,7 @@ async def root(response_factory: ResponseFactoryDep, request: Request) -> JSONRe api_router.include_router(ticket_router, prefix="/tickets", tags=["Tickets"]) -api_router.include_router(chatbot_router) \ No newline at end of file +api_router.include_router(chatbot_router, prefix="/chatbot", tags=["Chatbot URA"]) + +api_router.include_router(company_router, prefix="/companies", tags=["Companies"]) +api_router.include_router(product_router, prefix="/products", tags=["Products"]) diff --git a/app/core/background_tasks.py b/app/core/background_tasks.py index 16c98f8..3af5766 100644 --- a/app/core/background_tasks.py +++ b/app/core/background_tasks.py @@ -6,5 +6,12 @@ def global_background_tasks(pg_engine: AsyncEngine) -> list[asyncio.Task[None]]: - tasks: list[asyncio.Task[None]] = [asyncio.create_task(update_system_metrics(pg_engine))] + from app.core.dependencies import get_email_service + from app.domains.notifications.worker import run_email_outbox_worker + + email_strategy = get_email_service() + tasks: list[asyncio.Task[None]] = [ + asyncio.create_task(update_system_metrics(pg_engine)), + asyncio.create_task(run_email_outbox_worker(pg_engine, email_strategy)), + ] return tasks diff --git a/app/core/config.py b/app/core/config.py index d2c0312..dff9e36 100644 --- a/app/core/config.py +++ b/app/core/config.py @@ -19,8 +19,8 @@ def project_client_identifier(self) -> str: ENVIRONMENT: str = "development" - MOBILE_FRONTEND_URL: str = "syncdesk:/" - WEB_FRONTEND_URL: str = "http://localhost:3000" + MOBILE_FRONTEND_URL: str = "http://syncdesk.pro" + WEB_FRONTEND_URL: str = "http://syncdesk.pro" # CORS settings CORS_ALLOW_ORIGINS: list[str] = ["*"] @@ -135,6 +135,14 @@ def invite_token_timedelta(self) -> timedelta: RUN_RESEND_INTEGRATION_TESTS: bool = False RESEND_TEST_TO_EMAIL: str = "" + # Email Outbox + EMAIL_OUTBOX_ENABLED: bool = True + EMAIL_OUTBOX_POLL_SECONDS: int = 5 + EMAIL_OUTBOX_BATCH_SIZE: int = 50 + EMAIL_OUTBOX_MAX_ATTEMPTS: int = 5 + EMAIL_OUTBOX_BACKOFF_MAX_SECONDS: int = 900 + EMAIL_OUTBOX_WORKER_ID: str = "" + model_config = SettingsConfigDict(extra="allow", env_file=".env", env_file_encoding="utf-8") diff --git a/app/core/email/templates/reset_password_email.html b/app/core/email/templates/reset_password_email.html index ba322bc..daa2df4 100644 --- a/app/core/email/templates/reset_password_email.html +++ b/app/core/email/templates/reset_password_email.html @@ -1,233 +1,155 @@ - + + Redefinir Senha – SyncDesk - -
- - -
-
- - - -
- SyncDesk -
- - -
- -
- - - - - - -
- -

Redefinir sua senha

-

- Recebemos uma solicitação para redefinir a senha da conta associada a este endereço de e-mail. -

- -
-

Conta

- {{ user_email }} -
- - Redefinir Senha  ➤ - -

- Este link expira em {{ expiry_minutes|default(30) }} minutos. -

- -
- -

- Se o botão acima não funcionar, copie e cole o link abaixo no seu navegador:
- {{ reset_url }} -

- -
- -

- Se você não solicitou a redefinição de senha, ignore este e-mail. - Sua senha permanece a mesma e nenhuma alteração foi feita. -

- -
- - - - -
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + +
+ S + + SyncDesk +
+
+ Portal Administrativo +
+
 
+ + + + +
+ Cadeado +
+
 
+

Redefinir sua senha

+

Recebemos uma solicitação para redefinir
a senha associada à sua conta.

+
+ + + + + + +
+

Conta

+

{{ user_email }}

+
+ + +

+ Clique no botão abaixo para criar uma nova senha.
O link é válido por {{ expiry_minutes|default(30) }} minutos. +

+ + + + + + +
+ Redefinir Senha → +
+ + + + + + +
+

+ ⏱ Expira em {{ expiry_minutes|default(30) }} minutos +

+
+ + + + + + +
 
+ + +

+ Se o botão não funcionar, copie e cole o link abaixo:
+ {{ reset_url }} +

+ +
+ + + + + + +
+

+ 🔒 Se você não solicitou a redefinição de senha, ignore este e-mail. Sua senha permanece a mesma e nenhuma alteração foi feita. +

+
+ +
+

+ © {{ year|default(2025) }} SyncDesk – Todos os direitos reservados.
+ {% if support_email %} + Dúvidas? {{ support_email }} + {% endif %} +

+
+
+ \ No newline at end of file diff --git a/app/core/email/templates/welcome_email.html b/app/core/email/templates/welcome_email.html index c85bc01..7be8efa 100644 --- a/app/core/email/templates/welcome_email.html +++ b/app/core/email/templates/welcome_email.html @@ -1,270 +1,187 @@ - + + Bem-vindo ao SyncDesk - -
- -
-
- - - -
- SyncDesk -
- -
- -
- - - - -
- -

Bem-vindo ao SyncDesk, {{ user_name }}!

-

- Sua conta foi criada com sucesso. Use as credenciais abaixo para acessar o portal pela primeira vez. -

- -
-

E-mail de acesso

- {{ user_email }} -
- -
-

Senha temporária

- {{ one_time_password }} -
- -
- Atenção: esta senha é de uso único e você será obrigado a cadastrar uma nova senha no primeiro acesso. -
- - Acessar o Portal  ➤ - -
- -

- Se o botão acima não funcionar, copie e cole o link abaixo no seu navegador:
- {{ login_url }} -

- -
- -

- Se você não esperava este e-mail, entre em contato com o administrador do sistema imediatamente. -

- -
- - + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + +
+ S + + SyncDesk +
+
+ Portal Administrativo +
+
 
+ + + + + +
+ Usuário +
+
 
+

Bem-vindo ao SyncDesk,
{{ user_name }}!

+

Sua conta foi criada com sucesso.
Acesse o portal com as credenciais abaixo.

+
+ + + + + + +
+

E-mail de acesso

+

{{ user_email }}

+
+ + + + + + +
+

Senha Temporária

+

{{ one_time_password }}

+
+ + + + + + +
+

Atenção: esta senha é de uso único. Você será solicitado a criar uma nova senha no primeiro acesso ao portal.

+
+ + + + + + +
+

Como acessar

+ + + + + + +
+ 1 + Acesse o portal pelo botão abaixo
+ + + + + + +
+ 2 + Faça login com o e-mail e a senha temporária acima
+ + + + + + +
+ 3 + Defina uma nova senha segura para continuar
+
+ + + + + + +
+ Acessar o Portal → +
+ + + + + + +
 
+ + +

+ Se o botão não funcionar, copie e cole o link abaixo:
+ {{ login_url }} +

+
+

+ Se você não esperava este e-mail, entre em contato com o administrador imediatamente. +

+ +
+

+ © {{ year|default(2025) }} SyncDesk – Todos os direitos reservados.
+ {% if support_email %} + Dúvidas? {{ support_email }} + {% endif %} +

+
+
-
\ No newline at end of file diff --git a/app/core/event_dispatcher/README.md b/app/core/event_dispatcher/README.md new file mode 100644 index 0000000..f7e7405 --- /dev/null +++ b/app/core/event_dispatcher/README.md @@ -0,0 +1,321 @@ +# Event Dispatcher + +Asynchronous in-process communication between domains via internal events. + +## Problem + +Some business actions trigger consequences in other domains. Direct service injection creates growing coupling, circular dependencies, and responsibility violations. The Event Dispatcher decouples the emitter from consumers: whoever publishes the event does not know (and does not need to know) who reacts. + +## When to use events vs. direct injection + +| Scenario | Mechanism | Example | +| --- | --- | --- | +| The caller **needs the result** to proceed | Service injection | Chatbot queries `UserService` to validate user existence | +| The caller **just notifies something happened** | Event | Finished triage triggers ticket creation | + +## Structure + +``` +app/core/event_dispatcher/ +├── __init__.py # Re-exports: EventDispatcher, get_event_dispatcher, EventDispatcherDep +├── enums.py # AppEvent enum (event catalog) +├── schemas.py # DispatcherSchema base, typed payloads, EVENT_PAYLOAD_MAP +├── exceptions.py # EventSchemaError, InvalidHandlerError +├── decorators.py # @event_handler decorator +├── metrics.py # Prometheus counters and histograms +└── event_dispatcher.py # EventDispatcher (subscribe, publish), get_event_dispatcher +``` + +## Public API + +### `EventDispatcher` + +```python +from app.core.event_dispatcher import EventDispatcher, get_event_dispatcher + +dispatcher = get_event_dispatcher() # singleton via @lru_cache +``` + +For FastAPI route injection, use `EventDispatcherDep`: + +```python +from app.core.event_dispatcher import EventDispatcherDep + +@router.post("/tickets/{ticket_id}/close") +async def close_ticket(dispatcher: EventDispatcherDep): + ... +``` + +#### `subscribe(event: AppEvent, handler: EventHandler) -> None` + +Subscribes a handler to react to an event. Subscription is idempotent — subscribing the same handler twice has no effect. + +Validates at registration time that: +1. The handler is decorated with `@event_handler` — raises `InvalidHandlerError` otherwise. +2. The handler's declared payload types are compatible with the event's expected schema — raises `InvalidHandlerError` on mismatch. + +This ensures wiring errors are caught **at application startup**, not at runtime. + +#### `publish(event: AppEvent, payload: DispatcherSchema) -> None` + +Emits an event. Validates that the payload matches the expected type via `EVENT_PAYLOAD_MAP`. Each subscribed handler is fired as an independent `asyncio.Task` (fire-and-forget). + +```python +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.schemas import TriageFinishedEventSchema + +await dispatcher.publish( + AppEvent.TRIAGE_FINISHED, + TriageFinishedEventSchema( + client_id=client_id, + client_email="user@example.com", + client_name="User", + attendance_id=attendance_id, + ticket_type="issue", + ticket_criticality="high", + product_name="Product A", + ticket_description="Error generating invoice", + ), +) +``` + +### `@event_handler` decorator + +All handlers must be decorated with `@event_handler`. The decorator: +- Declares which payload types the handler accepts (used by `subscribe` for validation). +- Wraps the handler body in `try/except` with structured logging — handlers do not need manual error handling. +- Raises `EventSchemaError` at call time if the payload type does not match the declared types. + +```python +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.schemas import TriageFinishedEventSchema + +@event_handler(TriageFinishedEventSchema) +async def on_triage_finished(self, payload: TriageFinishedEventSchema) -> None: + ... +``` + +### Payload validation + +`publish` validates that the payload is an instance of the expected schema for the event via `EVENT_PAYLOAD_MAP`. If the type does not match, it raises `EventSchemaError`: + +```python +# This raises EventSchemaError: +await dispatcher.publish(AppEvent.TRIAGE_FINISHED, TicketClosedEventSchema(...)) +``` + +Field validation is performed by Pydantic at schema construction time, before `publish` is called. + +## Event catalog + +### `triage.finished` + +Emitter: `ChatbotService` + +Payload: `TriageFinishedEventSchema` + +| Field | Type | Description | +| --- | --- | --- | +| `client_id` | `UUID` | Client ID in the auth domain | +| `client_email` | `str` | Client email | +| `client_name` | `str` | Client name | +| `company_id` | `UUID \| None` | Company ID (optional) | +| `company_name` | `str \| None` | Company name (optional) | +| `attendance_id` | `PydanticObjectId` | Attendance/triage ID | +| `ticket_type` | `str` | Ticket type (`issue`, `access`, `new_feature`) | +| `ticket_criticality` | `str` | Criticality (`high`, `medium`, `low`) | +| `product_name` | `str` | Product name | +| `ticket_description` | `str` | Problem description | + +Listeners: +- **TicketListener** — creates a ticket and publishes `ticket.created` + +### `ticket.created` + +Emitter: `TicketListener` (in reaction to `triage.finished`) + +Payload: `TicketCreatedEventSchema` + +| Field | Type | Description | +| --- | --- | --- | +| `ticket_id` | `PydanticObjectId` | Newly created ticket ID | +| `client_id` | `UUID` | Client ID | +| `agent_id` | `UUID \| None` | Assigned agent (None if awaiting assignment) | + +Listeners: +- **ConversationListener** — opens the first support conversation + +### `ticket.assignee_updated` + +Emitter: `TicketService` (assign or transfer) + +Payload: `TicketAssigneeUpdatedEventSchema` + +| Field | Type | Description | +| --- | --- | --- | +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `client_id` | `UUID` | Client ID | +| `new_agent_id` | `UUID` | New responsible agent | +| `reason` | `str \| None` | Reassignment reason | + +Listeners: +- **ConversationListener** — updates participants in the active conversation + +### `ticket.escalated` + +Emitter: `TicketService` + +Payload: `TicketEscalatedEventSchema` + +| Field | Type | Description | +| --- | --- | --- | +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `client_id` | `UUID` | Client ID | +| `new_agent_id` | `UUID \| None` | Agent at the new level (None if pending) | +| `new_agent_name` | `str \| None` | New agent name | +| `new_level` | `str` | Target support level | +| `transfer_reason` | `str \| None` | Escalation reason | + +Listeners: +- **ConversationListener** — opens a new conversation linked to the ticket + +### `ticket.status_updated` + +Emitter: `TicketService` + +Payload: `TicketStatusUpdatedEventSchema` + +| Field | Type | Description | +| --- | --- | --- | +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `new_status` | `str` | New ticket status | + +### `ticket.closed` + +Emitter: `TicketService` (when status transitions to `finished`) + +Payload: `TicketClosedEventSchema` + +| Field | Type | Description | +| --- | --- | --- | +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `triage_id` | `PydanticObjectId` | Original triage ID | +| `client_id` | `UUID` | Client ID | + +Listeners: +- **ConversationListener** — closes the active conversation +- **ChatbotListener** — closes the attendance and requests evaluation + +## Chained event flow + +``` +triage.finished + └─ TicketListener creates ticket + └─ publishes ticket.created + ├─ ConversationListener creates conversation + └─ (future) NotificationListener notifies agent +``` + +The conversation depends on `ticket_id`, which only exists after ticket creation. Therefore `ConversationListener` reacts to `ticket.created`, not `triage.finished`. + +## Listener registration + +Listeners are registered during the application lifespan, after database initialization. Each domain exposes a `register_*_listener(dispatcher)` function that builds its own dependencies internally: + +```python +# app/domains/live_chat/listeners.py +def register_conversation_listener(dispatcher: EventDispatcher) -> None: + repo = ConversationRepository(mongo_db.get_db()) + service = ConversationService(repo) + listener = ConversationListener(service) + + dispatcher.subscribe(AppEvent.TICKET_CREATED, listener.on_ticket_created) + dispatcher.subscribe(AppEvent.TICKET_CLOSED, listener.on_ticket_closed) +``` + +`main.py` orchestrates registration calls via `register_app_events_listeners(dispatcher)`: + +```python +# app/main.py +def register_app_events_listeners(dispatcher: EventDispatcher) -> None: + register_conversation_listener(dispatcher) + # future domains register here +``` + +## Listener example + +A listener is a class that lives in the domain's `listeners.py`. It receives domain services via constructor injection and exposes async handler methods — one per event it reacts to. Each handler receives the typed payload as its only argument. + +```python +# app/domains/ticket/listeners.py +from app.core.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.schemas import TriageFinishedEventSchema, TicketCreatedEventSchema + +from .schemas import CreateTicketDTO +from .services import TicketService + + +class TicketListener: + def __init__(self, ticket_service: TicketService, dispatcher: EventDispatcher) -> None: + self.service = ticket_service + self.dispatcher = dispatcher + + @event_handler(TriageFinishedEventSchema) + async def on_triage_finished(self, payload: TriageFinishedEventSchema) -> None: + dto = CreateTicketDTO( + triage_id=payload.attendance_id, + type=payload.ticket_type, + criticality=payload.ticket_criticality, + product=payload.product_name, + description=payload.ticket_description, + client_id=payload.client_id, + ) + ticket = await self.service.create(dto) + + await self.dispatcher.publish( + AppEvent.TICKET_CREATED, + TicketCreatedEventSchema( + ticket_id=ticket.id, + client_id=payload.client_id, + ), + ) +``` + +Key points: +- The `@event_handler` decorator validates the payload type and wraps the body in `try/except` with structured logging. Handlers do not need manual error handling. +- The listener receives the `dispatcher` to publish chained events (`ticket.created`). +- Payload field access is typed: `payload.attendance_id`, not `kwargs["attendance_id"]`. + +## Metrics + +The dispatcher exposes Prometheus metrics via `app/core/event_dispatcher/metrics.py`: + +| Metric | Type | Labels | Description | +| --- | --- | --- | --- | +| `events_published_total` | Counter | `event` | Number of times each event was published | +| `event_handler_failures_total` | Counter | `handler` | Number of times each handler failed | +| `event_handler_duration_seconds` | Histogram | `handler` | Handler execution latency | + +`events_published_total` is recorded in `publish`. Handler failures and duration are recorded by the `@event_handler` decorator. + +## Rules + +- Services never import models or repositories from other domains. +- The dispatcher is fire-and-forget: `publish` schedules each handler as an `asyncio.Task` and returns immediately. +- All handlers must use the `@event_handler` decorator. `subscribe` rejects undecorated handlers with `InvalidHandlerError`. +- Handler subscription is idempotent — subscribing the same handler to the same event twice has no effect. +- Each handler is responsible for handling its own exceptions. The `@event_handler` decorator catches and logs exceptions automatically. An unhandled exception does not affect the emitter or other handlers. +- Listeners live in `listeners.py` inside each domain. +- Event names follow the pattern `{domain}.{past_action}`. +- Every event payload must be documented in this file when created. + +## Adding a new event + +1. Add the member to the `AppEvent` enum in `enums.py`. +2. Create the corresponding schema (inherits from `DispatcherSchema`) in `schemas.py`. +3. Add the entry to `EVENT_PAYLOAD_MAP`. +4. Document the event in this README with emitter, payload, and listeners. +5. Create the handler in the `listeners.py` of the reacting domain. +6. Register the handler in the domain's `register_*_listener`. diff --git a/app/core/event_dispatcher/__init__.py b/app/core/event_dispatcher/__init__.py new file mode 100644 index 0000000..0d796a8 --- /dev/null +++ b/app/core/event_dispatcher/__init__.py @@ -0,0 +1,11 @@ +from .decorators import event_handler +from .enums import AppEvent +from .event_dispatcher import EventDispatcher, EventDispatcherDep, get_event_dispatcher + +__all__ = [ + "EventDispatcher", + "get_event_dispatcher", + "EventDispatcherDep", + "event_handler", + "AppEvent", +] diff --git a/app/core/event_dispatcher/decorators.py b/app/core/event_dispatcher/decorators.py new file mode 100644 index 0000000..871e95b --- /dev/null +++ b/app/core/event_dispatcher/decorators.py @@ -0,0 +1,77 @@ +import time +from collections.abc import Callable, Coroutine +from functools import wraps +from typing import Any, ParamSpec + +from app.core.event_dispatcher.exceptions import EventSchemaError +from app.core.event_dispatcher.metrics import ( + event_handler_duration_seconds, + event_handler_failures_total, +) +from app.core.event_dispatcher.schemas import DispatcherSchema +from app.core.logger import get_logger + +logger = get_logger() + +P = ParamSpec("P") + + +def event_handler( + *payload_types: type[DispatcherSchema], +) -> Callable[ + [Callable[P, Coroutine[Any, Any, None]]], + Callable[P, Coroutine[Any, Any, None]], +]: + """Required decorator for all event handlers registered via ``EventDispatcher.subscribe``. + + Responsibilities: + - Declares which ``DispatcherSchema`` subtypes this handler accepts. + ``subscribe`` uses this metadata to validate wiring at startup. + - Validates the payload type at call time, raising ``EventSchemaError`` on mismatch. + - Wraps the handler body in ``try/except`` with structured logging, + so individual handler failures are logged but never propagate. + + Args: + *payload_types: One or more ``DispatcherSchema`` subclasses that this handler accepts. + + Example:: + + @event_handler(TriageFinishedEventSchema) + async def on_triage_finished(self, payload: TriageFinishedEventSchema) -> None: ... + """ + + def decorator( + fn: Callable[P, Coroutine[Any, Any, None]], + ) -> Callable[P, Coroutine[Any, Any, None]]: + @wraps(fn) + async def wrapper(*args: P.args, **kwargs: P.kwargs) -> None: + start_time = time.perf_counter() + + payload = args[-1] if args else kwargs.get("payload") + if payload_types and not isinstance(payload, payload_types): + expected = ", ".join(t.__name__ for t in payload_types) + raise EventSchemaError( + f"{fn.__qualname__} expected ({expected}), got {type(payload).__name__}" + ) + + try: + await fn(*args, **kwargs) + elapsed = time.perf_counter() - start_time + event_handler_duration_seconds.labels(handler=fn.__qualname__).observe(elapsed) + except Exception: + event_handler_failures_total.labels(handler=fn.__qualname__).inc() + + logger.exception( + "Event handler failed: %s", + fn.__qualname__, + extra={ + "payload": payload.model_dump() + if isinstance(payload, DispatcherSchema) + else None + }, + ) + + wrapper.__event_payload_types__ = payload_types # type: ignore[attr-defined] + return wrapper + + return decorator diff --git a/app/core/event_dispatcher/enums.py b/app/core/event_dispatcher/enums.py new file mode 100644 index 0000000..db2b3aa --- /dev/null +++ b/app/core/event_dispatcher/enums.py @@ -0,0 +1,12 @@ +from enum import Enum + + +class AppEvent(Enum): + TRIAGE_FINISHED = "triage.finished" + TICKET_CREATED = "ticket.created" + TICKET_STATUS_UPDATED = "ticket.status_updated" + TICKET_ESCALATED = "ticket.escalated" + TICKET_ASSIGNEE_UPDATED = "ticket.assignee_updated" + TICKET_CLOSED = "ticket.closed" + USER_WELCOME_INVITE = "auth.welcome_invite" + USER_PASSWORD_RESET = "auth.password_reset" diff --git a/app/core/event_dispatcher/event_dispatcher.py b/app/core/event_dispatcher/event_dispatcher.py new file mode 100644 index 0000000..4e4847d --- /dev/null +++ b/app/core/event_dispatcher/event_dispatcher.py @@ -0,0 +1,120 @@ +import asyncio +from collections.abc import Callable, Coroutine, Mapping +from functools import lru_cache +from typing import Annotated, Any + +from fastapi import Depends + +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.exceptions import EventSchemaError, InvalidHandlerError +from app.core.event_dispatcher.schemas import EVENT_PAYLOAD_MAP, DispatcherSchema +from app.core.logger import Logger, get_logger + +from .metrics import events_published_total + +EventHandler = Callable[..., Coroutine[Any, Any, None]] + + +class EventDispatcher: + """Asynchronous in-process event bus that decouples domain emitters from consumers. + + Handlers are fired as independent ``asyncio.Task`` instances (fire-and-forget). + Use ``get_event_dispatcher()`` to obtain the singleton instance. + """ + + def __init__( + self, payload_map: Mapping[AppEvent, type[DispatcherSchema]], logger: Logger + ) -> None: + self._handlers: dict[AppEvent, list[EventHandler]] = {} + self._payload_map = payload_map + self.logger = logger + + def unsubscribe(self, event: AppEvent, handler: EventHandler) -> None: + """Remove a previously registered handler. No-op if not found.""" + if event in self._handlers: + self._handlers[event] = [h for h in self._handlers[event] if h is not handler] + + def subscribe(self, event: AppEvent, handler: EventHandler) -> None: + """Register a handler to react to an event. + + Subscription is idempotent — subscribing the same handler twice has no effect. + + Raises: + InvalidHandlerError: If the handler is not decorated with ``@event_handler``, + or if its declared payload types are incompatible with the event's schema. + """ + handler_schema = getattr(handler, "__event_payload_types__", None) + if handler_schema is None: + self.logger.error( + "Handler '%s' rejected: missing @event_handler decorator", + handler.__name__, + ) + raise InvalidHandlerError(f"{handler.__name__} must be decorated with @event_handler") + + event_schema = self._payload_map[event] + if event_schema not in handler_schema: + expected = ", ".join(t.__name__ for t in handler_schema) + self.logger.error( + "Handler '%s' rejected: expects (%s), but event '%s' emits %s", + handler.__name__, + expected, + event.value, + event_schema.__name__, + ) + raise InvalidHandlerError( + f"Handler '{handler.__name__}' expects ({expected}), " + f"but event '{event.value}' emits {event_schema.__name__}" + ) + + if event not in self._handlers: + self._handlers[event] = [handler] + else: + if handler not in self._handlers[event]: + self._handlers[event].append(handler) + + self.logger.info( + "Handler '%s' subscribed to '%s'", + handler.__name__, + event.value, + ) + + async def publish(self, event: AppEvent, payload: DispatcherSchema) -> None: + """Emit an event to all subscribed handlers. + + Validates that ``payload`` is an instance of the schema mapped to ``event`` + in ``EVENT_PAYLOAD_MAP``. Each handler runs as an independent ``asyncio.Task``. + + Raises: + EventSchemaError: If the payload type does not match the expected schema. + """ + expected = self._payload_map[event] + if not isinstance(payload, expected): + self.logger.error( + "Publish rejected: '%s' expects %s, received %s", + event.value, + expected.__name__, + type(payload).__name__, + ) + raise EventSchemaError( + f"{event.value} expects {expected.__name__}, received {type(payload).__name__}" + ) + + handlers = self._handlers.get(event, []) + self.logger.info( + "Publishing '%s' to %d handler(s), payload=%s", + event.value, + len(handlers), + type(payload).__name__, + ) + events_published_total.labels(event=event.value).inc() + + for handler in handlers: + asyncio.create_task(handler(payload)) + + +@lru_cache +def get_event_dispatcher() -> EventDispatcher: + return EventDispatcher(EVENT_PAYLOAD_MAP, get_logger("app.event_dispatcher")) + + +EventDispatcherDep = Annotated[EventDispatcher, Depends(get_event_dispatcher)] diff --git a/app/core/event_dispatcher/exceptions.py b/app/core/event_dispatcher/exceptions.py new file mode 100644 index 0000000..c7c6000 --- /dev/null +++ b/app/core/event_dispatcher/exceptions.py @@ -0,0 +1,13 @@ +from typing import Any + + +class EventSchemaError(TypeError): + def __init__(self, message: str | None = None, errors: list[dict[str, Any]] | None = None): + self.errors = errors + super().__init__(message) + + +class InvalidHandlerError(TypeError): + def __init__(self, message: str | None = None, errors: list[dict[str, Any]] | None = None): + self.errors = errors + super().__init__(message) diff --git a/app/core/event_dispatcher/metrics.py b/app/core/event_dispatcher/metrics.py new file mode 100644 index 0000000..a9eace7 --- /dev/null +++ b/app/core/event_dispatcher/metrics.py @@ -0,0 +1,13 @@ +from app.core.metrics.prometheus import prometheus + +events_published_total = prometheus.register_counter( + "events_published_total", "Number of times each event was published", ["event"] +) + +event_handler_failures_total = prometheus.register_counter( + "event_handler_failures_total", "Number of times each handler failed", ["handler"] +) + +event_handler_duration_seconds = prometheus.register_histogram( + "event_handler_duration_seconds", "Handlers execution latency", ["handler"] +) diff --git a/app/core/event_dispatcher/schemas.py b/app/core/event_dispatcher/schemas.py new file mode 100644 index 0000000..faf5dd3 --- /dev/null +++ b/app/core/event_dispatcher/schemas.py @@ -0,0 +1,140 @@ +from uuid import UUID + +from beanie import PydanticObjectId +from pydantic import BaseModel + +from app.core.event_dispatcher.enums import AppEvent +from app.domains.ticket.models import TicketCriticality, TicketStatus, TicketType + + +class DispatcherSchema(BaseModel): + """Base class for all event payloads. Every event schema must inherit from this.""" + + pass + + +class TriageFinishedEventSchema(DispatcherSchema): + """Emitted by ``ChatbotService`` when the triage flow completes. + + Listeners: + - ``TicketListener`` - creates a ticket and publishes ``ticket.created``. + """ + + client_id: UUID + client_email: str + client_name: str + company_id: UUID | None = None + company_name: str | None = None + attendance_id: PydanticObjectId + ticket_type: TicketType + ticket_criticality: TicketCriticality + product_name: str + ticket_description: str + + +class TicketCreatedEventSchema(DispatcherSchema): + """Emitted by ``TicketListener`` after a ticket is created (in reaction to ``triage.finished``). + + Listeners: + - ``ConversationListener`` - opens the first support conversation. + """ + + ticket_id: PydanticObjectId + client_id: UUID + agent_id: UUID | None = None + + +class TicketAssigneeUpdatedEventSchema(DispatcherSchema): + """Emitted by ``TicketService`` when a ticket is assigned or transferred to another agent. + + Listeners: + - ``ConversationListener`` - updates participants in the active conversation. + """ + + ticket_id: PydanticObjectId + client_id: UUID + new_agent_id: UUID + reason: str | None = None + + +class TicketStatusUpdatedEventSchema(DispatcherSchema): + """Emitted by ``TicketService`` when a ticket's status changes. + + Listeners: + - ``ConversationListener`` - updates message history with a system message + - ``ChatbotService`` - Updates attendance status + + """ + + ticket_id: PydanticObjectId + new_status: TicketStatus + + +class TicketEscalatedEventSchema(DispatcherSchema): + """Emitted by ``TicketService`` when a ticket is escalated to a higher support level. + + Listeners: + - ``ConversationListener`` - opens a new conversation linked to the ticket. + """ + + ticket_id: PydanticObjectId + client_id: UUID + new_agent_id: UUID | None = None + new_agent_name: str | None = None + new_level: str + transfer_reason: str | None = None + + +class TicketClosedEventSchema(DispatcherSchema): + """Emitted by ``TicketService`` when a ticket transitions to ``finished``. + + Listeners: + - ``ConversationListener`` - closes the active conversation. + - ``ChatbotListener`` - closes the attendance and requests evaluation. + """ + + ticket_id: PydanticObjectId + triage_id: PydanticObjectId + client_id: UUID + + +class WelcomeInviteEventSchema(DispatcherSchema): + """Emitted by AuthService when an admin registers a new user. + + Listeners: + - EmailOutboxListener - enqueues a welcome invite email. + """ + + user_id: UUID + user_name: str + user_email: str + roles: list[str] + raw_token: str + one_time_password: str + max_attempts: int + + +class PasswordResetEventSchema(DispatcherSchema): + """Emitted by PasswordService when a user requests a password reset. + + Listeners: + - EmailOutboxListener - enqueues a password reset email. + """ + + user_id: UUID + user_email: str + roles: list[str] + raw_token: str + max_attempts: int + + +EVENT_PAYLOAD_MAP: dict[AppEvent, type[DispatcherSchema]] = { + AppEvent.TRIAGE_FINISHED: TriageFinishedEventSchema, + AppEvent.TICKET_ASSIGNEE_UPDATED: TicketAssigneeUpdatedEventSchema, + AppEvent.TICKET_ESCALATED: TicketEscalatedEventSchema, + AppEvent.TICKET_CLOSED: TicketClosedEventSchema, + AppEvent.TICKET_CREATED: TicketCreatedEventSchema, + AppEvent.TICKET_STATUS_UPDATED: TicketStatusUpdatedEventSchema, + AppEvent.USER_WELCOME_INVITE: WelcomeInviteEventSchema, + AppEvent.USER_PASSWORD_RESET: PasswordResetEventSchema, +} diff --git a/app/core/exceptions.py b/app/core/exceptions.py index d844651..4f0a710 100644 --- a/app/core/exceptions.py +++ b/app/core/exceptions.py @@ -31,6 +31,11 @@ def __init__( def register_exception_handlers(app: FastAPI) -> None: logger = get_logger("app.core.exceptions") + @app.exception_handler(AppHTTPException) + async def app_http_exception_handler(request: Request, exc: AppHTTPException) -> JSONResponse: + response_factory = ResponseFactory(request) + return response_factory.error(exc) + @app.exception_handler(StarletteHTTPException) async def http_exception_handler(request: Request, exc: StarletteHTTPException) -> JSONResponse: app_http_exc = AppHTTPException( diff --git a/app/core/schemas.py b/app/core/schemas.py index 84c9a1e..0052877 100644 --- a/app/core/schemas.py +++ b/app/core/schemas.py @@ -3,3 +3,10 @@ class BaseDTO(BaseModel): model_config = {"extra": "forbid"} + + +class PaginatedItems[T](BaseModel): + total: int + page: int + limit: int + items: list[T] diff --git a/app/core/security.py b/app/core/security.py index c1dcf38..68b23c2 100644 --- a/app/core/security.py +++ b/app/core/security.py @@ -84,7 +84,7 @@ def calculates_expiration_date(self, token_type: TokenType) -> datetime: raise ValueError("Invalid token type") def create_token( - self, user_id: UUID, roles_names: list[str], session_id: UUID, token_type: TokenType + self, user_id: UUID, roles_names: list[str], session_id: UUID, token_type: TokenType, company_id: UUID | None = None ) -> str: token_payload: dict[str, Any] = { "sub": str(user_id), @@ -95,6 +95,7 @@ def create_token( "aud": settings.project_client_identifier, "type": token_type.value, "sid": str(session_id), + "company_id": str(company_id) if company_id else None, } token: str = jwt.encode( # pyright: ignore token_payload, self.secret_key(token_type), algorithm=self.algorithm @@ -118,11 +119,11 @@ def decode_token(self, token: str, token_type: TokenType) -> dict[str, Any]: except Exception: raise ValueError("Invalid token") from None - def create_access_token(self, user_id: UUID, roles_names: list[str], session_id: UUID) -> str: - return self.create_token(user_id, roles_names, session_id, self.TokenType.ACCESS) + def create_access_token(self, user_id: UUID, roles_names: list[str], session_id: UUID, company_id: UUID | None = None) -> str: + return self.create_token(user_id, roles_names, session_id, self.TokenType.ACCESS, company_id) - def create_refresh_token(self, user_id: UUID, roles_names: list[str], session_id: UUID) -> str: - return self.create_token(user_id, roles_names, session_id, self.TokenType.REFRESH) + def create_refresh_token(self, user_id: UUID, roles_names: list[str], session_id: UUID, company_id: UUID | None = None) -> str: + return self.create_token(user_id, roles_names, session_id, self.TokenType.REFRESH, company_id) def decode_access_token(self, token: str) -> dict[str, Any]: token_payload = self.decode_token(token, self.TokenType.ACCESS) @@ -138,4 +139,4 @@ def decode_refresh_token(self, token: str) -> dict[str, Any]: def hash_token(self, token: str) -> str: hashed_token: str = self.pwd_context.hash(token) - return hashed_token + return hashed_token \ No newline at end of file diff --git a/app/domains/auth/dependencies.py b/app/domains/auth/dependencies.py index 4bc8e50..df7b437 100644 --- a/app/domains/auth/dependencies.py +++ b/app/domains/auth/dependencies.py @@ -9,6 +9,7 @@ PasswordSecurityDep, ResetTokenSecurityDep, ) +from app.core.event_dispatcher import EventDispatcherDep from app.core.exceptions import AppHTTPException from app.core.logger import user_id_ctx from app.db.postgres.dependencies import PgSessionDep @@ -38,9 +39,6 @@ bearer_scheme = HTTPBearer() -# ============================================================ -# Repositories -# ============================================================ def get_role_repository(db: PgSessionDep) -> RoleRepository: return RoleRepository(db) @@ -61,9 +59,6 @@ def get_password_reset_token_repository(db: PgSessionDep) -> PasswordResetTokenR return PasswordResetTokenRepository(db) -# ============================================================ -# Services -# ============================================================ def get_role_service( role_repo: Annotated[RoleRepository, Depends(get_role_repository)], ) -> RoleService: @@ -98,6 +93,7 @@ def get_password_service( password_security: PasswordSecurityDep, email_strategy: EmailServiceDep, reset_token_security: ResetTokenSecurityDep, + dispatcher: EventDispatcherDep, ) -> PasswordService: return PasswordService( user_service=user_service, @@ -105,6 +101,7 @@ def get_password_service( password_security=password_security, email_strategy=email_strategy, reset_token_security=reset_token_security, + dispatcher=dispatcher, ) @@ -115,6 +112,7 @@ def get_auth_service( jwt_service: JWTServiceDep, password_security: PasswordSecurityDep, password_service: Annotated[PasswordService, Depends(get_password_service)], + dispatcher: EventDispatcherDep, ) -> AuthService: return AuthService( user_service=user_service, @@ -123,6 +121,7 @@ def get_auth_service( password_security=password_security, role_service=role_service, password_service=password_service, + dispatcher=dispatcher, ) @@ -149,38 +148,62 @@ async def get_user_compliance( ) -> UserCompliance: user = user_session[0] return UserCompliance( - must_accept_terms=user.must_accept_terms, must_change_password=user.must_change_password + must_accept_terms=user.must_accept_terms, + must_change_password=user.must_change_password, ) -def _extract_bearer_token(authorization: str | None) -> str: +def _extract_bearer_token(authorization: str | None) -> str | None: if not authorization: - raise WebSocketException(code=1008, reason="Missing Authorization header") + return None + scheme, _, token = authorization.partition(" ") + if scheme.lower() != "bearer" or not token: - raise WebSocketException(code=1008, reason="Invalid Authorization header") - return token + return None + + return token.strip() + + +def _extract_token_from_ws_subprotocols(subprotocols: str | None) -> str | None: + if not subprotocols: + return None + + parts = [part.strip().strip('"') for part in subprotocols.split(",")] + + for index, part in enumerate(parts): + if part.lower() == "access_token" and len(parts) > index + 1: + token = parts[index + 1].strip() + return token or None + + return None + + +def _extract_ws_access_token(ws: WebSocket) -> str: + token = _extract_token_from_ws_subprotocols( + ws.headers.get("sec-websocket-protocol") + ) + + if token: + return token + + token = _extract_bearer_token(ws.headers.get("authorization")) + + if token: + return token + + raise WebSocketException( + code=1008, + reason="Missing WebSocket access token", + ) async def get_current_user_session_ws( ws: WebSocket, service: Annotated[AuthService, Depends(get_auth_service)], ) -> tuple[UserWithRoles, Session]: - # Extract from custom subprotocol "access_token, " since browsers block auth headers - token = None - subprotocols = ws.headers.get("Sec-WebSocket-Protocol") - if subprotocols: - parts = [p.strip() for p in subprotocols.split(",")] - if "access_token" in parts: - idx = parts.index("access_token") - # The token should be the next part in the sequence - if len(parts) > idx + 1: - token = parts[idx + 1] - - # Fallback to standard Authorization header - if not token: - token = _extract_bearer_token(ws.headers.get("Authorization")) - + token = _extract_ws_access_token(ws) + try: user, session = await service.load_current_user_session(token) except ( @@ -220,15 +243,18 @@ async def get_user_compliance_ws( ) -> UserCompliance: user = user_session[0] return UserCompliance( - must_accept_terms=user.must_accept_terms, must_change_password=user.must_change_password + must_accept_terms=user.must_accept_terms, + must_change_password=user.must_change_password, ) def require_permission(permission_name: str) -> Any: async def checker(permissions: UserPermissionsDep) -> bool: names = [p.name for p in permissions] + if permission_name not in names: raise AppHTTPException(status_code=403, detail="Insufficient permissions") + return True return Depends(checker) @@ -237,8 +263,10 @@ async def checker(permissions: UserPermissionsDep) -> bool: def require_permission_ws(permission_name: str) -> Any: async def checker(permissions: UserPermissionsWsDep) -> bool: names = [p.name for p in permissions] + if permission_name not in names: raise WebSocketException(code=1008, reason="Insufficient permissions") + return True return Depends(checker) @@ -247,17 +275,20 @@ async def checker(permissions: UserPermissionsWsDep) -> bool: def require_user_compliance() -> Any: async def checker(compliance: Annotated[UserCompliance, Depends(get_user_compliance)]) -> bool: required_actions: list[str] = [] + if compliance.must_change_password: required_actions.append("change_password") + if compliance.must_accept_terms: required_actions.append("accept_terms") if required_actions: raise AppHTTPException( - status_code=428, # precondition required + status_code=428, detail="Account setup required before accessing this resource.", errors={"required_actions": required_actions}, ) + return True return Depends(checker) @@ -268,21 +299,21 @@ async def checker_ws( compliance: Annotated[UserCompliance, Depends(get_user_compliance_ws)], ) -> bool: required_actions: list[str] = [] + if compliance.must_change_password: required_actions.append("change_password") + if compliance.must_accept_terms: required_actions.append("accept_terms") if required_actions: raise WebSocketException(code=1008, reason="Account setup required") + return True return Depends(checker_ws) -# ============================================================ -# Type Aliases for Router Use -# ============================================================ RoleServiceDep = Annotated[RoleService, Depends(get_role_service)] RoleRepoDep = Annotated[RoleRepository, Depends(get_role_repository)] @@ -299,9 +330,10 @@ async def checker_ws( CurrentUserSessionDep = Annotated[tuple[UserWithRoles, Session], Depends(get_current_user_session)] CurrentUserSessionWsDep = Annotated[ - tuple[UserWithRoles, Session], Depends(get_current_user_session_ws) + tuple[UserWithRoles, Session], + Depends(get_current_user_session_ws), ] PasswordServiceDep = Annotated[PasswordService, Depends(get_password_service)] -UserComplianceDep = Annotated[UserCompliance, Depends(get_user_compliance)] +UserComplianceDep = Annotated[UserCompliance, Depends(get_user_compliance)] \ No newline at end of file diff --git a/app/domains/auth/entities.py b/app/domains/auth/entities.py index 421f660..31c0621 100644 --- a/app/domains/auth/entities.py +++ b/app/domains/auth/entities.py @@ -114,6 +114,7 @@ class User: name: str | None = None oauth_provider: OAuthProvider | None = None oauth_provider_id: str | None = None + company_id: UUID | None = None is_active: bool = True is_verified: bool = False must_change_password: bool = False diff --git a/app/domains/auth/models.py b/app/domains/auth/models.py index 983e5b4..b500b6e 100644 --- a/app/domains/auth/models.py +++ b/app/domains/auth/models.py @@ -1,4 +1,7 @@ +from __future__ import annotations + from datetime import datetime +from typing import TYPE_CHECKING from uuid import UUID, uuid4 from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Index, Integer, String, Table, func @@ -11,6 +14,9 @@ from .enums import OAuthProvider, SessionStatus, TokenPurpose, enum_values +if TYPE_CHECKING: + from app.domains.companies.models import Company + user_roles = Table( "user_roles", Base.metadata, @@ -47,6 +53,9 @@ class User(Base): nullable=True, ) oauth_provider_id: Mapped[str | None] = mapped_column(String(255), nullable=True, unique=True) + company_id: Mapped[UUID | None] = mapped_column( + PG_UUID(as_uuid=True), ForeignKey("companies.id"), nullable=True, index=True + ) is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True, index=True) is_verified: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False, index=True) must_change_password: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) @@ -62,6 +71,8 @@ class User(Base): ) roles: Mapped[list["Role"]] = relationship(secondary=user_roles, back_populates="users") + company: Mapped["Company | None"] = relationship(back_populates="users") + def __repr__(self) -> str: return f"" diff --git a/app/domains/auth/repositories/user_repository.py b/app/domains/auth/repositories/user_repository.py index 9879820..7a186dc 100644 --- a/app/domains/auth/repositories/user_repository.py +++ b/app/domains/auth/repositories/user_repository.py @@ -51,6 +51,12 @@ async def get_all(self) -> list[UserEntity]: rows = result.scalars().all() return [self._to_entity(row) for row in rows] + async def get_all_with_roles(self) -> list[UserWithRoles]: + stmt = select(UserModel).options(selectinload(UserModel.roles)) + result = await self.db.execute(stmt) + rows = result.scalars().all() + return [self._to_user_with_roles(row) for row in rows] + async def get_by_id(self, id: UUID) -> UserEntity | None: stmt = select(UserModel).where(UserModel.id == id) res = await self.db.execute(stmt) @@ -135,21 +141,7 @@ async def get_with_roles(self, id: UUID) -> UserWithRoles | None: row = result.scalar_one_or_none() if row is None: return None - roles = [RoleEntity(id=r.id, name=r.name, description=r.description) for r in row.roles] - return UserWithRoles( - id=row.id, - email=row.email, - password_hash=row.password_hash, - username=row.username, - name=row.name, - oauth_provider=row.oauth_provider, - oauth_provider_id=row.oauth_provider_id, - is_active=row.is_active, - is_verified=row.is_verified, - must_change_password=row.must_change_password, - must_accept_terms=row.must_accept_terms, - roles=roles, - ) + return self._to_user_with_roles(row) async def get_by_email_with_roles(self, email: str) -> UserWithRoles | None: stmt = ( @@ -159,25 +151,12 @@ async def get_by_email_with_roles(self, email: str) -> UserWithRoles | None: row = result.scalar_one_or_none() if row is None: return None - roles = [RoleEntity(id=r.id, name=r.name, description=r.description) for r in row.roles] - return UserWithRoles( - id=row.id, - email=row.email, - password_hash=row.password_hash, - username=row.username, - name=row.name, - oauth_provider=row.oauth_provider, - oauth_provider_id=row.oauth_provider_id, - is_active=row.is_active, - is_verified=row.is_verified, - must_change_password=row.must_change_password, - must_accept_terms=row.must_accept_terms, - roles=roles, - ) + return self._to_user_with_roles(row) async def add_roles( self, id: UUID, role_ids: list[int] ) -> tuple[UserWithRoles | None, set[int] | None]: + role_ids = list(set(role_ids)) if len(role_ids) == 0: return (None, None) @@ -252,6 +231,52 @@ async def get_user_permissions(self, user_id: UUID) -> list[PermissionEntity]: return [ PermissionEntity(id=p.id, name=p.name, description=p.description) for p in permissions ] + + async def update_user_roles( + self, user_id: UUID, add_ids: list[int], remove_ids: list[int] + ) -> tuple[UserWithRoles | None, set[int] | None]: + add_ids = list(set(add_ids)) + remove_ids = list(set(remove_ids)) + + user = await self.get_by_id(user_id) + if user is None: + return (None, None) + + if add_ids: + roles_stmt = select(RoleModel.id).where(RoleModel.id.in_(add_ids)) + result = await self.db.execute(roles_stmt) + found_ids = set(result.scalars().all()) + missing_ids = set(add_ids) - found_ids + if missing_ids: + return (None, missing_ids) + + try: + if remove_ids: + await self.db.execute( + delete(user_roles).where( + user_roles.c.user_id == user_id, + user_roles.c.role_id.in_(remove_ids), + ) + ) + + if add_ids: + from sqlalchemy.dialects.postgresql import insert as pg_insert + + values: list[dict[str, UUID | int]] = [ + {"user_id": user_id, "role_id": role_id} for role_id in add_ids + ] + await self.db.execute( + pg_insert(user_roles).values(values).on_conflict_do_nothing() + ) + + await self.db.commit() + except SQLAlchemyError: + await self.db.rollback() + raise + + updated_user = await self.get_with_roles(user_id) + return (updated_user, None) + async def user_exists(self, user_id: UUID) -> bool: stmt = select(exists().where(UserModel.id == user_id)) @@ -292,6 +317,7 @@ def _to_entity(self, model: UserModel) -> UserEntity: name=model.name, oauth_provider=model.oauth_provider, oauth_provider_id=model.oauth_provider_id, + company_id=model.company_id, is_active=model.is_active, is_verified=model.is_verified, must_change_password=model.must_change_password, @@ -308,6 +334,7 @@ def _to_user_with_roles(self, model: UserModel) -> UserWithRoles: name=model.name, oauth_provider=model.oauth_provider, oauth_provider_id=model.oauth_provider_id, + company_id=model.company_id, is_active=model.is_active, is_verified=model.is_verified, must_change_password=model.must_change_password, diff --git a/app/domains/auth/routers/swagger_utils.py b/app/domains/auth/routers/swagger_utils.py index 2453700..acae6ca 100644 --- a/app/domains/auth/routers/swagger_utils.py +++ b/app/domains/auth/routers/swagger_utils.py @@ -590,22 +590,22 @@ list_users_responses: dict[int | str, dict[str, Any]] = { 200: { - "description": "List of all users retrieved successfully.", - "model": GenericSuccessContent[list[User]], + "description": "List of all users retrieved successfully, including assigned roles.", + "model": GenericSuccessContent[list[UserWithRoles]], }, } list_users_swagger: dict[str, Any] = { "summary": "List all users", - "description": "Returns every user registered in the system.", - "response_model": GenericSuccessContent[list[User]], + "description": "Returns every user registered in the system, including their assigned roles.", + "response_model": GenericSuccessContent[list[UserWithRoles]], "responses": list_users_responses, } get_user_responses: dict[int | str, dict[str, Any]] = { 200: { - "description": "User retrieved successfully.", - "model": GenericSuccessContent[User], + "description": "User retrieved successfully, including assigned roles.", + "model": GenericSuccessContent[UserWithRoles], }, 404: { "description": "User not found.", @@ -615,8 +615,11 @@ get_user_swagger: dict[str, Any] = { "summary": "Get a user by ID", - "description": "Returns a single user by their UUID. Returns 404 if not found.", - "response_model": GenericSuccessContent[User], + "description": ( + "Returns a single user by their UUID, including assigned roles. " + "Returns 404 if not found." + ), + "response_model": GenericSuccessContent[UserWithRoles], "responses": get_user_responses, } @@ -664,6 +667,32 @@ "responses": update_user_responses, } +deactivate_user_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "User deactivated successfully (is_active set to False).", + "model": GenericSuccessContent[User], + }, + 403: { + "description": "Missing permission to update users.", + "model": ErrorContent, + }, + 404: { + "description": "User not found.", + "model": ErrorContent, + }, +} + +deactivate_user_swagger: dict[str, Any] = { + "summary": "Deactivate a user", + "description": ( + "Marks the user identified by UUID as inactive (is_active = False). " + "The user is preserved (no soft or hard delete) and can be reactivated " + "via the standard update endpoint. Returns 404 if the user is not found." + ), + "response_model": GenericSuccessContent[User], + "responses": deactivate_user_responses, +} + add_user_roles_responses: dict[int | str, dict[str, Any]] = { 200: { "description": "Roles added to user successfully.", @@ -688,3 +717,64 @@ "response_model": GenericSuccessContent[UserWithRoles], "responses": add_user_roles_responses, } + +remove_user_roles_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Roles removed successfully. Returns the updated user with remaining roles.", + "model": GenericSuccessContent[UserWithRoles], + }, + 400: { + "description": "No role IDs provided.", + "model": ErrorContent, + }, + 404: { + "description": "User not found.", + "model": ErrorContent, + }, +} + +remove_user_roles_swagger: dict[str, Any] = { + "summary": "Remove roles from a user", + "description": ( + "Removes the given role IDs from the user. " + "Roles the user does not currently hold are silently ignored. " + "Returns 400 if no role IDs are provided, 404 if the user is not found." + ), + "response_model": GenericSuccessContent[UserWithRoles], + "responses": remove_user_roles_responses, +} + +update_user_roles_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Roles updated successfully. Returns the user with the resulting role set.", + "model": GenericSuccessContent[UserWithRoles], + }, + 400: { + "description": "Both add_role_ids and remove_role_ids are empty.", + "model": ErrorContent, + }, + 404: { + "description": "User not found, or one of the role IDs in add_role_ids does not exist.", + "model": ErrorContent, + }, + 422: { + "description": ( + "Request body validation failed — e.g. the same role ID appears in both " + "add_role_ids and remove_role_ids, or a list exceeds the 10-item limit." + ), + "model": ErrorContent, + }, +} + +update_user_roles_swagger: dict[str, Any] = { + "summary": "Update user roles (add and remove in one call)", + "description": ( + "Atomically adds and removes roles in a single request. " + "Duplicate IDs in either list are silently deduped. " + "Roles in remove_role_ids that the user does not hold are ignored. " + "Returns 400 if both lists are empty, 404 if the user or any role to add is not found, " + "422 if the same ID appears in both lists or a list exceeds 10 items." + ), + "response_model": GenericSuccessContent[UserWithRoles], + "responses": update_user_roles_responses, +} diff --git a/app/domains/auth/routers/user_router.py b/app/domains/auth/routers/user_router.py index 17b0354..c9a2259 100644 --- a/app/domains/auth/routers/user_router.py +++ b/app/domains/auth/routers/user_router.py @@ -3,18 +3,22 @@ from fastapi import APIRouter, status from fastapi.responses import JSONResponse -from app.core.dependencies import ResponseFactoryDep +from app.core.dependencies import PasswordSecurityDep, ResponseFactoryDep from app.core.exceptions import AppHTTPException from app.db.exceptions import ResourceAlreadyExistsError, ResourceNotFoundError from app.domains.auth.dependencies import CurrentUserSessionDep, UserServiceDep, require_permission +from app.domains.auth.schemas.user_schemas import RemoveUserRolesDTO, UpdateUserRolesDTO -from ..schemas import AddUserRolesDTO, CreateUserDTO, ReplaceUserDTO, UpdateUserDTO +from ..schemas import AddUserRolesDTO, CreateUserDTO, ReplaceUserDTO, UpdateUserDTO, UserResponseDTO from .swagger_utils import ( add_user_roles_swagger, create_user_swagger, + deactivate_user_swagger, get_user_swagger, list_users_swagger, + remove_user_roles_swagger, replace_user_swagger, + update_user_roles_swagger, update_user_swagger, ) @@ -32,10 +36,19 @@ async def create_user( _auth: CurrentUserSessionDep, service: UserServiceDep, response: ResponseFactoryDep, + password_security: PasswordSecurityDep, ) -> JSONResponse: try: - user = await service.create(dto) - return response.success(data=user.to_response_dict(), status_code=status.HTTP_201_CREATED) + dto_to_create = dto + if dto.password_hash: + dto_to_create = dto.model_copy( + update={ + "password_hash": password_security.generate_password_hash(dto.password_hash) + } + ) + user = await service.create(dto_to_create) + safe_data = UserResponseDTO.model_validate(user).model_dump(mode="json") + return response.success(data=safe_data, status_code=status.HTTP_201_CREATED) except ResourceAlreadyExistsError as e: raise AppHTTPException( status_code=status.HTTP_409_CONFLICT, @@ -52,9 +65,10 @@ async def create_user( async def get_users( _auth: CurrentUserSessionDep, service: UserServiceDep, response: ResponseFactoryDep ) -> JSONResponse: - users = await service.get_all() + users = await service.get_all_with_roles() + safe_data = [UserResponseDTO.model_validate(user).model_dump(mode="json") for user in users] return response.success( - data=[user.to_response_dict() for user in users], status_code=status.HTTP_200_OK + data=safe_data, status_code=status.HTTP_200_OK ) @@ -65,12 +79,13 @@ async def get_users( async def get_user( id: UUID, _auth: CurrentUserSessionDep, service: UserServiceDep, response: ResponseFactoryDep ) -> JSONResponse: - user = await service.get_by_id(id) + user = await service.get_by_id_with_roles(id) if not user: raise AppHTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"User with id '{id}' was not found." ) - return response.success(data=user.to_response_dict(), status_code=status.HTTP_200_OK) + safe_data = UserResponseDTO.model_validate(user).model_dump(mode="json") + return response.success(data=safe_data, status_code=status.HTTP_200_OK) @user_router.put( @@ -89,8 +104,9 @@ async def replace_user( raise AppHTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"User with id '{id}' was not found." ) + safe_data = UserResponseDTO.model_validate(user).model_dump(mode="json") return response.success( - data=user.to_response_dict(), + data=safe_data, status_code=status.HTTP_200_OK, ) @@ -111,6 +127,31 @@ async def update_user( raise AppHTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"User with id '{id}' was not found." ) + safe_data = UserResponseDTO.model_validate(user).model_dump(mode="json") + return response.success( + data=safe_data, + status_code=status.HTTP_200_OK, + ) + + +@user_router.patch( + "/{user_id}/deactivate", + tags=["Users"], + dependencies=[require_permission("user:update")], + **deactivate_user_swagger, +) +async def deactivate_user( + user_id: UUID, + _auth: CurrentUserSessionDep, + service: UserServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + user = await service.deactivate(user_id) + if user is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"User with id '{user_id}' was not found.", + ) return response.success( data=user.to_response_dict(), status_code=status.HTTP_200_OK, @@ -134,10 +175,70 @@ async def add_user_roles( ) try: user = await service.add_roles(id, dto.role_ids) - return response.success(data=user.to_response_dict(), status_code=status.HTTP_200_OK) + safe_data = UserResponseDTO.model_validate(user).model_dump(mode="json") + return response.success(data=safe_data, status_code=status.HTTP_200_OK) except ResourceNotFoundError as e: raise AppHTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"User with id '{id}' was not found." ) from e except ValueError as e: raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + + +@user_router.delete( + "/{user_id}/roles", + tags=["Users", "Roles"], + dependencies=[require_permission("user:update_roles")], + **remove_user_roles_swagger, +) +async def remove_user_roles( + user_id: UUID, + dto: RemoveUserRolesDTO, + _auth: CurrentUserSessionDep, + service: UserServiceDep, + response: ResponseFactoryDep +) -> JSONResponse: + if not dto.role_ids: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail="No role ids were informed" + ) + try: + user = await service.remove_roles(user_id, dto.role_ids) + safe_data = UserResponseDTO.model_validate(user).model_dump(mode="json") + return response.success(data=safe_data, status_code=status.HTTP_200_OK) + except ResourceNotFoundError as e: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"User with id '{user_id}' was not found." + ) from e + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + + +@user_router.patch( + "/{user_id}/roles", + tags=["Users", "Roles"], + dependencies=[require_permission("user:update_roles")], + **update_user_roles_swagger, +) +async def update_user_roles( + user_id: UUID, + dto: UpdateUserRolesDTO, + _auth: CurrentUserSessionDep, + service: UserServiceDep, + response: ResponseFactoryDep +) -> JSONResponse: + if not dto.add_role_ids and not dto.remove_role_ids: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail="No role ids were informed" + ) + + try: + user = await service.update_user_roles(user_id, dto) + safe_data = UserResponseDTO.model_validate(user).model_dump(mode="json") + return response.success(data=safe_data, status_code=status.HTTP_200_OK) + except ResourceNotFoundError as e: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"User with id '{user_id}' was not found." + ) from e + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e \ No newline at end of file diff --git a/app/domains/auth/schemas/__init__.py b/app/domains/auth/schemas/__init__.py index b6d9e48..0307dff 100644 --- a/app/domains/auth/schemas/__init__.py +++ b/app/domains/auth/schemas/__init__.py @@ -9,7 +9,15 @@ RefreshSessionDTO, UpdateSessionDTO, ) -from .user_schemas import AddUserRolesDTO, CreateUserDTO, ReplaceUserDTO, UpdateUserDTO, UserCompliance +from .user_schemas import ( + AddUserRolesDTO, + CreateUserDTO, + ReplaceUserDTO, + UpdateUserDTO, + UserCompliance, + UserResponseDTO, + RoleResponseDTO +) __all__ = [ "CreateRoleDTO", @@ -28,6 +36,8 @@ "UpdateUserDTO", "AddUserRolesDTO", "UserCompliance", + "UserResponseDTO", + "RoleResponseDTO", "CreatePasswordResetTokenDTO", "LoginResponse", "RefreshSessionRequest", @@ -38,4 +48,4 @@ "ChangePasswordRequest", "ForgotPasswordRequest", "ResetPasswordRequest" -] +] \ No newline at end of file diff --git a/app/domains/auth/schemas/user_schemas.py b/app/domains/auth/schemas/user_schemas.py index d4a3b3d..415dcc8 100644 --- a/app/domains/auth/schemas/user_schemas.py +++ b/app/domains/auth/schemas/user_schemas.py @@ -1,9 +1,8 @@ -from pydantic import model_validator - +from uuid import UUID +from pydantic import Field, model_validator, ConfigDict from app.core.schemas import BaseDTO from app.domains.auth.enums import OAuthProvider - class CreateUserDTO(BaseDTO): email: str password_hash: str | None = None @@ -11,6 +10,7 @@ class CreateUserDTO(BaseDTO): name: str | None = None oauth_provider: OAuthProvider | None = None oauth_provider_id: str | None = None + company_id: UUID | None = None is_active: bool = True is_verified: bool = False must_change_password: bool = False @@ -21,13 +21,10 @@ class CreateUserDTO(BaseDTO): def validate_auth_method(self) -> "CreateUserDTO": has_password = self.password_hash is not None has_oauth = self.oauth_provider is not None and self.oauth_provider_id is not None - if not has_password and not has_oauth: raise ValueError("User must have either password or OAuth provider.") - return self - class UpdateUserDTO(BaseDTO): email: str | None = None password_hash: str | None = None @@ -38,15 +35,61 @@ class UpdateUserDTO(BaseDTO): is_active: bool | None = None is_verified: bool | None = None - class ReplaceUserDTO(CreateUserDTO): pass - class AddUserRolesDTO(BaseDTO): role_ids: list[int] +class RemoveUserRolesDTO(BaseDTO): + role_ids: list[int] = Field(default_factory=list[int]) + +class UpdateUserRolesDTO(BaseDTO): + add_role_ids: list[int] = Field(default_factory=list[int]) + remove_role_ids: list[int] = Field(default_factory=list[int]) + + @model_validator(mode="after") + def validate_no_intersection(self) -> "UpdateUserRolesDTO": + inter = set(self.add_role_ids) & set(self.remove_role_ids) + if inter: + raise ValueError(f"No role can be in both add and remove fields. Roles {inter} are in both.") + return self + + @model_validator(mode="after") + def validate_field_size(self) -> "UpdateUserRolesDTO": + limit = 10 + errors: list[str] = [] + if len(self.add_role_ids) > limit: + errors.append("add_role_ids") + if len(self.remove_role_ids) > limit: + errors.append("remove_role_ids") + if errors: + raise ValueError(f"{' and '.join(errors)} exceed the limit of {limit} roles") + return self class UserCompliance(BaseDTO): must_change_password: bool must_accept_terms: bool + +class RoleResponseDTO(BaseDTO): + id: int + name: str + description: str | None = None + + model_config = ConfigDict(from_attributes=True) + +class UserResponseDTO(BaseDTO): + id: UUID + email: str + username: str | None = None + name: str | None = None + oauth_provider: OAuthProvider | None = None + oauth_provider_id: str | None = None + company_id: UUID | None = None + is_active: bool + is_verified: bool + must_change_password: bool + must_accept_terms: bool + roles: list[RoleResponseDTO] | None = None + + model_config = ConfigDict(from_attributes=True) \ No newline at end of file diff --git a/app/domains/auth/services/auth_service.py b/app/domains/auth/services/auth_service.py index 2682fa3..2a45853 100644 --- a/app/domains/auth/services/auth_service.py +++ b/app/domains/auth/services/auth_service.py @@ -2,6 +2,8 @@ from uuid import UUID from app.core.config import get_settings +from app.core.event_dispatcher import AppEvent, EventDispatcher +from app.core.event_dispatcher.schemas import WelcomeInviteEventSchema from app.core.http.schemas import SessionDeviceInfo from app.core.logger import get_logger from app.core.security import JWTService, PasswordSecurity @@ -39,6 +41,7 @@ def __init__( password_security: PasswordSecurity, role_service: RoleService, password_service: PasswordService, + dispatcher: EventDispatcher, ): self.user_service = user_service self.session_service = session_service @@ -46,6 +49,7 @@ def __init__( self.passwordSecurity = password_security self.role_service = role_service self.password_service = password_service + self.dispatcher = dispatcher self.logger = get_logger("app.auth.service") async def register( @@ -67,7 +71,7 @@ async def register( user = await self.user_service.create(create_user_dto) role_names = [r.name for r in user.roles] if user.roles is not None else [] access_token, refresh_token = await self.session_service.init_session( - user.id, role_names, device_info + user.id, role_names, device_info, user.company_id ) registration_total.labels(method="self").inc() @@ -87,22 +91,30 @@ async def login( user = await self.user_service.get_by_email_with_roles(email=dto.email) if user is None: login_total.labels(status="user_not_found").inc() + self.logger.info("Login failed: user not found", extra={"email": dto.email}) raise UserNotFoundError() password_hash = user.password_hash if not password_hash: + self.logger.info( + "Login failed: password not configured", + extra={"user_id": str(user.id), "email": user.email}, + ) login_total.labels(status="no_password").inc() raise UserPasswordNotConfiguredError() is_authenticated = self.passwordSecurity.verify_password(dto.password, password_hash) if not is_authenticated: login_total.labels(status="invalid_password").inc() - self.logger.warning("Failed login attempt", extra={"email": dto.email}) + self.logger.warning( + "Login failed: invalid password", + extra={"user_id": str(user.id), "email": dto.email}, + ) raise InvalidPasswordError(user.email) role_names = [r.name for r in user.roles] if user.roles is not None else [] access_token, refresh_token = await self.session_service.init_session( - user.id, role_names, device_info + user.id, role_names, device_info, user.company_id ) login_total.labels(status="success").inc() @@ -156,10 +168,10 @@ async def refresh_session( raise InvalidSessionError("New login required.") access_token = self.jwt_service.create_access_token( - current_session.user_id, current_user.roles_names(), current_session.id + current_session.user_id, current_user.roles_names(), current_session.id, current_user.company_id ) new_refresh_token = self.jwt_service.create_refresh_token( - current_session.user_id, current_user.roles_names(), current_session.id + current_session.user_id, current_user.roles_names(), current_session.id, current_user.company_id ) new_refresh_token_hash = self.passwordSecurity.generate_token_hash(new_refresh_token) @@ -219,10 +231,18 @@ async def admin_register(self, dto: AdminRegisterUserRequest) -> UserWithRoles: self.logger.info("Admin registered user", extra={"user_id": str(user.id), "email": dto.email}) raw_token = await self.password_service.create_reset_token(user.id, TokenPurpose.INVITE) + settings = get_settings() + await self.dispatcher.publish( + AppEvent.USER_WELCOME_INVITE, + WelcomeInviteEventSchema( + user_id=user.id, + user_name=user.name or str(user.id), + user_email=user.email, + roles=user.roles_names(), + raw_token=raw_token, + one_time_password=password, + max_attempts=settings.EMAIL_OUTBOX_MAX_ATTEMPTS, + ), + ) - try: - await self.password_service.send_welcome_email(user, raw_token, password) - except Exception: - self.logger.exception("Welcome email dispatch failed after admin_register") - - return user + return user \ No newline at end of file diff --git a/app/domains/auth/services/password_service.py b/app/domains/auth/services/password_service.py index d34d6a0..05760d0 100644 --- a/app/domains/auth/services/password_service.py +++ b/app/domains/auth/services/password_service.py @@ -6,6 +6,8 @@ from app.core.config import get_settings from app.core.email import EmailStrategy, ResetPasswordEmailParams from app.core.email.schemas import WelcomeEmailParams +from app.core.event_dispatcher import AppEvent, EventDispatcher +from app.core.event_dispatcher.schemas import PasswordResetEventSchema from app.core.logger import get_logger from app.core.security import PasswordSecurity, ResetTokenSecurity from ..enums import TokenPurpose @@ -25,12 +27,14 @@ def __init__( password_security: PasswordSecurity, email_strategy: EmailStrategy, reset_token_security: ResetTokenSecurity, + dispatcher: EventDispatcher, ): self.user_service = user_service self.token_repo = token_repo self.password_security = password_security self.email_strategy = email_strategy self.reset_token_security = reset_token_security + self.dispatcher = dispatcher self.logger = get_logger("app.auth.password_service") def generate_random_password(self, length: int = 16) -> str: @@ -146,7 +150,17 @@ async def forgot_password(self, email: str) -> None: if user is None: return try: + settings = get_settings() raw_token = await self.create_reset_token(user.id, TokenPurpose.RESET) - await self.send_reset_password_email(user, raw_token) + await self.dispatcher.publish( + AppEvent.USER_PASSWORD_RESET, + PasswordResetEventSchema( + user_id=user.id, + user_email=user.email, + roles=user.roles_names(), + raw_token=raw_token, + max_attempts=settings.EMAIL_OUTBOX_MAX_ATTEMPTS, + ), + ) except Exception: self.logger.exception("Failed forgot-password pipeline for existing user") diff --git a/app/domains/auth/services/session_service.py b/app/domains/auth/services/session_service.py index 82702b9..939c361 100644 --- a/app/domains/auth/services/session_service.py +++ b/app/domains/auth/services/session_service.py @@ -36,7 +36,7 @@ def __init__( self.logger = get_logger("app.auth.session_service") async def init_session( - self, user_id: UUID, role_names: list[str], device_info: SessionDeviceInfo | None = None + self, user_id: UUID, role_names: list[str], device_info: SessionDeviceInfo | None = None, company_id: UUID | None = None ) -> tuple[str, str]: session_dto = CreateSessionDTO( user_id=user_id, @@ -46,20 +46,25 @@ async def init_session( device_info=device_info, last_used_at=_utcnow(), ) - session, refresh_token = await self.create(session_dto) - access_token = self.jwt_service.create_access_token(user_id, role_names, session.id) + # Repassa o company_id para a criação do Refresh Token + session, refresh_token = await self.create(session_dto, company_id) + + # Repassa o company_id para a criação do Access Token + access_token = self.jwt_service.create_access_token(user_id, role_names, session.id, company_id) return access_token, refresh_token - async def create(self, dto: CreateSessionDTO) -> tuple[Session, str]: + async def create(self, dto: CreateSessionDTO, company_id: UUID | None = None) -> tuple[Session, str]: await self.repo.free_active_sessions_limit(dto.user_id, self.max_active_sessions) session_data = dto.model_dump(exclude={"role_names"}, exclude_none=True) if dto.device_info is not None: session_data["device_info"] = dto.device_info.model_dump(mode="json", exclude_none=True) session_model = await self.repo.add(SessionModel(**session_data)) + + # Inclui o company_id na geração do token refresh_token = self.jwt_service.create_refresh_token( - session_model.user_id, dto.role_names, session_model.id + session_model.user_id, dto.role_names, session_model.id, company_id ) refresh_token_hash = self.jwt_service.hash_token(refresh_token) session_model.refresh_token_hash = refresh_token_hash @@ -135,4 +140,4 @@ async def revoke_all_user_sessions(self, user_id: UUID) -> None: self.logger.info( "All user sessions revoked", extra={"user_id": str(user_id), "count": len(active_sessions)}, - ) + ) \ No newline at end of file diff --git a/app/domains/auth/services/user_service.py b/app/domains/auth/services/user_service.py index 410fb15..a378252 100644 --- a/app/domains/auth/services/user_service.py +++ b/app/domains/auth/services/user_service.py @@ -4,6 +4,7 @@ from app.db.exceptions import ResourceNotFoundError from app.domains.auth.exceptions import UserCannotLoseLoginMethodError from app.domains.auth.repositories.user_repository import UserRepository +from app.domains.auth.schemas.user_schemas import UpdateUserRolesDTO from ..entities import Permission, Role, User, UserWithRoles from ..schemas import CreateUserDTO, ReplaceUserDTO, UpdateUserDTO @@ -20,6 +21,9 @@ async def create(self, dto: CreateUserDTO) -> UserWithRoles: async def get_all(self) -> list[User]: return await self.repo.get_all() + async def get_all_with_roles(self) -> list[UserWithRoles]: + return await self.repo.get_all_with_roles() + async def get_by_id(self, id: UUID) -> User | None: return await self.repo.get_by_id(id) @@ -49,6 +53,12 @@ async def delete(self, id: UUID) -> User | None: self.logger.info("User soft-deleted", extra={"user_id": str(id)}) return await self.repo.soft_delete(id) + async def deactivate(self, id: UUID) -> User | None: + user = await self.repo.update(id, UpdateUserDTO(is_active=False)) + if user is not None: + self.logger.info("User deactivated", extra={"user_id": str(id)}) + return user + async def hard_delete(self, id: UUID) -> User | None: self.logger.warning("User hard-deleted", extra={"user_id": str(id)}) return await self.repo.hard_delete(id) @@ -62,6 +72,39 @@ async def add_roles(self, id: UUID, role_ids: list[int]) -> UserWithRoles: self.logger.info("Roles assigned to user", extra={"user_id": str(id), "role_ids": role_ids}) return user + async def remove_roles(self, user_id: UUID, role_ids: list[int]) -> UserWithRoles: + await self.repo.remove_roles(user_id, role_ids) + user = await self.get_by_id_with_roles(user_id) + if user is None: + raise ResourceNotFoundError("User", str(user_id)) + return user + + async def update_user_roles(self, user_id: UUID, dto: UpdateUserRolesDTO) -> UserWithRoles: + user, missing_ids = await self.repo.update_user_roles( + user_id, dto.add_role_ids, dto.remove_role_ids + ) + if missing_ids: + self.logger.warning( + "Update user roles failed: roles not found", + extra={"user_id": str(user_id), "missing_role_ids": list(missing_ids)}, + ) + raise ValueError(f"Roles not found: {missing_ids}") + if user is None: + self.logger.warning( + "Update user roles failed: user not found", + extra={"user_id": str(user_id)}, + ) + raise ResourceNotFoundError("User", str(user_id)) + self.logger.info( + "User roles updated", + extra={ + "user_id": str(user_id), + "added": dto.add_role_ids, + "removed": dto.remove_role_ids, + }, + ) + return user + async def get_user_permissions(self, id: UUID) -> list[Permission]: return await self.repo.get_user_permissions(id) diff --git a/app/domains/chatbot/README.md b/app/domains/chatbot/README.md new file mode 100644 index 0000000..ea76d00 --- /dev/null +++ b/app/domains/chatbot/README.md @@ -0,0 +1,192 @@ +# Domínio Chatbot (URA de Triagem) + +Módulo responsável pela triagem automatizada de atendimentos via chatbot (URA). O cliente interage com um fluxo de perguntas e respostas (máquina de estados finita) que resulta na abertura de um ticket, resolução de dúvida ou encerramento simples. + +## Visão Geral + +O domínio `chatbot`: +- gerencia o ciclo de vida de um **Attendance** (atendimento) no MongoDB +- conduz a triagem por uma FSM (Finite State Machine) baseada em menus +- cria tickets automaticamente no domínio `ticket` quando a triagem resulta em demanda +- expõe avaliação de satisfação pós-atendimento +- expõe consulta de atendimentos individuais com campo computado `needs_evaluation` + +Dependências principais: +- `ChatbotRepository` para persistência no MongoDB +- `ChatbotService` para regra de negócio e orquestração da FSM +- `ChatbotFSM` para transições de estado (puro, sem I/O) +- `ResponseFactoryDep` para o envelope de resposta HTTP +- `CurrentUserSessionDep` para autenticação +- Domínio `ticket` para criação de tickets a partir da triagem + +## Arquitetura + +``` +chatbot/ +├── routers.py # Borda HTTP — endpoints REST +├── swagger_utils.py # Dicts de documentação OpenAPI (separados das rotas) +├── schemas.py # DTOs de entrada e saída (Pydantic) +├── models.py # Documento Beanie (Attendance) e subdocumentos +├── enums.py # TriageState e AttendanceStatus +├── fsm.py # Máquina de estados — MENU_MAP + ChatbotFSM +├── services/ +│ └── chatbot_service.py # Regra de negócio e orquestração +├── repositories/ +│ └── chatbot_repository.py # Acesso direto ao MongoDB (Motor) +├── dependencies.py # Wiring de DI (ChatbotServiceDep, ChatbotRepositoryDep) +├── metrics.py # Contadores Prometheus (mensagens, tickets) +├── exceptions.py # Exceções de domínio +└── README.md +``` + +### Fluxo resumido + +1. Router valida autenticação e delega ao service. +2. Service carrega (ou cria) o attendance do MongoDB. +3. Service extrai o estado atual da triagem e delega a transição para `ChatbotFSM`. +4. FSM retorna o próximo estado, mensagem e opções de input. +5. Service persiste o novo estado e, se a triagem finalizou com ticket, cria o ticket. +6. Router devolve resposta no envelope padrão. + +## Máquina de Estados (FSM) + +A FSM é definida em `fsm.py` através do dicionário `MENU_MAP`. Cada estado mapeia para uma mensagem, tipo de input e opções de transição. + +``` +MAIN_MENU (A) +├── [1,2,3] Produto → CHOOSING_PRODUCT_PROBLEM (B) +│ ├── [1] Falha → WAITING_FAILURE_TEXT (F) → TICKET_CREATED (E) +│ └── [2] Nova função → WAITING_FEATURE_TEXT (G) → TICKET_CREATED (E) +├── [4] Dúvida → CHOOSING_QUESTION_TYPE (C) +│ ├── [1] Prazos → SHOWING_DEADLINES (X) → [Sim] MAIN_MENU / [Não] SERVICE_FINISHED (I) +│ ├── [2] Manual → SHOWING_MANUAL (J) → [Sim] MAIN_MENU / [Não] SERVICE_FINISHED (I) +│ └── [3] Novo sistema → SHOWING_EMAIL (L) → [Sim] MAIN_MENU / [Não] SERVICE_FINISHED (I) +└── [5] Acesso → REQUESTING_ACCESS (D) → TICKET_CREATED (E) +``` + +Estados terminais: `TICKET_CREATED` (E) e `SERVICE_FINISHED` (I). + +Tipos de input: +- `quick_replies`: usuário seleciona uma opção pré-definida (campo `answer_value`) +- `free_text`: usuário envia texto livre (campo `answer_text`) + +A FSM é **pura** — não faz I/O. Recebe o estado atual e a mensagem, retorna `InternalBotResponseDTO`. Toda persistência e efeito colateral fica no service. + +## Rotas Disponíveis + +Base path: `/api/chatbot` + +| Método | Path | Descrição | Permissão planejada | +|--------|-----------------------------|--------------------------------|------------------------| +| POST | `/` | Criar atendimento | `chatbot:create` | +| GET | `/` | Listar atendimentos | `chatbot:list` | +| POST | `/webhook` | Interagir com a triagem | `chatbot:interact` | +| GET | `/{triage_id}` | Consultar atendimento | `chatbot:read` | +| POST | `/{triage_id}/evaluation` | Avaliar atendimento | `chatbot:evaluate` | + +A documentação OpenAPI de cada rota está em `swagger_utils.py`, aplicada via `**dict` no decorator do router — mantendo as rotas limpas. + +### POST `/` — Criar atendimento + +Cria um attendance com `status = opened` e já executa a primeira transição da FSM, retornando a pergunta inicial (MAIN_MENU). A identidade do cliente é derivada do token JWT. Não recebe request body. + +Este é o **único ponto de criação** de um attendance. O webhook não cria attendances. + +Retorno: `201` — `GenericSuccessContent[TriageData]` via `ResponseFactory`. + +### POST `/webhook` — Interagir com a triagem + +Recebe `triage_id` + resposta do usuário. O attendance já deve existir (criado via `POST /`). Retorna `404` se o `triage_id` não for encontrado. + +Validações: +- `answer_text` e `answer_value` são mutuamente exclusivos (422) +- Ambos `None` serão rejeitados com 422 (T09) + +Retorno: `200` — `GenericSuccessContent[TriageData]` via `ResponseFactory`. +Em andamento: `step_id`, `message`, `input` (mode + quick_replies). +Finalizado: `finished: true`, `closure_message`, e `result` (se ticket criado). + +### GET `/{triage_id}` — Consultar atendimento + +Retorna o attendance completo incluindo `needs_evaluation` (campo computado: `true` sse `status == finished` e `evaluation == null`). + +### POST `/{triage_id}/evaluation` — Avaliar atendimento + +Registra a nota de satisfação (1-5). Só pode ser chamado uma vez, e só após a triagem estar finalizada. + +Erros: `404` (não encontrado), `409` (não finalizado ou já avaliado), `422` (rating inválido). + +## Modelo de Dados + +### Attendance (MongoDB — collection configurada em `models.py`) + +| Campo | Tipo | Descrição | +|--------------|--------------------------|----------------------------------------| +| `_id` | `ObjectId` | Usado como `triage_id` | +| `status` | `AttendanceStatus` | `opened`, `in_progress`, `finished` | +| `start_date` | `datetime` | Início do atendimento (UTC) | +| `end_date` | `datetime \| None` | Fim do atendimento (UTC) | +| `client` | `AttendanceClient` | Dados do cliente | +| `triage` | `list[Triage]` | Histórico de perguntas e respostas | +| `result` | `AttendanceResult \| None` | Tipo do resultado + mensagem de fechamento | +| `evaluation` | `AttendanceEvaluation \| None` | Nota de satisfação | + +O model `Attendance` herda de `beanie.Document` e deve estar registrado no `init_beanie()` em `app/main.py`. + +## Schemas + +### Entrada + +| Schema | Descrição | +|---------------------|--------------------------------------------------| +| `TriageInputDTO` | Payload do webhook (triage_id + resposta) | +| `EvaluationRequest` | Payload da avaliação (rating: 1-5) | +| `CreateAttendanceDTO` | DTO interno para criação de attendance | + +### Saída + +| Schema | Descrição | +|---------------------|--------------------------------------------------| +| `TriageData` | Bloco `data` da resposta de triagem (usado com `GenericSuccessContent[TriageData]`) | +| `AttendanceResponse`| Consulta completa com `needs_evaluation` | +| `EvaluationResponse`| Confirmação da avaliação com `evaluated_at` | +| `TriageStepSchema` | Item do histórico de triagem | + +### Internos + +| Schema | Descrição | +|------------------------|----------------------------------------------| +| `InternalBotResponseDTO` | Retorno da FSM (new_state, response_text, quick_replies) | +| `TriageInputDef` | Definição do input esperado (mode + quick_replies) | + +## Métricas + +Definidas em `metrics.py`, registradas no Prometheus: + +| Métrica | Tipo | Labels | Descrição | +|--------------------------------------|---------|--------|----------------------------------| +| `domain_chatbot_messages_total` | Counter | `step` | Total de mensagens processadas | +| `domain_chatbot_tickets_created_total` | Counter | — | Tickets criados pela triagem | + +## Integração com o Domínio Ticket + +O `ChatbotService` cria tickets diretamente via `ChatbotRepository.create_ticket()`, usando o model `Ticket` do domínio `ticket`. O service: + +1. Analisa as respostas da triagem para extrair `type`, `criticality` e `product`. +2. Monta um `TicketClient` a partir dos dados do attendance. +3. Insere o ticket via Beanie. + +> **Nota (T09):** A criação direta será substituída por emissão do evento `triage.finished` via `EventDispatcher`. Um listener no domínio `ticket` irá criar o ticket a partir do payload do evento. + +## Problemas Conhecidos + +| Severidade | Problema | Descrição | +|---|---|---| +| Alto | Webhook sem autenticação | `POST /webhook` não exige token — qualquer requisição é aceita. Corrigido na T09. | +| Alto | Identidade do cliente no payload | `client_id/name/email` vem no body em vez de derivar do token. Corrigido na T09. | +| Alto | Criação de ticket acoplada | O service cria tickets diretamente, sem atomicidade. Migrar para EventDispatcher na T09. | +| Médio | `step_id` no DTO sem uso | Campo exigido no `TriageInputDTO` mas ignorado pelo service. Removido na T09. | +| Médio | `answer_text` sem sanitização | Texto livre persistido sem tratamento. Corrigido na T09. | +| Médio | `except Exception` genérico | Repository usa `except Exception` na conversão de ObjectId. Corrigido na T08. | +| Médio | `InternalBotResponseDTO.new_state` sem tipagem | Tipado como `Any`, deveria ser `TriageState \| None`. Corrigido na T08. | +| Baixo | Permissões comentadas | `require_permission(...)` está comentado em todas as rotas. Ativado progressivamente nas T08/T09. | diff --git a/app/domains/chatbot/__init__.py b/app/domains/chatbot/__init__.py index e69de29..e56d372 100644 --- a/app/domains/chatbot/__init__.py +++ b/app/domains/chatbot/__init__.py @@ -0,0 +1,3 @@ +from .routers import router as chatbot_router + +__all__ = ["chatbot_router"] \ No newline at end of file diff --git a/app/domains/chatbot/dependencies.py b/app/domains/chatbot/dependencies.py new file mode 100644 index 0000000..b370626 --- /dev/null +++ b/app/domains/chatbot/dependencies.py @@ -0,0 +1,33 @@ +from typing import Annotated + +from fastapi import Depends + +from app.core.event_dispatcher import get_event_dispatcher +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.db.mongo.dependencies import MongoSessionDep +from app.domains.chatbot.repositories.chatbot_repository import ChatbotRepository +from app.domains.chatbot.services.chatbot_service import ChatbotService + + +def get_chatbot_repo(db: MongoSessionDep) -> ChatbotRepository: + return ChatbotRepository(db) + + +ChatbotRepositoryDep = Annotated[ChatbotRepository, Depends(get_chatbot_repo)] + + +def get_dispatcher() -> EventDispatcher: + return get_event_dispatcher() + + +EventDispatcherDep = Annotated[EventDispatcher, Depends(get_dispatcher)] + + +def get_chatbot_service( + chatbot_repo: ChatbotRepositoryDep, + dispatcher: EventDispatcherDep, +) -> ChatbotService: + return ChatbotService(chatbot_repo, dispatcher) + + +ChatbotServiceDep = Annotated[ChatbotService, Depends(get_chatbot_service)] \ No newline at end of file diff --git a/app/domains/chatbot/exceptions.py b/app/domains/chatbot/exceptions.py index e69de29..8541f3e 100644 --- a/app/domains/chatbot/exceptions.py +++ b/app/domains/chatbot/exceptions.py @@ -0,0 +1,47 @@ +from fastapi import status +from app.core.exceptions import AppHTTPException + + +class AttendanceNotFoundException(AppHTTPException): + def __init__(self, triage_id: str) -> None: + super().__init__( + status_code=status.HTTP_404_NOT_FOUND, + title="Attendance Not Found", + detail=f"Attendance {triage_id} not found." + ) + + +class AttendanceCreationException(AppHTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + title="Attendance Creation Error", + detail="Attendance was created but could not be loaded afterward. Please try again." + ) + + +class AttendanceNotFinishedException(AppHTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_409_CONFLICT, + title="Attendance Not Finished", + detail="Attendance is not finished yet." + ) + + +class AttendanceAlreadyEvaluatedException(AppHTTPException): + def __init__(self) -> None: + super().__init__( + status_code=status.HTTP_409_CONFLICT, + title="Attendance Already Evaluated", + detail="Attendance has already been evaluated." + ) + + +class MissingClientDataException(AppHTTPException): + def __init__(self, detail: str = "Missing client data to create attendance.") -> None: + super().__init__( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + title="Missing Client Data", + detail=detail + ) \ No newline at end of file diff --git a/app/domains/chatbot/fsm.py b/app/domains/chatbot/fsm.py index f427400..ce343d7 100644 --- a/app/domains/chatbot/fsm.py +++ b/app/domains/chatbot/fsm.py @@ -130,7 +130,7 @@ def process_interaction(current_state: Optional[TriageState], message: str) -> I return ChatbotFSM._get_state_response(next_state) # Cai aqui se a mensagem não bater com nenhuma opção válida - return ChatbotFSM._invalid_response(current_state, current_menu) + return ChatbotFSM._get_state_response(current_state) return ChatbotFSM._get_state_response(TriageState.MAIN_MENU) @@ -166,18 +166,3 @@ def _get_finished_response() -> InternalBotResponseDTO: response_text="Atendimento finalizado! Momento de avaliação do atendimento.", is_finished=True ) - - @staticmethod - def _invalid_response(state: TriageState, menu: MenuConfig) -> InternalBotResponseDTO: - is_free_text = menu["input_type"] == "free_text" - - options = None - if not is_free_text: - options = [{"label": o["label"], "value": o["value"]} for o in menu.get("options", [])] - - return InternalBotResponseDTO( - new_state=state, - response_text="Opção inválida. Por favor, selecione uma das opções válidas abaixo.", - is_free_text=is_free_text, - quick_replies=options - ) \ No newline at end of file diff --git a/app/domains/chatbot/listeners.py b/app/domains/chatbot/listeners.py new file mode 100644 index 0000000..210a60b --- /dev/null +++ b/app/domains/chatbot/listeners.py @@ -0,0 +1,24 @@ +from app.core.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.schemas import TicketClosedEventSchema +from app.db.mongo.db import mongo_db +from app.domains.chatbot.repositories.chatbot_repository import ChatbotRepository +from app.domains.chatbot.services.chatbot_service import ChatbotService + + +class ChatbotListener: + def __init__(self, chatbot_service: ChatbotService) -> None: + self.service = chatbot_service + + @event_handler(TicketClosedEventSchema) + async def on_ticket_closed(self, schema: TicketClosedEventSchema) -> None: + await self.service.finish_attendance_pending_evaluation(str(schema.triage_id)) + + +def register_chatbot_listener(dispatcher: EventDispatcher) -> None: + repo = ChatbotRepository(mongo_db.get_db()) + service = ChatbotService(repo, dispatcher) + listener = ChatbotListener(service) + + dispatcher.subscribe(AppEvent.TICKET_CLOSED, listener.on_ticket_closed) \ No newline at end of file diff --git a/app/domains/chatbot/models.py b/app/domains/chatbot/models.py new file mode 100644 index 0000000..279176f --- /dev/null +++ b/app/domains/chatbot/models.py @@ -0,0 +1,51 @@ +from datetime import datetime +from typing import Any +from uuid import UUID + +from beanie import Document +from pydantic import BaseModel, Field + +from app.domains.chatbot.enums import AttendanceStatus + + +class AttendanceCompany(BaseModel): + id: UUID + name: str + + +class AttendanceClient(BaseModel): + id: UUID + name: str + email: str + company: AttendanceCompany | dict[str, Any] | None = None + + +class AttendanceResult(BaseModel): + type: str + closure_message: str + ticket_id: str | None = None + chat_id: str | None = None + + +class AttendanceEvaluation(BaseModel): + rating: int = Field(..., ge=1, le=5) + + +class Triage(BaseModel): + step: str + question: str + answer_value: str | None = None + answer_text: str | None = None + + +class Attendance(Document): + status: AttendanceStatus + start_date: datetime + end_date: datetime | None = None + client: AttendanceClient + triage: list[Triage] = Field(default_factory=list[Triage]) + result: AttendanceResult | None = None + evaluation: AttendanceEvaluation | None = None + + class Settings: + name = "attendances" diff --git a/app/domains/chatbot/repositories/chatbot_repository.py b/app/domains/chatbot/repositories/chatbot_repository.py index cc46040..97ad3dc 100644 --- a/app/domains/chatbot/repositories/chatbot_repository.py +++ b/app/domains/chatbot/repositories/chatbot_repository.py @@ -1,58 +1,133 @@ +from datetime import datetime from typing import Any -from motor.motor_asyncio import AsyncIOMotorDatabase + from bson import ObjectId -from app.domains.chatbot.schemas import CreateAttendanceDTO -from app.domains.ticket.models import Ticket +from motor.motor_asyncio import AsyncIOMotorDatabase + +from app.domains.chatbot.enums import AttendanceStatus +from app.domains.chatbot.schemas import AttendanceSearchFiltersDTO, CreateAttendanceDTO + class ChatbotRepository: - def __init__(self, db: AsyncIOMotorDatabase[dict[str, Any]]): - # Nomes das coleções mantidos como no banco de dados para evitar perda de referência - self.attendances_collection = db["atendimentos"] - self.tickets_collection = db["tickets"] - - async def create_attendance(self, dto: CreateAttendanceDTO, triage_id: str) -> dict[str, Any]: - document = dto.model_dump(mode="json") - - query_id: ObjectId | str - if ObjectId.is_valid(triage_id): - query_id = ObjectId(triage_id) - else: - query_id = triage_id - - document["_id"] = query_id - document["triage"] = [] - - await self.attendances_collection.insert_one(document) - - return { - "triage_id": str(query_id), - "status": document.get("status"), - "start_date": document.get("start_date"), - "client": document.get("client"), - "triage": document.get("triage"), - } - - async def find_attendance(self, attendance_id: str) -> dict[str, Any] | None: - try: - query_id = ObjectId(attendance_id) - except Exception: - query_id = attendance_id - return await self.attendances_collection.find_one({"_id": query_id}) - - async def save_attendance(self, attendance_id: str, full_attendance: dict[str, Any]) -> None: - try: - query_id = ObjectId(attendance_id) - except Exception: - query_id = attendance_id - - full_attendance["_id"] = query_id - - await self.attendances_collection.replace_one( - {"_id": query_id}, - full_attendance, - upsert=True + def __init__(self, db: AsyncIOMotorDatabase[dict[str, Any]]) -> None: + self.db = db + self.collection = db["atendimentos"] + + async def create_attendance( + self, + dto: CreateAttendanceDTO, + triage_id: str, + ) -> dict[str, Any]: + object_id = ObjectId(triage_id) + data = dto.model_dump(mode="json") + data["_id"] = object_id + await self.collection.insert_one(data) + return data + + async def find_attendance(self, triage_id: str) -> dict[str, Any] | None: + return await self.collection.find_one({"_id": ObjectId(triage_id)}) + + async def save_attendance( + self, + triage_id: str, + attendance: dict[str, Any], + ) -> None: + object_id = ObjectId(triage_id) + attendance["_id"] = object_id + await self.collection.replace_one( + {"_id": object_id}, + attendance, + upsert=True, + ) + + async def list_attendances( + self, + filters: AttendanceSearchFiltersDTO, + ) -> list[dict[str, Any]]: + query: dict[str, Any] = {} + + if filters.client_id is not None: + query["client.id"] = str(filters.client_id) + + if filters.client_name is not None: + query["client.name"] = { + "$regex": filters.client_name, + "$options": "i", + } + + if filters.status is not None: + query["status"] = filters.status.value + + if filters.result_type is not None: + query["result.type"] = filters.result_type + + if filters.has_evaluation is not None: + query["evaluation"] = {"$ne": None} if filters.has_evaluation else None + + if filters.rating is not None: + query["evaluation.rating"] = filters.rating + + date_query: dict[str, Any] = {} + + if filters.start_date_from is not None: + date_query["$gte"] = filters.start_date_from.isoformat() + + if filters.start_date_to is not None: + date_query["$lte"] = filters.start_date_to.isoformat() + + if date_query: + query["start_date"] = date_query + + cursor = self.collection.find(query).sort("start_date", -1) + return await cursor.to_list(length=None) + + async def finish_attendance_pending_evaluation( + self, + triage_id: str, + finished_at: str, + ) -> bool: + result = await self.collection.update_one( + { + "_id": ObjectId(triage_id), + "status": {"$ne": AttendanceStatus.FINISHED.value}, + }, + { + "$set": { + "status": AttendanceStatus.FINISHED.value, + "end_date": finished_at, + } + }, ) + return result.modified_count > 0 + + async def find_ticket_and_conversation_ids_by_triage_id( + self, + triage_id: str, + ) -> tuple[str | None, str | None]: + triage_object_id = ObjectId(triage_id) + + ticket = await self.db["tickets"].find_one( + {"triage_id": triage_object_id}, + sort=[("_id", -1)], + ) + + if ticket is None: + return None, None + + ticket_id = str(ticket["_id"]) + chat_id: str | None = None + + chat_ids = ticket.get("chat_ids") or [] + if chat_ids: + chat_id = str(chat_ids[-1]) + + if chat_id is None: + conversation = await self.db["conversations"].find_one( + {"ticket_id": ticket["_id"]}, + sort=[("sequential_index", -1)], + ) + + if conversation is not None: + chat_id = str(conversation["_id"]) - async def create_ticket(self, ticket: Ticket) -> str: - created_ticket = await ticket.insert() - return str(created_ticket.id) + return ticket_id, chat_id \ No newline at end of file diff --git a/app/domains/chatbot/routers.py b/app/domains/chatbot/routers.py index 5804eef..36020da 100644 --- a/app/domains/chatbot/routers.py +++ b/app/domains/chatbot/routers.py @@ -1,44 +1,120 @@ -from fastapi import APIRouter, Depends, Body, status +from typing import Annotated + +from fastapi import APIRouter, Body, Depends, status from fastapi.responses import JSONResponse from app.core.dependencies import ResponseFactoryDep -from app.db.mongo.dependencies import MongoSessionDep from app.domains.auth.dependencies import CurrentUserSessionDep -from app.domains.chatbot.schemas import AttendanceClient, TriageInputDTO, TriageResponseDTO -from app.domains.chatbot.repositories.chatbot_repository import ChatbotRepository -from app.domains.chatbot.services.chatbot_service import ChatbotService +from app.domains.chatbot.dependencies import ChatbotServiceDep +from app.domains.chatbot.schemas import ( + AttendanceClient, + AttendanceSearchFiltersDTO, + EvaluationRequest, + TriageInputDTO, +) +from app.domains.chatbot.swagger_utils import ( + create_attendance_swagger, + evaluation_swagger, + get_attendance_swagger, + list_attendances_swagger, + webhook_swagger, +) -router = APIRouter(prefix="/chatbot", tags=["Chatbot URA"]) +router = APIRouter() -def get_chatbot_service(db: MongoSessionDep) -> ChatbotService: - repository = ChatbotRepository(db) - return ChatbotService(repository) -@router.post("/") +@router.post( + "/", + **create_attendance_swagger, +) async def create_triage( auth: CurrentUserSessionDep, + service: ChatbotServiceDep, response: ResponseFactoryDep, - service: ChatbotService = Depends(get_chatbot_service), ) -> JSONResponse: user = auth[0] - c = AttendanceClient( + + client = AttendanceClient( id=user.id, - name = user.name or user.email, - email = user.email, + name=user.name or user.email, + email=user.email, + ) + + data = await service.create_attendance(client) + + return response.success( + data=data.model_dump(mode="json"), + status_code=status.HTTP_201_CREATED, ) - res = await service.create_attendance(c) + + +@router.get( + "/", + **list_attendances_swagger, +) +async def get_attendances( + filters: Annotated[AttendanceSearchFiltersDTO, Depends()], + auth: CurrentUserSessionDep, + service: ChatbotServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + data = await service.list_attendances(filters) + return response.success( - data = res, - status_code = status.HTTP_201_CREATED + data=[item.model_dump(mode="json") for item in data], + status_code=status.HTTP_200_OK, ) -@router.post("/webhook", response_model=TriageResponseDTO) +@router.post( + "/webhook", + **webhook_swagger, +) async def send_message( + service: ChatbotServiceDep, + response: ResponseFactoryDep, payload: TriageInputDTO = Body(...), - service: ChatbotService = Depends(get_chatbot_service) -) -> TriageResponseDTO: - """ - Endpoint para interagir com o Chatbot da URA de Triagem. - """ - return await service.process_message(payload) \ No newline at end of file +) -> JSONResponse: + data = await service.process_message(payload) + + return response.success( + data=data.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@router.get( + "/{triage_id}", + **get_attendance_swagger, +) +async def get_attendance( + triage_id: str, + auth: CurrentUserSessionDep, + service: ChatbotServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + data = await service.get_attendance(triage_id) + + return response.success( + data=data.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@router.post( + "/{triage_id}/evaluation", + **evaluation_swagger, +) +async def set_evaluation( + triage_id: str, + auth: CurrentUserSessionDep, + payload: EvaluationRequest, + service: ChatbotServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + data = await service.set_evaluation(triage_id, payload) + + return response.success( + data=data.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) \ No newline at end of file diff --git a/app/domains/chatbot/schemas.py b/app/domains/chatbot/schemas.py index ab68516..5ccb933 100644 --- a/app/domains/chatbot/schemas.py +++ b/app/domains/chatbot/schemas.py @@ -1,59 +1,43 @@ -# app/domains/chatbot/schemas.py from datetime import UTC, datetime +from typing import Dict, List, Optional from uuid import UUID from pydantic import BaseModel, Field, model_validator -from typing import Optional, List, Dict, Any +from app.core.schemas import BaseDTO from app.domains.chatbot.enums import AttendanceStatus +from app.domains.chatbot.models import ( + AttendanceClient, + AttendanceEvaluation, + AttendanceResult, +) + -# --- ENTRADA (Frontend -> Backend) --- class TriageInputDTO(BaseModel): triage_id: str = Field(..., description="Identificador da sessão de triagem") step_id: str = Field(..., description="Etapa que está sendo respondida") answer_text: Optional[str] = Field(None, description="Resposta em texto livre") - answer_value: Optional[str] = Field(None, description="Valor da opção selecionada (quick reply)") + answer_value: Optional[str] = Field(None, description="Valor da opção selecionada") client_id: UUID | None = Field( None, - description="UUID do cliente. Obrigatorio quando triage_id nao existir.", + description="UUID do cliente. Obrigatório quando triage_id não existir.", ) client_name: str | None = Field( None, - description="Nome do cliente. Obrigatorio quando triage_id nao existir.", + description="Nome do cliente. Obrigatório quando triage_id não existir.", ) client_email: str | None = Field( None, - description="Email do cliente. Obrigatorio quando triage_id nao existir.", + description="Email do cliente. Obrigatório quando triage_id não existir.", ) - @model_validator(mode='after') - def check_answers(self): + @model_validator(mode="after") + def check_answers(self) -> "TriageInputDTO": if self.answer_text is not None and self.answer_value is not None: raise ValueError("answer_text e answer_value não devem ser enviados juntos.") return self -class AttendanceCompany(BaseModel): - id: UUID - name: str - - -class AttendanceClient(BaseModel): - id: UUID - name: str - email: str - company: AttendanceCompany | None = None - - -class AttendanceResult(BaseModel): - type: str - closure_message: str - - -class AttendanceEvaluation(BaseModel): - rating: int - - class CreateAttendanceDTO(BaseModel): status: AttendanceStatus = AttendanceStatus.OPENED start_date: datetime = Field(default_factory=lambda: datetime.now(UTC)) @@ -63,18 +47,22 @@ class CreateAttendanceDTO(BaseModel): evaluation: AttendanceEvaluation | None = None -# --- SAÍDA (Backend -> Frontend) --- class QuickReply(BaseModel): label: str value: str + class TriageInputDef(BaseModel): mode: str quick_replies: Optional[List[QuickReply]] = None + class TriageResult(BaseModel): type: str id: str + ticket_id: str | None = None + chat_id: str | None = None + class TriageData(BaseModel): triage_id: str @@ -85,18 +73,60 @@ class TriageData(BaseModel): closure_message: Optional[str] = None result: Optional[TriageResult] = None -class TriageResponseMeta(BaseModel): - timestamp: str - success: bool - request_id: str - -class TriageResponseDTO(BaseModel): - data: TriageData - meta: TriageResponseMeta class InternalBotResponseDTO(BaseModel): - new_state: Any # TriageState + new_state: object | None response_text: str is_free_text: bool = False quick_replies: Optional[List[Dict[str, str]]] = None - is_finished: bool = False \ No newline at end of file + is_finished: bool = False + + +class AttendanceSearchFiltersDTO(BaseDTO): + client_id: UUID | None = Field(default=None) + client_name: str | None = Field(default=None) + status: AttendanceStatus | None = Field(default=None) + result_type: str | None = Field(default=None) + start_date_from: datetime | None = Field(default=None) + start_date_to: datetime | None = Field(default=None) + has_evaluation: bool | None = Field(default=None) + rating: int | None = Field(default=None, ge=1, le=5) + + +class TriageStepSchema(BaseModel): + step: str + question: str + answer_value: str | None = None + answer_text: str | None = None + + +class EvaluationRequest(BaseModel): + rating: int = Field(..., ge=1, le=5, description="Nota de satisfação de 1 a 5") + + +class EvaluationResponse(BaseModel): + triage_id: str + rating: int + evaluated_at: datetime + + +class AttendanceResponse(BaseModel): + triage_id: str + status: AttendanceStatus + start_date: datetime + end_date: datetime | None = None + client: AttendanceClient + triage: list[TriageStepSchema] = Field(default_factory=list) + result: AttendanceResult | None = None + evaluation: AttendanceEvaluation | None = None + needs_evaluation: bool = False + current_step_id: str | None = None + current_message: str | None = None + current_input: TriageInputDef | None = None + + @model_validator(mode="after") + def compute_needs_evaluation(self) -> "AttendanceResponse": + self.needs_evaluation = ( + self.status == AttendanceStatus.FINISHED and self.evaluation is None + ) + return self \ No newline at end of file diff --git a/app/domains/chatbot/services/chatbot_service.py b/app/domains/chatbot/services/chatbot_service.py index ad20b3f..8e61867 100644 --- a/app/domains/chatbot/services/chatbot_service.py +++ b/app/domains/chatbot/services/chatbot_service.py @@ -1,48 +1,76 @@ -import uuid -from typing import Any, cast -from datetime import datetime, timezone -from uuid import UUID, uuid4 +import asyncio +from datetime import UTC, datetime +from typing import Any +from uuid import UUID from beanie import PydanticObjectId from bson import ObjectId -from fastapi import status -from app.core.exceptions import AppHTTPException +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.schemas import TriageFinishedEventSchema from app.core.logger import get_logger -from app.domains.chatbot.enums import TriageState -from app.domains.chatbot.metrics import chatbot_messages_total, chatbot_tickets_total -from app.domains.chatbot.schemas import ( - AttendanceClient, CreateAttendanceDTO, TriageInputDTO, TriageResponseDTO, TriageData, TriageInputDef, - QuickReply, TriageResponseMeta, TriageResult +from app.domains.chatbot.enums import AttendanceStatus, TriageState +from app.domains.chatbot.exceptions import ( + AttendanceAlreadyEvaluatedException, + AttendanceCreationException, + AttendanceNotFinishedException, + AttendanceNotFoundException, + MissingClientDataException, ) from app.domains.chatbot.fsm import ChatbotFSM +from app.domains.chatbot.metrics import chatbot_messages_total +from app.domains.chatbot.models import ( + AttendanceClient, + AttendanceEvaluation, + AttendanceResult, +) from app.domains.chatbot.repositories.chatbot_repository import ChatbotRepository -from app.domains.ticket.models import ( - Ticket, - TicketClient, - TicketCompany, - TicketCriticality, - TicketStatus, - TicketType, +from app.domains.chatbot.schemas import ( + AttendanceResponse, + AttendanceSearchFiltersDTO, + CreateAttendanceDTO, + EvaluationRequest, + EvaluationResponse, + InternalBotResponseDTO, + QuickReply, + TriageData, + TriageInputDTO, + TriageInputDef, + TriageResult, + TriageStepSchema, ) +from app.domains.ticket.models import TicketCriticality, TicketType + class ChatbotService: - def __init__(self, repository: ChatbotRepository) -> None: + def __init__( + self, + repository: ChatbotRepository, + dispatcher: EventDispatcher, + ) -> None: self.repository = repository + self.dispatcher = dispatcher self.logger = get_logger("app.chatbot.service") async def create_attendance( self, client: AttendanceClient, triage_id: str | None = None, - ) -> dict[str, Any]: - dto = CreateAttendanceDTO( - client = client - ) + ) -> TriageData: + dto = CreateAttendanceDTO(client=client) final_triage_id = triage_id or str(ObjectId()) - return await self.repository.create_attendance(dto, final_triage_id) + attendance = await self.repository.create_attendance(dto, final_triage_id) + + bot_response = ChatbotFSM.process_interaction(None, "") + self._record_step_metric(bot_response) + + attendance["triage"] = [self._build_triage_step(bot_response)] + await self.repository.save_attendance(final_triage_id, attendance) + + return self._build_triage_data(final_triage_id, bot_response) - async def process_message(self, payload: TriageInputDTO) -> TriageResponseDTO: + async def process_message(self, payload: TriageInputDTO) -> TriageData: attendance_db = await self.repository.find_attendance(payload.triage_id) if attendance_db is None: @@ -51,232 +79,511 @@ async def process_message(self, payload: TriageInputDTO) -> TriageResponseDTO: attendance_db = await self.repository.find_attendance(payload.triage_id) if attendance_db is None: - raise AppHTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=( - "Attendance was created but could not be loaded afterward. " - "Please try again." - ), - ) + raise AttendanceCreationException() attendance: dict[str, Any] = attendance_db - + + if attendance.get("status") == AttendanceStatus.FINISHED.value: + return self._build_finished_triage_data_from_attendance( + payload.triage_id, + attendance, + ) + triage: list[dict[str, Any]] = attendance.get("triage", []) current_state: TriageState | None = None - + last_step: str | None = None + if triage: last_interaction = triage[-1] step = last_interaction.get("step") - + current_state = TriageState(step) if step is not None else None - + last_step = step + if payload.answer_text is not None: last_interaction["answer_text"] = payload.answer_text + if payload.answer_value is not None: last_interaction["answer_value"] = payload.answer_value user_message = payload.answer_value if payload.answer_value else (payload.answer_text or "") bot_response = ChatbotFSM.process_interaction(current_state, user_message) + self._record_step_metric(bot_response) - step_label = bot_response.new_state.value if bot_response.new_state else "unknown" - chatbot_messages_total.labels(step=step_label).inc() + ticket_id: str | None = None + chat_id: str | None = None - if not bot_response.is_finished: - new_question: dict[str, Any] = { - "step": bot_response.new_state.value if bot_response.new_state else "UNKNOWN", - "question": bot_response.response_text, - "answer_text": None, - "answer_value": None, - "type": "free_text" if bot_response.is_free_text else "quick_replies" + if bot_response.is_finished: + is_ticket = bot_response.new_state == TriageState.TICKET_CREATED + + self.logger.info( + "Triage finished", + extra={ + "triage_id": payload.triage_id, + "is_ticket": is_ticket, + }, + ) + + attendance["status"] = AttendanceStatus.FINISHED.value + attendance["end_date"] = datetime.now(UTC).isoformat() + attendance["result"] = { + "type": "Ticket" if is_ticket else "Resolved", + "closure_message": bot_response.response_text, + "ticket_id": None, + "chat_id": None, } - triage.append(new_question) + attendance["triage"] = triage + + await self.repository.save_attendance(payload.triage_id, attendance) + + if is_ticket: + ticket_id, chat_id = await self._publish_triage_finished_and_resolve_ids( + payload.triage_id, + attendance, + ) + + attendance["result"]["ticket_id"] = ticket_id + attendance["result"]["chat_id"] = chat_id + + await self.repository.save_attendance(payload.triage_id, attendance) + + return self._build_triage_data( + payload.triage_id, + bot_response, + ticket_id=ticket_id, + chat_id=chat_id, + ) + + new_state_value = bot_response.new_state.value if bot_response.new_state else "UNKNOWN" + + if new_state_value != last_step: + triage.append(self._build_triage_step(bot_response)) attendance["triage"] = triage - ticket_id = None - if bot_response.new_state == TriageState.TICKET_CREATED: - free_text_context = payload.answer_text if payload.answer_text else "Solicitação criada via URA" - ticket_id = await self._generate_ticket_with_context(attendance, free_text_context, payload.triage_id) - chatbot_tickets_total.inc() - self.logger.info("Ticket created from triage", extra={"triage_id": payload.triage_id, "ticket_id": ticket_id}) + await self.repository.save_attendance(payload.triage_id, attendance) - # Resolve o format do step id atual (fallback para unknown se for nulo) - formatted_step_id = f"step_{bot_response.new_state.value.lower()}" if bot_response.new_state else "step_unknown" - - if bot_response.is_finished: - self.logger.info("Triage finished", extra={"triage_id": payload.triage_id}) - attendance["status"] = "finished" - attendance["end_date"] = datetime.now(timezone.utc) - attendance["result"] = { - "type": "Ticket" if ticket_id else "Resolved", - "closure_message": bot_response.response_text - } - - data = TriageData( - triage_id=payload.triage_id, - finished=True, - closure_message=bot_response.response_text, - result=TriageResult(type="Ticket", id=str(ticket_id)) if ticket_id else None + return self._build_triage_data(payload.triage_id, bot_response) + + async def list_attendances( + self, + filters: AttendanceSearchFiltersDTO, + ) -> list[AttendanceResponse]: + docs = await self.repository.list_attendances(filters) + return [self._map_attendance_response(doc) for doc in docs] + + async def get_attendance(self, triage_id: str) -> AttendanceResponse: + attendance = await self.repository.find_attendance(triage_id) + + if attendance is None: + raise AttendanceNotFoundException(triage_id) + + return self._map_attendance_response(attendance) + + async def finish_attendance_pending_evaluation(self, triage_id: str) -> bool: + finished_at = datetime.now(UTC) + + try: + updated = await self.repository.finish_attendance_pending_evaluation( + triage_id, + finished_at.isoformat(), + ) + except Exception: + self.logger.exception( + "Failed to finish attendance from ticket close event", + extra={"triage_id": triage_id}, + ) + return False + + if updated: + self.logger.info( + "Attendance finished from ticket close event", + extra={"triage_id": triage_id}, ) else: - input_def = TriageInputDef( - mode="free_text" if bot_response.is_free_text else "quick_replies", - quick_replies=[QuickReply(label=op["label"], value=op["value"]) for op in bot_response.quick_replies] if bot_response.quick_replies else None + self.logger.debug( + "Skipping attendance finish from ticket close event because attendance was not found", + extra={"triage_id": triage_id}, ) - data = TriageData( - triage_id=payload.triage_id, - step_id=formatted_step_id, - message=bot_response.response_text, - input=input_def + + return updated + + async def set_evaluation( + self, + triage_id: str, + payload: EvaluationRequest, + ) -> EvaluationResponse: + attendance = await self.repository.find_attendance(triage_id) + + if attendance is None: + raise AttendanceNotFoundException(triage_id) + + if attendance.get("status") != AttendanceStatus.FINISHED.value: + raise AttendanceNotFinishedException() + + if attendance.get("evaluation") is not None: + raise AttendanceAlreadyEvaluatedException() + + evaluated_at = datetime.now(UTC) + + attendance["evaluation"] = AttendanceEvaluation( + rating=payload.rating, + ).model_dump(mode="json") + attendance["end_date"] = attendance.get("end_date") or evaluated_at.isoformat() + + await self.repository.save_attendance(triage_id, attendance) + + return EvaluationResponse( + triage_id=triage_id, + rating=payload.rating, + evaluated_at=evaluated_at, + ) + + async def _publish_triage_finished_and_resolve_ids( + self, + triage_id: str, + attendance: dict[str, Any], + ) -> tuple[str | None, str | None]: + event_payload = self._build_triage_finished_event(triage_id, attendance) + + await self.dispatcher.publish( + AppEvent.TRIAGE_FINISHED, + event_payload, + ) + + ticket_id: str | None = None + chat_id: str | None = None + + for _ in range(20): + ticket_id, chat_id = await self.repository.find_ticket_and_conversation_ids_by_triage_id( + triage_id, ) - await self.repository.save_attendance(payload.triage_id, attendance) + if ticket_id is not None and chat_id is not None: + break + + await asyncio.sleep(0.05) + + if ticket_id is None: + self.logger.warning( + "Triage finished event was published but ticket was not found", + extra={"triage_id": triage_id}, + ) + + if ticket_id is not None and chat_id is None: + self.logger.warning( + "Ticket was created but conversation was not found", + extra={ + "triage_id": triage_id, + "ticket_id": ticket_id, + }, + ) - meta = TriageResponseMeta( - timestamp=datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"), - success=True, - request_id=str(uuid.uuid4()) + return ticket_id, chat_id + + def _build_triage_finished_event( + self, + triage_id: str, + attendance: dict[str, Any], + ) -> TriageFinishedEventSchema: + client_raw = attendance["client"] + company_raw = client_raw.get("company") + + ticket_type, ticket_criticality, product_name, ticket_description = ( + self._derive_ticket_payload_from_triage(attendance) ) - return TriageResponseDTO(data=data, meta=meta) - - async def _generate_ticket_with_context(self, attendance: dict[str, Any], free_text: str, attendance_id: str) -> str: - full_triage: list[dict[str, Any]] = attendance.get("triage", []) - - demand_type = "issue" - criticality = "high" - product = "N/A" - - for interaction in full_triage: - step = interaction.get("step") - value = interaction.get("answer_value") - - if step == "A" and value in ["1", "2", "3"]: - if value == "1": product = "Product A" - elif value == "2": product = "Product B" - elif value == "3": product = "Product C" - - if step == "A" and value == "5": - demand_type = "access" - criticality = "medium" - - if step == "B": - if value == "1": - demand_type = "issue" - criticality = "high" - elif value == "2": - demand_type = "new_feature" - criticality = "low" - - triage_object_id = self._resolve_triage_object_id(attendance, attendance_id) - ticket = Ticket( - triage_id=triage_object_id, - type=self._resolve_ticket_type(demand_type), - criticality=self._resolve_ticket_criticality(criticality), - product=product, - status=TicketStatus.OPEN, - creation_date=datetime.now(timezone.utc), - description=free_text, - chat_ids=[], - agent_history=[], - client=self._build_ticket_client(attendance), - comments=[], + company_id: UUID | None = None + company_name: str | None = None + + if isinstance(company_raw, dict): + raw_company_id = company_raw.get("id") + + if raw_company_id is not None: + company_id = self._coerce_uuid(raw_company_id) + + company_name_raw = company_raw.get("name") + + if company_name_raw is not None: + company_name = str(company_name_raw) + + return TriageFinishedEventSchema( + client_id=self._coerce_uuid(client_raw["id"]), + client_email=str(client_raw["email"]), + client_name=str(client_raw["name"]), + company_id=company_id, + company_name=company_name, + attendance_id=PydanticObjectId(triage_id), + ticket_type=ticket_type, + ticket_criticality=ticket_criticality, + product_name=product_name, + ticket_description=ticket_description, ) - return await self.repository.create_ticket(ticket) - - def _resolve_triage_object_id(self, attendance: dict[str, Any], attendance_id: str) -> PydanticObjectId: - raw_id = attendance.get("_id", attendance_id) - if isinstance(raw_id, ObjectId): - return cast(PydanticObjectId, raw_id) - - raw_id_str = str(raw_id) - if ObjectId.is_valid(raw_id_str): - return cast(PydanticObjectId, ObjectId(raw_id_str)) - - raise ValueError("triage_id must be a valid ObjectId to create a ticket") - - def _resolve_ticket_type(self, demand_type: str) -> TicketType: - if demand_type == TicketType.ACCESS.value: - return TicketType.ACCESS - if demand_type == TicketType.NEW_FEATURE.value: - return TicketType.NEW_FEATURE - return TicketType.ISSUE - - def _resolve_ticket_criticality(self, criticality: str) -> TicketCriticality: - if criticality == TicketCriticality.MEDIUM.value: - return TicketCriticality.MEDIUM - if criticality == TicketCriticality.LOW.value: - return TicketCriticality.LOW - return TicketCriticality.HIGH - - def _build_ticket_client(self, attendance: dict[str, Any]) -> TicketClient: - client_data_raw = attendance.get("client", {}) - client_data: dict[str, Any] = ( - cast(dict[str, Any], client_data_raw) if isinstance(client_data_raw, dict) else {} + def _derive_ticket_payload_from_triage( + self, + attendance: dict[str, Any], + ) -> tuple[TicketType, TicketCriticality, str, str]: + triage: list[dict[str, Any]] = attendance.get("triage", []) + + main_menu_answer = self._answer_value_for_step(triage, TriageState.MAIN_MENU.value) + product_problem_answer = self._answer_value_for_step( + triage, + TriageState.CHOOSING_PRODUCT_PROBLEM.value, ) - client_id = self._parse_uuid(client_data.get("id")) or uuid4() - company_data_raw = client_data.get("company", {}) - company_data: dict[str, Any] = ( - cast(dict[str, Any], company_data_raw) if isinstance(company_data_raw, dict) else {} + product_name = self._resolve_product_name(main_menu_answer) + description = self._last_text_answer(triage) or "Solicitação criada pela URA digital." + + if main_menu_answer == "5": + return ( + TicketType.ACCESS, + TicketCriticality.MEDIUM, + "Sync Desk", + description, + ) + + if product_problem_answer == "2": + return ( + TicketType.NEW_FEATURE, + TicketCriticality.MEDIUM, + product_name, + description, + ) + + return ( + TicketType.ISSUE, + TicketCriticality.MEDIUM, + product_name, + description, ) - company_id = self._parse_uuid(company_data.get("id")) or client_id - email_raw = client_data.get("email") - email = str(email_raw) if email_raw else f"{client_id}@unknown.local" - name_raw = client_data.get("name") or client_data.get("username") or email - name = str(name_raw) - company_name_raw = company_data.get("name") - company_name = str(company_name_raw) if company_name_raw else "Unknown company" + def _answer_value_for_step( + self, + triage: list[dict[str, Any]], + step: str, + ) -> str | None: + for item in triage: + if item.get("step") == step and item.get("answer_value") is not None: + return str(item["answer_value"]) + return None + + def _last_text_answer(self, triage: list[dict[str, Any]]) -> str | None: + for item in reversed(triage): + answer_text = item.get("answer_text") - return TicketClient( - id=client_id, - name=name, - email=email, - company=TicketCompany(id=company_id, name=company_name), + if isinstance(answer_text, str) and answer_text.strip(): + return answer_text.strip() + + return None + + def _resolve_product_name(self, answer_value: str | None) -> str: + product_map = { + "1": "Produto A", + "2": "Produto B", + "3": "Produto C", + } + + return product_map.get(answer_value or "", "Produto não informado") + + def _record_step_metric(self, bot_response: InternalBotResponseDTO) -> None: + step_label = bot_response.new_state.value if bot_response.new_state else "unknown" + chatbot_messages_total.labels(step=step_label).inc() + + def _build_triage_step(self, bot_response: InternalBotResponseDTO) -> dict[str, Any]: + return { + "step": bot_response.new_state.value if bot_response.new_state else "UNKNOWN", + "question": bot_response.response_text, + "answer_text": None, + "answer_value": None, + "type": "free_text" if bot_response.is_free_text else "quick_replies", + } + + def _build_triage_data( + self, + triage_id: str, + bot_response: InternalBotResponseDTO, + ticket_id: str | None = None, + chat_id: str | None = None, + ) -> TriageData: + if bot_response.is_finished: + is_ticket = bot_response.new_state == TriageState.TICKET_CREATED + + return TriageData( + triage_id=triage_id, + finished=True, + closure_message=bot_response.response_text, + result=( + TriageResult( + type="Ticket", + id=triage_id, + ticket_id=ticket_id, + chat_id=chat_id, + ) + if is_ticket + else None + ), + ) + + formatted_step_id = ( + f"step_{bot_response.new_state.value.lower()}" + if bot_response.new_state + else "step_unknown" ) - def _parse_uuid(self, raw_value: Any) -> UUID | None: - if raw_value is None: - return None - if isinstance(raw_value, UUID): - return raw_value - try: - return UUID(str(raw_value)) - except (TypeError, ValueError): - return None + input_def = TriageInputDef( + mode="free_text" if bot_response.is_free_text else "quick_replies", + quick_replies=( + [ + QuickReply(label=option["label"], value=option["value"]) + for option in bot_response.quick_replies + ] + if bot_response.quick_replies + else None + ), + ) + + return TriageData( + triage_id=triage_id, + step_id=formatted_step_id, + message=bot_response.response_text, + input=input_def, + ) + + def _build_finished_triage_data_from_attendance( + self, + triage_id: str, + attendance: dict[str, Any], + ) -> TriageData: + result_raw = attendance.get("result") or {} + result_type = result_raw.get("type") + closure_message = result_raw.get("closure_message") or "Atendimento finalizado." + + return TriageData( + triage_id=triage_id, + finished=True, + closure_message=closure_message, + result=( + TriageResult( + type="Ticket", + id=triage_id, + ticket_id=result_raw.get("ticket_id"), + chat_id=result_raw.get("chat_id"), + ) + if result_type == "Ticket" + else None + ), + ) def _build_attendance_client_from_payload(self, payload: TriageInputDTO) -> AttendanceClient: missing_fields: list[str] = [] + if payload.client_id is None: missing_fields.append("client_id") + if not payload.client_name: missing_fields.append("client_name") + if not payload.client_email: missing_fields.append("client_email") if missing_fields: - raise AppHTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail=( - "triage_id was not found. To auto-create attendance, provide fields: " - + ", ".join(missing_fields) - ), + detail_msg = ( + "triage_id was not found. To auto-create attendance, provide fields: " + + ", ".join(missing_fields) ) + raise MissingClientDataException(detail=detail_msg) client_id = payload.client_id client_name = payload.client_name client_email = payload.client_email + if client_id is None or client_name is None or client_email is None: - raise AppHTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Missing client data to create attendance.", - ) + raise MissingClientDataException() return AttendanceClient( id=client_id, name=client_name, email=client_email, - ) \ No newline at end of file + ) + + def _map_attendance_response(self, attendance: dict[str, Any]) -> AttendanceResponse: + client_raw = attendance["client"] + result_raw = attendance.get("result") + evaluation_raw = attendance.get("evaluation") + + start_date = self._coerce_datetime(attendance["start_date"]) + end_date = self._coerce_datetime(attendance.get("end_date")) + + current_step_id, current_message, current_input = self._get_current_input(attendance) + + return AttendanceResponse( + triage_id=str(attendance["_id"]), + status=AttendanceStatus(attendance["status"]), + start_date=start_date, + end_date=end_date, + client=AttendanceClient( + id=self._coerce_uuid(client_raw["id"]), + name=client_raw["name"], + email=client_raw["email"], + company=client_raw.get("company"), + ), + triage=[ + TriageStepSchema( + step=item["step"], + question=item["question"], + answer_value=item.get("answer_value"), + answer_text=item.get("answer_text"), + ) + for item in attendance.get("triage", []) + ], + result=AttendanceResult(**result_raw) if result_raw else None, + evaluation=AttendanceEvaluation(**evaluation_raw) if evaluation_raw else None, + current_step_id=current_step_id, + current_message=current_message, + current_input=current_input, + ) + + def _get_current_input( + self, + attendance: dict[str, Any], + ) -> tuple[str | None, str | None, TriageInputDef | None]: + if attendance.get("status") == AttendanceStatus.FINISHED.value: + return None, None, None + + triage: list[dict[str, Any]] = attendance.get("triage", []) + + if not triage: + return None, None, None + + current = triage[-1] + step = current.get("step") + + if step is None: + return None, None, None + + try: + state = TriageState(step) + except ValueError: + return None, None, None + + bot_response = ChatbotFSM._get_state_response(state) + triage_data = self._build_triage_data(str(attendance["_id"]), bot_response) + + return triage_data.step_id, triage_data.message, triage_data.input + + def _coerce_uuid(self, value: Any) -> UUID: + if isinstance(value, UUID): + return value + + return UUID(str(value)) + + def _coerce_datetime(self, value: Any) -> datetime | None: + if value is None: + return None + + if isinstance(value, datetime): + return value + + return datetime.fromisoformat(str(value)) \ No newline at end of file diff --git a/app/domains/chatbot/swagger_utils.py b/app/domains/chatbot/swagger_utils.py new file mode 100644 index 0000000..7cd5db6 --- /dev/null +++ b/app/domains/chatbot/swagger_utils.py @@ -0,0 +1,258 @@ +from typing import Any + +from fastapi import status + +from app.domains.chatbot.schemas import ( + AttendanceResponse, + EvaluationResponse, + TriageData, +) +from app.schemas.response import ErrorContent, GenericSuccessContent + +_MAIN_MENU_MESSAGE = ( + "Olá! Bem vindo ao SyncDesk! Para começarmos, verifiquei no seu cadastro " + "e você possui os seguintes produtos disponíveis para manutenção. " + "Selecione a opção que indica sobre o que você quer falar hoje:" +) + +_MAIN_MENU_QUICK_REPLIES = [ + {"label": "Produto A", "value": "1"}, + {"label": "Produto B", "value": "2"}, + {"label": "Produto C", "value": "3"}, + {"label": "Desejo apenas tirar uma dúvida.", "value": "4"}, + {"label": "Desejo uma liberação de acesso no Sync Desk.", "value": "5"}, +] + +_TRIAGE_IN_PROGRESS_EXAMPLE: dict[str, Any] = { + "data": { + "triage_id": "69f40f33baca8f85e73cb741", + "step_id": "step_a", + "message": _MAIN_MENU_MESSAGE, + "input": { + "mode": "quick_replies", + "quick_replies": _MAIN_MENU_QUICK_REPLIES, + }, + }, + "meta": { + "timestamp": "2026-05-01T02:25:55.593576+00:00", + "success": True, + "request_id": "d87e6a1b-f3fe-4c60-bb20-f65f3299976f", + }, +} + +_TRIAGE_FINISHED_TICKET_EXAMPLE: dict[str, Any] = { + "data": { + "triage_id": "69f40f33baca8f85e73cb741", + "finished": True, + "closure_message": ( + "Aguarde, sua solicitação foi criada e será atribuída a um de nossos " + "analistas. Você já pode acompanhar o tema pela tela 'Minhas demandas'. " + "Obrigada!" + ), + "result": {"type": "Ticket", "id": "69f40f33baca8f85e73cb741"}, + }, + "meta": { + "timestamp": "2026-05-01T02:30:11.123456+00:00", + "success": True, + "request_id": "5b1c8d2e-7a44-4f9b-9cf3-2e8a4b1d6f70", + }, +} + +_TRIAGE_FINISHED_RESOLVED_EXAMPLE: dict[str, Any] = { + "data": { + "triage_id": "69f40f33baca8f85e73cb741", + "finished": True, + "closure_message": "Atendimento finalizado! Momento de avaliação do atendimento.", + "result": None, + }, + "meta": { + "timestamp": "2026-05-01T02:32:44.778899+00:00", + "success": True, + "request_id": "8e2a51fb-9eaa-4af6-95cc-bb0f25c91022", + }, +} + +create_attendance_responses: dict[int | str, dict[str, Any]] = { + 201: { + "description": "Attendance created and first triage step (MAIN_MENU) returned.", + "model": GenericSuccessContent[TriageData], + "content": { + "application/json": { + "example": _TRIAGE_IN_PROGRESS_EXAMPLE, + }, + }, + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, +} + +create_attendance_swagger: dict[str, Any] = { + "summary": "Create a new attendance and start triage", + "description": ( + "Creates a new triage attendance session for the authenticated user " + "and immediately runs the first FSM transition, returning the MAIN_MENU " + "question. The client identity is derived from the JWT token; no request " + "body is required. The persisted attendance starts with `status = opened` " + "and a single triage step (`A`).\n\n" + "This is the only way to create an attendance — the webhook does not " + "create them." + ), + "status_code": status.HTTP_201_CREATED, + "response_model": GenericSuccessContent[TriageData], + "responses": create_attendance_responses, +} + +list_attendances_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "List of attendances retrieved successfully.", + "model": GenericSuccessContent[list[AttendanceResponse]], + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, +} + +list_attendances_swagger: dict[str, Any] = { + "summary": "List attendances", + "description": ( + "Returns triage attendances visible to the authenticated user. " + "All filters are optional and combined with AND; no filters returns all attendances.\n\n" + "**Query parameters:**\n" + "- `client_id` — UUID exato do cliente.\n" + "- `client_name` — Busca parcial (case-insensitive) pelo nome do cliente.\n" + "- `status` — Status do atendimento: `opened`, `in_progress`, `finished`.\n" + "- `result_type` — Tipo do resultado: `Ticket` ou `Resolved`.\n" + "- `start_date_from` — Início do intervalo de busca por data de início (inclusive, UTC).\n" + "- `start_date_to` — Fim do intervalo de busca por data de início (inclusive, UTC).\n" + "- `has_evaluation` — `true` = já avaliado, `false` = sem avaliação.\n" + "- `rating` — Nota exata de avaliação (1–5)." + ), + "response_model": GenericSuccessContent[list[AttendanceResponse]], + "responses": list_attendances_responses, +} + +webhook_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": ( + "Triage step processed successfully. The response shape depends on " + "whether the triage is still running (`step_id` + `message` + `input`) " + "or has finished (`finished: true` + `closure_message` + optional `result`)." + ), + "model": GenericSuccessContent[TriageData], + "content": { + "application/json": { + "examples": { + "in_progress": { + "summary": "Triage step in progress", + "value": _TRIAGE_IN_PROGRESS_EXAMPLE, + }, + "finished_ticket": { + "summary": "Triage finished — ticket created", + "value": _TRIAGE_FINISHED_TICKET_EXAMPLE, + }, + "finished_resolved": { + "summary": "Triage finished — resolved without ticket", + "value": _TRIAGE_FINISHED_RESOLVED_EXAMPLE, + }, + }, + }, + }, + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, + 403: { + "description": "User lacks the `chatbot:interact` permission.", + "model": ErrorContent, + }, + 404: { + "description": "Attendance not found for the given `triage_id`.", + "model": ErrorContent, + }, + 422: { + "description": ( + "Validation error: `answer_text` and `answer_value` sent together, or both are null." + ), + "model": ErrorContent, + }, +} + +webhook_swagger: dict[str, Any] = { + "summary": "Interact with the triage chatbot", + "description": ( + "Sends an answer to the current triage step and receives the next step " + "from the chatbot FSM. The attendance must already exist (created via " + "`POST /`). Exactly one of `answer_text` or `answer_value` must be " + "provided.\n\n" + "While the triage is running, the response carries `step_id`, `message` " + "and `input` (mode + quick_replies). When the triage finishes, the " + "response carries `finished: true`, a `closure_message`, and a `result` " + "block when a ticket was generated." + ), + "response_model": GenericSuccessContent[TriageData], + "responses": webhook_responses, +} + +get_attendance_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Attendance details retrieved successfully.", + "model": GenericSuccessContent[AttendanceResponse], + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, + 404: { + "description": "Attendance not found.", + "model": ErrorContent, + }, +} + +get_attendance_swagger: dict[str, Any] = { + "summary": "Get attendance details", + "description": ( + "Returns the full attendance record, including triage history, result, " + "evaluation, and the computed `needs_evaluation` flag." + ), + "response_model": GenericSuccessContent[AttendanceResponse], + "responses": get_attendance_responses, +} + +evaluation_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Evaluation submitted successfully.", + "model": GenericSuccessContent[EvaluationResponse], + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, + 404: { + "description": "Attendance not found.", + "model": ErrorContent, + }, + 409: { + "description": ("Attendance is not yet finished, or has already been evaluated."), + "model": ErrorContent, + }, + 422: { + "description": "Rating value out of the allowed range (1-5).", + "model": ErrorContent, + }, +} + +evaluation_swagger: dict[str, Any] = { + "summary": "Submit attendance evaluation", + "description": ( + "Records the client's satisfaction rating for a finished attendance. " + "Can only be called once per attendance, and only after the triage " + "has been completed (`status = finished`)." + ), + "status_code": status.HTTP_200_OK, + "response_model": GenericSuccessContent[EvaluationResponse], + "responses": evaluation_responses, +} diff --git a/app/domains/companies/README.md b/app/domains/companies/README.md new file mode 100644 index 0000000..14347f6 --- /dev/null +++ b/app/domains/companies/README.md @@ -0,0 +1,310 @@ +# Companies Module + +The companies module manages companies (organizations that contract our services), their associated products, and their users. It provides CRUD operations for companies, product association management, and user listing per company. + +## Architecture + +``` +companies/ +├── routers/ # HTTP endpoints (FastAPI router) +├── services.py # Business logic +├── repositories.py # Database access (SQLAlchemy) +├── schemas.py # Pydantic DTOs (request/response validation) +├── entities.py # Domain dataclasses (decoupled from ORM) +├── models.py # SQLAlchemy ORM models +├── dependencies.py # FastAPI dependency injection wiring +└── swagger_utils.py # Swagger/OpenAPI documentation configs +``` + +## Data Model + +### Companies + +| Field | Type | Description | +|--------------|---------------|------------------------------------------| +| `id` | `UUID` | Primary key | +| `legal_name` | `string(255)` | Unique, indexed (razao social) | +| `trade_name` | `string(255)` | Nullable, indexed (nome fantasia) | +| `tax_id` | `string(14)` | Unique, indexed (CNPJ) | +| `created_at` | `datetime` | Server default `now()` | +| `deleted_at` | `datetime` | Nullable (soft delete) | + +### Company Products (association table) + +| Field | Type | Description | +|----------------|------------|----------------------------------------------| +| `company_id` | `UUID` | FK -> `companies.id`, composite PK | +| `product_id` | `int` | FK -> `products.id`, composite PK | +| `bought_at` | `datetime` | Server default `now()` | +| `support_until`| `datetime` | Expiration date for the product support | + +### Relationships + +- **Companies -> Users**: One-to-many. A company has many users; a user optionally belongs to one company (`users.company_id` FK). +- **Companies <-> Products**: Many-to-many via `company_products` join table. + +--- + +## CRUD Endpoints + +All endpoints are mounted under `/api/companies` and require authentication via `Authorization: Bearer `. + +### Companies + +| Method | Path | Permission | Description | +|----------|-----------------------------------|-------------------------|------------------------------------| +| `POST` | `/` | `company:create` | Create a new company | +| `GET` | `/` | `company:list` | List companies (paginated) | +| `GET` | `/{company_id}` | `company:read` | Get company by ID | +| `PUT` | `/{company_id}` | `company:replace` | Replace company (full update) | +| `PATCH` | `/{company_id}` | `company:update` | Partial update | +| `DELETE` | `/{company_id}` | `company:soft_delete` | Soft-delete company | + +### Company Products + +| Method | Path | Permission | Description | +|----------|---------------------------------------------|--------------------------|----------------------------------| +| `POST` | `/{company_id}/products` | `company:add_product` | Add products to a company | +| `DELETE` | `/{company_id}/products` | `company:remove_products`| Remove products (batch) | +| `DELETE` | `/{company_id}/products/{product_id}` | `company:remove_product` | Remove a single product | + +### Company Users + +| Method | Path | Permission | Description | +|----------|-----------------------------------------|-------------------------|------------------------------------| +| `POST` | `/{company_id}/users` | `company:add_users` | Assign users to a company | +| `DELETE` | `/{company_id}/users` | `company:remove_users` | Remove users (batch) | +| `DELETE` | `/{company_id}/users/{user_id}` | `company:remove_user` | Remove a single user | +| `GET` | `/{company_id}/users` | `company:list_users` | List users of a company (paginated)| + +--- + +## Request / Response Examples + +### Create Company + +``` +POST /api/companies/ +Authorization: Bearer +``` + +**Request body:** +```json +{ + "legal_name": "Acme Tecnologia Ltda", + "trade_name": "Acme Tech", + "tax_id": "12345678000190" +} +``` + +**Response `201`:** +```json +{ + "data": { + "id": "uuid", + "legal_name": "Acme Tecnologia Ltda", + "trade_name": "Acme Tech", + "tax_id": "12345678000190", + "created_at": "2026-04-15T12:00:00" + }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `409 Conflict` — a company with the same `tax_id` or `legal_name` already exists. +- `422 Unprocessable Entity` — request body validation failed. + +### List Companies (Paginated) + +``` +GET /api/companies/?page=1&limit=20 +Authorization: Bearer +``` + +**Response `200`:** +```json +{ + "data": { + "items": [ + { + "id": "uuid", + "legal_name": "Acme Tecnologia Ltda", + "trade_name": "Acme Tech", + "tax_id": "12345678000190" + } + ], + "total": 1, + "page": 1, + "limit": 20 + }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +### Get Company by ID + +``` +GET /api/companies/{company_id} +Authorization: Bearer +``` + +**Response `200`:** +```json +{ + "data": { + "id": "uuid", + "legal_name": "Acme Tecnologia Ltda", + "trade_name": "Acme Tech", + "tax_id": "12345678000190", + "created_at": "2026-04-15T12:00:00" + }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — company not found. + +### Add Products to a Company + +``` +POST /api/companies/{company_id}/products +Authorization: Bearer +``` + +**Request body:** +```json +{ + "product_ids": [1, 2, 3] +} +``` + +**Response `201`:** +```json +{ + "data": { ... }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — company or one of the referenced products not found. +- `409 Conflict` — one or more products are already associated with this company. + +### Soft-Delete a Company + +``` +DELETE /api/companies/{company_id} +Authorization: Bearer +``` + +**Response `200`:** +```json +{ + "data": null, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — company not found. + +### Assign Users to a Company + +``` +POST /api/companies/{company_id}/users +Authorization: Bearer +``` + +**Request body:** +```json +{ + "user_ids": ["uuid-1", "uuid-2"] +} +``` + +**Response `200`:** +```json +{ + "data": null, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — company or one of the referenced users not found. +- `409 Conflict` — one or more users are already assigned to this company. + +### Remove a User from a Company + +``` +DELETE /api/companies/{company_id}/users/{user_id} +Authorization: Bearer +``` + +**Response `200`:** +```json +{ + "data": null, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — company or user not found, or user is not assigned to this company. + +### List Company Users (Paginated) + +``` +GET /api/companies/{company_id}/users?page=1&limit=20 +Authorization: Bearer +``` + +**Response `200`:** +```json +{ + "data": { + "items": [ + { + "id": "uuid", + "email": "user@example.com", + "username": "johndoe", + "name": "John Doe" + } + ], + "total": 1, + "page": 1, + "limit": 20 + }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — company not found. + +--- + +## Validation Rules + +- **legal_name**: required, between 3 and 255 characters. +- **trade_name**: required on create/replace, optional on update, between 3 and 255 characters. +- **tax_id**: required, between 11 and 14 characters. Auto-normalized on input: non-alphanumeric characters (dots, dashes, slashes) are stripped. Example: `12.345.678/0001-90` becomes `12345678000190`. + +--- + +## User-Company Association + +Users are linked to companies via a `company_id` foreign key on the `users` table (defined in the auth domain). This field is: + +- **Nullable** — not all users belong to a company (e.g., admins, agents). +- **Indexed** — for efficient lookups of users by company. + +The business rule that client-role users must have a `company_id` is enforced at the **service layer**, not via database constraints, since it depends on cross-table role checks. + +--- + +## Implementation Status + +> **All endpoints currently return `501 Not Implemented`.** This is a temporary scaffold — each endpoint **must** be replaced with proper business logic in the service and repository layers as the domain is implemented. diff --git a/app/domains/companies/__init__.py b/app/domains/companies/__init__.py new file mode 100644 index 0000000..bf9b054 --- /dev/null +++ b/app/domains/companies/__init__.py @@ -0,0 +1,3 @@ +from .routers import company_router + +__all__ = ["company_router"] diff --git a/app/domains/companies/dependencies.py b/app/domains/companies/dependencies.py new file mode 100644 index 0000000..83385f7 --- /dev/null +++ b/app/domains/companies/dependencies.py @@ -0,0 +1,21 @@ +from typing import Annotated + +from fastapi import Depends + +from app.db.postgres.dependencies import PgSessionDep +from app.domains.companies.repositories import CompanyRepository +from app.domains.companies.services import CompanyService + + +def get_company_repository(db: PgSessionDep) -> CompanyRepository: + return CompanyRepository(db) + + +CompanyRepositoryDep = Annotated[CompanyRepository, Depends(get_company_repository)] + + +def get_company_service(repo: CompanyRepositoryDep) -> CompanyService: + return CompanyService(repo) + + +CompanyServiceDep = Annotated[CompanyService, Depends(get_company_service)] diff --git a/app/domains/companies/entities.py b/app/domains/companies/entities.py new file mode 100644 index 0000000..d6baf0c --- /dev/null +++ b/app/domains/companies/entities.py @@ -0,0 +1,21 @@ +from datetime import datetime +from uuid import UUID + +from pydantic.dataclasses import dataclass + + +@dataclass +class Company: + id: UUID + legal_name: str + tax_id: str + created_at: datetime + trade_name: str | None = None + + +@dataclass +class CompanyProduct: + company_id: UUID + product_id: int + bought_at: datetime + support_until: datetime diff --git a/app/domains/companies/models.py b/app/domains/companies/models.py new file mode 100644 index 0000000..be3b238 --- /dev/null +++ b/app/domains/companies/models.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING +from uuid import UUID, uuid4 + +from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Table, func +from sqlalchemy.dialects.postgresql import UUID as PG_UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.db.postgres.base import Base + +if TYPE_CHECKING: + from app.domains.auth.models import User + from app.domains.products.models import Product + + +company_products = Table( + "company_products", + Base.metadata, + Column("company_id", PG_UUID(as_uuid=True), ForeignKey("companies.id"), primary_key=True), + Column("product_id", Integer, ForeignKey("products.id"), primary_key=True), + Column("bought_at", DateTime, nullable=False, server_default=func.now()), + Column("support_until", DateTime, nullable=False), +) + + +class Company(Base): + __tablename__ = "companies" + + id: Mapped[UUID] = mapped_column(PG_UUID(as_uuid=True), primary_key=True, default=uuid4) + legal_name: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True) + trade_name: Mapped[str] = mapped_column(String(255), unique=False, nullable=True, index=True) + tax_id: Mapped[str] = mapped_column(String(14), nullable=False, unique=True, index=True) + + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.now() + ) + deleted_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + + users: Mapped[list[User]] = relationship(back_populates="company") + products: Mapped[list["Product"]] = relationship( + secondary=company_products, back_populates="companies" + ) + + def __repr__(self) -> str: + return f"" diff --git a/app/domains/companies/repositories.py b/app/domains/companies/repositories.py new file mode 100644 index 0000000..a1319f7 --- /dev/null +++ b/app/domains/companies/repositories.py @@ -0,0 +1,155 @@ +from uuid import UUID +from datetime import datetime, UTC +from sqlalchemy import select, update, delete, exc, func +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.domains.companies.entities import Company as CompanyEntity +from app.domains.companies.models import Company as CompanyModel, company_products +from app.domains.auth.models import User as UserModel +from app.db.exceptions import ResourceAlreadyExistsError +from app.domains.companies.schemas import CreateCompanyDTO, UpdateCompanyDTO, ReplaceCompanyDTO +from app.core.schemas import PaginatedItems + +class CompanyRepository: + def __init__(self, db: AsyncSession) -> None: + self.db = db + + def _to_entity(self, model: CompanyModel) -> CompanyEntity: + return CompanyEntity( + id=model.id, + legal_name=model.legal_name, + tax_id=model.tax_id, + created_at=model.created_at, + trade_name=model.trade_name, + ) + + async def create(self, dto: CreateCompanyDTO) -> CompanyEntity: + try: + model = CompanyModel(**dto.model_dump()) + self.db.add(model) + await self.db.flush() + await self.db.commit() + return self._to_entity(model) + except exc.IntegrityError as e: + await self.db.rollback() + raise ResourceAlreadyExistsError("Company", dto.tax_id) from e + + async def get_by_id(self, company_id: UUID) -> CompanyEntity | None: + result = await self.db.execute( + select(CompanyModel).where(CompanyModel.id == company_id, CompanyModel.deleted_at.is_(None)) + ) + model = result.scalar_one_or_none() + return self._to_entity(model) if model else None + + async def get_all_paginated(self, skip: int, limit: int) -> PaginatedItems[CompanyEntity]: + total_result = await self.db.execute( + select(func.count(CompanyModel.id)).where(CompanyModel.deleted_at.is_(None)) + ) + total = total_result.scalar_one() or 0 + + result = await self.db.execute( + select(CompanyModel) + .where(CompanyModel.deleted_at.is_(None)) + .offset(skip) + .limit(limit) + ) + models = result.scalars().all() + return PaginatedItems( + items=[self._to_entity(m) for m in models], + total=total, + page=(skip // limit) + 1, + limit=limit + ) + + async def update(self, company_id: UUID, dto: UpdateCompanyDTO | ReplaceCompanyDTO) -> CompanyEntity | None: + try: + result = await self.db.execute( + update(CompanyModel) + .where(CompanyModel.id == company_id, CompanyModel.deleted_at.is_(None)) + .values(**dto.model_dump(exclude_unset=True)) + .returning(CompanyModel) + ) + model = result.scalar_one_or_none() + if model: + await self.db.commit() + return self._to_entity(model) + return None + except exc.IntegrityError as e: + await self.db.rollback() + raise ResourceAlreadyExistsError("Company", "identifier") from e + + async def soft_delete(self, company_id: UUID) -> bool: + result = await self.db.execute( + update(CompanyModel) + .where(CompanyModel.id == company_id, CompanyModel.deleted_at.is_(None)) + .values(deleted_at=datetime.now(UTC).replace(tzinfo=None)) + .returning(CompanyModel.id) + ) + model_id = result.scalar_one_or_none() + if model_id: + await self.db.commit() + return True + return False + + async def associate_users(self, company_id: UUID, user_ids: list[UUID]) -> None: + if not user_ids: + return + await self.db.execute( + update(UserModel).where(UserModel.id.in_(user_ids)).values(company_id=company_id) + ) + await self.db.commit() + + async def disassociate_users(self, company_id: UUID, user_ids: list[UUID]) -> None: + if not user_ids: + return + await self.db.execute( + update(UserModel) + .where(UserModel.id.in_(user_ids), UserModel.company_id == company_id) + .values(company_id=None) + ) + await self.db.commit() + + async def get_company_users_paginated(self, company_id: UUID, skip: int, limit: int) -> tuple[list[UserModel], int]: + total_result = await self.db.execute( + select(func.count(UserModel.id)).where(UserModel.company_id == company_id, UserModel.deleted_at.is_(None)) + ) + total = total_result.scalar_one() or 0 + + result = await self.db.execute( + select(UserModel) + .options(selectinload(UserModel.roles)) + .where(UserModel.company_id == company_id, UserModel.deleted_at.is_(None)) + .offset(skip) + .limit(limit) + ) + return list(result.scalars().all()), total + + async def add_products(self, company_id: UUID, product_ids: list[int]) -> None: + if not product_ids: + return + from datetime import timedelta + now = datetime.now(UTC).replace(tzinfo=None) + future = now + timedelta(days=365) + + values = [ + {"company_id": company_id, "product_id": pid, "bought_at": now, "support_until": future} + for pid in set(product_ids) + ] + + try: + await self.db.execute(pg_insert(company_products).values(values).on_conflict_do_nothing()) + await self.db.commit() + except exc.IntegrityError as e: + await self.db.rollback() + raise ValueError("One or more product_ids do not exist") from e + + async def remove_products(self, company_id: UUID, product_ids: list[int]) -> None: + if not product_ids: + return + await self.db.execute( + delete(company_products) + .where(company_products.c.company_id == company_id, company_products.c.product_id.in_(product_ids)) + ) + await self.db.commit() \ No newline at end of file diff --git a/app/domains/companies/routers/__init__.py b/app/domains/companies/routers/__init__.py new file mode 100644 index 0000000..8a7ee72 --- /dev/null +++ b/app/domains/companies/routers/__init__.py @@ -0,0 +1,3 @@ +from .company_router import company_router + +__all__ = ["company_router"] diff --git a/app/domains/companies/routers/company_router.py b/app/domains/companies/routers/company_router.py new file mode 100644 index 0000000..af6dc05 --- /dev/null +++ b/app/domains/companies/routers/company_router.py @@ -0,0 +1,175 @@ +from uuid import UUID +from fastapi import APIRouter, Query, status +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse + +from app.core.dependencies import ResponseFactoryDep +from app.core.exceptions import AppHTTPException +from app.db.exceptions import ResourceAlreadyExistsError +from app.domains.auth.dependencies import CurrentUserSessionDep, require_permission +from app.domains.auth.schemas import UserResponseDTO +from app.domains.companies.dependencies import CompanyServiceDep +from app.domains.companies.schemas import ( + AddCompanyProductDTO, + AddCompanyUsersDTO, + CreateCompanyDTO, + RemoveCompanyProductDTO, + RemoveCompanyUsersDTO, + ReplaceCompanyDTO, + UpdateCompanyDTO, +) +from app.domains.companies.swagger_utils import ( + add_products_swagger, + add_users_swagger, + create_company_swagger, + get_companies_swagger, + get_company_swagger, + get_company_users_swagger, + remove_product_swagger, + remove_products_batch_swagger, + remove_user_swagger, + remove_users_batch_swagger, + replace_company_swagger, + soft_delete_company_swagger, + update_company_swagger, +) + +company_router = APIRouter(tags=["Companies"]) + +@company_router.post("/", dependencies=[require_permission("company:create")], **create_company_swagger) +async def create_company( + dto: CreateCompanyDTO, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + company = await service.create(dto) + return response.success(data=jsonable_encoder(company), status_code=status.HTTP_201_CREATED) + except ResourceAlreadyExistsError as e: + raise AppHTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e)) from e + +@company_router.get("/", dependencies=[require_permission("company:list")], **get_companies_swagger) +async def get_companies( + auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, + page: int = Query(default=1, ge=1), limit: int = Query(default=20, ge=1), +) -> JSONResponse: + res = await service.get_all_paginated(page, limit) + return response.success(data=res.model_dump(mode="json"), status_code=status.HTTP_200_OK) + +@company_router.get("/{company_id}", dependencies=[require_permission("company:read")], **get_company_swagger) +async def get_company( + company_id: UUID, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + company = await service.get_by_id(company_id) + if not company: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Company not found") + return response.success(data=jsonable_encoder(company), status_code=status.HTTP_200_OK) + +@company_router.put("/{company_id}", dependencies=[require_permission("company:replace")], **replace_company_swagger) +async def replace_company( + company_id: UUID, dto: ReplaceCompanyDTO, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + company = await service.update(company_id, dto) + if not company: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Company not found") + return response.success(data=jsonable_encoder(company), status_code=status.HTTP_200_OK) + except ResourceAlreadyExistsError as e: + raise AppHTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e)) from e + +@company_router.patch("/{company_id}", dependencies=[require_permission("company:update")], **update_company_swagger) +async def update_company( + company_id: UUID, dto: UpdateCompanyDTO, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + company = await service.update(company_id, dto) + if not company: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Company not found") + return response.success(data=jsonable_encoder(company), status_code=status.HTTP_200_OK) + except ResourceAlreadyExistsError as e: + raise AppHTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e)) from e + +@company_router.delete("/{company_id}", dependencies=[require_permission("company:soft_delete")], **soft_delete_company_swagger) +async def soft_delete_company( + company_id: UUID, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + deleted = await service.soft_delete(company_id) + if not deleted: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Company not found") + return response.success(data=None, status_code=status.HTTP_200_OK) + +@company_router.post("/{company_id}/products", dependencies=[require_permission("company:add_product")], **add_products_swagger) +async def add_company_products( + company_id: UUID, dto: AddCompanyProductDTO, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.add_products(company_id, dto.product_ids) + return response.success(data=None, status_code=status.HTTP_201_CREATED) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + +@company_router.delete("/{company_id}/products", dependencies=[require_permission("company:remove_products")], **remove_products_batch_swagger) +async def remove_company_products_batch( + company_id: UUID, dto: RemoveCompanyProductDTO, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.remove_products(company_id, dto.product_ids) + return response.success(data=None, status_code=status.HTTP_200_OK) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + +@company_router.delete("/{company_id}/products/{product_id}", dependencies=[require_permission("company:remove_product")], **remove_product_swagger) +async def remove_company_product( + company_id: UUID, product_id: int, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.remove_products(company_id, [product_id]) + return response.success(data=None, status_code=status.HTTP_200_OK) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + +@company_router.post("/{company_id}/users", dependencies=[require_permission("company:add_users")], **add_users_swagger) +async def add_company_users( + company_id: UUID, dto: AddCompanyUsersDTO, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.associate_users(company_id, dto.user_ids) + return response.success(data=None, status_code=status.HTTP_201_CREATED) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + +@company_router.delete("/{company_id}/users", dependencies=[require_permission("company:remove_users")], **remove_users_batch_swagger) +async def remove_company_users_batch( + company_id: UUID, dto: RemoveCompanyUsersDTO, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.disassociate_users(company_id, dto.user_ids) + return response.success(data=None, status_code=status.HTTP_200_OK) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + +@company_router.delete("/{company_id}/users/{user_id}", dependencies=[require_permission("company:remove_user")], **remove_user_swagger) +async def remove_company_user( + company_id: UUID, user_id: UUID, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.disassociate_users(company_id, [user_id]) + return response.success(data=None, status_code=status.HTTP_200_OK) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + +@company_router.get("/{company_id}/users", dependencies=[require_permission("company:list_users")], **get_company_users_swagger) +async def get_company_users( + company_id: UUID, auth: CurrentUserSessionDep, service: CompanyServiceDep, response: ResponseFactoryDep, + page: int = Query(default=1, ge=1), limit: int = Query(default=20, ge=1), +) -> JSONResponse: + users, total = await service.get_company_users_paginated(company_id, page, limit) + + # Proteção de vazamento de dados importada da Entrega 1 + safe_items = [UserResponseDTO.model_validate(u).model_dump(mode="json") for u in users] + + data = { + "items": safe_items, + "total": total, + "page": page, + "limit": limit + } + return response.success(data=data, status_code=status.HTTP_200_OK) \ No newline at end of file diff --git a/app/domains/companies/schemas.py b/app/domains/companies/schemas.py new file mode 100644 index 0000000..aecace2 --- /dev/null +++ b/app/domains/companies/schemas.py @@ -0,0 +1,94 @@ +from uuid import UUID + +from pydantic import model_validator + +from app.core.schemas import BaseDTO, PaginatedItems +from app.domains.auth.entities import User +from app.domains.companies.entities import Company, CompanyProduct +from app.schemas.response import GenericSuccessContent + + +def validate_company_fields( + legal_name: str | None, trade_name: str | None, tax_id: str | None +) -> None: + errors: list[str] = [] + + def check_length(value: str | None, field: str, min_len: int, max_len: int) -> None: + if value is not None and not (min_len <= len(value) <= max_len): + errors.append(f"Company {field} must be between {min_len} and {max_len} characters") + + check_length(legal_name, "legal_name", 3, 255) + check_length(trade_name, "trade_name", 3, 255) + check_length(tax_id, "tax_id", 11, 14) + + if errors: + raise ValueError("; ".join(errors)) + + +def normalize_tax_id(tax_id: str) -> str: + tax_id = tax_id.lower() + norm = "".join(char for char in tax_id if char.isalnum()) + return norm + + +class CreateCompanyDTO(BaseDTO): + legal_name: str + trade_name: str + tax_id: str + + @model_validator(mode="after") + def validate_fields(self) -> "CreateCompanyDTO": + self.tax_id = normalize_tax_id(self.tax_id) + validate_company_fields(self.legal_name, self.trade_name, self.tax_id) + return self + + +class UpdateCompanyDTO(BaseDTO): + legal_name: str | None = None + trade_name: str | None = None + tax_id: str | None = None + + @model_validator(mode="after") + def validate_fields(self) -> "UpdateCompanyDTO": + if self.legal_name is None and self.trade_name is None and self.tax_id is None: + raise ValueError("Company update payload must have at least one valid attribute") + if self.tax_id is not None: + self.tax_id = normalize_tax_id(self.tax_id) + validate_company_fields(self.legal_name, self.trade_name, self.tax_id) + return self + + +class ReplaceCompanyDTO(CreateCompanyDTO): + pass + + +class AddCompanyProductDTO(BaseDTO): + product_ids: list[int] + + +class RemoveCompanyProductDTO(AddCompanyProductDTO): + pass + + +UpdateCompanyResponse = GenericSuccessContent[Company] + +ReplaceCompanyResponse = GenericSuccessContent[Company] + +AddCompanyProductResponse = GenericSuccessContent[list[CompanyProduct]] + +CreateCompanyResponse = GenericSuccessContent[Company] + +GetCompaniesResponse = GenericSuccessContent[PaginatedItems[Company]] + +GetCompanyResponse = GenericSuccessContent[Company] + + +class AddCompanyUsersDTO(BaseDTO): + user_ids: list[UUID] + + +class RemoveCompanyUsersDTO(AddCompanyUsersDTO): + pass + + +GetCompanyUsersResponse = GenericSuccessContent[PaginatedItems[User]] diff --git a/app/domains/companies/services.py b/app/domains/companies/services.py new file mode 100644 index 0000000..cd16ca7 --- /dev/null +++ b/app/domains/companies/services.py @@ -0,0 +1,50 @@ +from typing import Any +from uuid import UUID +from app.domains.companies.repositories import CompanyRepository +from app.domains.companies.entities import Company as CompanyEntity +from app.domains.companies.schemas import CreateCompanyDTO, UpdateCompanyDTO, ReplaceCompanyDTO +from app.core.schemas import PaginatedItems + +class CompanyService: + def __init__(self, repo: CompanyRepository) -> None: + self.repo = repo + + async def create(self, dto: CreateCompanyDTO) -> CompanyEntity: + return await self.repo.create(dto) + + async def get_by_id(self, company_id: UUID) -> CompanyEntity | None: + return await self.repo.get_by_id(company_id) + + async def get_all_paginated(self, page: int, limit: int) -> PaginatedItems[CompanyEntity]: + skip = (page - 1) * limit + return await self.repo.get_all_paginated(skip, limit) + + async def update(self, company_id: UUID, dto: UpdateCompanyDTO | ReplaceCompanyDTO) -> CompanyEntity | None: + return await self.repo.update(company_id, dto) + + async def soft_delete(self, company_id: UUID) -> bool: + return await self.repo.soft_delete(company_id) + + async def associate_users(self, company_id: UUID, user_ids: list[UUID]) -> None: + if not await self.get_by_id(company_id): + raise ValueError(f"Company {company_id} not found") + await self.repo.associate_users(company_id, user_ids) + + async def disassociate_users(self, company_id: UUID, user_ids: list[UUID]) -> None: + if not await self.get_by_id(company_id): + raise ValueError(f"Company {company_id} not found") + await self.repo.disassociate_users(company_id, user_ids) + + async def get_company_users_paginated(self, company_id: UUID, page: int, limit: int) -> tuple[list[Any], int]: + skip = (page - 1) * limit + return await self.repo.get_company_users_paginated(company_id, skip, limit) + + async def add_products(self, company_id: UUID, product_ids: list[int]) -> None: + if not await self.get_by_id(company_id): + raise ValueError(f"Company {company_id} not found") + await self.repo.add_products(company_id, product_ids) + + async def remove_products(self, company_id: UUID, product_ids: list[int]) -> None: + if not await self.get_by_id(company_id): + raise ValueError(f"Company {company_id} not found") + await self.repo.remove_products(company_id, product_ids) \ No newline at end of file diff --git a/app/domains/companies/swagger_utils.py b/app/domains/companies/swagger_utils.py new file mode 100644 index 0000000..d74a1aa --- /dev/null +++ b/app/domains/companies/swagger_utils.py @@ -0,0 +1,293 @@ +from typing import Any + +from fastapi import status + +from app.domains.companies.schemas import ( + AddCompanyProductResponse, + CreateCompanyResponse, + GetCompaniesResponse, + GetCompanyResponse, + GetCompanyUsersResponse, + ReplaceCompanyResponse, + UpdateCompanyResponse, +) +from app.schemas.response import ErrorContent, GenericSuccessContent + +create_company_responses: dict[int | str, dict[str, Any]] = { + 201: { + "description": "Company created successfully.", + "model": CreateCompanyResponse, + }, + 409: { + "description": "A company with the same tax_id or legal_name already exists.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +create_company_swagger: dict[str, Any] = { + "summary": "Create a new company", + "description": ( + "Registers a new company in the system. " + "Returns 409 if a company with the same tax_id or legal_name already exists." + ), + "status_code": status.HTTP_201_CREATED, + "response_model": CreateCompanyResponse, + "responses": create_company_responses, +} + +get_companies_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Paginated list of companies retrieved successfully.", + "model": GetCompaniesResponse, + }, +} + +get_companies_swagger: dict[str, Any] = { + "summary": "List companies", + "description": "Returns a paginated list of companies.", + "response_model": GetCompaniesResponse, + "responses": get_companies_responses, +} + +get_company_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Company retrieved successfully.", + "model": GetCompanyResponse, + }, + 404: { + "description": "Company not found.", + "model": ErrorContent, + }, +} + +get_company_swagger: dict[str, Any] = { + "summary": "Get a company by ID", + "description": "Returns a single company by its UUID.", + "response_model": GetCompanyResponse, + "responses": get_company_responses, +} + +replace_company_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Company replaced successfully.", + "model": ReplaceCompanyResponse, + }, + 404: { + "description": "Company not found.", + "model": ErrorContent, + }, + 409: { + "description": "A company with the same tax_id or legal_name already exists.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +replace_company_swagger: dict[str, Any] = { + "summary": "Replace a company", + "description": "Fully replaces all fields of an existing company.", + "response_model": ReplaceCompanyResponse, + "responses": replace_company_responses, +} + +update_company_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Company updated successfully.", + "model": UpdateCompanyResponse, + }, + 404: { + "description": "Company not found.", + "model": ErrorContent, + }, + 409: { + "description": "A company with the same tax_id or legal_name already exists.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +update_company_swagger: dict[str, Any] = { + "summary": "Partially update a company", + "description": "Updates only the provided fields of an existing company.", + "response_model": UpdateCompanyResponse, + "responses": update_company_responses, +} + +soft_delete_company_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Company soft-deleted successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Company not found.", + "model": ErrorContent, + }, +} + +soft_delete_company_swagger: dict[str, Any] = { + "summary": "Soft-delete a company", + "description": "Marks a company as deleted without removing it from the database.", + "response_model": GenericSuccessContent[None], + "responses": soft_delete_company_responses, +} + +add_products_responses: dict[int | str, dict[str, Any]] = { + 201: { + "description": "Products added to the company successfully.", + "model": AddCompanyProductResponse, + }, + 404: { + "description": "Company or one of the referenced products not found.", + "model": ErrorContent, + }, + 409: { + "description": "One or more products are already associated with this company.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +add_products_swagger: dict[str, Any] = { + "summary": "Add products to a company", + "description": "Associates one or more products with an existing company.", + "status_code": status.HTTP_201_CREATED, + "response_model": AddCompanyProductResponse, + "responses": add_products_responses, +} + +remove_products_batch_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Products removed from the company successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Company or one of the referenced products not found.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +remove_products_batch_swagger: dict[str, Any] = { + "summary": "Remove products from a company (batch)", + "description": "Removes one or more product associations from an existing company.", + "response_model": GenericSuccessContent[None], + "responses": remove_products_batch_responses, +} + +remove_product_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Product removed from the company successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Company or product association not found.", + "model": ErrorContent, + }, +} + +remove_product_swagger: dict[str, Any] = { + "summary": "Remove a single product from a company", + "description": "Removes a specific product association from an existing company.", + "response_model": GenericSuccessContent[None], + "responses": remove_product_responses, +} + +add_users_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Users assigned to the company successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Company or one of the referenced users not found.", + "model": ErrorContent, + }, + 409: { + "description": "One or more users are already assigned to this company.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +add_users_swagger: dict[str, Any] = { + "summary": "Assign users to a company", + "description": "Sets the company_id on one or more users, associating them with this company.", + "response_model": GenericSuccessContent[None], + "responses": add_users_responses, +} + +remove_user_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "User removed from the company successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Company or user not found, or user is not assigned to this company.", + "model": ErrorContent, + }, +} + +remove_user_swagger: dict[str, Any] = { + "summary": "Remove a user from a company", + "description": "Clears the company_id on a specific user, disassociating them from company.", + "response_model": GenericSuccessContent[None], + "responses": remove_user_responses, +} + +remove_users_batch_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Users removed from the company successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Company or one of the referenced users not found.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +remove_users_batch_swagger: dict[str, Any] = { + "summary": "Remove users from a company (batch)", + "description": "Clears the company_id on one or more users, disassociating them from company.", + "response_model": GenericSuccessContent[None], + "responses": remove_users_batch_responses, +} + +get_company_users_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "List of users belonging to the company.", + "model": GetCompanyUsersResponse, + }, + 404: { + "description": "Company not found.", + "model": ErrorContent, + }, +} + +get_company_users_swagger: dict[str, Any] = { + "summary": "List users of a company", + "description": "Returns a paginated list of users associated with the given company.", + "response_model": GetCompanyUsersResponse, + "responses": get_company_users_responses, +} diff --git a/app/domains/live_chat/README.md b/app/domains/live_chat/README.md index 6930d3f..3ffdb0f 100644 --- a/app/domains/live_chat/README.md +++ b/app/domains/live_chat/README.md @@ -49,6 +49,8 @@ live_chat/ ├── entities.py # Pydantic/Beanie models for Conversation, ChatMessage ├── schemas.py # Pydantic DTOs for request/response validation ├── chat_manager.py # In-memory chat room manager (singleton) +├── listeners.py # Event handlers for ticket lifecycle events +├── metrics.py # Prometheus metrics (chat_messages_total) ├── dependencies.py # FastAPI dependency injection wiring ├── exceptions.py # Domain-specific exceptions ``` @@ -60,6 +62,24 @@ live_chat/ 3. **Send/Receive Messages**: Messages are exchanged in real time. Each message is validated (including content size limits via `MAX_CHAT_MESSAGE_CONTENT_SIZE` setting), persisted, and broadcast to all participants in the room. 4. **Room Lifecycle**: Chat rooms are created on demand and deleted when empty. +### Event-Driven Conversation Lifecycle + +`ConversationListener` reacts to ticket lifecycle events emitted by the event dispatcher. It automates conversation management so that conversations stay in sync with ticket state without manual intervention. + +| Event | Behavior | +|---|---| +| `TICKET_CREATED` | Creates the first conversation (index 0) for the ticket. Idempotent — skips if a conversation already exists. | +| `TICKET_ASSIGNEE_UPDATED` | Closes the current conversation with a system message, opens a new one for the new agent, and links them via `parent_id`/`children_ids`. | +| `TICKET_ESCALATED` | Same as assignee update, plus posts an escalation system message to the new conversation. | +| `TICKET_STATUS_UPDATED` | Posts a system message with the new status to the current conversation. | +| `TICKET_CLOSED` | Posts a closing system message and ends the current conversation. Idempotent — skips if already closed. | + +Conversation chaining is handled by `ConversationService.append_conversation_to_ticket`, which: +1. Creates the new conversation **before** closing the old one (so a failure doesn't leave the ticket without an open conversation). +2. Posts the closing message on the old conversation. +3. Ends the old conversation. +4. Links old → new via `children_ids`. + ## Data Models ### Conversation @@ -67,7 +87,8 @@ live_chat/ - `ticket_id` (ObjectId): Associated ticket - `client_id` (UUID): Client user - `agent_id` (UUID, optional): Agent user -- `parent_id` (ObjectId, optional): Parent conversation (for threading) +- `sequential_index` (int): Zero-based index within the ticket's conversation chain. Unique together with `ticket_id`. +- `parent_id` (ObjectId, optional): Parent conversation (for chaining) - `children_ids` (list[ObjectId]): Child conversations - `started_at` (datetime): Start timestamp - `finished_at` (datetime, optional): End timestamp @@ -173,5 +194,4 @@ If a message is invalid, the server responds with an error message but keeps the | 🔴 Critical | Test WebSocket endpoint unsecured | `/test/room/{conversation_id}` has no authentication/authorization; must be removed or secured before production. | | 🔴 High | No logic for conversations >16MB (MongoDB limit) | Conversations exceeding 16MB will fail to save; needs pagination or splitting. | | 🔴 High | No treatment for files | File messages are accepted as base64 but not processed; needs HTTP endpoint, storage, and URL generation. | -| 🟠 Medium | No logic to handle the scaling of a conversation | No implementation for creating child conversations. | | 🔵 Low | Duplicate parent_id validation | Both service and repository check parent existence, causing double DB queries. | diff --git a/app/domains/live_chat/listeners.py b/app/domains/live_chat/listeners.py new file mode 100644 index 0000000..d35dc86 --- /dev/null +++ b/app/domains/live_chat/listeners.py @@ -0,0 +1,127 @@ +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.schemas import ( + TicketAssigneeUpdatedEventSchema, + TicketClosedEventSchema, + TicketCreatedEventSchema, + TicketEscalatedEventSchema, +) +from app.db.mongo.db import mongo_db +from app.domains.live_chat.entities import ChatMessage +from app.domains.live_chat.metrics import ( + listener_conversations_closed_total, + listener_conversations_created_total, +) +from app.domains.live_chat.repositories.conversation_repository import ConversationRepository +from app.domains.live_chat.services.conversation_service import ConversationService +from app.domains.ticket.models import Ticket + + +class ConversationListener: + def __init__(self, service: ConversationService) -> None: + self.service = service + + @event_handler(TicketCreatedEventSchema) + async def on_ticket_created(self, schema: TicketCreatedEventSchema) -> None: + has_conversation = await self.service.ticket_has_conversation(schema.ticket_id) + + if has_conversation: + return + + conversation = await self.service.append_conversation_to_ticket( + ticket_id=schema.ticket_id, + client_id=schema.client_id, + agent_id=schema.agent_id, + ) + + if conversation.id is not None: + await self._attach_chat_to_ticket(str(schema.ticket_id), str(conversation.id)) + + listener_conversations_created_total.labels(event="ticket_created").inc() + + @event_handler(TicketAssigneeUpdatedEventSchema) + async def on_ticket_assignee_updated( + self, + schema: TicketAssigneeUpdatedEventSchema, + ) -> None: + conversation = await self.service.get_latest_open_by_ticket_id(schema.ticket_id) + + if conversation is None: + conversation = await self.service.append_conversation_to_ticket( + ticket_id=schema.ticket_id, + client_id=schema.client_id, + agent_id=schema.new_agent_id, + ) + + if conversation.id is not None: + await self._attach_chat_to_ticket(str(schema.ticket_id), str(conversation.id)) + + listener_conversations_created_total.labels(event="ticket_assignee_updated").inc() + + return + + if conversation.id is None: + return + + await self.service.attribute_agent(conversation.id, schema.new_agent_id) + + await self.service.add_message_to_conversation( + conversation.id, + ChatMessage.create( + conversation_id=conversation.id, + sender_id="System", + type="text", + content="Chamado atribuído a um atendente.", + ), + ) + + @event_handler(TicketEscalatedEventSchema) + async def on_ticket_escalated(self, schema: TicketEscalatedEventSchema) -> None: + conversation = await self.service.append_conversation_to_ticket( + ticket_id=schema.ticket_id, + client_id=schema.client_id, + agent_id=schema.new_agent_id, + closing_message="Atendimento transferido para outro nível de suporte.", + ) + + if conversation.id is not None: + await self._attach_chat_to_ticket(str(schema.ticket_id), str(conversation.id)) + + listener_conversations_created_total.labels(event="ticket_escalated").inc() + listener_conversations_closed_total.labels(event="ticket_escalated").inc() + + @event_handler(TicketClosedEventSchema) + async def on_ticket_closed(self, schema: TicketClosedEventSchema) -> None: + closed = await self.service.close_active_ticket_conversation( + ticket_id=schema.ticket_id, + system_message="Atendimento encerrado.", + ) + + if closed is not None: + listener_conversations_closed_total.labels(event="ticket_closed").inc() + + async def _attach_chat_to_ticket(self, ticket_id: str, chat_id: str) -> None: + ticket = await Ticket.get(ticket_id) + + if ticket is None: + return + + if chat_id in [str(item) for item in ticket.chat_ids]: + return + + ticket.chat_ids.append(chat_id) + await ticket.save() + + +def register_conversation_listener(dispatcher: EventDispatcher) -> None: + service = ConversationService( + ConversationRepository(mongo_db.get_db()), + ) + + listener = ConversationListener(service) + + dispatcher.subscribe(AppEvent.TICKET_CREATED, listener.on_ticket_created) + dispatcher.subscribe(AppEvent.TICKET_ASSIGNEE_UPDATED, listener.on_ticket_assignee_updated) + dispatcher.subscribe(AppEvent.TICKET_ESCALATED, listener.on_ticket_escalated) + dispatcher.subscribe(AppEvent.TICKET_CLOSED, listener.on_ticket_closed) \ No newline at end of file diff --git a/app/domains/live_chat/metrics.py b/app/domains/live_chat/metrics.py index 6755c80..3aebf22 100644 --- a/app/domains/live_chat/metrics.py +++ b/app/domains/live_chat/metrics.py @@ -11,3 +11,15 @@ chat_messages_total = prometheus.register_counter( "domain_live_chat_messages_sent_total", "Total messages sent in chat rooms" ) + +listener_conversations_created_total = prometheus.register_counter( + "domain_live_chat_listener_conversations_created_total", + "Conversations created by event listeners", + ["event"], +) + +listener_conversations_closed_total = prometheus.register_counter( + "domain_live_chat_listener_conversations_closed_total", + "Conversations closed by event listeners", + ["event"], +) diff --git a/app/domains/live_chat/repositories/conversation_repository.py b/app/domains/live_chat/repositories/conversation_repository.py index 1129650..748dca8 100644 --- a/app/domains/live_chat/repositories/conversation_repository.py +++ b/app/domains/live_chat/repositories/conversation_repository.py @@ -1,3 +1,4 @@ +import re from datetime import datetime from typing import Any, cast from uuid import UUID @@ -19,7 +20,7 @@ from app.domains.live_chat.exceptions import ParentConversationNotFoundError from ..entities import ChatMessage, ChatParticipants, Conversation -from ..schemas import CreateConversationDTO, PaginatedMessages +from ..schemas import ActiveConversationSummary, CreateConversationDTO, PaginatedMessages class ConversationRepository: @@ -70,9 +71,7 @@ async def get_by_client_id(self, client_id: UUID) -> list[Conversation]: ) return await query.to_list() - async def get_by_ticket_id( - self, ticket_id: PydanticObjectId - ) -> list[Conversation]: + async def get_by_ticket_id(self, ticket_id: PydanticObjectId) -> list[Conversation]: query: AggregationQuery[Conversation] = Conversation.aggregate( [ {"$match": {"ticket_id": ticket_id}}, @@ -84,29 +83,6 @@ async def get_by_ticket_id( ) return await query.to_list() - # async def get_paginated_messages( - # self, ticket_id: PydanticObjectId, page: int, limit: int - # ) -> PaginatedMessages: - # query: AggregationQuery[Conversation] = Conversation.aggregate( - # [ - # {"$match": {"ticket_id": ticket_id}}, - # {"$sort": {"sequential_index": 1}}, - # ], - # projection_model=Conversation, - # ) - # conversations = await query.to_list() - # messages: list[ChatMessage] = [] - # for c in conversations: - # messages.extend(c.messages) - # total = len(messages) - # ceiling = max(len(messages) - (page - 1) * limit, 0) - # floor = max(ceiling - limit, 0) - # messages = messages[floor:ceiling] - - # return PaginatedMessages( - # messages=messages, total=total, page=page, limit=limit, has_next=floor > 0 - # ) - async def get_paginated_messages( self, ticket_id: PydanticObjectId, page: int, limit: int ) -> PaginatedMessages: @@ -138,7 +114,8 @@ async def get_paginated_messages( { "$subtract": [ {"$max": [{"$subtract": ["$count", skip]}, 0]}, - {"$max": [ + { + "$max": [ {"$subtract": ["$count", skip + limit]}, 0, ] @@ -200,6 +177,16 @@ async def conversation_exists(self, id: PydanticObjectId) -> bool: doc = await self.db["conversations"].find_one({"_id": id}, {"_id": 1}) return doc is not None + async def ticket_has_conversation(self, ticket_id: PydanticObjectId) -> bool: + doc = await self.db["conversations"].find_one({"ticket_id": ticket_id}, {"_id": 1}) + return doc is not None + + async def get_last_by_ticket_id(self, ticket_id: PydanticObjectId) -> Conversation | None: + return await Conversation.find_one( + Conversation.ticket_id == ticket_id, + sort=[("sequential_index", -1)], + ) + async def update(self, conversation: Conversation) -> Conversation | None: return cast(Conversation | None, await conversation.save()) @@ -217,14 +204,232 @@ async def add_message(self, id: PydanticObjectId, message: ChatMessage) -> None: try: await conversation.update({"$push": {"messages": message.model_dump()}}) except (ConnectionFailure, ServerSelectionTimeoutError) as e: - logger.error("MongoDB connection error on add_message", extra={"conversation_id": str(id)}, exc_info=e) + logger.error( + "MongoDB connection error on add_message", + extra={"conversation_id": str(id)}, + exc_info=e, + ) raise RuntimeError("Connection error when saving the message") from e except WriteError as e: - logger.error("MongoDB write error on add_message", extra={"conversation_id": str(id)}, exc_info=e) + logger.error( + "MongoDB write error on add_message", extra={"conversation_id": str(id)}, exc_info=e + ) raise RuntimeError("Error persisting message") from e + async def add_child(self, parent_id: PydanticObjectId, child_id: PydanticObjectId) -> None: + await self.db["conversations"].update_one( + {"_id": parent_id}, + {"$push": {"children_ids": child_id}}, + ) + async def attribute_agent(self, conversation_id: PydanticObjectId, agent_id: UUID) -> None: conversation = await Conversation.get(conversation_id) if not conversation: raise ResourceNotFoundError("Conversation", str(conversation_id)) await conversation.update({"$set": {"agent_id": agent_id}}) + + async def get_latest_open_by_ticket_id( + self, ticket_id: PydanticObjectId + ) -> Conversation | None: + query: AggregationQuery[Conversation] = Conversation.aggregate( + [ + {"$match": {"ticket_id": ticket_id, "finished_at": None}}, + {"$sort": {"sequential_index": -1}}, + {"$limit": 1}, + ], + projection_model=Conversation, + ) + results = await query.to_list() + return results[0] if results else None + + async def get_active_conversations( + self, user_id: UUID, is_admin: bool, search: str | None = None + ) -> list[ActiveConversationSummary]: + match_stage: dict[str, Any] = {"finished_at": None} + + if not is_admin: + match_stage["$or"] = [ + {"agent_id": Binary(user_id.bytes, subtype=4)}, + {"agent_id": str(user_id)}, + {"agent_id": None}, + ] + + pipeline: list[dict[str, Any]] = [ + {"$match": match_stage}, + { + "$lookup": { + "from": "tickets", + "localField": "ticket_id", + "foreignField": "_id", + "as": "ticket", + } + }, + {"$unwind": {"path": "$ticket", "preserveNullAndEmptyArrays": True}}, + { + "$addFields": { + "last_message_obj": {"$arrayElemAt": [{"$ifNull": ["$messages", []]}, -1]}, + "message_count": {"$size": {"$ifNull": ["$messages", []]}}, + "client_name": {"$ifNull": ["$ticket.client.name", "Usuário"]}, + "client_email": "$ticket.client.email", + "description": "$ticket.description", + "product": "$ticket.product", + "triage_id": {"$toString": "$ticket.triage_id"}, + "ticket_status": "$ticket.status", + "created_at": "$ticket.creation_date", + "notes": { + "$map": { + "input": {"$ifNull": ["$ticket.comments", []]}, + "as": "comment", + "in": "$$comment.text", + } + }, + "current_agent": { + "$arrayElemAt": [{"$ifNull": ["$ticket.agent_history", []]}, -1] + }, + } + }, + { + "$addFields": { + "assigned_agent_id": "$current_agent.agent_id", + "assigned_agent_name": "$current_agent.name", + } + }, + { + "$project": { + "_id": 0, + "chat_id": "$_id", + "ticket_id": "$ticket_id", + "client_id": "$client_id", + "client_name": "$client_name", + "client_email": "$client_email", + "agent_id": "$agent_id", + "started_at": "$started_at", + "finished_at": "$finished_at", + "last_message": "$last_message_obj.content", + "last_message_at": "$last_message_obj.timestamp", + "message_count": "$message_count", + "triage_id": "$triage_id", + "product": "$product", + "description": "$description", + "notes": "$notes", + "ticket_status": "$ticket_status", + "assigned_agent_id": "$assigned_agent_id", + "assigned_agent_name": "$assigned_agent_name", + "created_at": "$created_at", + } + }, + ] + + if search: + regex = {"$regex": re.escape(search), "$options": "i"} + pipeline.append( + { + "$match": { + "$or": [ + {"client_name": regex}, + {"client_email": regex}, + {"last_message": regex}, + {"description": regex}, + {"product": regex}, + {"notes": {"$elemMatch": regex}}, + ] + } + } + ) + + pipeline.append({"$sort": {"last_message_at": -1, "created_at": -1, "started_at": -1}}) + + cursor: AsyncIOMotorCommandCursor[dict[str, Any]] = ( + Conversation.get_motor_collection().aggregate(pipeline) + ) + docs = await cursor.to_list(length=None) + + return [ + ActiveConversationSummary( + chat_id=doc["chat_id"], + ticket_id=doc["ticket_id"], + client_id=self._normalize_uuid_value(doc["client_id"]), + client_name=doc.get("client_name") or "Usuário", + client_email=doc.get("client_email"), + agent_id=self._normalize_uuid_value(doc.get("agent_id")), + started_at=doc["started_at"], + finished_at=doc.get("finished_at"), + last_message=doc.get("last_message"), + last_message_at=doc.get("last_message_at"), + message_count=doc.get("message_count", 0), + triage_id=doc.get("triage_id"), + product=doc.get("product"), + description=doc.get("description"), + notes=doc.get("notes", []), + ticket_status=doc.get("ticket_status"), + assigned_agent_id=self._normalize_uuid_value(doc.get("assigned_agent_id")), + assigned_agent_name=doc.get("assigned_agent_name"), + created_at=doc.get("created_at"), + ) + for doc in docs + ] + + async def search_conversation_by_text( + self, + search_query: str, + client_id: UUID | None = None, + agent_id: UUID | None = None, + ) -> list[Conversation]: + pattern = re.escape(search_query) + regex = {"$regex": pattern, "$options": "i"} + match_stage: dict[str, Any] = {"messages.content": regex} + + if client_id is not None: + match_stage["client_id"] = Binary(client_id.bytes, subtype=4) + + if agent_id is not None: + match_stage["$or"] = [ + {"agent_id": Binary(agent_id.bytes, subtype=4)}, + {"agent_id": str(agent_id)}, + ] + + pipeline: list[dict[str, Any]] = [ + {"$match": match_stage}, + { + "$addFields": { + "match_score": { + "$size": { + "$filter": { + "input": {"$ifNull": ["$messages", []]}, + "as": "msg", + "cond": { + "$regexMatch": { + "input": "$$msg.content", + "regex": pattern, + "options": "i", + } + }, + } + } + } + } + }, + {"$sort": {"match_score": -1, "sequential_index": -1}}, + {"$group": {"_id": "$ticket_id", "doc": {"$first": "$$ROOT"}}}, + {"$replaceRoot": {"newRoot": "$doc"}}, + {"$sort": {"match_score": -1, "sequential_index": -1}}, + {"$unset": "match_score"}, + ] + + query: AggregationQuery[Conversation] = Conversation.aggregate( + pipeline, + projection_model=Conversation, + ) + return await query.to_list() + + @staticmethod + def _normalize_uuid_value(value: Any) -> UUID | None: + if value is None: + return None + if isinstance(value, UUID): + return value + if isinstance(value, Binary): + return UUID(bytes=bytes(value)) + if isinstance(value, (bytes, bytearray)): + return UUID(bytes=bytes(value)) + return UUID(str(value)) diff --git a/app/domains/live_chat/routers/chat_router.py b/app/domains/live_chat/routers/chat_router.py index 33c7c20..64c21e2 100644 --- a/app/domains/live_chat/routers/chat_router.py +++ b/app/domains/live_chat/routers/chat_router.py @@ -10,19 +10,50 @@ from app.core.dependencies import WSResponseFactoryDep from app.core.logger import get_logger from app.domains.auth import CurrentUserSessionWsDep, require_permission_ws - -logger = get_logger("app.live_chat.router") +from app.domains.auth.entities import UserWithRoles +from app.domains.live_chat.entities import Conversation from ..chat_manager import ChatConnection, get_chat_manager from ..dependencies import ConversationServiceDep from ..exceptions import ChatRoomNotFoundError, InvalidMessageError +logger = get_logger("app.live_chat.router") + def ensure_ws_request_id(ws: WebSocket) -> None: if not hasattr(ws.state, "request_id"): ws.state.request_id = ws.headers.get("x-request-id") or str(uuid4()) +def get_role_names(user: UserWithRoles) -> set[str]: + return {str(role).strip().lower() for role in user.roles_names()} + + +def is_admin(user: UserWithRoles) -> bool: + return "admin" in get_role_names(user) + + +def can_user_join_conversation(user: UserWithRoles, conversation: Conversation) -> bool: + if is_admin(user): + return True + + return user.id in conversation.participants() + + +def get_accepted_subprotocol(ws: WebSocket) -> str | None: + requested = ws.headers.get("sec-websocket-protocol") + + if not requested: + return None + + parts = [part.strip() for part in requested.split(",")] + + if "access_token" in parts: + return "access_token" + + return None + + chat_manager = get_chat_manager() chat_router = APIRouter() @@ -37,19 +68,37 @@ async def connect_to_conversation( response: WSResponseFactoryDep, ) -> None: user = auth[0] - chat = await service.get_by_id(chat_id) - if chat is None or not chat.is_opened() or user.id not in chat.participants(): + if chat is None: await ws.send_denial_response( JSONResponse( status_code=403, - content={"detail": "Chat does not exist or user is not a participant."}, + content={"detail": "Chat does not exist."}, ) ) return - await ws.accept(subprotocol="access_token") + if not chat.is_opened(): + await ws.send_denial_response( + JSONResponse( + status_code=403, + content={"detail": "Chat is already closed."}, + ) + ) + return + + if not can_user_join_conversation(user, chat): + await ws.send_denial_response( + JSONResponse( + status_code=403, + content={"detail": "User is not allowed to join this chat."}, + ) + ) + return + + await ws.accept(subprotocol=get_accepted_subprotocol(ws)) + conn = ChatConnection(ws, response, user) joined = False @@ -63,21 +112,31 @@ async def connect_to_conversation( message = service.handle_message(chat_id, user.id, payload) await service.add_message_to_conversation(chat_id, message) - await chat_manager.broadcast(chat_id, message) except WebSocketDisconnect: break except (InvalidMessageError, ValidationError) as e: - await conn.send_error(WebSocketException(code=1003, reason=str(e) or "")) + await conn.send_error( + WebSocketException(code=1003, reason=str(e) or "") + ) except ValueError as e: - await conn.send_error(WebSocketException(code=1008, reason=str(e))) + await conn.send_error( + WebSocketException(code=1008, reason=str(e)) + ) except RuntimeError as e: - await conn.send_error(WebSocketException(code=1011, reason=str(e))) + await conn.send_error( + WebSocketException(code=1011, reason=str(e)) + ) + except ChatRoomNotFoundError as e: - logger.warning("Chat room not found during connection", extra={"chat_id": str(chat_id)}) + logger.warning( + "Chat room not found during connection", + extra={"chat_id": str(chat_id)}, + ) await conn.send_error(WebSocketException(code=1011, reason=str(e))) await conn.close(code=1011, reason="Chat room unavailable") + finally: if joined: await chat_manager.leave_room(chat_id, conn) @@ -92,8 +151,10 @@ async def connect_to_conversation_test( response: WSResponseFactoryDep, ) -> None: await ws.accept() + conn = ChatConnection(ws, response) user_id = uuid4() + await chat_manager.join_room(conversation_id, conn) try: @@ -115,4 +176,4 @@ async def connect_to_conversation_test( ) finally: - await chat_manager.leave_room(conversation_id, conn) + await chat_manager.leave_room(conversation_id, conn) \ No newline at end of file diff --git a/app/domains/live_chat/routers/conversation_router.py b/app/domains/live_chat/routers/conversation_router.py index b6718ab..8301948 100644 --- a/app/domains/live_chat/routers/conversation_router.py +++ b/app/domains/live_chat/routers/conversation_router.py @@ -16,12 +16,70 @@ get_convs_swagger, get_messages_swagger, post_conv_swagger, + search_convs_swagger, set_agent_swagger, ) conversation_router = APIRouter() +@conversation_router.get( + "/active", + tags=["Conversations"], + dependencies=[require_permission("chat:read")], +) +async def get_active_conversations( + auth: CurrentUserSessionDep, + service: ConversationServiceDep, + response: ResponseFactoryDep, + search: str = Query(default="", description="Search by client name, email or last message."), +) -> JSONResponse: + user = auth[0] + chats = await service.get_active_conversations(user, search) + + return response.success( + data=[chat.model_dump(mode="json") for chat in chats], + status_code=status.HTTP_200_OK, + ) + + +@conversation_router.get( + "/search", + tags=["Conversations"], + dependencies=[require_permission("chat:read")], + **search_convs_swagger, +) +async def search_conversations_text( + auth: CurrentUserSessionDep, + service: ConversationServiceDep, + response: ResponseFactoryDep, + search_query: str | None = Query( + default=None, + min_length=5, + max_length=100, + description="Substring to match against message content (case-insensitive).", + ), +) -> JSONResponse: + """Search conversations by message content. + + Scope is enforced by role: + - clients can only find their own conversations + - agents can only find conversations they are assigned to + - admins can find any conversation + """ + if search_query is None or not search_query.strip(): + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="provide a search text using search_query in the query string", + ) + + res = await service.search_conversation_by_text(search_query, auth[0]) + return response.success( + data=[c.model_dump(mode="json") for c in res], + status_code=status.HTTP_200_OK, + ) + + @conversation_router.get( "/client/{client_id}", tags=["Conversations"], @@ -133,6 +191,44 @@ async def create_conversation( ) from e +@conversation_router.post( + "/{chat_id}/assume", + tags=["Conversations"], + dependencies=[require_permission("chat:set_agent")], +) +async def assume_conversation( + chat_id: PydanticObjectId, + auth: CurrentUserSessionDep, + service: ConversationServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + user = auth[0] + + try: + chat = await service.assume_conversation(chat_id, user) + + if chat is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Conversation {chat_id} does not exist.", + ) + + return response.success( + data=chat.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + except PermissionError as err: + raise AppHTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=str(err), + ) from err + except ValueError as err: + raise AppHTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=str(err), + ) from err + + @conversation_router.patch( "/{chat_id}/set-agent/{agent_id}", tags=["Conversations"], diff --git a/app/domains/live_chat/routers/swagger_utils.py b/app/domains/live_chat/routers/swagger_utils.py index 0c3f649..2128cf6 100644 --- a/app/domains/live_chat/routers/swagger_utils.py +++ b/app/domains/live_chat/routers/swagger_utils.py @@ -105,6 +105,48 @@ "responses": get_messages_responses, } +search_convs_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": ( + "Conversations matching the search query. At most one conversation per " + "ticket is returned (the most recent matching one). Empty list when there " + "are no matches." + ), + "model": GenericSuccessContent[list[Conversation]], + }, + 400: { + "description": "The 'search_query' parameter was not provided.", + "model": ErrorContent, + }, + 403: { + "description": "The authenticated user does not have permission to read chats.", + "model": ErrorContent, + }, + 422: { + "description": ( + "Query parameter validation failed (e.g. 'search_query' shorter than 5 " + "or longer than 100 characters)." + ), + "model": ErrorContent, + }, +} + +search_convs_swagger: dict[str, Any] = { + "summary": "Search conversations by message content", + "description": ( + "Case-insensitive substring search over the `messages.content` field of " + "conversations. Special regex characters in the query are treated as literal " + "text. Results are deduplicated by ticket: only the most recent matching " + "conversation per ticket is returned. " + "Scope is enforced by role: clients only see their own conversations; " + "agents (including N1/N2/N3) only see conversations they are assigned to; " + "admins see all matching conversations. " + "Requires the 'chat:read' permission." + ), + "response_model": GenericSuccessContent[list[Conversation]], + "responses": search_convs_responses, +} + set_agent_responses: dict[int | str, dict[str, Any]] = { 200: { "description": "Agent assigned to the conversation successfully.", diff --git a/app/domains/live_chat/schemas.py b/app/domains/live_chat/schemas.py index 3891824..17a8325 100644 --- a/app/domains/live_chat/schemas.py +++ b/app/domains/live_chat/schemas.py @@ -1,3 +1,4 @@ +from datetime import datetime from typing import Any, Literal from uuid import UUID @@ -59,3 +60,27 @@ class PaginatedMessages(BaseModel): page: int limit: int has_next: bool + + +class ActiveConversationSummary(BaseModel): + chat_id: PydanticObjectId + ticket_id: PydanticObjectId + client_id: UUID + client_name: str + client_email: str | None = None + agent_id: UUID | None = None + started_at: datetime + finished_at: datetime | None = None + last_message: str | None = None + last_message_at: datetime | None = None + message_count: int = 0 + can_join_live: bool = False + needs_assume: bool = False + triage_id: str | None = None + product: str | None = None + description: str | None = None + notes: list[str] = [] + ticket_status: str | None = None + assigned_agent_id: UUID | None = None + assigned_agent_name: str | None = None + created_at: datetime | None = None diff --git a/app/domains/live_chat/services/conversation_service.py b/app/domains/live_chat/services/conversation_service.py index 2a3c482..e712efc 100644 --- a/app/domains/live_chat/services/conversation_service.py +++ b/app/domains/live_chat/services/conversation_service.py @@ -1,14 +1,22 @@ +from datetime import UTC, datetime from typing import Any from uuid import UUID from beanie import PydanticObjectId +from app.domains.auth.entities import UserWithRoles from app.domains.live_chat.entities import ChatMessage, ChatParticipants, Conversation from app.domains.live_chat.exceptions import ParentConversationNotFoundError -from app.domains.live_chat.schemas import CreateConversationDTO, IncomingMessage, PaginatedMessages +from app.domains.live_chat.schemas import ( + ActiveConversationSummary, + CreateConversationDTO, + IncomingMessage, + PaginatedMessages, +) -from ..repositories import ConversationRepository from ..metrics import chat_messages_total +from ..repositories import ConversationRepository + class ConversationService: def __init__(self, repository: ConversationRepository) -> None: @@ -28,6 +36,9 @@ def handle_message( responding_to=data.responding_to, ) + async def ticket_has_conversation(self, ticket_id: PydanticObjectId) -> bool: + return await self.repo.ticket_has_conversation(ticket_id) + async def create(self, dto: CreateConversationDTO) -> Conversation: if dto.parent_id is not None: parent_exists = await self.repo.conversation_exists(dto.parent_id) @@ -49,9 +60,7 @@ async def get_participants(self, chat_id: PydanticObjectId) -> ChatParticipants async def attribute_agent(self, chat_id: PydanticObjectId, agent_id: UUID) -> None: return await self.repo.attribute_agent(chat_id, agent_id) - async def get_chats_from_ticket( - self, ticket_id: PydanticObjectId - ) -> list[Conversation]: + async def get_chats_from_ticket(self, ticket_id: PydanticObjectId) -> list[Conversation]: return await self.repo.get_by_ticket_id(ticket_id) async def get_paginated_messages( @@ -69,3 +78,149 @@ async def add_message_to_conversation( ) -> None: chat_messages_total.inc() await self.repo.add_message(chat_id, message) + + async def get_active_conversations( + self, user: UserWithRoles, search: str | None = None + ) -> list[ActiveConversationSummary]: + is_admin = "admin" in user.roles_names() + chats = await self.repo.get_active_conversations(user.id, is_admin, search) + + result: list[ActiveConversationSummary] = [] + for chat in chats: + can_join_live = is_admin or chat.agent_id == user.id + needs_assume = (not is_admin) and chat.agent_id is None + + result.append( + chat.model_copy( + update={ + "can_join_live": can_join_live, + "needs_assume": needs_assume, + } + ) + ) + + return result + + async def assume_conversation( + self, chat_id: PydanticObjectId, user: UserWithRoles + ) -> Conversation | None: + chat = await self.repo.get_by_id(chat_id) + if chat is None: + return None + + if not chat.is_opened(): + raise ValueError("Conversation is already closed.") + + is_admin = "admin" in user.roles_names() + + if chat.agent_id is None: + await self.repo.attribute_agent(chat_id, user.id) + chat.agent_id = user.id + return chat + + if chat.agent_id == user.id: + return chat + + if is_admin: + await self.repo.attribute_agent(chat_id, user.id) + chat.agent_id = user.id + return chat + + raise PermissionError("Conversation is already assigned to another agent.") + + async def get_latest_open_by_ticket_id( + self, ticket_id: PydanticObjectId + ) -> Conversation | None: + return await self.repo.get_latest_open_by_ticket_id(ticket_id) + + async def get_last_conversation_from_ticket( + self, ticket_id: PydanticObjectId + ) -> Conversation | None: + return await self.repo.get_last_by_ticket_id(ticket_id) + + async def end_conversation( + self, chat_id: PydanticObjectId, end_datetime: datetime | None = None + ) -> Conversation | None: + c = await self.get_by_id(chat_id) + if c is None: + return None + c.finished_at = end_datetime if end_datetime else datetime.now(UTC) + c = await self.repo.update(c) + return c + + async def close_active_ticket_conversation( + self, + ticket_id: PydanticObjectId, + system_message: str, + finished_at: datetime | None = None, + ) -> Conversation | None: + conversation = await self.get_latest_open_by_ticket_id(ticket_id) + if conversation is None or conversation.id is None: + return None + + await self.add_message_to_conversation( + conversation.id, + ChatMessage.create( + conversation_id=conversation.id, + sender_id="System", + type="text", + content=system_message, + ), + ) + return await self.end_conversation(conversation.id, finished_at) + + async def append_conversation_to_ticket( + self, + ticket_id: PydanticObjectId, + client_id: UUID, + agent_id: UUID | None = None, + closing_message: str | None = None, + ) -> Conversation: + last_conv = await self.get_last_conversation_from_ticket(ticket_id) + + sequential_index = (last_conv.sequential_index + 1) if last_conv else 0 + parent_id = last_conv.id if last_conv else None + + new_conv = await self.create( + CreateConversationDTO( + ticket_id=ticket_id, + agent_id=agent_id, + client_id=client_id, + sequential_index=sequential_index, + parent_id=parent_id, + ) + ) + + if last_conv is not None and last_conv.id is not None: + if closing_message: + await self.add_message_to_conversation( + last_conv.id, + ChatMessage.create( + conversation_id=last_conv.id, + sender_id="System", + type="text", + content=closing_message, + ), + ) + await self.end_conversation(last_conv.id) + if new_conv.id is not None: + await self.repo.add_child(last_conv.id, new_conv.id) + + return new_conv + + + async def search_conversation_by_text( + self, search_query: str, user: UserWithRoles + ) -> list[Conversation]: + roles = user.roles_names() + if "admin" in roles: + return await self.repo.search_conversation_by_text(search_query) + + if any(role.strip().upper() in {"AGENT", "N1", "N2", "N3"} for role in roles): + return await self.repo.search_conversation_by_text( + search_query, agent_id=user.id + ) + + return await self.repo.search_conversation_by_text( + search_query, client_id=user.id + ) diff --git a/app/domains/notifications/README.md b/app/domains/notifications/README.md new file mode 100644 index 0000000..0b01e25 --- /dev/null +++ b/app/domains/notifications/README.md @@ -0,0 +1,123 @@ +# Notifications Domain + +Transactional email outbox for SyncDesk. + +This module owns the durable persistence and delivery of outgoing emails. Producers in other domains never call email APIs directly — they publish a typed event on the application `EventDispatcher`, and the notifications domain consumes that event, persists an outbox row, and a background worker handles delivery, retry, and dead-lettering. + +## Purpose + +- Decouple email producers (auth, etc.) from email infrastructure. +- Survive process crashes: emails are committed to the database before delivery is attempted. +- Bounded retry with exponential backoff and a terminal `DEAD` status for unrecoverable failures. + +## Architecture + +- `models.py`: SQLAlchemy ORM model `EmailOutbox` (JSONB payload column). Restricted to the repository layer. +- `entities.py`: `EmailOutbox` dataclass entity returned by the repository. +- `enums.py`: `EmailEventType`, `EmailOutboxStatus`. +- `schemas.py`: `EnqueueEmailOutboxDTO` and the typed payload models (`WelcomeInvitePayload`, `PasswordResetPayload`). +- `repositories/email_outbox_repository.py`: persistence operations (`enqueue`, `claim_batch`, `mark_sent`, `mark_retry`, `mark_dead`). +- `services/email_outbox_service.py`: builds typed payloads from event schemas and delegates to the repository. +- `listeners.py`: subscribes to `EventDispatcher` events and invokes the service. This is the single integration point with other domains. +- `worker.py`: long-running async loop that claims pending rows with `FOR UPDATE SKIP LOCKED`, renders the email, sends through `EmailStrategy`, and updates status. +- `metrics.py`: Prometheus counters/gauges for queue depth and per-event processing outcomes. + +The boundary rule: cross-domain integration goes through `EventDispatcher`. No domain imports a service or repository from `notifications`. The auth domain (only current producer) publishes events; the notifications listener handles persistence. + +## Public Interface (for other domains) + +To trigger an email, publish a typed event on `EventDispatcher`. The relevant event schemas live in `app/core/event_dispatcher/schemas.py`: + +```python +from app.core.event_dispatcher import AppEvent, EventDispatcher +from app.core.event_dispatcher.schemas import WelcomeInviteEventSchema + +class MyService: + def __init__(self, dispatcher: EventDispatcher) -> None: + self.dispatcher = dispatcher + + async def some_flow(self, ...) -> None: + await self.dispatcher.publish( + AppEvent.USER_WELCOME_INVITE, + WelcomeInviteEventSchema( + user_id=user.id, + user_name=user.name, + user_email=user.email, + roles=user.roles_names(), + raw_token=token, + one_time_password=password, + max_attempts=settings.EMAIL_OUTBOX_MAX_ATTEMPTS, + ), + ) +``` + +The dispatch is fire-and-forget. The notifications listener picks it up, opens its own DB session, writes the outbox row, and commits. Producers do not need to await delivery and must not assume success on return. + +## Supported Events + +| `AppEvent` | Payload schema (`app.core.event_dispatcher.schemas`) | Outbox `event_type` | +| ----------------------- | ---------------------------------------------------- | ------------------- | +| `USER_WELCOME_INVITE` | `WelcomeInviteEventSchema` | `WELCOME_INVITE` | +| `USER_PASSWORD_RESET` | `PasswordResetEventSchema` | `PASSWORD_RESET` | + +The internal payload stored in the outbox JSONB column is a separate, narrower type (`WelcomeInvitePayload` / `PasswordResetPayload`) — the listener resolves the frontend URL from the user's roles before persisting. + +## Adding a New Email Type + +1. Add a value to `EmailEventType` in `enums.py`. +2. Add a typed payload model in `schemas.py` (one Pydantic `BaseModel` per email type). +3. Add a value to `AppEvent` in `app/core/event_dispatcher/enums.py` and the matching event schema in `app/core/event_dispatcher/schemas.py`. Register the pair in `EVENT_PAYLOAD_MAP`. +4. Add an `enqueue_<...>` method to `EmailOutboxService` that converts the event schema into the typed payload. +5. Add a handler method to `EmailOutboxListener` decorated with `@event_handler()`, and subscribe it inside `register_email_outbox_listener`. +6. Extend `_render_html` in `worker.py` with an `isinstance` branch for the new payload type. +7. Add a render function in `app/core/email/renderer.py` and a params schema in `app/core/email/schemas.py`. +8. Extend the repository's `_to_entity` `if/elif` so the new event type maps back to the right typed payload on read. + +## Operations + +### Worker lifecycle + +`run_email_outbox_worker` is started as a global background task during application startup and cancelled gracefully during shutdown. It does nothing if `EMAIL_OUTBOX_ENABLED=False`. + +Each iteration: +1. Claims a batch of `PENDING`/`RETRY` rows whose `next_attempt_at <= now()` using `FOR UPDATE SKIP LOCKED`. This is what makes the worker safe to run as multiple replicas — concurrent workers will not pick up the same row. +2. Bulk-updates claimed rows to `PROCESSING` with the current worker id and `locked_at`. +3. Renders the email and dispatches via the configured `EmailStrategy`. +4. On success: `mark_sent` (status `SENT`, clears lock and last error). +5. On failure: increments `attempts`. If `attempts >= max_attempts`, `mark_dead`; otherwise `mark_retry` with exponential backoff (`2 ** attempts` seconds, capped at `EMAIL_OUTBOX_BACKOFF_MAX_SECONDS`, plus jitter). + +### Status machine + +``` +PENDING ──claim──▶ PROCESSING ──ok──▶ SENT + │ + └──fail──▶ RETRY ──claim──▶ PROCESSING ─... + │ + └──attempts >= max──▶ DEAD +``` + +`SENT` and `DEAD` are terminal — they are never claimed again. + +### Configuration + +| Setting | Purpose | +| ------------------------------------ | ---------------------------------------------------------- | +| `EMAIL_OUTBOX_ENABLED` | Master switch for the worker. | +| `EMAIL_OUTBOX_BATCH_SIZE` | Max rows claimed per poll. | +| `EMAIL_OUTBOX_POLL_SECONDS` | Sleep interval between polls. | +| `EMAIL_OUTBOX_MAX_ATTEMPTS` | Default delivery attempts before dead-lettering. | +| `EMAIL_OUTBOX_BACKOFF_MAX_SECONDS` | Cap for the exponential backoff. | +| `EMAIL_OUTBOX_WORKER_ID` | Optional explicit worker id (defaults to `host-pid`). | + +### Metrics + +Exposed via Prometheus from `metrics.py`: + +- `email_outbox_depth{status}` (gauge): claimed batch size by status. +- `email_outbox_processed_total{status}` (counter): per-row outcomes — `sent`, `retry`, `dead`. + +### Testing + +- **Unit**: `tests/app/unit/notifications/` — service and worker pieces with mocked dependencies. +- **Integration**: `tests/app/integration/domains/notifications/test_email_outbox_repository.py` — exercises the repository against a real database with savepoint isolation. No mocks; integration tests of this domain only stub the email sender (the external boundary). +- **e2e**: `tests/app/e2e/conftest.py` registers a capture handler on the dispatcher to assert that the right events were published end-to-end. diff --git a/app/domains/notifications/__init__.py b/app/domains/notifications/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/domains/notifications/entities.py b/app/domains/notifications/entities.py new file mode 100644 index 0000000..b260dad --- /dev/null +++ b/app/domains/notifications/entities.py @@ -0,0 +1,25 @@ +from datetime import datetime +from uuid import UUID + +from pydantic.dataclasses import dataclass + +from app.domains.notifications.enums import EmailEventType +from app.domains.notifications.models import EmailOutboxStatus +from app.domains.notifications.schemas import PasswordResetPayload, WelcomeInvitePayload + + +@dataclass +class EmailOutbox: + id: UUID + event_type: EmailEventType + recipient: str + payload: WelcomeInvitePayload | PasswordResetPayload + status: EmailOutboxStatus + attempts: int + max_attempts: int + last_error: str | None + next_attempt_at: datetime + created_at: datetime + sent_at: datetime | None + locked_at: datetime | None + lock_owner: str | None diff --git a/app/domains/notifications/enums.py b/app/domains/notifications/enums.py new file mode 100644 index 0000000..2144f68 --- /dev/null +++ b/app/domains/notifications/enums.py @@ -0,0 +1,18 @@ +from enum import Enum + + +class EmailOutboxStatus(str, Enum): + PENDING = "PENDING" + PROCESSING = "PROCESSING" + SENT = "SENT" + RETRY = "RETRY" + DEAD = "DEAD" + + +class EmailEventType(str, Enum): + WELCOME_INVITE = "WELCOME_INVITE" + PASSWORD_RESET = "PASSWORD_RESET" + + +def status_values(enum_class: type[Enum]) -> list[str]: + return [m.value for m in enum_class] diff --git a/app/domains/notifications/listeners.py b/app/domains/notifications/listeners.py new file mode 100644 index 0000000..3944e13 --- /dev/null +++ b/app/domains/notifications/listeners.py @@ -0,0 +1,32 @@ +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.schemas import PasswordResetEventSchema, WelcomeInviteEventSchema +from app.core.logger import get_logger +from app.db.postgres.engine import async_session +from app.domains.notifications.repositories.email_outbox_repository import EmailOutboxRepository +from app.domains.notifications.services.email_outbox_service import EmailOutboxService + +logger = get_logger("app.notifications.listener") + + +class EmailOutboxListener: + @event_handler(WelcomeInviteEventSchema) + async def on_welcome_invite(self, schema: WelcomeInviteEventSchema) -> None: + async with async_session() as db: + service = EmailOutboxService(EmailOutboxRepository(db)) + await service.enqueue_welcome_invite(schema) + await db.commit() + + @event_handler(PasswordResetEventSchema) + async def on_password_reset(self, schema: PasswordResetEventSchema) -> None: + async with async_session() as db: + service = EmailOutboxService(EmailOutboxRepository(db)) + await service.enqueue_password_reset(schema) + await db.commit() + + +def register_email_outbox_listener(dispatcher: EventDispatcher) -> None: + listener = EmailOutboxListener() + dispatcher.subscribe(AppEvent.USER_WELCOME_INVITE, listener.on_welcome_invite) + dispatcher.subscribe(AppEvent.USER_PASSWORD_RESET, listener.on_password_reset) diff --git a/app/domains/notifications/metrics.py b/app/domains/notifications/metrics.py new file mode 100644 index 0000000..9c3bd54 --- /dev/null +++ b/app/domains/notifications/metrics.py @@ -0,0 +1,13 @@ +from app.core.metrics.prometheus import prometheus + +email_outbox_depth = prometheus.register_gauge( + "email_outbox_depth", + "Number of email outbox entries by status", + ["status"], +) + +email_outbox_processed_total = prometheus.register_counter( + "email_outbox_processed_total", + "Total email outbox rows processed by outcome", + ["status"], +) diff --git a/app/domains/notifications/models.py b/app/domains/notifications/models.py new file mode 100644 index 0000000..58949da --- /dev/null +++ b/app/domains/notifications/models.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Any +from uuid import UUID, uuid4 + +from sqlalchemy import DateTime, Index, Integer, String, Text, func +from sqlalchemy import Enum as SqlEnum +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.dialects.postgresql import UUID as PG_UUID +from sqlalchemy.orm import Mapped, mapped_column + +from app.db.postgres.base import Base +from app.domains.notifications.enums import EmailOutboxStatus, status_values + + +class EmailOutbox(Base): + __tablename__ = "email_outbox" + + __table_args__ = ( + Index("ix_email_outbox_status_next_attempt_at", "status", "next_attempt_at"), + Index("ix_email_outbox_event_type", "event_type"), + Index("ix_email_outbox_recipient", "recipient"), + ) + + id: Mapped[UUID] = mapped_column(PG_UUID(as_uuid=True), primary_key=True, default=uuid4) + event_type: Mapped[str] = mapped_column(String(64), nullable=False) + recipient: Mapped[str] = mapped_column(String(320), nullable=False) + payload: Mapped[dict[str, Any]] = mapped_column(JSONB, nullable=False) + status: Mapped[str] = mapped_column( + SqlEnum( + EmailOutboxStatus, + name="email_outbox_status", + native_enum=True, + create_constraint=False, + values_callable=status_values, + ), + nullable=False, + default=EmailOutboxStatus.PENDING, + ) + attempts: Mapped[int] = mapped_column(Integer, nullable=False, default=0) + max_attempts: Mapped[int] = mapped_column(Integer, nullable=False, default=5) + last_error: Mapped[str | None] = mapped_column(Text, nullable=True) + next_attempt_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.now() + ) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.now() + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.now(), onupdate=func.now() + ) + sent_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + locked_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + lock_owner: Mapped[str | None] = mapped_column(String(128), nullable=True) diff --git a/app/domains/notifications/repositories/__init__.py b/app/domains/notifications/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/domains/notifications/repositories/email_outbox_repository.py b/app/domains/notifications/repositories/email_outbox_repository.py new file mode 100644 index 0000000..95b2718 --- /dev/null +++ b/app/domains/notifications/repositories/email_outbox_repository.py @@ -0,0 +1,137 @@ +from datetime import datetime +from uuid import UUID + +from sqlalchemy import update +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.domains.notifications.entities import EmailOutbox as EmailOutboxEntity +from app.domains.notifications.enums import EmailEventType, EmailOutboxStatus +from app.domains.notifications.models import EmailOutbox +from app.domains.notifications.schemas import ( + EnqueueEmailOutboxDTO, + PasswordResetPayload, + WelcomeInvitePayload, +) + + +class EmailOutboxRepository: + def __init__(self, db: AsyncSession) -> None: + self.db = db + + @staticmethod + def _to_entity(model: EmailOutbox) -> EmailOutboxEntity: + event_type = EmailEventType(model.event_type) + payload: WelcomeInvitePayload | PasswordResetPayload + if event_type == EmailEventType.WELCOME_INVITE: + payload = WelcomeInvitePayload(**model.payload) + elif event_type == EmailEventType.PASSWORD_RESET: + payload = PasswordResetPayload(**model.payload) + else: + raise ValueError(f"Unknown event_type: {model.event_type}") + + return EmailOutboxEntity( + id=model.id, + event_type=event_type, + recipient=model.recipient, + payload=payload, + status=EmailOutboxStatus(model.status), + attempts=model.attempts, + max_attempts=model.max_attempts, + last_error=model.last_error, + next_attempt_at=model.next_attempt_at, + created_at=model.created_at, + sent_at=model.sent_at, + locked_at=model.locked_at, + lock_owner=model.lock_owner, + ) + + async def enqueue(self, dto: EnqueueEmailOutboxDTO) -> EmailOutboxEntity: + row = EmailOutbox( + event_type=dto.event_type, + recipient=dto.recipient, + payload=dto.payload.model_dump(mode="json"), + status=EmailOutboxStatus.PENDING, + max_attempts=dto.max_attempts, + ) + self.db.add(row) + await self.db.flush() + return self._to_entity(row) + + async def claim_batch( + self, now: datetime, worker_id: str, limit: int + ) -> list[EmailOutboxEntity]: + stmt = ( + select(EmailOutbox) + .where( + EmailOutbox.status.in_( + [EmailOutboxStatus.PENDING, EmailOutboxStatus.RETRY] + ), + EmailOutbox.next_attempt_at <= now, + ) + .order_by(EmailOutbox.next_attempt_at) + .with_for_update(skip_locked=True) + .limit(limit) + ) + result = await self.db.execute(stmt) + rows = list(result.scalars().all()) + + if rows: + ids = [r.id for r in rows] + await self.db.execute( + update(EmailOutbox) + .where(EmailOutbox.id.in_(ids)) + .values( + status=EmailOutboxStatus.PROCESSING, + locked_at=now, + lock_owner=worker_id, + ) + ) + await self.db.flush() + + return [self._to_entity(r) for r in rows] + + async def mark_sent(self, id: UUID, now: datetime) -> None: + await self.db.execute( + update(EmailOutbox) + .where(EmailOutbox.id == id) + .values( + status=EmailOutboxStatus.SENT, + sent_at=now, + locked_at=None, + lock_owner=None, + last_error=None, + ) + ) + + async def mark_retry( + self, + id: UUID, + last_error: str, + next_attempt_at: datetime, + attempts: int, + ) -> None: + await self.db.execute( + update(EmailOutbox) + .where(EmailOutbox.id == id) + .values( + status=EmailOutboxStatus.RETRY, + last_error=last_error[:2000], + next_attempt_at=next_attempt_at, + attempts=attempts, + locked_at=None, + lock_owner=None, + ) + ) + + async def mark_dead(self, id: UUID, last_error: str) -> None: + await self.db.execute( + update(EmailOutbox) + .where(EmailOutbox.id == id) + .values( + status=EmailOutboxStatus.DEAD, + last_error=last_error[:2000], + locked_at=None, + lock_owner=None, + ) + ) diff --git a/app/domains/notifications/schemas.py b/app/domains/notifications/schemas.py new file mode 100644 index 0000000..a147e02 --- /dev/null +++ b/app/domains/notifications/schemas.py @@ -0,0 +1,32 @@ +from uuid import UUID + +from pydantic import BaseModel + +from app.core.schemas import BaseDTO +from app.domains.notifications.enums import EmailEventType + + +class WelcomeInvitePayload(BaseModel): + user_id: UUID + user_name: str + user_email: str + one_time_password: str + frontend_url: str + token: str + + +class PasswordResetPayload(BaseModel): + user_id: UUID + user_email: str + frontend_url: str + token: str + + +EmailOutboxPayload = WelcomeInvitePayload | PasswordResetPayload + + +class EnqueueEmailOutboxDTO(BaseDTO): + event_type: EmailEventType + recipient: str + payload: WelcomeInvitePayload | PasswordResetPayload + max_attempts: int = 5 diff --git a/app/domains/notifications/services/__init__.py b/app/domains/notifications/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/domains/notifications/services/email_outbox_service.py b/app/domains/notifications/services/email_outbox_service.py new file mode 100644 index 0000000..675d641 --- /dev/null +++ b/app/domains/notifications/services/email_outbox_service.py @@ -0,0 +1,67 @@ +from app.core.config import get_settings +from app.core.event_dispatcher.schemas import ( + PasswordResetEventSchema, + WelcomeInviteEventSchema, +) +from app.core.logger import get_logger + +from app.domains.notifications.enums import EmailEventType +from app.domains.notifications.repositories.email_outbox_repository import ( + EmailOutboxRepository, +) +from app.domains.notifications.schemas import EnqueueEmailOutboxDTO, PasswordResetPayload, WelcomeInvitePayload + + +class EmailOutboxService: + def __init__(self, repo: EmailOutboxRepository) -> None: + self.repo = repo + self.logger = get_logger("app.notifications.outbox") + + @staticmethod + def _resolve_frontend_url(roles: list[str]) -> str: + settings = get_settings() + if "agent" in roles or "admin" in roles: + return settings.WEB_FRONTEND_URL + return settings.MOBILE_FRONTEND_URL + + async def enqueue_welcome_invite(self, schema: WelcomeInviteEventSchema) -> None: + payload = WelcomeInvitePayload( + user_id=schema.user_id, + user_name=schema.user_name, + user_email=schema.user_email, + one_time_password=schema.one_time_password, + frontend_url=self._resolve_frontend_url(schema.roles), + token=schema.raw_token, + ) + row = await self.repo.enqueue( + EnqueueEmailOutboxDTO( + event_type=EmailEventType.WELCOME_INVITE, + recipient=schema.user_email, + payload=payload, + max_attempts=schema.max_attempts, + ) + ) + self.logger.info( + "Enqueued welcome invite email", + extra={"outbox_id": str(row.id), "user_id": str(schema.user_id)}, + ) + + async def enqueue_password_reset(self, schema: PasswordResetEventSchema) -> None: + payload = PasswordResetPayload( + user_id=schema.user_id, + user_email=schema.user_email, + frontend_url=self._resolve_frontend_url(schema.roles), + token=schema.raw_token, + ) + row = await self.repo.enqueue( + EnqueueEmailOutboxDTO( + event_type=EmailEventType.PASSWORD_RESET, + recipient=schema.user_email, + payload=payload, + max_attempts=schema.max_attempts, + ) + ) + self.logger.info( + "Enqueued password reset email", + extra={"outbox_id": str(row.id), "user_id": str(schema.user_id)}, + ) diff --git a/app/domains/notifications/worker.py b/app/domains/notifications/worker.py new file mode 100644 index 0000000..51d4619 --- /dev/null +++ b/app/domains/notifications/worker.py @@ -0,0 +1,162 @@ +import asyncio +import os +import random +import socket +from datetime import UTC, datetime, timedelta + +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker + +from app.core.config import get_settings +from app.core.email.renderer import render_password_reset_email, render_welcome_email +from app.core.email.schemas import ResetPasswordEmailParams, WelcomeEmailParams +from app.core.email.strategy import EmailStrategy +from app.core.logger import get_logger +from app.domains.notifications.entities import ( + EmailOutbox, + WelcomeInvitePayload, +) +from app.domains.notifications.metrics import email_outbox_depth, email_outbox_processed_total +from app.domains.notifications.models import EmailOutboxStatus +from app.domains.notifications.repositories.email_outbox_repository import ( + EmailOutboxRepository, +) + +logger = get_logger("app.notifications.worker") + + +def _worker_id() -> str: + settings = get_settings() + if settings.EMAIL_OUTBOX_WORKER_ID: + return settings.EMAIL_OUTBOX_WORKER_ID + return f"{socket.gethostname()}-{os.getpid()}" + + +def _backoff_seconds(attempts: int, max_seconds: int) -> float: + base = min(2**attempts, max_seconds) + jitter = random.uniform(0, base * 0.1) + return base + jitter + + +def _render_html(entry: EmailOutbox) -> tuple[str, str]: + if isinstance(entry.payload, WelcomeInvitePayload): + params = WelcomeEmailParams( + user_name=entry.payload.user_name, + user_email=entry.payload.user_email, + one_time_password=entry.payload.one_time_password, + login_url=f"{entry.payload.frontend_url}/login?token={entry.payload.token}", + ) + return "Welcome to SyncDesk!", render_welcome_email(params) + + else: + params = ResetPasswordEmailParams( + user_email=entry.payload.user_email, + reset_url=f"{entry.payload.frontend_url}/reset-password?token={entry.payload.token}", + ) + return "Reset Your Password", render_password_reset_email(params) + + +async def _process_single( + session_maker: async_sessionmaker[AsyncSession], + email_strategy: EmailStrategy, + entry: EmailOutbox, + worker_id: str, +) -> None: + settings = get_settings() + now = datetime.now(UTC).replace(tzinfo=None) + outbox_id = str(entry.id) + + try: + subject, html = _render_html(entry) + await email_strategy._send(entry.recipient, subject, html) # type: ignore[attr-defined] + + async with session_maker() as session: + async with session.begin(): + repo = EmailOutboxRepository(session) + await repo.mark_sent(entry.id, now) + + email_outbox_processed_total.labels(status="sent").inc() + logger.info( + "Outbox email sent", + extra={"outbox_id": outbox_id, "event_type": entry.event_type.value}, + ) + + except Exception as exc: + new_attempts = entry.attempts + 1 + error_msg = str(exc)[:2000] + + async with session_maker() as session: + async with session.begin(): + repo = EmailOutboxRepository(session) + if new_attempts >= entry.max_attempts: + await repo.mark_dead(entry.id, error_msg) + email_outbox_processed_total.labels(status="dead").inc() + logger.error( + "Outbox email dead-lettered", + extra={"outbox_id": outbox_id, "attempts": new_attempts}, + exc_info=exc, + ) + else: + delay = _backoff_seconds(new_attempts, settings.EMAIL_OUTBOX_BACKOFF_MAX_SECONDS) + next_attempt_at = now + timedelta(seconds=delay) + await repo.mark_retry(entry.id, error_msg, next_attempt_at, new_attempts) + email_outbox_processed_total.labels(status="retry").inc() + logger.warning( + "Outbox email scheduled for retry", + extra={ + "outbox_id": outbox_id, + "attempts": new_attempts, + "next_attempt_at": next_attempt_at.isoformat(), + }, + ) + + +async def _poll_and_process( + session_maker: async_sessionmaker[AsyncSession], + email_strategy: EmailStrategy, + worker_id: str, +) -> None: + settings = get_settings() + now = datetime.now(UTC).replace(tzinfo=None) + + async with session_maker() as session: + async with session.begin(): + repo = EmailOutboxRepository(session) + entries = await repo.claim_batch(now, worker_id, settings.EMAIL_OUTBOX_BATCH_SIZE) + + if not entries: + return + + email_outbox_depth.labels(status=EmailOutboxStatus.PROCESSING).set(len(entries)) + logger.debug("Claimed outbox batch", extra={"count": len(entries), "worker_id": worker_id}) + + tasks = [ + _process_single(session_maker, email_strategy, entry, worker_id) for entry in entries + ] + await asyncio.gather(*tasks, return_exceptions=True) + + +async def run_email_outbox_worker( + engine: AsyncEngine, + email_strategy: EmailStrategy, +) -> None: + settings = get_settings() + if not settings.EMAIL_OUTBOX_ENABLED: + logger.info("Email outbox worker disabled via EMAIL_OUTBOX_ENABLED=False") + return + + worker_id = _worker_id() + session_maker: async_sessionmaker[AsyncSession] = async_sessionmaker( + engine, expire_on_commit=False + ) + logger.info("Email outbox worker started", extra={"worker_id": worker_id}) + + while True: + try: + await _poll_and_process(session_maker, email_strategy, worker_id) + except asyncio.CancelledError: + logger.info("Email outbox worker cancelled", extra={"worker_id": worker_id}) + raise + except Exception: + logger.exception("Email outbox worker error") + + await asyncio.sleep(settings.EMAIL_OUTBOX_POLL_SECONDS) diff --git a/app/domains/products/README.md b/app/domains/products/README.md new file mode 100644 index 0000000..f8e4bcc --- /dev/null +++ b/app/domains/products/README.md @@ -0,0 +1,193 @@ +# Products Module + +The products module manages the product catalog — the services offered to companies. It provides CRUD operations for products and manages the many-to-many association between products and companies. + +## Architecture + +``` +products/ +├── routers.py # HTTP endpoints (FastAPI router) +├── services.py # Business logic +├── repositories.py # Database access (SQLAlchemy) +├── schemas.py # Pydantic DTOs (request/response validation) +├── entities.py # Domain dataclasses (decoupled from ORM) +├── models.py # SQLAlchemy ORM models +├── dependencies.py # FastAPI dependency injection wiring +└── swagger_utils.py # Swagger/OpenAPI documentation configs +``` + +## Data Model + +### Products + +| Field | Type | Description | +|---------------|---------------|--------------------------------| +| `id` | `int` | Primary key (auto-increment) | +| `name` | `string(127)` | Required, 3-127 characters | +| `description` | `string(500)` | Nullable, 3-500 characters | +| `created_at` | `datetime` | Server default `now()` | +| `deleted_at` | `datetime` | Nullable (soft delete) | + +### Relationships + +- **Products <-> Companies**: Many-to-many via `company_products` join table (defined in the companies domain). A product can be contracted by multiple companies, and a company can contract multiple products. + +--- + +## CRUD Endpoints + +All endpoints are mounted under `/api/products` and require authentication via `Authorization: Bearer `. + +### Products + +| Method | Path | Permission | Description | +|----------|-------------------------|-----------------------|-------------------------------| +| `POST` | `/` | `product:create` | Create a new product | +| `GET` | `/` | `product:list` | List products (paginated) | +| `GET` | `/{product_id}` | `product:read` | Get product by ID | +| `PUT` | `/{product_id}` | `product:replace` | Replace product (full update) | +| `PATCH` | `/{product_id}` | `product:update` | Partial update | +| `DELETE` | `/{product_id}` | `product:soft_delete` | Soft-delete product | + +### Product Companies + +| Method | Path | Permission | Description | +|----------|-----------------------------------|---------------------------|--------------------------------------| +| `POST` | `/{product_id}/companies` | `product:add_companies` | Associate companies with a product | +| `DELETE` | `/{product_id}/companies` | `product:remove_companies`| Remove company associations (batch) | +| `DELETE` | `/{product_id}/companies/{company_id}` | `product:remove_company` | Remove a single company | +| `GET` | `/{product_id}/companies` | `product:list_companies` | List companies of a product | + +--- + +## Request / Response Examples + +### Create Product + +``` +POST /api/products/ +Authorization: Bearer +``` + +**Request body:** +```json +{ + "name": "SyncDesk Chat", + "description": "Real-time chat support module with agent routing" +} +``` + +**Response `201`:** +```json +{ + "data": { + "id": 1, + "name": "SyncDesk Chat", + "description": "Real-time chat support module with agent routing", + "created_at": "2026-04-15T12:00:00" + }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `409 Conflict` — a product with the same name already exists. +- `422 Unprocessable Entity` — request body validation failed. + +### List Products (Paginated) + +``` +GET /api/products/?page=1&limit=20 +Authorization: Bearer +``` + +**Response `200`:** +```json +{ + "data": { + "items": [ + { + "id": 1, + "name": "SyncDesk Chat", + "description": "Real-time chat support module with agent routing" + } + ], + "total": 1, + "page": 1, + "limit": 20 + }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +### Partial Update + +``` +PATCH /api/products/{product_id} +Authorization: Bearer +``` + +**Request body:** +```json +{ + "description": "Updated description for the product" +} +``` + +> At least one field (`name` or `description`) must be provided. + +**Response `200`:** +```json +{ + "data": { + "id": 1, + "name": "SyncDesk Chat", + "description": "Updated description for the product" + }, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — product not found. +- `422 Unprocessable Entity` — no valid field provided or validation failed. + +### Add Companies to a Product + +``` +POST /api/products/{product_id}/companies +Authorization: Bearer +``` + +**Request body:** +```json +{ + "company_ids": ["uuid-1", "uuid-2"] +} +``` + +**Response `201`:** +```json +{ + "data": null, + "meta": { "timestamp": "...", "success": true, "request_id": null } +} +``` + +**Error responses:** +- `404 Not Found` — product or one of the referenced companies not found. +- `409 Conflict` — one or more companies are already associated with this product. + +--- + +## Validation Rules + +- **name**: required, between 3 and 127 characters. +- **description**: optional on create, between 3 and 500 characters when provided. +- **Partial update** (`PATCH`): at least one field must be present in the payload. + +--- + +## Implementation Status + +> **All endpoints currently return `501 Not Implemented`.** This is a temporary scaffold — each endpoint **must** be replaced with proper business logic in the service and repository layers as the domain is implemented. diff --git a/app/domains/products/__init__.py b/app/domains/products/__init__.py new file mode 100644 index 0000000..7b8f774 --- /dev/null +++ b/app/domains/products/__init__.py @@ -0,0 +1,3 @@ +from .routers import product_router + +__all__ = ["product_router"] diff --git a/app/domains/products/dependencies.py b/app/domains/products/dependencies.py new file mode 100644 index 0000000..f8f3c5b --- /dev/null +++ b/app/domains/products/dependencies.py @@ -0,0 +1,21 @@ +from typing import Annotated + +from fastapi import Depends + +from app.db.postgres.dependencies import PgSessionDep +from app.domains.products.repositories import ProductRepository +from app.domains.products.services import ProductService + + +def get_product_repository(db: PgSessionDep) -> ProductRepository: + return ProductRepository(db) + + +ProductRepoDep = Annotated[ProductRepository, Depends(get_product_repository)] + + +def get_product_service(repo: ProductRepoDep) -> ProductService: + return ProductService(repo) + + +ProductServiceDep = Annotated[ProductService, Depends(get_product_service)] diff --git a/app/domains/products/entities.py b/app/domains/products/entities.py new file mode 100644 index 0000000..12a4bd2 --- /dev/null +++ b/app/domains/products/entities.py @@ -0,0 +1,11 @@ +from datetime import datetime + +from pydantic.dataclasses import dataclass + + +@dataclass +class Product: + id: int + name: str + description: str + created_at: datetime diff --git a/app/domains/products/models.py b/app/domains/products/models.py new file mode 100644 index 0000000..a97a116 --- /dev/null +++ b/app/domains/products/models.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from datetime import datetime +from typing import TYPE_CHECKING + +from sqlalchemy import DateTime, Integer, String, func +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.db.postgres.base import Base +from app.domains.companies.models import company_products + +if TYPE_CHECKING: + from app.domains.companies.models import Company + + +class Product(Base): + __tablename__ = "products" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[str] = mapped_column(String(127), nullable=False) + description: Mapped[str | None] = mapped_column(String(500), nullable=True) + created_at: Mapped[datetime] = mapped_column( + DateTime, nullable=False, server_default=func.now() + ) + deleted_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) + + companies: Mapped[list[Company]] = relationship( + secondary=company_products, + back_populates="products", + ) diff --git a/app/domains/products/repositories.py b/app/domains/products/repositories.py new file mode 100644 index 0000000..914d257 --- /dev/null +++ b/app/domains/products/repositories.py @@ -0,0 +1,156 @@ +from typing import Any +from uuid import UUID +from datetime import datetime, UTC +from sqlalchemy import select, update, delete, exc, func +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncSession + +from app.domains.products.entities import Product as ProductEntity +from app.domains.products.models import Product as ProductModel +from app.domains.companies.models import Company as CompanyModel, company_products +from app.domains.companies.entities import Company as CompanyEntity +from app.db.exceptions import ResourceAlreadyExistsError +from app.domains.products.schemas import CreateProductDTO, UpdateProductDTO, ReplaceProductDTO +from app.core.schemas import PaginatedItems + +class ProductRepository: + def __init__(self, db: AsyncSession) -> None: + self.db = db + + def _to_entity(self, model: ProductModel) -> ProductEntity: + return ProductEntity( + id=model.id, + name=model.name, + # VS Code Strict Fix: A Entidade exige 'str', mas o model aceita nulo. + description=model.description or "", + created_at=model.created_at, + ) + + def _to_company_entity(self, model: CompanyModel) -> CompanyEntity: + return CompanyEntity( + id=model.id, + legal_name=model.legal_name, + tax_id=model.tax_id, + created_at=model.created_at, + trade_name=model.trade_name, + ) + + async def create(self, dto: CreateProductDTO) -> ProductEntity: + try: + model = ProductModel(**dto.model_dump()) + self.db.add(model) + await self.db.flush() + await self.db.commit() + return self._to_entity(model) + except exc.IntegrityError as e: + await self.db.rollback() + raise ResourceAlreadyExistsError("Product", dto.name) from e + + async def get_by_id(self, product_id: int) -> ProductEntity | None: + result = await self.db.execute( + select(ProductModel).where(ProductModel.id == product_id, ProductModel.deleted_at.is_(None)) + ) + model = result.scalar_one_or_none() + return self._to_entity(model) if model else None + + async def get_all_paginated(self, skip: int, limit: int) -> PaginatedItems[ProductEntity]: + total_result = await self.db.execute( + select(func.count(ProductModel.id)).select_from(ProductModel).where(ProductModel.deleted_at.is_(None)) + ) + total = total_result.scalar_one() or 0 + + result = await self.db.execute( + select(ProductModel) + .where(ProductModel.deleted_at.is_(None)) + .offset(skip) + .limit(limit) + ) + models = result.scalars().all() + return PaginatedItems( + items=[self._to_entity(m) for m in models], + total=total, + page=(skip // limit) + 1, + limit=limit + ) + + async def update(self, product_id: int, dto: UpdateProductDTO | ReplaceProductDTO) -> ProductEntity | None: + try: + update_data = dto.model_dump(exclude_unset=True) + # Evita o CompileError do SQLAlchemy caso não existam dados a atualizar + if not update_data: + return await self.get_by_id(product_id) + + result = await self.db.execute( + update(ProductModel) + .where(ProductModel.id == product_id, ProductModel.deleted_at.is_(None)) + .values(**update_data) + .returning(ProductModel) + ) + model = result.scalar_one_or_none() + if model: + await self.db.commit() + return self._to_entity(model) + return None + except exc.IntegrityError as e: + await self.db.rollback() + raise ResourceAlreadyExistsError("Product", "name") from e + + async def soft_delete(self, product_id: int) -> bool: + result = await self.db.execute( + update(ProductModel) + .where(ProductModel.id == product_id, ProductModel.deleted_at.is_(None)) + .values(deleted_at=datetime.now(UTC).replace(tzinfo=None)) + .returning(ProductModel.id) + ) + model_id = result.scalar_one_or_none() + if model_id: + await self.db.commit() + return True + return False + + async def get_product_companies_paginated(self, product_id: int, skip: int, limit: int) -> tuple[list[CompanyEntity], int]: + total_result = await self.db.execute( + select(func.count(CompanyModel.id)) + .select_from(CompanyModel) + .join(company_products, CompanyModel.id == company_products.c.company_id) + .where(company_products.c.product_id == product_id, CompanyModel.deleted_at.is_(None)) + ) + total = total_result.scalar_one() or 0 + + result = await self.db.execute( + select(CompanyModel) + .join(company_products, CompanyModel.id == company_products.c.company_id) + .where(company_products.c.product_id == product_id, CompanyModel.deleted_at.is_(None)) + .offset(skip) + .limit(limit) + ) + models = result.scalars().all() + return [self._to_company_entity(m) for m in models], total + + async def add_companies(self, product_id: int, company_ids: list[UUID]) -> None: + if not company_ids: + return + from datetime import timedelta + now = datetime.now(UTC).replace(tzinfo=None) + future = now + timedelta(days=365) + + values: list[dict[str, Any]] = [ + {"company_id": cid, "product_id": product_id, "bought_at": now, "support_until": future} + for cid in set(company_ids) + ] + + try: + await self.db.execute(pg_insert(company_products).values(values).on_conflict_do_nothing()) + await self.db.commit() + except exc.IntegrityError as e: + await self.db.rollback() + raise ValueError("One or more company_ids do not exist") from e + + async def remove_companies(self, product_id: int, company_ids: list[UUID]) -> None: + if not company_ids: + return + await self.db.execute( + delete(company_products) + .where(company_products.c.product_id == product_id, company_products.c.company_id.in_(company_ids)) + ) + await self.db.commit() \ No newline at end of file diff --git a/app/domains/products/routers.py b/app/domains/products/routers.py new file mode 100644 index 0000000..f3bb0c4 --- /dev/null +++ b/app/domains/products/routers.py @@ -0,0 +1,141 @@ +from uuid import UUID +from fastapi import APIRouter, Query, status +from fastapi.encoders import jsonable_encoder +from fastapi.responses import JSONResponse + +from app.core.dependencies import ResponseFactoryDep +from app.core.schemas import PaginatedItems +from app.core.exceptions import AppHTTPException +from app.db.exceptions import ResourceAlreadyExistsError +from app.domains.auth.dependencies import CurrentUserSessionDep, require_permission +from app.domains.products.dependencies import ProductServiceDep +from app.domains.products.schemas import ( + AddProductToCompaniesDTO, + CreateProductDTO, + RemoveProductFromCompaniesDTO, + ReplaceProductDTO, + UpdateProductDTO, +) +from app.domains.products.swagger_utils import ( + add_companies_swagger, + create_product_swagger, + get_product_companies_swagger, + get_product_swagger, + get_products_swagger, + remove_companies_swagger, + remove_company_swagger, + replace_product_swagger, + soft_delete_product_swagger, + update_product_swagger, +) + +product_router = APIRouter(tags=["Products"]) + +@product_router.post("/", dependencies=[require_permission("product:create")], **create_product_swagger) +async def create_product( + dto: CreateProductDTO, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + product = await service.create(dto) + return response.success(data=jsonable_encoder(product), status_code=status.HTTP_201_CREATED) + except ResourceAlreadyExistsError as e: + raise AppHTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e)) from e + + +@product_router.get("/", dependencies=[require_permission("product:list")], **get_products_swagger) +async def get_products( + auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, + page: int = Query(default=1, ge=1), limit: int = Query(default=20, ge=1), +) -> JSONResponse: + res = await service.get_all_paginated(page, limit) + return response.success(data=res.model_dump(mode="json"), status_code=status.HTTP_200_OK) + + +@product_router.get("/{product_id}", dependencies=[require_permission("product:read")], **get_product_swagger) +async def get_product( + product_id: int, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + product = await service.get_by_id(product_id) + if not product: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Product not found") + return response.success(data=jsonable_encoder(product), status_code=status.HTTP_200_OK) + + +@product_router.put("/{product_id}", dependencies=[require_permission("product:replace")], **replace_product_swagger) +async def replace_product( + product_id: int, dto: ReplaceProductDTO, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + product = await service.update(product_id, dto) + if not product: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Product not found") + return response.success(data=jsonable_encoder(product), status_code=status.HTTP_200_OK) + except ResourceAlreadyExistsError as e: + raise AppHTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e)) from e + + +@product_router.patch("/{product_id}", dependencies=[require_permission("product:update")], **update_product_swagger) +async def update_product( + product_id: int, dto: UpdateProductDTO, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + product = await service.update(product_id, dto) + if not product: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Product not found") + return response.success(data=jsonable_encoder(product), status_code=status.HTTP_200_OK) + except ResourceAlreadyExistsError as e: + raise AppHTTPException(status_code=status.HTTP_409_CONFLICT, detail=str(e)) from e + + +@product_router.delete("/{product_id}", dependencies=[require_permission("product:soft_delete")], **soft_delete_product_swagger) +async def soft_delete_product( + product_id: int, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + deleted = await service.soft_delete(product_id) + if not deleted: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Product not found") + return response.success(data=None, status_code=status.HTTP_200_OK) + + +@product_router.post("/{product_id}/companies", dependencies=[require_permission("product:add_companies")], **add_companies_swagger) +async def add_product_to_companies( + product_id: int, dto: AddProductToCompaniesDTO, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.add_companies(product_id, dto.company_ids) + return response.success(data=None, status_code=status.HTTP_201_CREATED) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + + +@product_router.delete("/{product_id}/companies", dependencies=[require_permission("product:remove_companies")], **remove_companies_swagger) +async def remove_product_from_companies( + product_id: int, dto: RemoveProductFromCompaniesDTO, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.remove_companies(product_id, dto.company_ids) + return response.success(data=None, status_code=status.HTTP_200_OK) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + + +@product_router.delete("/{product_id}/companies/{company_id}", dependencies=[require_permission("product:remove_company")], **remove_company_swagger) +async def remove_product_company( + product_id: int, company_id: UUID, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, +) -> JSONResponse: + try: + await service.remove_companies(product_id, [company_id]) + return response.success(data=None, status_code=status.HTTP_200_OK) + except ValueError as e: + raise AppHTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e + + +@product_router.get("/{product_id}/companies", dependencies=[require_permission("product:list_companies")], **get_product_companies_swagger) +async def get_product_companies( + product_id: int, auth: CurrentUserSessionDep, service: ProductServiceDep, response: ResponseFactoryDep, + page: int = Query(default=1, ge=1), limit: int = Query(default=20, ge=1), +) -> JSONResponse: + companies, total = await service.get_product_companies_paginated(product_id, page, limit) + + paginated = PaginatedItems(items=companies, total=total, page=page, limit=limit) + return response.success(data=paginated.model_dump(mode="json"), status_code=status.HTTP_200_OK) \ No newline at end of file diff --git a/app/domains/products/schemas.py b/app/domains/products/schemas.py new file mode 100644 index 0000000..d2f739f --- /dev/null +++ b/app/domains/products/schemas.py @@ -0,0 +1,71 @@ +from uuid import UUID + +from pydantic import model_validator + +from app.core.schemas import BaseDTO, PaginatedItems +from app.domains.companies.entities import Company +from app.domains.products.entities import Product +from app.schemas.response import GenericSuccessContent + + +def validate_product_fields(name: str | None, description: str | None) -> None: + errors: list[str] = [] + if name is not None: + len_name = len(name) + if len_name > 127 or len_name < 3: + errors.append("Product name must be between 3 and 127 characters") + + if description is not None: + len_desc = len(description) + if len_desc > 500 or len_desc < 3: + errors.append("Product description must be between 3 and 500 characters") + + if errors: + raise ValueError("; ".join(errors)) + + +class CreateProductDTO(BaseDTO): + name: str + description: str + + @model_validator(mode="after") + def validate_fields(self) -> "CreateProductDTO": + validate_product_fields(self.name, self.description) + return self + + +class UpdateProductDTO(BaseDTO): + name: str | None = None + description: str | None = None + + @model_validator(mode="after") + def validate_fields(self) -> "UpdateProductDTO": + if self.name is None and self.description is None: + raise ValueError("Product update payload must have at least one valid attribute") + validate_product_fields(self.name, self.description) + return self + + +class ReplaceProductDTO(CreateProductDTO): + pass + + +class AddProductToCompaniesDTO(BaseDTO): + company_ids: list[UUID] + + +class RemoveProductFromCompaniesDTO(BaseDTO): + company_ids: list[UUID] + + +CreateProductResponse = GenericSuccessContent[Product] + +GetProductsResponse = GenericSuccessContent[PaginatedItems[Product]] + +GetProductResponse = GenericSuccessContent[Product] + +ReplaceProductResponse = GenericSuccessContent[Product] + +UpdateProductResponse = GenericSuccessContent[Product] + +GetProductCompaniesResponse = GenericSuccessContent[PaginatedItems[Company]] diff --git a/app/domains/products/services.py b/app/domains/products/services.py new file mode 100644 index 0000000..eb5e9e3 --- /dev/null +++ b/app/domains/products/services.py @@ -0,0 +1,40 @@ +from uuid import UUID +from app.domains.products.repositories import ProductRepository +from app.domains.products.entities import Product as ProductEntity +from app.domains.companies.entities import Company as CompanyEntity +from app.domains.products.schemas import CreateProductDTO, UpdateProductDTO, ReplaceProductDTO +from app.core.schemas import PaginatedItems + +class ProductService: + def __init__(self, repo: ProductRepository) -> None: + self.repo = repo + + async def create(self, dto: CreateProductDTO) -> ProductEntity: + return await self.repo.create(dto) + + async def get_by_id(self, product_id: int) -> ProductEntity | None: + return await self.repo.get_by_id(product_id) + + async def get_all_paginated(self, page: int, limit: int) -> PaginatedItems[ProductEntity]: + skip = (page - 1) * limit + return await self.repo.get_all_paginated(skip, limit) + + async def update(self, product_id: int, dto: UpdateProductDTO | ReplaceProductDTO) -> ProductEntity | None: + return await self.repo.update(product_id, dto) + + async def soft_delete(self, product_id: int) -> bool: + return await self.repo.soft_delete(product_id) + + async def get_product_companies_paginated(self, product_id: int, page: int, limit: int) -> tuple[list[CompanyEntity], int]: + skip = (page - 1) * limit + return await self.repo.get_product_companies_paginated(product_id, skip, limit) + + async def add_companies(self, product_id: int, company_ids: list[UUID]) -> None: + if not await self.get_by_id(product_id): + raise ValueError(f"Product {product_id} not found") + await self.repo.add_companies(product_id, company_ids) + + async def remove_companies(self, product_id: int, company_ids: list[UUID]) -> None: + if not await self.get_by_id(product_id): + raise ValueError(f"Product {product_id} not found") + await self.repo.remove_companies(product_id, company_ids) \ No newline at end of file diff --git a/app/domains/products/swagger_utils.py b/app/domains/products/swagger_utils.py new file mode 100644 index 0000000..63d0b3a --- /dev/null +++ b/app/domains/products/swagger_utils.py @@ -0,0 +1,243 @@ +from typing import Any + +from fastapi import status + +from app.domains.products.schemas import ( + CreateProductResponse, + GetProductCompaniesResponse, + GetProductResponse, + GetProductsResponse, + ReplaceProductResponse, + UpdateProductResponse, +) +from app.schemas.response import ErrorContent, GenericSuccessContent + +# -- POST / ------------------------------------------------------------------ + +create_product_responses: dict[int | str, dict[str, Any]] = { + 201: { + "description": "Product created successfully.", + "model": CreateProductResponse, + }, + 409: { + "description": "A product with the same name already exists.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +create_product_swagger: dict[str, Any] = { + "summary": "Create a new product", + "description": "Registers a new product in the catalog.", + "status_code": status.HTTP_201_CREATED, + "response_model": CreateProductResponse, + "responses": create_product_responses, +} + +# -- GET / ------------------------------------------------------------------- + +get_products_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Paginated list of products retrieved successfully.", + "model": GetProductsResponse, + }, +} + +get_products_swagger: dict[str, Any] = { + "summary": "List products", + "description": "Returns a paginated list of products.", + "response_model": GetProductsResponse, + "responses": get_products_responses, +} + +# -- GET /{product_id} ------------------------------------------------------- + +get_product_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Product retrieved successfully.", + "model": GetProductResponse, + }, + 404: { + "description": "Product not found.", + "model": ErrorContent, + }, +} + +get_product_swagger: dict[str, Any] = { + "summary": "Get a product by ID", + "description": "Returns a single product by its ID.", + "response_model": GetProductResponse, + "responses": get_product_responses, +} + +# -- PUT /{product_id} ------------------------------------------------------- + +replace_product_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Product replaced successfully.", + "model": ReplaceProductResponse, + }, + 404: { + "description": "Product not found.", + "model": ErrorContent, + }, + 409: { + "description": "A product with the same name already exists.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +replace_product_swagger: dict[str, Any] = { + "summary": "Replace a product", + "description": "Fully replaces all fields of an existing product.", + "response_model": ReplaceProductResponse, + "responses": replace_product_responses, +} + +# -- PATCH /{product_id} ----------------------------------------------------- + +update_product_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Product updated successfully.", + "model": UpdateProductResponse, + }, + 404: { + "description": "Product not found.", + "model": ErrorContent, + }, + 409: { + "description": "A product with the same name already exists.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +update_product_swagger: dict[str, Any] = { + "summary": "Partially update a product", + "description": "Updates only the provided fields of an existing product.", + "response_model": UpdateProductResponse, + "responses": update_product_responses, +} + +# -- DELETE /{product_id} ---------------------------------------------------- + +soft_delete_product_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Product soft-deleted successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Product not found.", + "model": ErrorContent, + }, +} + +soft_delete_product_swagger: dict[str, Any] = { + "summary": "Soft-delete a product", + "description": "Marks a product as deleted without removing it from the database.", + "response_model": GenericSuccessContent[None], + "responses": soft_delete_product_responses, +} + +# -- POST /{product_id}/companies -------------------------------------------- + +add_companies_responses: dict[int | str, dict[str, Any]] = { + 201: { + "description": "Companies associated with the product successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Product or one of the referenced companies not found.", + "model": ErrorContent, + }, + 409: { + "description": "One or more companies are already associated with this product.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +add_companies_swagger: dict[str, Any] = { + "summary": "Add companies to a product", + "description": "Associates one or more companies with an existing product.", + "status_code": status.HTTP_201_CREATED, + "response_model": GenericSuccessContent[None], + "responses": add_companies_responses, +} + +# -- DELETE /{product_id}/companies ------------------------------------------ + +remove_companies_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Companies removed from the product successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Product or one of the referenced companies not found.", + "model": ErrorContent, + }, + 422: { + "description": "Request body validation failed.", + "model": ErrorContent, + }, +} + +remove_companies_swagger: dict[str, Any] = { + "summary": "Remove companies from a product", + "description": "Removes one or more company associations from an existing product.", + "response_model": GenericSuccessContent[None], + "responses": remove_companies_responses, +} + +# -- DELETE /{product_id}/companies/{company_id} ----------------------------- + +remove_company_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Company removed from the product successfully.", + "model": GenericSuccessContent[None], + }, + 404: { + "description": "Product or company association not found.", + "model": ErrorContent, + }, +} + +remove_company_swagger: dict[str, Any] = { + "summary": "Remove a single company from a product", + "description": "Removes a specific company association from an existing product.", + "response_model": GenericSuccessContent[None], + "responses": remove_company_responses, +} + +# -- GET /{product_id}/companies --------------------------------------------- + +get_product_companies_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "Paginated list of companies associated with the product.", + "model": GetProductCompaniesResponse, + }, + 404: { + "description": "Product not found.", + "model": ErrorContent, + }, +} + +get_product_companies_swagger: dict[str, Any] = { + "summary": "List companies of a product", + "description": "Returns a paginated list of companies associated with the given product.", + "response_model": GetProductCompaniesResponse, + "responses": get_product_companies_responses, +} diff --git a/app/domains/ticket/README.md b/app/domains/ticket/README.md index d5a0f5a..f2e07b7 100644 --- a/app/domains/ticket/README.md +++ b/app/domains/ticket/README.md @@ -1,86 +1,154 @@ -# Dominio de Tickets +# Ticket Domain -Modulo responsavel pela criacao, consulta e atualizacao de status de tickets no SyncDesk API. +Official API contract for the SyncDesk ticket domain. -## Visao Geral +This module defines the public HTTP contracts, Pydantic schemas, pagination rules, event payloads, and the minimum implemented behavior required for the current sprint. The focus is contract definition, not full operational business implementation. -O dominio `ticket`: -- persiste tickets no MongoDB usando Beanie -- cria tickets com status inicial `open` -- resolve o cliente do ticket a partir de `client_id` -- permite consulta com filtros opcionais -- permite atualizacao de status com regras de transicao +## Scope -Dependencias principais: -- `TicketRepository` para persistencia e consulta -- `TicketService` para regra de negocio -- `UserService` do dominio `auth` para resolver o cliente -- `ResponseFactoryDep` para o envelope de resposta HTTP -- `require_permission(...)` para autorizacao +Implemented routes: +- `POST /api/tickets/` +- `GET /api/tickets/` +- `GET /api/tickets/{ticket_id}` +- `PATCH /api/tickets/{ticket_id}` -## Arquitetura +Contract stubs in this sprint: +- `GET /api/tickets/queue` +- `POST /api/tickets/{ticket_id}/assign` +- `POST /api/tickets/{ticket_id}/escalate` +- `POST /api/tickets/{ticket_id}/transfer` -- `routers.py`: borda HTTP -- `schemas.py`: contratos de entrada e saida -- `services.py`: regra de negocio -- `repositories.py`: acesso ao MongoDB -- `dependencies.py`: composicao do service -- `models.py`: enums e documento `Ticket` +Out of scope: +- listeners for `live_chat` +- listeners for `chatbot` +- event dispatcher wiring +- delete endpoint +- full queue, assignment, escalation, and transfer business logic -Fluxo resumido: +## Architecture -1. Router valida autenticacao e permissao. -2. Router delega ao service. -3. Service executa a regra de negocio. -4. Repository acessa o MongoDB. -5. Router devolve resposta no envelope padrao. +- `models.py`: persistent ticket document and enums +- `schemas.py`: request/response contracts and event payloads +- `routers.py`: HTTP contract surface +- `services.py`: implemented business logic kept intentionally small +- `repositories.py`: MongoDB access for the implemented flows +- `dependencies.py`: service composition -## Rotas Disponiveis +The domain follows the project standards based on: +- `CurrentUserSessionDep` +- `require_permission(...)` +- `ResponseFactoryDep` +- `GenericSuccessContent[...]` -### `GET /api/tickets/` +## Enums -Lista tickets ou busca por filtros opcionais. +### `TicketType` -Permissao: -- `ticket:read` +Values: +- `issue` +- `access` +- `new_feature` -Autenticacao: -- Bearer token obrigatorio +### `TicketCriticality` -Filtros suportados: -- `ticket_id` -- `client_id` +Values: +- `high` +- `medium` +- `low` + +### `TicketStatus` + +Values: +- `open` +- `awaiting_assignment` +- `in_progress` +- `waiting_for_provider` +- `waiting_for_validation` +- `finished` + +`awaiting_assignment` is a real status of the official contract. It is used to represent tickets that were created successfully and are waiting for an active assignee. + +## Official initial status + +The official initial status of a newly created ticket is: +- `awaiting_assignment` + +This decision is applied consistently in the service layer and documented as the default lifecycle entry point for ticket operations. + +## Persistent model strategy + +The persisted `Ticket` document remains intentionally conservative. + +Persisted fields: - `triage_id` -- `status` -- `criticality` - `type` +- `criticality` - `product` +- `status` +- `creation_date` +- `description` +- `chat_ids` +- `agent_history` +- `client` +- `comments` -Comportamento: -- sem filtros: retorna todos os tickets -- com filtros: aplica todos em conjunto -- sem resultados: retorna `200` com lista vazia -- com `ticket_id`: continua retornando lista para manter consistencia - -Exemplos: - -```http -GET /api/tickets/ -GET /api/tickets/?status=open -GET /api/tickets/?client_id=0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2 -GET /api/tickets/?criticality=high&type=issue -GET /api/tickets/?ticket_id=67f0ca60e4b0b1a2c3d4e601 -GET /api/tickets/?client_id=0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2&status=in_progress&criticality=high -``` +Not added in this sprint: +- `department` +- `current_assignee` +- dedicated department or assignee embedded references + +Queue and routing concerns are represented in API DTOs where needed, without inflating the persisted MongoDB document. Department routing is intentionally not implemented in the current ticket model. + +## Schemas + +### Main request/response contracts + +- `CreateTicketDTO` +- `CreateTicketResponseDTO` +- `TicketSearchFiltersDTO` +- `TicketResponse` +- `TicketListResponse` +- `UpdateTicketDTO` +- `AssignTicketRequest` +- `EscalateTicketRequest` +- `TransferTicketRequest` +- `TicketQueueFiltersDTO` +- `TicketQueueItemResponse` +- `TicketQueueListResponse` + +### Event payload contracts + +- `TicketEventPayload` +- `TicketClosedEventPayload` +- `TicketAssigneeUpdatedEventPayload` +- `TicketEscalatedEventPayload` +- `TriageFinishedEventPayload` + +### Provisional routing fields + +The following field is intentionally typed as `str` in this sprint: +- `department_id` +- `level` + +`department_id` remains available only as a queue filter/response contract field. Transfer and escalation rules do not implement department behavior; they use the support level stored in assignment history. + +## Routes ### `POST /api/tickets/` -Cria um novo ticket. +Status: +- implemented -Permissao: +Permission: - `ticket:create` -Body: +Request body: +- `CreateTicketDTO` + +Response: +- `GenericSuccessContent[CreateTicketResponseDTO]` + +Example request: ```json { @@ -94,221 +162,297 @@ Body: } ``` -### `PATCH /api/tickets/{ticket_id}/status` +Example response: + +```json +{ + "data": { + "id": "67f0ca60e4b0b1a2c3d4e601", + "status": "awaiting_assignment", + "creation_date": "2026-04-14T12:00:00Z" + } +} +``` + +### `GET /api/tickets/` + +Status: +- implemented -Atualiza o status de um ticket existente. +Permission: +- `ticket:read` -Permissao: -- `ticket:update_status` +Official response format: +- `GenericSuccessContent[TicketListResponse]` + +Pagination defaults: +- `page=1` +- `page_size=20` + +Query params: +- `ticket_id` +- `client_id` +- `triage_id` +- `status` +- `criticality` +- `type` +- `product` +- `page` +- `page_size` -Body: +Response shape: ```json { - "status": "in_progress" + "data": { + "items": [], + "page": 1, + "page_size": 20, + "total": 0 + } } ``` -## Schemas +### `GET /api/tickets/{ticket_id}` -### `CreateTicketDTO` +Status: +- implemented -Campos: -- `triage_id: PydanticObjectId` -- `type: TicketType` -- `criticality: TicketCriticality` -- `product: str` -- `description: str` -- `chat_ids: list[PydanticObjectId]` -- `client_id: UUID` +Permission: +- `ticket:read` -### `CreateTicketResponseDTO` +Response: +- `GenericSuccessContent[TicketResponse]` -Campos: -- `id: str` -- `status: TicketStatus` -- `creation_date: datetime` +### `PATCH /api/tickets/{ticket_id}` -### `TicketSearchFiltersDTO` +Status: +- implemented -Campos opcionais: -- `ticket_id: PydanticObjectId | None` -- `client_id: UUID | None` -- `triage_id: PydanticObjectId | None` -- `status: TicketStatus | None` -- `criticality: TicketCriticality | None` -- `type: TicketType | None` -- `product: str | None` +Permission: +- `ticket:update` -### `TicketResponseDTO` +Official purpose: +- partially update a ticket -Campos retornados: -- `id` -- `triage_id` -- `type` +Supported request fields: +- `status` - `criticality` - `product` -- `status` -- `creation_date` - `description` -- `chat_ids` -- `agent_history` -- `client` -- `comments` -### `UpdateTicketStatusDTO` +Actions that do not belong to this PATCH: +- assignment +- transfer +- escalation + +Response: +- `GenericSuccessContent[TicketResponse]` + +Event behavior: +- when the resulting status becomes `finished`, this route represents the `ticket.closed` business event contract + +Example request: + +```json +{ + "status": "finished", + "criticality": "medium", + "description": "Chamado concluido e validado." +} +``` + +### `GET /api/tickets/queue` + +Status: +- implemented + +Permission: +- `ticket:queue` + +Query params: +- `status` +- `type` +- `department_id` +- `unassigned_only` +- `level` +- `assignee_id` +- `page` +- `page_size` + +Response: +- `GenericSuccessContent[TicketQueueListResponse]` + +Ordering contract: +- criticality first +- creation date second + +Current behavior: +- lists queue candidates with filters and criticality/date ordering + +### `POST /api/tickets/{ticket_id}/assign` -Campos: -- `status: TicketStatus` +Status: +- implemented -### `UpdateTicketStatusResponseDTO` +Permission: +- `ticket:assign` -Campos: -- `id` -- `previous_status` -- `current_status` +Request body: +- `AssignTicketRequest` -## Regras de Negocio +Response: +- `GenericSuccessContent[TicketResponse]` -### Criacao +Event contract: +- emits `ticket.assignee_updated` -Comportamento confirmado: -- o ticket nasce com `status = open` -- `creation_date` e preenchida automaticamente -- `comments` inicia como lista vazia -- `agent_history` inicia como lista vazia -- o cliente e resolvido por `client_id` usando `UserService.get_by_id(...)` -- se o cliente nao existir, a criacao falha com `404` +Current behavior: +- assigns the ticket to the requested agent and emits `ticket.assignee_updated` -Observacao tecnica: -- o projeto nao possui hoje um dominio proprio de empresa -- por isso, `TicketCompany` e montado internamente no service com base no usuario resolvido +### `POST /api/tickets/{ticket_id}/escalate` -### Consulta +Status: +- implemented -Comportamento confirmado: -- a consulta usa uma unica rota GET com query params opcionais -- filtros informados sao combinados com AND -- `client_id` filtra por `client.id` -- `ticket_id` filtra pelo `_id` do documento -- `product` usa comparacao exata +Permission: +- `ticket:escalate` -### Atualizacao de status +Request body: +- `EscalateTicketRequest` -Transicoes validas: +Business rule contract: +- escalation transfers the ticket to a target agent at a higher support level -| Status atual | Proximos status validos | +Response: +- `GenericSuccessContent[TicketResponse]` + +Event contract: +- emits `ticket.escalated` + +Current behavior: +- closes the previous assignment, assigns the target higher-level agent, keeps the ticket `in_progress`, and emits `ticket.escalated` + +### `POST /api/tickets/{ticket_id}/transfer` + +Status: +- implemented + +Permission: +- `ticket:transfer` + +Request body: +- `TransferTicketRequest` + +Business rule contract: +- transfer changes the assignee without changing support level + +Response: +- `GenericSuccessContent[TicketResponse]` + +Event contract: +- emits `ticket.assignee_updated` + +Current behavior: +- closes the previous assignment, assigns the target same-level agent, keeps the ticket `in_progress`, and emits `ticket.assignee_updated` + +### Delete policy + +`DELETE /api/tickets/{ticket_id}` is not exposed in this sprint. + +Reason: +- ticket lifecycle must remain auditable and traceable + +## Status transitions + +Validated transitions: + +| Current status | Allowed next statuses | | --- | --- | -| `open` | `in_progress` | -| `in_progress` | `waiting_for_provider`, `waiting_for_validation`, `finished` | +| `open` | `awaiting_assignment`, `in_progress` | +| `awaiting_assignment` | `in_progress` | +| `in_progress` | `awaiting_assignment`, `waiting_for_provider`, `waiting_for_validation`, `finished` | | `waiting_for_provider` | `in_progress` | | `waiting_for_validation` | `in_progress`, `finished` | -| `finished` | nenhum | +| `finished` | none | -Regras adicionais: -- ticket inexistente retorna `404` -- mesmo status retorna `400` -- transicao invalida retorna `400` +Operational note: +- the official creation flow enters at `awaiting_assignment` +- `open` remains part of the official enum and transition graph -## Formato dos Dados +## Events -| Campo | Tipo | Exemplo | -| --- | --- | --- | -| `ticket_id` | ObjectId | `67f0ca60e4b0b1a2c3d4e601` | -| `triage_id` | ObjectId | `67f0c9b8e4b0b1a2c3d4e5f6` | -| `chat_ids[]` | ObjectId | `67f0c9b8e4b0b1a2c3d4e5f7` | -| `client_id` | UUID | `0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2` | -| `type` | enum | `issue`, `access`, `new_feature` | -| `criticality` | enum | `high`, `medium`, `low` | -| `status` | enum | `open`, `in_progress`, `waiting_for_provider`, `waiting_for_validation`, `finished` | -| `product` | string | `Sistema Financeiro` | +The ticket domain is the producer of: +- `ticket.closed` +- `ticket.assignee_updated` +- `ticket.escalated` -## Retornos Possiveis +The ticket domain also defines the payload it expects to receive from: +- `triage.finished` -### `GET /api/tickets/` +### `ticket.closed` -- `200`: sucesso com lista de tickets -- `401`: token ausente ou invalido -- `403`: usuario sem `ticket:read` -- `422`: query params invalidos -- `500`: erro inesperado +Purpose: +- notify downstream domains that the ticket was closed -### `POST /api/tickets/` +Payload: +- `TicketClosedEventPayload` -- `201`: ticket criado -- `401`: token ausente ou invalido -- `403`: usuario sem `ticket:create` -- `404`: cliente inexistente -- `422`: body invalido -- `500`: erro inesperado +Expected external consumers: +- `live_chat` +- `chatbot` -### `PATCH /api/tickets/{ticket_id}/status` +### `ticket.assignee_updated` -- `200`: status atualizado -- `400`: mesmo status ou transicao invalida -- `401`: token ausente ou invalido -- `403`: usuario sem `ticket:update_status` -- `404`: ticket inexistente -- `422`: `ticket_id` ou body invalidos -- `500`: erro inesperado +Purpose: +- notify assignment or transfer updates -## Integracao com a URA +Payload: +- `TicketAssigneeUpdatedEventPayload` -A URA nao deve consumir as rotas HTTP de tickets. +### `ticket.escalated` -Ponto de entrada recomendado: -- `TicketService.create_ticket(dto)` -- `TicketService.update_status(ticket_id, dto)` quando necessario +Purpose: +- notify upward movement in the support structure -Dados que a URA precisa fornecer para criacao: -- `triage_id` -- `type` -- `criticality` -- `product` -- `description` -- `chat_ids` -- `client_id` +Payload: +- `TicketEscalatedEventPayload` -Validacoes que continuam existindo na chamada interna: -- `client_id` precisa existir -- enums precisam ser validos -- `triage_id` e `chat_ids` precisam ser ObjectIds validos -- regras de transicao continuam sendo aplicadas - -Diferencas para HTTP: -- nao usa autenticacao da rota -- nao usa `ResponseFactoryDep` -- erros devem ser tratados como excecoes Python, principalmente `AppHTTPException` - -Exemplo de uso interno: - -```python -from app.domains.ticket.schemas import CreateTicketDTO - -dto = CreateTicketDTO( - triage_id="67f0c9b8e4b0b1a2c3d4e5f6", - type="issue", - criticality="high", - product="Sistema Financeiro", - description="Erro ao emitir boleto", - chat_ids=["67f0c9b8e4b0b1a2c3d4e5f7"], - client_id="0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", -) - -result = await ticket_service.create_ticket(dto) -``` +### `triage.finished` -## Observacoes Tecnicas +Purpose: +- define the upstream event payload that can create a ticket from triage completion -- a nova permissao necessaria para leitura e `ticket:read` -- o seed central deve conter essa permissao para ambientes novos -- a consulta atual usa filtro exato para `product` -- a rota GET retorna tickets completos, nao apenas resumo +Payload: +- `TriageFinishedEventPayload` -## Problemas Conhecidos (pendentes) +Responsibility boundary: +- the event publisher belongs to another domain +- the ticket domain validates and consumes the payload it receives +- `client_id` must come from a trusted authenticated source -Os pontos abaixo foram identificados em revisao tecnica e ainda nao foram corrigidos neste modulo: +## Permissions -- Consulta sem paginação: `GET /api/tickets/` pode retornar toda a coleção sem limite, aumentando risco de degradação com o crescimento da base. -- Campos de texto sem limite: `product` e `description` não possuem restrições de tamanho no schema, permitindo payloads excessivamente grandes. -- Exposição de informações pessoais (PII): a resposta de tickets inclui `client.email` para qualquer usuário com permissão `ticket:read`; avaliar necessidade de mascaramento ou escopo mais restrito. +Ticket permissions used by this contract: +- `ticket:read` +- `ticket:create` +- `ticket:update` +- `ticket:queue` +- `ticket:assign` +- `ticket:transfer` +- `ticket:escalate` + +## Implementation summary + +Implemented now: +- ticket creation +- paginated ticket listing +- ticket retrieval by id +- partial ticket update + +Prepared as contract stubs: +- queue +- assignment +- escalation +- transfer +- event payload contracts for internal and external integrations diff --git a/app/domains/ticket/__init__.py b/app/domains/ticket/__init__.py index d912d8d..67bc3ac 100644 --- a/app/domains/ticket/__init__.py +++ b/app/domains/ticket/__init__.py @@ -1,4 +1,3 @@ from .models import Ticket -from .routers import ticket_router -__all__ = ["Ticket", "ticket_router"] \ No newline at end of file +__all__ = ["Ticket"] \ No newline at end of file diff --git a/app/domains/ticket/dependencies.py b/app/domains/ticket/dependencies.py index ea0e312..1ab5c93 100644 --- a/app/domains/ticket/dependencies.py +++ b/app/domains/ticket/dependencies.py @@ -2,6 +2,7 @@ from fastapi import Depends +from app.core.event_dispatcher import EventDispatcherDep from app.db.mongo.dependencies import MongoSessionDep from app.domains.auth import UserServiceDep from app.domains.ticket.repositories import TicketRepository @@ -15,8 +16,12 @@ def get_ticket_repo(db: MongoSessionDep) -> TicketRepository: TicketRepositoryDep = Annotated[TicketRepository, Depends(get_ticket_repo)] -def get_ticket_service(ticket_repo: TicketRepositoryDep, user_service: UserServiceDep) -> TicketService: - return TicketService(ticket_repo, user_service) +def get_ticket_service( + ticket_repo: TicketRepositoryDep, + user_service: UserServiceDep, + event_dispatcher: EventDispatcherDep, +) -> TicketService: + return TicketService(ticket_repo, user_service, event_dispatcher) TicketServiceDep = Annotated[TicketService, Depends(get_ticket_service)] diff --git a/app/domains/ticket/listeners.py b/app/domains/ticket/listeners.py new file mode 100644 index 0000000..1831c99 --- /dev/null +++ b/app/domains/ticket/listeners.py @@ -0,0 +1,55 @@ +from collections.abc import Callable + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.schemas import TriageFinishedEventSchema +from app.core.logger import get_logger +from app.db.mongo.db import mongo_db +from app.db.postgres.engine import async_session +from app.domains.auth.repositories.user_repository import UserRepository +from app.domains.auth.services.user_service import UserService +from app.domains.ticket.repositories import TicketRepository +from app.domains.ticket.schemas import CreateTicketDTO +from app.domains.ticket.services import TicketService + + +logger = get_logger("app.ticket.listener") + + +class TicketListener: + def __init__( + self, + service_factory: Callable[[AsyncSession], TicketService] + ) -> None: + self._service_factory = service_factory + + @event_handler(TriageFinishedEventSchema) + async def on_triage_finished(self, schema: TriageFinishedEventSchema) -> None: + async with async_session() as db: + service = self._service_factory(db) + await service.create_ticket( + CreateTicketDTO( + triage_id = schema.attendance_id, + type = schema.ticket_type, + criticality=schema.ticket_criticality, + product=schema.product_name, + description=schema.ticket_description, + client_id=schema.client_id, + company_id=schema.company_id, + company_name=schema.company_name, + ) + ) + + +def register_ticket_listener(dispatcher: EventDispatcher) -> None: + ticket_repo = TicketRepository(mongo_db.get_db()) + + def build_service(db: AsyncSession) -> TicketService: + return TicketService(ticket_repo, UserService(UserRepository(db)), dispatcher) + + listener = TicketListener(build_service) + + dispatcher.subscribe(AppEvent.TRIAGE_FINISHED, listener.on_triage_finished) diff --git a/app/domains/ticket/models.py b/app/domains/ticket/models.py index 958465c..09c284d 100644 --- a/app/domains/ticket/models.py +++ b/app/domains/ticket/models.py @@ -20,6 +20,7 @@ class TicketCriticality(Enum): class TicketStatus(Enum): OPEN = "open" + AWAITING_ASSIGNMENT = "awaiting_assignment" IN_PROGRESS = "in_progress" WAITING_FOR_PROVIDER = "waiting_for_provider" WAITING_FOR_VALIDATION = "waiting_for_validation" @@ -51,8 +52,8 @@ class TicketHistory(BaseModel): name: str level: str assignment_date: datetime - exit_date: datetime - transfer_reason: str + exit_date: datetime | None = None + transfer_reason: str | None = None class Ticket(Document): @@ -63,10 +64,10 @@ class Ticket(Document): status: TicketStatus creation_date: datetime description: str - chat_ids: list[PydanticObjectId] - agent_history: list[TicketHistory] + chat_ids: list[PydanticObjectId] = Field(default_factory=list) + agent_history: list[TicketHistory] = Field(default_factory=list) client: TicketClient - comments: list[TicketComment] + comments: list[TicketComment] = Field(default_factory=list) class Settings: name = "tickets" diff --git a/app/domains/ticket/repositories.py b/app/domains/ticket/repositories.py index 56c8763..fe89e25 100644 --- a/app/domains/ticket/repositories.py +++ b/app/domains/ticket/repositories.py @@ -1,10 +1,13 @@ +import re from typing import Any +from uuid import UUID from beanie import PydanticObjectId from motor.motor_asyncio import AsyncIOMotorDatabase -from app.domains.ticket.models import Ticket, TicketStatus -from app.domains.ticket.schemas import TicketSearchFiltersDTO +from app.domains.ticket.models import Ticket, TicketComment, TicketHistory +from app.domains.ticket.schemas import TicketQueueFiltersDTO, TicketSearchFiltersDTO +from app.domains.ticket.schemas import TicketSearchFiltersDTO, UpdateTicketCommentDTO class TicketRepository: @@ -15,7 +18,114 @@ async def create_ticket(self, ticket: Ticket) -> Ticket: await ticket.insert() return ticket - async def search_tickets(self, filters: TicketSearchFiltersDTO) -> list[Ticket]: + async def list_tickets_paginated(self, filters: TicketSearchFiltersDTO) -> tuple[list[Ticket], int]: + query = self._build_query(filters) + offset = (filters.page - 1) * filters.page_size + + total = await Ticket.find(query).count() + items = await Ticket.find(query).skip(offset).limit(filters.page_size).to_list() + return items, total + + async def list_queue_candidates(self, filters: TicketQueueFiltersDTO) -> list[Ticket]: + query = self._build_queue_query(filters) + return await Ticket.find(query).to_list() + + async def get_by_id(self, ticket_id: PydanticObjectId) -> Ticket | None: + return await Ticket.get(ticket_id) + + async def save(self, ticket: Ticket) -> Ticket: + await ticket.save() + return ticket + + async def add_ticket_comment( + self, ticket_id: PydanticObjectId, comment: TicketComment + ) -> TicketComment | None: + ticket = await Ticket.get(ticket_id) + if ticket is None: + return None + ticket.comments.append(comment) + await ticket.save() + return comment + + async def update_ticket_comment( + self, ticket_id: PydanticObjectId, comment_id: UUID, dto: UpdateTicketCommentDTO + ) -> TicketComment | None: + updates = dto.model_dump(exclude_unset=True) + if not updates: + return None + ticket = await Ticket.get(ticket_id) + if ticket is None: + return None + comment = next( + (c for c in ticket.comments if c.comment_id == comment_id), None + ) + if comment is None: + return None + for field_name, value in updates.items(): + setattr(comment, field_name, value) + await ticket.save() + return comment + + async def delete_ticket_comment( + self, ticket_id: PydanticObjectId, comment_id: UUID + ) -> TicketComment | None: + ticket = await Ticket.get(ticket_id) + if ticket is None: + return None + comment = next( + (c for c in ticket.comments if c.comment_id == comment_id), None + ) + if comment is None: + return None + ticket.comments = [c for c in ticket.comments if c.comment_id != comment_id] + await ticket.save() + return comment + + async def get_ticket_history( + self, ticket_id: PydanticObjectId + ) -> list[TicketHistory] | None: + ticket = await Ticket.get(ticket_id) + if ticket is None: + return None + return ticket.agent_history + + + async def search_ticket( + self, + search_query: str, + client_id: UUID | None = None, + agent_id: UUID | None = None, + company_id: UUID | None = None, + global_scope: bool = False, + ) -> list[Ticket] | None: + pattern = re.escape(search_query) + text_filter: dict[str, Any] = { + "$or": [ + {"description": {"$regex": pattern, "$options": "i"}}, + {"comments.text": {"$regex": pattern, "$options": "i"}}, + ] + } + + scope_filter: dict[str, Any] | None = None + if client_id is not None: + scope_filter = {"client.id": client_id} + elif agent_id is not None: + scope_filter = {"agent_history.agent_id": agent_id} + elif company_id is not None: + scope_filter = {"client.company.id": company_id} + elif not global_scope: + return [] + + query = text_filter if scope_filter is None else {"$and": [text_filter, scope_filter]} + + try: + return await Ticket.find(query).to_list() + except Exception: + return None + + + @staticmethod + def _build_query(filters: TicketSearchFiltersDTO) -> dict[str, Any]: query: dict[str, Any] = {} if filters.ticket_id is not None: @@ -33,15 +143,26 @@ async def search_tickets(self, filters: TicketSearchFiltersDTO) -> list[Ticket]: if filters.product is not None: query["product"] = filters.product - if not query: - return await Ticket.find_all().to_list() + return query - return await Ticket.find(query).to_list() + @staticmethod + def _build_queue_query(filters: TicketQueueFiltersDTO) -> dict[str, Any]: + query: dict[str, Any] = {} - async def get_by_id(self, ticket_id: PydanticObjectId) -> Ticket | None: - return await Ticket.get(ticket_id) + if filters.status is not None: + query["status"] = filters.status.value + else: + query["status"] = { + "$in": [ + "open", + "awaiting_assignment", + "in_progress", + "waiting_for_provider", + "waiting_for_validation", + ] + } - async def update_status(self, ticket: Ticket, status: TicketStatus) -> Ticket: - ticket.status = status - await ticket.save() - return ticket + if filters.type is not None: + query["type"] = filters.type.value + + return query diff --git a/app/domains/ticket/routers.py b/app/domains/ticket/routers.py index 156c898..29a10d4 100644 --- a/app/domains/ticket/routers.py +++ b/app/domains/ticket/routers.py @@ -1,30 +1,62 @@ from typing import Annotated +from uuid import UUID from beanie import PydanticObjectId -from fastapi import APIRouter, Depends, status +from fastapi import APIRouter, Depends, Query, status from starlette.responses import JSONResponse from app.core.dependencies import ResponseFactoryDep +from app.core.exceptions import AppHTTPException from app.domains.auth import CurrentUserSessionDep, require_permission from app.domains.ticket.dependencies import TicketServiceDep from app.domains.ticket.schemas import ( + AddTicketCommentDTO, + AssignTicketRequest, CreateTicketDTO, CreateTicketResponseDTO, - TicketResponseDTO, + EscalateTicketRequest, + TicketPaginatedList, + TicketQueueFiltersDTO, + TicketQueueListResponse, + TicketResponse, TicketSearchFiltersDTO, + TransferTicketRequest, + UpdateTicketCommentDTO, + UpdateTicketDTO, UpdateTicketStatusDTO, UpdateTicketStatusResponseDTO, ) +from app.domains.ticket.swagger_utils import ( + comment_on_ticket_swagger, + get_ticket_comments_swagger, + search_tickets_by_text_swagger, +) from app.schemas.response import GenericSuccessContent ticket_router = APIRouter() +def _contract_not_implemented(feature_name: str) -> None: + raise AppHTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail=( + f"{feature_name} contract is available in this sprint, " + "but its business implementation is still pending." + ), + title="Contract Stub", + ) + + @ticket_router.get( "/", tags=["Tickets"], - response_model=GenericSuccessContent[list[TicketResponseDTO]], + response_model=GenericSuccessContent[TicketPaginatedList[TicketResponse]], dependencies=[require_permission("ticket:read")], + summary="List tickets", + description=( + "Official paginated ticket listing endpoint. " + "Returns items, page, page_size, and total." + ), ) async def get_tickets( filters: Annotated[TicketSearchFiltersDTO, Depends()], @@ -32,9 +64,68 @@ async def get_tickets( service: TicketServiceDep, response: ResponseFactoryDep, ) -> JSONResponse: - result = await service.search_tickets(filters) + """ + HTTP GET /api/tickets/ + + Purpose: + - List tickets with the official paginated response contract. + + Query params: + - ticket_id, client_id, triage_id, status, criticality, type, product, page, page_size + + Response: + - GenericSuccessContent[TicketPaginatedList[TicketResponse]] + + Permissions: + - ticket:read + """ + result = await service.list_tickets(filters) + return response.success( + data=result.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.get( + "/queue", + tags=["Tickets", "Queue"], + response_model=GenericSuccessContent[TicketQueueListResponse], + dependencies=[require_permission("ticket:queue")], + summary="List ticket queue", + description=( + "Queue contract for open/active tickets ordered by criticality and creation date. " + "The contract is available now; the full queue business implementation remains pending." + ), +) +async def get_ticket_queue( + filters: Annotated[TicketQueueFiltersDTO, Depends()], + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + """ + HTTP GET /api/tickets/queue + + Purpose: + - Expose the queue contract for tickets awaiting assignment or active handling. + + Query params: + - status, type, department_id, unassigned_only, level, assignee_id, page, page_size + + Response: + - GenericSuccessContent[TicketQueueListResponse] + + Permissions: + - ticket:queue + + Business notes: + - Sorting is contractually defined as criticality first, then creation date. + - department_id and level are provisional cross-domain contract fields. + - This route will emit no event by itself. + """ + result = await service.list_ticket_queue(filters) return response.success( - data=[ticket.model_dump(mode="json") for ticket in result], + data=result.model_dump(mode="json"), status_code=status.HTTP_200_OK, ) @@ -44,6 +135,8 @@ async def get_tickets( tags=["Tickets"], response_model=GenericSuccessContent[CreateTicketResponseDTO], dependencies=[require_permission("ticket:create")], + summary="Create ticket", + description="Official ticket creation endpoint.", ) async def create_ticket( dto: CreateTicketDTO, @@ -51,8 +144,162 @@ async def create_ticket( service: TicketServiceDep, response: ResponseFactoryDep, ) -> JSONResponse: + """ + HTTP POST /api/tickets/ + + Purpose: + - Create a new ticket. + + Body: + - CreateTicketDTO + + Response: + - GenericSuccessContent[CreateTicketResponseDTO] + + Permissions: + - ticket:create + + Events: + - Ticket creation may later be triggered from 'triage.finished' in addition to HTTP. + """ result = await service.create_ticket(dto) - return response.success(data=result.model_dump(mode="json"), status_code=status.HTTP_201_CREATED) + return response.success( + data=result.model_dump(mode="json"), + status_code=status.HTTP_201_CREATED, + ) + + +@ticket_router.get( + "/search", + tags=["Tickets"], + dependencies=[require_permission("chat:read")], + **search_tickets_by_text_swagger, +) +async def search_tickets_by_text( + auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, + search_query: str | None = Query(default=None, min_length=5, max_length=100), +) -> JSONResponse: + if search_query is None: + raise AppHTTPException( + status_code = status.HTTP_400_BAD_REQUEST, + detail="provide a search text using search_query in the query string" + ) + + res = await service.search_ticket_by_text(search_query, auth[0]) + if res is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=( + "Não foi possível executar a busca de tickets: " + "o usuário autenticado não possui um escopo de busca válido " + "(cliente, atendente ou empresa)." + ), + ) + return response.success( + data=[c.model_dump(mode="json") for c in res], + status_code=status.HTTP_200_OK, + ) + + + +@ticket_router.get( + "/{ticket_id}", + tags=["Tickets"], + response_model=GenericSuccessContent[TicketResponse], + dependencies=[require_permission("ticket:read")], + summary="Get ticket by id", + description="Returns a single ticket using the canonical response contract.", +) +async def get_ticket( + ticket_id: PydanticObjectId, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + """ + HTTP GET /api/tickets/{ticket_id} + + Purpose: + - Read a single ticket by identifier. + + Response: + - GenericSuccessContent[TicketResponse] + + Permissions: + - ticket:read + """ + result = await service.get_ticket(ticket_id) + return response.success( + data=result.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.post( + "/{ticket_id}/take", + tags=["Tickets"], + response_model=GenericSuccessContent[TicketResponse], + dependencies=[require_permission("ticket:update_status")], +) +async def take_ticket( + ticket_id: PydanticObjectId, + auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + user = auth[0] + result = await service.take_ticket(ticket_id, user) + return response.success( + data=result.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.patch( + "/{ticket_id}", + tags=["Tickets"], + response_model=GenericSuccessContent[TicketResponse], + dependencies=[require_permission("ticket:update")], + summary="Partially update a ticket", + description=( + "Official partial update endpoint for editable ticket fields. " + "Use this endpoint for product, description, criticality, and status changes. " + "If the resulting status becomes 'finished', the ticket domain emits " + "the 'ticket.closed' business event contract." + ), +) +async def update_ticket( + ticket_id: PydanticObjectId, + dto: UpdateTicketDTO, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + """ + HTTP PATCH /api/tickets/{ticket_id} + + Purpose: + - Update official editable fields of a ticket. + + Allowed body fields: + - status + - criticality + - product + - description + + Excluded actions: + - assign, transfer, and escalate remain dedicated routes. + + Permissions: + - ticket:update + + Events: + - ticket.closed when the resulting status becomes finished + """ + result = await service.update_ticket(ticket_id, dto) + return response.success(data=result.model_dump(mode="json"), status_code=status.HTTP_200_OK) @ticket_router.patch( @@ -60,13 +307,288 @@ async def create_ticket( tags=["Tickets"], response_model=GenericSuccessContent[UpdateTicketStatusResponseDTO], dependencies=[require_permission("ticket:update_status")], + include_in_schema=False, + summary="Update ticket status (legacy)", + description=( + "Status-only update endpoint preserved for backward compatibility. " + "Hidden from the OpenAPI schema; new clients should use PATCH /tickets/{ticket_id}. " + "Requires the ticket to have an assigned agent and that the actor is " + "either the assigned agent or an admin." + ), ) async def update_ticket_status( ticket_id: PydanticObjectId, dto: UpdateTicketStatusDTO, - _auth: CurrentUserSessionDep, + auth: CurrentUserSessionDep, service: TicketServiceDep, response: ResponseFactoryDep, ) -> JSONResponse: - result = await service.update_status(ticket_id, dto) + user = auth[0] + result = await service.update_status(ticket_id, dto, user) return response.success(data=result.model_dump(mode="json"), status_code=status.HTTP_200_OK) + + +@ticket_router.post( + "/{ticket_id}/assign", + tags=["Tickets", "Queue"], + response_model=GenericSuccessContent[TicketResponse], + dependencies=[require_permission("ticket:assign")], + summary="Assign a ticket to an agent", + description=( + "Assignment contract for queue handling. " + "This route is expected to emit 'ticket.assignee_updated' after " + "the business implementation is added." + ), +) +async def assign_ticket( + ticket_id: PydanticObjectId, + dto: AssignTicketRequest, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + """ + HTTP POST /api/tickets/{ticket_id}/assign + + Purpose: + - Assign an agent to a ticket and register assignment history. + + Body: + - AssignTicketRequest + + Response: + - GenericSuccessContent[TicketResponse] + + Permissions: + - ticket:assign + + Events: + - ticket.assignee_updated + """ + result = await service.assign_ticket(ticket_id, dto) + return response.success( + data=result.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.post( + "/{ticket_id}/escalate", + tags=["Tickets", "Queue"], + response_model=GenericSuccessContent[TicketResponse], + dependencies=[require_permission("ticket:escalate")], + summary="Escalate a ticket", + description=( + "Escalation contract for moving a ticket to an agent at a higher support level. " + "This route is expected to emit 'ticket.escalated' after the " + "business implementation is added." + ), +) +async def escalate_ticket( + ticket_id: PydanticObjectId, + dto: EscalateTicketRequest, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + """ + HTTP POST /api/tickets/{ticket_id}/escalate + + Purpose: + - Move a ticket upward in the support hierarchy. + + Body: + - EscalateTicketRequest + + Response: + - GenericSuccessContent[TicketResponse] + + Permissions: + - ticket:escalate + + Business notes: + - Direct escalation assigns the ticket to a target agent at a higher support level. + - Department routing is intentionally out of scope for the current ticket model. + + Events: + - ticket.escalated + """ + result = await service.escalate_ticket(ticket_id, dto) + return response.success( + data=result.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.post( + "/{ticket_id}/transfer", + tags=["Tickets", "Queue"], + response_model=GenericSuccessContent[TicketResponse], + dependencies=[require_permission("ticket:transfer")], + summary="Transfer a ticket", + description=( + "Transfer contract for moving a ticket between agents on the same support level. " + "This route is expected to emit 'ticket.assignee_updated' after " + "the business implementation is added." + ), +) +async def transfer_ticket( + ticket_id: PydanticObjectId, + dto: TransferTicketRequest, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + """ + HTTP POST /api/tickets/{ticket_id}/transfer + + Purpose: + - Transfer a ticket to another agent without changing its support level. + + Body: + - TransferTicketRequest + + Response: + - GenericSuccessContent[TicketResponse] + + Permissions: + - ticket:transfer + + Events: + - ticket.assignee_updated + """ + result = await service.transfer_ticket(ticket_id, dto) + return response.success( + data=result.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.post( + "/{ticket_id}/comments", + dependencies=[require_permission("ticket:comment")], + tags=["Tickets"], + **comment_on_ticket_swagger, +) +async def comment_on_ticket( + ticket_id: PydanticObjectId, + dto: AddTicketCommentDTO, + auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep +) -> JSONResponse: + user = auth[0] + comment = await service.add_comment_to_ticket( + ticket_id, + user.name or user.username or user.email, + dto + ) + + if comment is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Ticket {ticket_id} does not exist.", + ) + + return response.success( + data=comment.model_dump(mode="json"), + status_code=status.HTTP_201_CREATED, + ) + + +@ticket_router.get( + "/{ticket_id}/comments", + dependencies=[require_permission("ticket:read")], + tags=["Tickets"], + **get_ticket_comments_swagger, +) +async def get_ticket_comments( + ticket_id: PydanticObjectId, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep, +) -> JSONResponse: + comments = await service.list_ticket_comments(ticket_id) + if comments is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Ticket {ticket_id} does not exist.", + ) + + return response.success( + data=[comment.model_dump(mode="json") for comment in comments], + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.patch( + "/{ticket_id}/comments/{comment_id}", + dependencies=[require_permission("ticket:update_comment")], + tags=["Tickets"] +) +async def update_ticket_comment( + ticket_id: PydanticObjectId, + comment_id: UUID, + dto: UpdateTicketCommentDTO, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep +) -> JSONResponse: + comment = await service.update_ticket_comment(ticket_id, comment_id, dto) + + if comment is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail = "Comment not found to update" + ) + + return response.success( + data=comment.model_dump(mode="json"), + status_code=status.HTTP_200_OK, + ) + + +@ticket_router.delete( + "/{ticket_id}/comments/{comment_id}", + dependencies=[require_permission("ticket:delete_comment")], + tags=["Tickets"] +) +async def delete_ticket_comment( + ticket_id: PydanticObjectId, + comment_id: UUID, + _auth: CurrentUserSessionDep, + service: TicketServiceDep, + response: ResponseFactoryDep +) -> JSONResponse: + comment = await service.delete_ticket_comment(ticket_id, comment_id) + + if comment is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail = "Comment not found to delete" + ) + + return response.success(data = comment.model_dump(mode="json"), status_code=status.HTTP_200_OK) + + +@ticket_router.get( + "/{ticket_id}/history", + dependencies=[require_permission("ticket:read")], + tags=["Tickets"] +) +async def get_ticket_history( + ticket_id: PydanticObjectId, + service: TicketServiceDep, + response: ResponseFactoryDep +) -> JSONResponse: + hist = await service.get_ticket_history(ticket_id) + if hist is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Ticket {ticket_id} not found." + ) + + return response.success( + data=[entry.model_dump(mode="json") for entry in hist], + status_code=status.HTTP_200_OK, + ) diff --git a/app/domains/ticket/schemas.py b/app/domains/ticket/schemas.py index 1f6915a..968c76d 100644 --- a/app/domains/ticket/schemas.py +++ b/app/domains/ticket/schemas.py @@ -1,4 +1,5 @@ from datetime import datetime +from typing import Literal from uuid import UUID from beanie import PydanticObjectId @@ -8,6 +9,18 @@ from app.domains.ticket.models import TicketCriticality, TicketStatus, TicketType +class PaginationDTO(BaseDTO): + page: int = Field(default=1, ge=1, description="1-indexed page number.") + page_size: int = Field(default=20, ge=1, le=100, description="Items per page.") + + +class TicketPaginatedList[T](BaseModel): + total: int = Field(..., ge=0) + page: int = Field(..., ge=1) + page_size: int = Field(..., ge=1, le=100) + items: list[T] + + class CreateTicketDTO(BaseDTO): model_config = { "json_schema_extra": { @@ -19,6 +32,8 @@ class CreateTicketDTO(BaseDTO): "description": "Erro ao emitir boleto", "chat_ids": ["67f0c9b8e4b0b1a2c3d4e5f7"], "client_id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "company_id": "a4b9e7f1-2e7d-4cc1-9c12-7c7c9d10b321", + "company_name": "ACME Finance", } } } @@ -28,8 +43,16 @@ class CreateTicketDTO(BaseDTO): criticality: TicketCriticality product: str description: str - chat_ids: list[PydanticObjectId] + chat_ids: list[PydanticObjectId] = Field(default_factory=list) client_id: UUID = Field(description="Identifier of the client user in the auth domain.") + company_id: UUID | None = Field( + default=None, + description="Identifier of the client company. Falls back to the client identity when omitted.", + ) + company_name: str | None = Field( + default=None, + description="Company name snapshot for the ticket. Falls back to a client-derived label when omitted.", + ) class CreateTicketResponseDTO(BaseModel): @@ -38,7 +61,7 @@ class CreateTicketResponseDTO(BaseModel): creation_date: datetime -class TicketSearchFiltersDTO(BaseDTO): +class TicketSearchFiltersDTO(PaginationDTO): ticket_id: PydanticObjectId | None = Field(default=None, description="Ticket ObjectId.") client_id: UUID | None = Field(default=None, description="Client UUID in auth domain.") triage_id: PydanticObjectId | None = Field(default=None, description="Triage ObjectId.") @@ -48,28 +71,31 @@ class TicketSearchFiltersDTO(BaseDTO): product: str | None = Field(default=None, description="Exact product name.") -class TicketCompanyResponseDTO(BaseModel): +class TicketCompanyResponse(BaseModel): id: UUID name: str -class TicketClientResponseDTO(BaseModel): +class TicketClientResponse(BaseModel): id: UUID name: str email: str - company: TicketCompanyResponseDTO + company: TicketCompanyResponse -class TicketHistoryResponseDTO(BaseModel): +class TicketHistoryResponse(BaseModel): agent_id: UUID name: str - level: str + level: str = Field( + ..., + description="Support level snapshot. Provisional string contract; examples: N1, N2, N3.", + ) assignment_date: datetime - exit_date: datetime - transfer_reason: str + exit_date: datetime | None = None + transfer_reason: str | None = None -class TicketCommentResponseDTO(BaseModel): +class TicketCommentResponse(BaseModel): comment_id: UUID author: str text: str @@ -77,7 +103,34 @@ class TicketCommentResponseDTO(BaseModel): internal: bool = False -class TicketResponseDTO(BaseModel): +class TicketResponse(BaseModel): + model_config = { + "json_schema_extra": { + "example": { + "id": "67f0ca60e4b0b1a2c3d4e601", + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "high", + "product": "Sistema Financeiro", + "status": "open", + "creation_date": "2026-04-14T12:00:00Z", + "description": "Erro ao emitir boleto", + "chat_ids": ["67f0c9b8e4b0b1a2c3d4e5f7"], + "agent_history": [], + "client": { + "id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "name": "Maria Souza", + "email": "maria@smtp.dev", + "company": { + "id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "name": "Maria Souza account", + }, + }, + "comments": [], + } + } + } + id: str triage_id: str type: TicketType @@ -87,9 +140,118 @@ class TicketResponseDTO(BaseModel): creation_date: datetime description: str chat_ids: list[str] - agent_history: list[TicketHistoryResponseDTO] - client: TicketClientResponseDTO - comments: list[TicketCommentResponseDTO] + agent_history: list[TicketHistoryResponse] + client: TicketClientResponse + comments: list[TicketCommentResponse] + assigned_agent_id: UUID | None = None + assigned_agent_name: str | None = None + + +class TicketQueueFiltersDTO(PaginationDTO): + status: TicketStatus | None = Field(default=None, description="Filter queue items by status.") + type: TicketType | None = Field(default=None, description="Filter queue items by ticket type.") + department_id: str | None = Field( + default=None, + description="Provisional department reference from another domain.", + ) + unassigned_only: bool | None = Field( + default=None, + description="When true, return only tickets without an active assignee.", + ) + level: str | None = Field( + default=None, + description="Provisional support level filter. Example values: N1, N2, N3.", + ) + assignee_id: UUID | None = Field( + default=None, + description="Filter queue items by current assignee identifier.", + ) + + +class TicketQueueItemResponse(BaseModel): + model_config = { + "json_schema_extra": { + "example": { + "id": "67f0ca60e4b0b1a2c3d4e601", + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "high", + "product": "Sistema Financeiro", + "status": "awaiting_assignment", + "creation_date": "2026-04-14T12:00:00Z", + "description": "Erro ao emitir boleto", + "client": { + "id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "name": "Maria Souza", + "email": "maria@smtp.dev", + "company": { + "id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "name": "Maria Souza account", + }, + }, + "department_id": "dept-finance", + "department_name": "Financeiro", + "level": "N1", + "assignee_id": None, + "assignee_name": None, + "unassigned": True, + } + } + } + + id: str + triage_id: str + type: TicketType + criticality: TicketCriticality + product: str + status: TicketStatus + creation_date: datetime + description: str + client: TicketClientResponse + department_id: str | None = Field( + default=None, + description="Provisional department reference. Value comes from another domain contract.", + ) + department_name: str | None = None + level: str | None = Field( + default=None, + description="Provisional support level. Example values: N1, N2, N3.", + ) + assignee_id: UUID | None = None + assignee_name: str | None = None + unassigned: bool = True + + +class TicketQueueListResponse(BaseModel): + items: list[TicketQueueItemResponse] + page: int = Field(..., ge=1) + page_size: int = Field(..., ge=1, le=100) + total: int = Field(..., ge=0) + + +class UpdateTicketDTO(BaseDTO): + model_config = { + "json_schema_extra": { + "example": { + "status": "finished", + "criticality": "medium", + "product": "Sistema Financeiro", + "description": "Chamado concluido e validado.", + } + } + } + + status: TicketStatus | None = Field( + default=None, + description=( + "Optional status transition. If the resulting status is 'finished', " + "the domain must emit 'ticket.closed' once the business " + "implementation is completed." + ), + ) + criticality: TicketCriticality | None = None + product: str | None = None + description: str | None = None class UpdateTicketStatusDTO(BaseDTO): @@ -102,3 +264,180 @@ class UpdateTicketStatusResponseDTO(BaseModel): id: str previous_status: TicketStatus current_status: TicketStatus + + +class AssignTicketRequest(BaseDTO): + model_config = { + "json_schema_extra": { + "example": { + "agent_id": "4b8b9bd2-6042-43f5-b5a3-6b36fdfaf9a8", + "reason": "Primeira atribuicao na fila N1.", + } + } + } + + agent_id: UUID + reason: str | None = Field( + default=None, + description="Optional audit reason for the assignee change.", + ) + + +class EscalateTicketRequest(BaseDTO): + model_config = { + "json_schema_extra": { + "example": { + "target_agent_id": "4b8b9bd2-6042-43f5-b5a3-6b36fdfaf9a8", + "reason": "Necessario apoio do nivel superior.", + } + } + } + + target_agent_id: UUID + reason: str = Field(..., description="Business reason for the escalation.") + + +class TransferTicketRequest(BaseDTO): + model_config = { + "json_schema_extra": { + "example": { + "target_agent_id": "4b8b9bd2-6042-43f5-b5a3-6b36fdfaf9a8", + "reason": "Redistribuicao interna do mesmo nivel.", + } + } + } + + target_agent_id: UUID + reason: str = Field(..., description="Business reason for the transfer.") + + +class TicketEventPayload(BaseModel): + ticket_id: str + triage_id: str + client_id: UUID + status: TicketStatus + occurred_at: datetime + + +class TicketClosedEventPayload(TicketEventPayload): + model_config = { + "json_schema_extra": { + "example": { + "event_name": "ticket.closed", + "ticket_id": "67f0ca60e4b0b1a2c3d4e601", + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "client_id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "status": "finished", + "occurred_at": "2026-04-14T12:30:00Z", + "previous_status": "in_progress", + "closed_at": "2026-04-14T12:30:00Z", + } + } + } + + event_name: Literal["ticket.closed"] = "ticket.closed" + previous_status: TicketStatus + closed_at: datetime + + +class TicketAssigneeUpdatedEventPayload(TicketEventPayload): + model_config = { + "json_schema_extra": { + "example": { + "event_name": "ticket.assignee_updated", + "ticket_id": "67f0ca60e4b0b1a2c3d4e601", + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "client_id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "status": "in_progress", + "occurred_at": "2026-04-14T12:35:00Z", + "previous_agent_id": None, + "current_agent_id": "4b8b9bd2-6042-43f5-b5a3-6b36fdfaf9a8", + "reason": "Primeira atribuicao na fila N1.", + "department_id": "dept-finance", + "level": "N1", + } + } + } + + event_name: Literal["ticket.assignee_updated"] = "ticket.assignee_updated" + previous_agent_id: UUID | None = None + current_agent_id: UUID + reason: str | None = None + department_id: str | None = None + level: str | None = Field( + default=None, + description="Provisional support level contract shared with queue/escalation APIs.", + ) + + +class TicketEscalatedEventPayload(TicketEventPayload): + model_config = { + "json_schema_extra": { + "example": { + "event_name": "ticket.escalated", + "ticket_id": "67f0ca60e4b0b1a2c3d4e601", + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "client_id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + "status": "in_progress", + "occurred_at": "2026-04-14T12:40:00Z", + "previous_agent_id": "4b8b9bd2-6042-43f5-b5a3-6b36fdfaf9a8", + "source_level": "N1", + "target_agent_id": "97f0c9b8-e4b0-41a2-83d4-e5f600000001", + "target_level": "N2", + "reason": "Necessario apoio do nivel superior.", + } + } + } + + event_name: Literal["ticket.escalated"] = "ticket.escalated" + previous_agent_id: UUID | None = None + source_level: str | None = None + target_agent_id: UUID + target_level: str + reason: str + + +class TriageFinishedEventPayload(BaseDTO): + model_config = { + "json_schema_extra": { + "example": { + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "high", + "product": "Sistema Financeiro", + "description": "Erro ao emitir boleto", + "chat_ids": ["67f0c9b8e4b0b1a2c3d4e5f7"], + "client_id": "0f7d7c4f-7b5b-45cb-9d85-6f3c69f0b5d2", + } + } + } + + triage_id: str + type: TicketType + criticality: TicketCriticality + product: str + description: str + chat_ids: list[str] + client_id: UUID = Field( + ..., + description=( + "Client identity must come from a trusted authenticated source " + "outside the ticket domain." + ), + ) + +class AddTicketCommentDTO(BaseDTO): + text: str + internal: bool = True + +class UpdateTicketCommentDTO(BaseDTO): + author: str | None = None + text: str | None = None + internal: bool = False + + +TicketCompanyResponseDTO = TicketCompanyResponse +TicketClientResponseDTO = TicketClientResponse +TicketHistoryResponseDTO = TicketHistoryResponse +TicketCommentResponseDTO = TicketCommentResponse +TicketResponseDTO = TicketResponse diff --git a/app/domains/ticket/services.py b/app/domains/ticket/services.py index 427b720..1751ccb 100644 --- a/app/domains/ticket/services.py +++ b/app/domains/ticket/services.py @@ -1,24 +1,51 @@ from datetime import UTC, datetime -from uuid import UUID +from uuid import UUID, uuid4 from beanie import PydanticObjectId from fastapi import status +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.schemas import ( + TicketAssigneeUpdatedEventSchema, + TicketClosedEventSchema, + TicketCreatedEventSchema, + TicketEscalatedEventSchema, +) from app.core.exceptions import AppHTTPException from app.core.logger import get_logger +from app.domains.auth.entities import UserWithRoles from app.domains.auth.services.user_service import UserService from app.domains.ticket.metrics import tickets_created_total, tickets_status_changed_total -from app.domains.ticket.models import Ticket, TicketClient, TicketCompany, TicketStatus +from app.domains.ticket.models import ( + Ticket, + TicketClient, + TicketCompany, + TicketCriticality, + TicketHistory, + TicketStatus, + TicketComment, +) from app.domains.ticket.repositories import TicketRepository from app.domains.ticket.schemas import ( + AddTicketCommentDTO, + AssignTicketRequest, CreateTicketDTO, CreateTicketResponseDTO, - TicketClientResponseDTO, - TicketCommentResponseDTO, - TicketCompanyResponseDTO, - TicketHistoryResponseDTO, - TicketResponseDTO, + EscalateTicketRequest, + TicketClientResponse, + TicketCommentResponse, + TicketCompanyResponse, + TicketHistoryResponse, + TicketPaginatedList, + TicketQueueFiltersDTO, + TicketQueueItemResponse, + TicketQueueListResponse, + TicketResponse, TicketSearchFiltersDTO, + TransferTicketRequest, + UpdateTicketCommentDTO, + UpdateTicketDTO, UpdateTicketStatusDTO, UpdateTicketStatusResponseDTO, ) @@ -26,8 +53,10 @@ class TicketService: allowed_transitions: dict[TicketStatus, set[TicketStatus]] = { - TicketStatus.OPEN: {TicketStatus.IN_PROGRESS}, + TicketStatus.OPEN: {TicketStatus.AWAITING_ASSIGNMENT, TicketStatus.IN_PROGRESS}, + TicketStatus.AWAITING_ASSIGNMENT: {TicketStatus.IN_PROGRESS}, TicketStatus.IN_PROGRESS: { + TicketStatus.AWAITING_ASSIGNMENT, TicketStatus.WAITING_FOR_PROVIDER, TicketStatus.WAITING_FOR_VALIDATION, TicketStatus.FINISHED, @@ -40,19 +69,24 @@ class TicketService: TicketStatus.FINISHED: set(), } - def __init__(self, repository: TicketRepository, user_service: UserService): + def __init__(self, repository: TicketRepository, user_service: UserService, event_dispatcher: EventDispatcher): self.repo = repository self.user_service = user_service + self.dispatcher = event_dispatcher self.logger = get_logger("app.ticket.service") async def create_ticket(self, dto: CreateTicketDTO) -> CreateTicketResponseDTO: - client = await self._build_ticket_client(dto.client_id) + client = await self._build_ticket_client( + dto.client_id, + dto.company_id, + dto.company_name, + ) ticket = Ticket( triage_id=dto.triage_id, type=dto.type, criticality=dto.criticality, product=dto.product, - status=TicketStatus.OPEN, + status=TicketStatus.AWAITING_ASSIGNMENT, creation_date=datetime.now(UTC), description=dto.description, chat_ids=dto.chat_ids, @@ -61,11 +95,24 @@ async def create_ticket(self, dto: CreateTicketDTO) -> CreateTicketResponseDTO: comments=[], ) created_ticket = await self.repo.create_ticket(ticket) + assert created_ticket.id is not None + await self.dispatcher.publish( + AppEvent.TICKET_CREATED, + TicketCreatedEventSchema( + ticket_id=created_ticket.id, + client_id=created_ticket.client.id, + ), + ) + tickets_created_total.labels(source="api", criticality=dto.criticality.value).inc() self.logger.info( "Ticket created", - extra={"ticket_id": str(created_ticket.id), "type": dto.type.value, "criticality": dto.criticality.value}, + extra={ + "ticket_id": str(created_ticket.id), + "type": dto.type.value, + "criticality": dto.criticality.value, + }, ) return CreateTicketResponseDTO( @@ -74,58 +121,475 @@ async def create_ticket(self, dto: CreateTicketDTO) -> CreateTicketResponseDTO: creation_date=created_ticket.creation_date, ) - async def search_tickets(self, filters: TicketSearchFiltersDTO) -> list[TicketResponseDTO]: - tickets = await self.repo.search_tickets(filters) - return [self._to_ticket_response(ticket) for ticket in tickets] + async def list_tickets(self, filters: TicketSearchFiltersDTO) -> TicketPaginatedList[TicketResponse]: + tickets, total = await self.repo.list_tickets_paginated(filters) + return TicketPaginatedList[TicketResponse]( + items=[self._to_ticket_response(ticket) for ticket in tickets], + page=filters.page, + page_size=filters.page_size, + total=total, + ) + + async def get_ticket(self, ticket_id: PydanticObjectId) -> TicketResponse: + ticket = await self._get_ticket_or_404(ticket_id) + return self._to_ticket_response(ticket) - async def update_status( - self, ticket_id: PydanticObjectId, dto: UpdateTicketStatusDTO - ) -> UpdateTicketStatusResponseDTO: - ticket = await self.repo.get_by_id(ticket_id) - if ticket is None: + async def list_ticket_queue(self, filters: TicketQueueFiltersDTO) -> TicketQueueListResponse: + tickets = await self.repo.list_queue_candidates(filters) + filtered_tickets = [ticket for ticket in tickets if self._matches_queue_filters(ticket, filters)] + sorted_tickets = sorted(filtered_tickets, key=self._queue_sort_key) + + offset = (filters.page - 1) * filters.page_size + paginated_tickets = sorted_tickets[offset : offset + filters.page_size] + + return TicketQueueListResponse( + items=[self._to_ticket_queue_item_response(ticket) for ticket in paginated_tickets], + page=filters.page, + page_size=filters.page_size, + total=len(sorted_tickets), + ) + + async def take_ticket( + self, + ticket_id: PydanticObjectId, + actor: UserWithRoles, + ) -> TicketResponse: + ticket = await self._get_ticket_or_404(ticket_id) + + actor_roles = actor.roles_names() + if "admin" not in actor_roles and "agent" not in actor_roles: + raise AppHTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Only agents or admins can take tickets.", + ) + + current_agent_id = self._get_current_assigned_agent_id(ticket) + + if current_agent_id is not None: + if current_agent_id == actor.id: + return self._to_ticket_response(ticket) + + raise AppHTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Este chamado já foi atribuído a outro atendente.", + ) + + actor_name = actor.name or actor.username or actor.email + actor_level = "admin" if "admin" in actor_roles else "agent" + now = datetime.now(UTC) + + ticket.agent_history.append( + TicketHistory( + agent_id=actor.id, + name=actor_name, + level=actor_level, + assignment_date=now, + exit_date=None, + transfer_reason="Assumido via fila", + ) + ) + + await ticket.save() + + self.logger.info( + "Ticket taken", + extra={ + "ticket_id": str(ticket_id), + "actor_user_id": str(actor.id), + }, + ) + + return self._to_ticket_response(ticket) + + async def assign_ticket( + self, + ticket_id: PydanticObjectId, + dto: AssignTicketRequest, + ) -> TicketResponse: + ticket = await self._get_ticket_or_404(ticket_id) + agent = await self.user_service.get_by_id_with_roles(dto.agent_id) + if agent is None: raise AppHTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f"Ticket {ticket_id} does not exist.", + detail=f"Agent {dto.agent_id} does not exist.", ) - previous_status = ticket.status - if dto.status == previous_status: + agent_roles = agent.roles_names() + if not self._can_be_ticket_agent(agent_roles): raise AppHTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail="Ticket is already in the requested status.", + detail="The provided user cannot be assigned as a ticket agent.", ) - allowed_statuses = self.allowed_transitions.get(previous_status, set()) - if dto.status not in allowed_statuses: + if ticket.status == TicketStatus.FINISHED: raise AppHTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=( - f"Invalid status transition from '{previous_status.value}' " - f"to '{dto.status.value}'." + detail="Finished tickets cannot receive a new assignee.", + ) + + previous_assignment = self._get_active_assignment(ticket) + previous_agent_id = previous_assignment.agent_id if previous_assignment is not None else None + now = datetime.now(UTC) + + if previous_assignment is not None: + previous_assignment.exit_date = now + + ticket.agent_history.append( + TicketHistory( + agent_id=agent.id, + name=self._resolve_user_display_name(agent), + level=self._resolve_agent_level(agent_roles), + assignment_date=now, + exit_date=None, + transfer_reason=dto.reason, + ) + ) + + ticket.status = self._derive_status_after_assignment(ticket.status) + updated_ticket = await self.repo.save(ticket) + + await self.dispatcher.publish( + AppEvent.TICKET_ASSIGNEE_UPDATED, + TicketAssigneeUpdatedEventSchema( + ticket_id=updated_ticket.id, + client_id=updated_ticket.client.id, + new_agent_id=agent.id, + reason=dto.reason, + ), + ) + + self.logger.info( + "Ticket assigned", + extra={ + "ticket_id": str(ticket_id), + "previous_agent_id": str(previous_agent_id) if previous_agent_id is not None else None, + "new_agent_id": str(agent.id), + }, + ) + + return self._to_ticket_response(updated_ticket) + + async def escalate_ticket( + self, + ticket_id: PydanticObjectId, + dto: EscalateTicketRequest, + ) -> TicketResponse: + ticket = await self._get_ticket_or_404(ticket_id) + current_assignment = self._get_active_assignment(ticket) + if current_assignment is None: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Ticket must have an active assignee before it can be escalated.", + ) + + if current_assignment.agent_id == dto.target_agent_id: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Ticket is already assigned to the target agent.", + ) + + if ticket.status == TicketStatus.FINISHED: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Finished tickets cannot be escalated.", + ) + + target_agent = await self.user_service.get_by_id_with_roles(dto.target_agent_id) + if target_agent is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Agent {dto.target_agent_id} does not exist.", + ) + + target_agent_roles = target_agent.roles_names() + if not self._can_be_ticket_agent(target_agent_roles): + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="The provided user cannot be assigned as a ticket agent.", + ) + + previous_agent_id = current_assignment.agent_id + source_level = self._normalize_support_level(current_assignment.level) + target_level = self._normalize_support_level( + self._resolve_agent_level(target_agent_roles) + ) + self._validate_escalation_level(source_level, target_level) + + now = datetime.now(UTC) + current_assignment.exit_date = now + current_assignment.transfer_reason = dto.reason + + ticket.agent_history.append( + TicketHistory( + agent_id=target_agent.id, + name=self._resolve_user_display_name(target_agent), + level=target_level, + assignment_date=now, + exit_date=None, + transfer_reason=dto.reason, + ) + ) + ticket.status = TicketStatus.IN_PROGRESS + updated_ticket = await self.repo.save(ticket) + + await self.dispatcher.publish( + AppEvent.TICKET_ESCALATED, + TicketEscalatedEventSchema( + ticket_id=updated_ticket.id, + client_id=updated_ticket.client.id, + new_agent_id=target_agent.id, + new_agent_name=self._resolve_user_display_name(target_agent), + new_level=target_level, + transfer_reason=dto.reason, + ), + ) + + self.logger.info( + "Ticket escalated", + extra={ + "ticket_id": str(ticket_id), + "previous_agent_id": ( + str(previous_agent_id) if previous_agent_id is not None else None ), + "source_level": source_level, + "target_level": target_level, + "new_agent_id": str(target_agent.id), + }, + ) + + return self._to_ticket_response(updated_ticket) + + async def transfer_ticket( + self, + ticket_id: PydanticObjectId, + dto: TransferTicketRequest, + ) -> TicketResponse: + ticket = await self._get_ticket_or_404(ticket_id) + current_assignment = self._get_active_assignment(ticket) + if current_assignment is None: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Ticket must have an active assignee before it can be transferred.", ) - updated_ticket = await self.repo.update_status(ticket, dto.status) + if current_assignment.agent_id == dto.target_agent_id: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Ticket is already assigned to the target agent.", + ) + + if ticket.status == TicketStatus.FINISHED: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Finished tickets cannot be transferred.", + ) + + target_agent = await self.user_service.get_by_id_with_roles(dto.target_agent_id) + if target_agent is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Agent {dto.target_agent_id} does not exist.", + ) + + target_agent_roles = target_agent.roles_names() + if not self._can_be_ticket_agent(target_agent_roles): + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="The provided user cannot be assigned as a ticket agent.", + ) + + source_level = self._normalize_support_level(current_assignment.level) + target_level = self._normalize_support_level( + self._resolve_agent_level(target_agent_roles) + ) + if target_level != source_level: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Direct ticket transfer must keep the same support level.", + ) + + now = datetime.now(UTC) + current_assignment.exit_date = now + current_assignment.transfer_reason = dto.reason + + ticket.agent_history.append( + TicketHistory( + agent_id=target_agent.id, + name=self._resolve_user_display_name(target_agent), + level=source_level, + assignment_date=now, + exit_date=None, + transfer_reason=dto.reason, + ) + ) + ticket.status = TicketStatus.IN_PROGRESS + updated_ticket = await self.repo.save(ticket) + + await self.dispatcher.publish( + AppEvent.TICKET_ASSIGNEE_UPDATED, + TicketAssigneeUpdatedEventSchema( + ticket_id=updated_ticket.id, + client_id=updated_ticket.client.id, + new_agent_id=target_agent.id, + reason=dto.reason, + ), + ) - tickets_status_changed_total.labels( - from_status=previous_status.value, to_status=dto.status.value - ).inc() self.logger.info( - "Ticket status updated", + "Ticket transferred", extra={ "ticket_id": str(ticket_id), - "from": previous_status.value, - "to": dto.status.value, + "previous_agent_id": str(current_assignment.agent_id), + "new_agent_id": str(target_agent.id), + "level": source_level, }, ) + return self._to_ticket_response(updated_ticket) + + async def update_ticket( + self, ticket_id: PydanticObjectId, dto: UpdateTicketDTO + ) -> TicketResponse: + ticket = await self._get_ticket_or_404(ticket_id) + updates = dto.model_dump(exclude_unset=True) + status_update = updates.pop("status", None) + previous_status: TicketStatus | None = None + + if status_update is not None and status_update != ticket.status: + previous_status = ticket.status + self._validate_status_change(previous_status, status_update) + ticket.status = status_update + elif status_update is not None and not updates: + return self._to_ticket_response(ticket) + + for field_name, value in updates.items(): + setattr(ticket, field_name, value) + + if status_update is None and not updates: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="At least one updatable field must be provided.", + ) + + updated_ticket = await self.repo.save(ticket) + if previous_status is not None and status_update is not None: + self._record_status_transition( + ticket_id, previous_status, status_update, actor=None + ) + if status_update == TicketStatus.FINISHED: + await self._publish_ticket_closed(updated_ticket) + return self._to_ticket_response(updated_ticket) + + async def add_comment_to_ticket( + self, + ticket_id: PydanticObjectId, + author_name: str, + dto: AddTicketCommentDTO + ) -> TicketComment | None: + tc = TicketComment( + comment_id=uuid4(), + author = author_name, + text = dto.text, + date = datetime.now(UTC), + internal = dto.internal + ) + return await self.repo.add_ticket_comment(ticket_id, tc) + + async def list_ticket_comments( + self, ticket_id: PydanticObjectId + ) -> list[TicketCommentResponse] | None: + ticket = await self.repo.get_by_id(ticket_id) + if ticket is None: + return None + return [ + TicketCommentResponse( + comment_id=comment.comment_id, + author=comment.author, + text=comment.text, + date=comment.date, + internal=comment.internal, + ) + for comment in ticket.comments + ] + + async def update_ticket_comment( + self, ticket_id: PydanticObjectId, comment_id: UUID, dto: UpdateTicketCommentDTO + ) -> TicketComment | None: + return await self.repo.update_ticket_comment(ticket_id, comment_id, dto) + + async def delete_ticket_comment( + self, ticket_id: PydanticObjectId, comment_id: UUID + ) -> TicketComment | None: + return await self.repo.delete_ticket_comment(ticket_id, comment_id) + + async def update_status( + self, + ticket_id: PydanticObjectId, + dto: UpdateTicketStatusDTO, + actor: UserWithRoles, + ) -> UpdateTicketStatusResponseDTO: + ticket = await self._get_ticket_or_404(ticket_id) + + self._authorize_status_change(ticket, actor) + + previous_status = ticket.status + if dto.status == previous_status: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Ticket is already in the requested status.", + ) + + self._validate_status_change(previous_status, dto.status) + ticket.status = dto.status + + updated_ticket = await self.repo.save(ticket) + self._record_status_transition(ticket_id, previous_status, dto.status, actor=actor) + if dto.status == TicketStatus.FINISHED: + await self._publish_ticket_closed(updated_ticket) + return UpdateTicketStatusResponseDTO( id=str(updated_ticket.id), previous_status=previous_status, current_status=updated_ticket.status, ) - async def _build_ticket_client(self, client_id: UUID) -> TicketClient: + + async def get_ticket_history(self, ticket_id: PydanticObjectId) -> list[TicketHistory] | None: + return await self.repo.get_ticket_history(ticket_id) + + async def search_ticket_by_text( + self, search_query: str, user: UserWithRoles + ) -> list[Ticket] | None: + if not search_query.strip(): + return [] + + roles = user.roles_names() + is_admin = "admin" in roles + is_agent = any( + role.strip().upper() in {"AGENT", "N1", "N2", "N3"} for role in roles + ) + + if (is_admin or is_agent) and user.company_id is None: + return await self.repo.search_ticket(search_query, global_scope=True) + + if is_admin: + return await self.repo.search_ticket( + search_query, company_id=user.company_id + ) + + if is_agent: + return await self.repo.search_ticket(search_query, agent_id=user.id) + + return await self.repo.search_ticket(search_query, client_id=user.id) + + + + async def _build_ticket_client( + self, + client_id: UUID, + company_id: UUID | None, + company_name: str | None, + ) -> TicketClient: user = await self.user_service.get_by_id(client_id) if user is None: raise AppHTTPException( @@ -135,8 +599,8 @@ async def _build_ticket_client(self, client_id: UUID) -> TicketClient: client_name = user.name or user.username or user.email company = TicketCompany( - id=user.id, - name=f"{client_name} account", + id=company_id if company_id is not None else user.id, + name=company_name if company_name is not None else f"{client_name} account", ) return TicketClient( id=user.id, @@ -145,8 +609,222 @@ async def _build_ticket_client(self, client_id: UUID) -> TicketClient: company=company, ) - def _to_ticket_response(self, ticket: Ticket) -> TicketResponseDTO: - return TicketResponseDTO( + async def _get_ticket_or_404(self, ticket_id: PydanticObjectId) -> Ticket: + ticket = await self.repo.get_by_id(ticket_id) + if ticket is None: + raise AppHTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Ticket {ticket_id} does not exist.", + ) + return ticket + + def _authorize_status_change(self, ticket: Ticket, actor: UserWithRoles) -> None: + current_agent_id = self._get_current_assigned_agent_id(ticket) + actor_roles = actor.roles_names() + + if current_agent_id is None: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="O chamado precisa ser assumido por um atendente antes de alterar o status.", + ) + + if "admin" not in actor_roles and actor.id != current_agent_id: + raise AppHTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Somente o atendente responsável ou um administrador pode alterar o status deste chamado.", + ) + + def _validate_status_change( + self, previous_status: TicketStatus, new_status: TicketStatus + ) -> None: + allowed_statuses = self.allowed_transitions.get(previous_status, set()) + if new_status not in allowed_statuses: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=( + f"Invalid status transition from '{previous_status.value}' " + f"to '{new_status.value}'." + ), + ) + + def _record_status_transition( + self, + ticket_id: PydanticObjectId, + previous_status: TicketStatus, + new_status: TicketStatus, + actor: UserWithRoles | None, + ) -> None: + tickets_status_changed_total.labels( + from_status=previous_status.value, to_status=new_status.value + ).inc() + extra: dict[str, str] = { + "ticket_id": str(ticket_id), + "from": previous_status.value, + "to": new_status.value, + } + if actor is not None: + extra["actor_user_id"] = str(actor.id) + self.logger.info("Ticket status updated", extra=extra) + + async def _publish_ticket_closed(self, ticket: Ticket) -> None: + assert ticket.id is not None + await self.dispatcher.publish( + AppEvent.TICKET_CLOSED, + TicketClosedEventSchema( + ticket_id=ticket.id, + triage_id=ticket.triage_id, + client_id=ticket.client.id, + ), + ) + + def _get_current_assigned_agent_id(self, ticket: Ticket) -> UUID | None: + current_assignment = self._get_active_assignment(ticket) + if current_assignment is None: + return None + return current_assignment.agent_id + + def _get_active_assignment(self, ticket: Ticket) -> TicketHistory | None: + for history in reversed(ticket.agent_history): + if history.exit_date is None: + return history + return None + + def _get_current_assignment(self, ticket: Ticket) -> TicketHistory | None: + if ticket.agent_history: + return ticket.agent_history[-1] + return None + + def _normalize_support_level(self, level: str) -> str: + normalized = level.strip().upper() + if normalized == "AGENT": + return "N1" + return normalized + + def _support_level_rank(self, level: str) -> int | None: + normalized = self._normalize_support_level(level) + if len(normalized) < 2 or normalized[0] != "N": + return None + + numeric_level = normalized[1:] + if not numeric_level.isdigit(): + return None + + return int(numeric_level) + + def _validate_escalation_level(self, source_level: str, target_level: str) -> None: + source_rank = self._support_level_rank(source_level) + target_rank = self._support_level_rank(target_level) + + if source_rank is None or target_rank is None: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Support levels must use the N format.", + ) + + if target_rank <= source_rank: + raise AppHTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Ticket escalation must target a higher support level.", + ) + + def _resolve_user_display_name(self, user: UserWithRoles) -> str: + return user.name or user.username or user.email + + def _can_be_ticket_agent(self, roles_names: list[str]) -> bool: + for role_name in roles_names: + normalized = role_name.strip().upper() + if normalized in {"AGENT", "ADMIN", "N1", "N2", "N3"}: + return True + return False + + def _resolve_agent_level(self, roles_names: list[str]) -> str: + for role_name in roles_names: + normalized = role_name.strip().upper() + if normalized in {"N1", "N2", "N3"}: + return normalized + if "admin" in roles_names: + return "admin" + return "N1" + + def _derive_status_after_assignment(self, current_status: TicketStatus) -> TicketStatus: + if current_status in {TicketStatus.OPEN, TicketStatus.AWAITING_ASSIGNMENT}: + return TicketStatus.IN_PROGRESS + return current_status + + def _resolve_assigned_agent( + self, ticket: Ticket + ) -> tuple[UUID | None, str | None]: + current_assignment = self._get_active_assignment(ticket) + if current_assignment is not None: + last = current_assignment + return last.agent_id, last.name + return None, None + + def _matches_queue_filters(self, ticket: Ticket, filters: TicketQueueFiltersDTO) -> bool: + current_assignment = self._get_active_assignment(ticket) + current_level = current_assignment.level if current_assignment is not None else None + current_assignee_id = current_assignment.agent_id if current_assignment is not None else None + unassigned = current_assignee_id is None + + # department_id is a provisional contract field. The current persisted model + # does not store a department snapshot yet, so queue items can only expose None. + if filters.department_id is not None: + return False + + if filters.unassigned_only is True and not unassigned: + return False + + if filters.level is not None and filters.level != current_level: + return False + + if filters.assignee_id is not None and filters.assignee_id != current_assignee_id: + return False + + return True + + def _queue_sort_key(self, ticket: Ticket) -> tuple[int, datetime]: + criticality_priority = { + TicketCriticality.HIGH: 0, + TicketCriticality.MEDIUM: 1, + TicketCriticality.LOW: 2, + } + return criticality_priority[ticket.criticality], ticket.creation_date + + def _to_ticket_queue_item_response(self, ticket: Ticket) -> TicketQueueItemResponse: + current_assignment = self._get_active_assignment(ticket) + assignee_id, assignee_name = self._resolve_assigned_agent(ticket) + level = current_assignment.level if current_assignment is not None else None + + return TicketQueueItemResponse( + id=str(ticket.id), + triage_id=str(ticket.triage_id), + type=ticket.type, + criticality=ticket.criticality, + product=ticket.product, + status=ticket.status, + creation_date=ticket.creation_date, + description=ticket.description, + client=TicketClientResponse( + id=ticket.client.id, + name=ticket.client.name, + email=ticket.client.email, + company=TicketCompanyResponse( + id=ticket.client.company.id, + name=ticket.client.company.name, + ), + ), + department_id=None, + department_name=None, + level=level, + assignee_id=assignee_id, + assignee_name=assignee_name, + unassigned=assignee_id is None, + ) + + def _to_ticket_response(self, ticket: Ticket) -> TicketResponse: + assigned_agent_id, assigned_agent_name = self._resolve_assigned_agent(ticket) + + return TicketResponse( id=str(ticket.id), triage_id=str(ticket.triage_id), type=ticket.type, @@ -157,7 +835,7 @@ def _to_ticket_response(self, ticket: Ticket) -> TicketResponseDTO: description=ticket.description, chat_ids=[str(chat_id) for chat_id in ticket.chat_ids], agent_history=[ - TicketHistoryResponseDTO( + TicketHistoryResponse( agent_id=history.agent_id, name=history.name, level=history.level, @@ -167,17 +845,17 @@ def _to_ticket_response(self, ticket: Ticket) -> TicketResponseDTO: ) for history in ticket.agent_history ], - client=TicketClientResponseDTO( + client=TicketClientResponse( id=ticket.client.id, name=ticket.client.name, email=ticket.client.email, - company=TicketCompanyResponseDTO( + company=TicketCompanyResponse( id=ticket.client.company.id, name=ticket.client.company.name, ), ), comments=[ - TicketCommentResponseDTO( + TicketCommentResponse( comment_id=comment.comment_id, author=comment.author, text=comment.text, @@ -186,4 +864,6 @@ def _to_ticket_response(self, ticket: Ticket) -> TicketResponseDTO: ) for comment in ticket.comments ], + assigned_agent_id=assigned_agent_id, + assigned_agent_name=assigned_agent_name, ) diff --git a/app/domains/ticket/swagger_utils.py b/app/domains/ticket/swagger_utils.py new file mode 100644 index 0000000..a60a3b4 --- /dev/null +++ b/app/domains/ticket/swagger_utils.py @@ -0,0 +1,105 @@ +from typing import Any + +from fastapi import status + +from app.domains.ticket.schemas import TicketCommentResponse, TicketResponse +from app.schemas.response import ErrorContent, GenericSuccessContent + +comment_on_ticket_responses: dict[int | str, dict[str, Any]] = { + 201: { + "description": "Comment added to the ticket and returned in the response payload.", + "model": GenericSuccessContent[TicketCommentResponse], + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, + 403: { + "description": "User lacks the `ticket:comment` permission.", + "model": ErrorContent, + }, + 404: { + "description": "Ticket not found for the given `ticket_id`.", + "model": ErrorContent, + }, +} + +comment_on_ticket_swagger: dict[str, Any] = { + "summary": "Add a comment to a ticket", + "description": ( + "Appends a comment to the ticket identified by `ticket_id`. " + "The author is derived from the authenticated user (name, username, or email, " + "in that order). Use the `internal` flag to mark a comment as visible to " + "agents only or visible to the requesting client." + ), + "status_code": status.HTTP_201_CREATED, + "response_model": GenericSuccessContent[TicketCommentResponse], + "responses": comment_on_ticket_responses, +} + +get_ticket_comments_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": "List of comments belonging to the ticket, in insertion order.", + "model": GenericSuccessContent[list[TicketCommentResponse]], + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, + 403: { + "description": "User lacks the `ticket:read` permission.", + "model": ErrorContent, + }, + 404: { + "description": "Ticket not found for the given `ticket_id`.", + "model": ErrorContent, + }, +} + +get_ticket_comments_swagger: dict[str, Any] = { + "summary": "List ticket comments", + "description": ( + "Returns every comment attached to the ticket identified by `ticket_id`, " + "preserving insertion order. Both internal and external comments are included; " + "consumers should filter by the `internal` flag when rendering to clients." + ), + "status_code": status.HTTP_200_OK, + "response_model": GenericSuccessContent[list[TicketCommentResponse]], + "responses": get_ticket_comments_responses, +} + + +search_tickets_by_text_responses: dict[int | str, dict[str, Any]] = { + 200: { + "description": ( + "List of tickets whose `description` or comments match the query. " + "Results are scoped by the requester's role and may be empty." + ), + "model": GenericSuccessContent[list[TicketResponse]], + }, + 401: { + "description": "Missing or invalid authentication token.", + "model": ErrorContent, + }, + 403: { + "description": "User lacks the `chat:read` permission.", + "model": ErrorContent, + }, +} + +search_tickets_by_text_swagger: dict[str, Any] = { + "summary": "Search tickets by text", + "description": ( + "Case-insensitive substring search across the ticket `description` and the " + "text of every comment. Results are scoped by the requester's role:\n\n" + "- **client**: only tickets where the requester is the client.\n" + "- **agent / N1 / N2 / N3**: only tickets where the requester appears in " + "the assignment history.\n" + "- **admin**: only tickets whose client belongs to the same company as the " + "requester. An admin without an associated company sees an empty list.\n\n" + "A blank `search_query` always returns an empty list." + ), + "status_code": status.HTTP_200_OK, + "response_model": GenericSuccessContent[list[TicketResponse]], + "responses": search_tickets_by_text_responses, +} diff --git a/app/main.py b/app/main.py index 03e6d6b..f842929 100644 --- a/app/main.py +++ b/app/main.py @@ -10,13 +10,29 @@ register_exception_handlers, ) from app.core.background_tasks import global_background_tasks +from app.core.event_dispatcher import get_event_dispatcher +from app.core.event_dispatcher.event_dispatcher import EventDispatcher from app.core.init_routers import initiate_routers from app.core.logger import get_logger, stop_logger from app.core.middleware import add_middlewares from app.db import close_postgres_db, init_postgres_db, mongo_db from app.db.postgres.engine import engine as pg_engine +from app.domains.chatbot.listeners import register_chatbot_listener +from app.domains.chatbot.models import Attendance from app.domains.live_chat import Conversation +from app.domains.live_chat.listeners import register_conversation_listener +from app.domains.notifications.listeners import register_email_outbox_listener from app.domains.ticket import Ticket +from app.domains.ticket.listeners import register_ticket_listener + + +def register_app_events_listeners(dispatcher: EventDispatcher) -> None: + logger = get_logger("app.main") + register_conversation_listener(dispatcher) + register_chatbot_listener(dispatcher) + register_ticket_listener(dispatcher) + register_email_outbox_listener(dispatcher) + logger.info("Registered event listeners to EventDispatcher.") @asynccontextmanager @@ -24,21 +40,35 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: logger = get_logger("app.main") settings = get_settings() logger.info("Starting Application...") - tasks = global_background_tasks(pg_engine) + + tasks: list[asyncio.Task] = [] + dispatcher = get_event_dispatcher() try: if settings.ENVIRONMENT == "development": await init_postgres_db() await mongo_db.connect() - await init_beanie(database=mongo_db.get_db(), document_models=[Conversation, Ticket]) + await init_beanie( + database=mongo_db.get_db(), + document_models=[Conversation, Ticket, Attendance], + ) + + register_app_events_listeners(dispatcher) + + tasks = global_background_tasks(pg_engine) + yield finally: logger.info("🛑 Shutting Down Application...") + for task in tasks: task.cancel() - await asyncio.gather(*tasks, return_exceptions=True) + + if tasks: + await asyncio.gather(*tasks, return_exceptions=True) + await close_postgres_db() await mongo_db.disconnect() stop_logger() @@ -55,4 +85,4 @@ def create_app() -> FastAPI: add_middlewares(app) initiate_routers(app) register_exception_handlers(app) - return app + return app \ No newline at end of file diff --git a/app/seed/run_seed.py b/app/seed/run_seed.py index 9056f2f..f8e44be 100644 --- a/app/seed/run_seed.py +++ b/app/seed/run_seed.py @@ -9,6 +9,7 @@ from app.seed import seed from app.seed.seed_examples import ( seed_example_attendances, + seed_example_companies_and_products, seed_example_conversations, seed_example_tickets, seed_example_user_roles, @@ -22,7 +23,12 @@ async def run() -> None: await seed.seed_roles(db) await seed.seed_permissions(db) await seed.seed_role_permissions(db) + + # Agora as empresas/produtos são inseridas ANTES dos usuários (foreign key constraint) + await seed_example_companies_and_products(db) await seed.seed_users(db) + + # Descomentado: await seed_example_users(db) await seed_example_user_roles(db) @@ -30,6 +36,7 @@ async def run() -> None: await mongo_db.connect() try: mongo = mongo_db.get_db() + # Descomentado (opcional, para uma base de testes rica no mongo): await seed_example_attendances(mongo) await seed_example_tickets(mongo) await seed_example_conversations(mongo) @@ -38,4 +45,4 @@ async def run() -> None: if __name__ == "__main__": - asyncio.run(run()) + asyncio.run(run()) \ No newline at end of file diff --git a/app/seed/seed.py b/app/seed/seed.py index 800a99c..2857c01 100644 --- a/app/seed/seed.py +++ b/app/seed/seed.py @@ -2,11 +2,13 @@ import string from typing import Any -from sqlalchemy import insert, select +from sqlalchemy import select from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncSession from app.core.security import PasswordSecurity +import app.domains.companies.models # noqa: F401 +import app.domains.products.models # noqa: F401 from app.domains.auth.models import Permission, Role, User, role_permissions, user_roles @@ -17,8 +19,8 @@ async def seed_roles(session: AsyncSession) -> None: {"id": 3, "name": "agent", "description": "attends to the clients problems"}, {"id": 4, "name": "client", "description": "end user of the application"}, ] - - await session.execute(insert(Role).values(roles)) + stmt = pg_insert(Role).values(roles).on_conflict_do_nothing() + await session.execute(stmt) async def seed_permissions(session: AsyncSession) -> None: @@ -30,6 +32,7 @@ async def seed_permissions(session: AsyncSession) -> None: {"name": "user:update", "description": "Update users"}, {"name": "user:replace", "description": "Replace users"}, {"name": "user:add_roles", "description": "Add roles to users"}, + {"name": "user:update_roles", "description": "Add and remove roles from users"}, # Password {"name": "password:change", "description": "Change user password"}, {"name": "password:reset", "description": "Reset user password"}, @@ -64,7 +67,40 @@ async def seed_permissions(session: AsyncSession) -> None: # Ticket {"name": "ticket:read", "description": "Read tickets"}, {"name": "ticket:create", "description": "Create tickets"}, + {"name": "ticket:update", "description": "Update ticket fields"}, {"name": "ticket:update_status", "description": "Update ticket status"}, + {"name": "ticket:queue", "description": "Read ticket queue"}, + {"name": "ticket:assign", "description": "Assign tickets"}, + {"name": "ticket:transfer", "description": "Transfer tickets"}, + {"name": "ticket:escalate", "description": "Escalate tickets"}, + {"name": "ticket:comment", "description": "Adds comment to ticket"}, + {"name": "ticket:update_comment", "description": "Updates comment to ticket"}, + {"name": "ticket:delete_comment", "description": "Deletes comment to ticket"}, + # Company + {"name": "company:create", "description": "Create companies"}, + {"name": "company:read", "description": "Read company details"}, + {"name": "company:list", "description": "List companies"}, + {"name": "company:replace", "description": "Replace companies"}, + {"name": "company:update", "description": "Update companies"}, + {"name": "company:soft_delete", "description": "Soft delete companies"}, + {"name": "company:add_product", "description": "Add product to company"}, + {"name": "company:remove_products", "description": "Remove products from company in batch"}, + {"name": "company:remove_product", "description": "Remove single product from company"}, + {"name": "company:add_users", "description": "Add users to company"}, + {"name": "company:remove_users", "description": "Remove users from company in batch"}, + {"name": "company:remove_user", "description": "Remove single user from company"}, + {"name": "company:list_users", "description": "List company users"}, + # Product + {"name": "product:create", "description": "Create products"}, + {"name": "product:read", "description": "Read product details"}, + {"name": "product:list", "description": "List products"}, + {"name": "product:replace", "description": "Replace products"}, + {"name": "product:update", "description": "Update products"}, + {"name": "product:soft_delete", "description": "Soft delete products"}, + {"name": "product:add_companies", "description": "Add product to companies"}, + {"name": "product:remove_companies", "description": "Remove product from companies in batch"}, + {"name": "product:remove_company", "description": "Remove product from single company"}, + {"name": "product:list_companies", "description": "List product companies"}, ] insert_stmt = pg_insert(Permission).values(permissions).on_conflict_do_nothing() @@ -73,10 +109,19 @@ async def seed_permissions(session: AsyncSession) -> None: async def seed_role_permissions(session: AsyncSession) -> None: relations = { - "admin": ["user:%", "role:%", "permission:%", "chat:%", "password:%", "ticket:%"], + "admin": ["user:%", "role:%", "permission:%", "chat:%", "password:%", "ticket:%", "company:%", "product:%"], "user": ["session:%", "chat:%", "password:change"], - "agent": ["session:%", "chat:%", "password:change", "ticket:%"], - "client": ["session:%", "chat:%", "password:change"], + "agent": [ + "session:%", + "chat:%", + "password:change", + "ticket:%", + "company:read", + "company:list", + "product:read", + "product:list", + ], + "client": ["session:%", "chat:%", "password:change", "company:read", "product:read", "product:list", "ticket:read"], } for role_name, patterns in relations.items(): @@ -128,7 +173,7 @@ async def seed_users(session: AsyncSession) -> None: "username": name, "name": name, "must_change_password": False, - "must_accept_terms": False + "must_accept_terms": False, } ) @@ -149,4 +194,4 @@ async def seed_users(session: AsyncSession) -> None: user_role_values = [{"user_id": user_id, "role_id": admin_role_id} for user_id in user_ids] role_insert_stmt = pg_insert(user_roles).values(user_role_values).on_conflict_do_nothing() - await session.execute(role_insert_stmt) + await session.execute(role_insert_stmt) \ No newline at end of file diff --git a/app/seed/seed_examples.py b/app/seed/seed_examples.py index 0449f6a..787fa5f 100644 --- a/app/seed/seed_examples.py +++ b/app/seed/seed_examples.py @@ -1,23 +1,23 @@ """ Seed example data for a professional SyncDesk demo. - Creates: - - Postgres: agent and client users with proper roles + - Postgres: companies, products, agent and client users with proper roles - MongoDB: attendances (triage sessions), tickets, and conversations """ - from datetime import UTC, datetime, timedelta from typing import Any from uuid import UUID, uuid4 from bson import ObjectId from motor.motor_asyncio import AsyncIOMotorDatabase -from sqlalchemy import select +from sqlalchemy import select, text from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncSession from app.core.security import PasswordSecurity from app.domains.auth.models import Role, User, user_roles +from app.domains.companies.models import Company, company_products +from app.domains.products.models import Product # --------------------------------------------------------------------------- # Fixed UUIDs so relationships stay consistent across seeds @@ -27,14 +27,12 @@ "camila": UUID("a1000000-0000-0000-0000-000000000002"), "rafael": UUID("a1000000-0000-0000-0000-000000000003"), } - CLIENT_IDS: dict[str, UUID] = { "marcos": UUID("c1000000-0000-0000-0000-000000000001"), "ana": UUID("c1000000-0000-0000-0000-000000000002"), "fernanda": UUID("c1000000-0000-0000-0000-000000000003"), "ricardo": UUID("c1000000-0000-0000-0000-000000000004"), } - COMPANY_IDS: dict[str, UUID] = { "techsol": UUID("d1000000-0000-0000-0000-000000000001"), "dataflow": UUID("d1000000-0000-0000-0000-000000000002"), @@ -47,22 +45,66 @@ NOW = datetime(2026, 4, 4, 14, 0, 0, tzinfo=UTC) - # ===== POSTGRES ===== +async def seed_example_companies_and_products(session: AsyncSession) -> None: + """Seed example companies, products, and their relationships.""" + now = datetime.now(UTC).replace(tzinfo=None) + future = now + timedelta(days=365) + + # 1. Inserindo Produtos + products_payload = [ + {"id": 1, "name": "Produto A", "description": "Sistema de Gestão Financeira", "created_at": now}, + {"id": 2, "name": "Produto B", "description": "Dashboard de Analytics e BI", "created_at": now}, + {"id": 3, "name": "Produto C", "description": "Módulo de Autenticação e SSO", "created_at": now}, + ] + await session.execute(pg_insert(Product).values(products_payload).on_conflict_do_nothing()) + await session.execute( + text("SELECT setval('products_id_seq', (SELECT COALESCE(MAX(id), 1) FROM products))") + ) + + # 2. Inserindo Empresas + companies_payload = [ + { + "id": COMPANY_IDS["techsol"], + "legal_name": "TechSol Sistemas Ltda", + "trade_name": "TechSol Sistemas", + "tax_id": "12345678000190", + "created_at": now + }, + { + "id": COMPANY_IDS["dataflow"], + "legal_name": "DataFlow Analytics S/A", + "trade_name": "DataFlow Analytics", + "tax_id": "98765432000110", + "created_at": now + }, + ] + await session.execute(pg_insert(Company).values(companies_payload).on_conflict_do_nothing()) + + # 3. Associando Produtos às Empresas + company_products_payload = [ + {"company_id": COMPANY_IDS["techsol"], "product_id": 1, "bought_at": now, "support_until": future}, + {"company_id": COMPANY_IDS["techsol"], "product_id": 2, "bought_at": now, "support_until": future}, + {"company_id": COMPANY_IDS["dataflow"], "product_id": 1, "bought_at": now, "support_until": future}, + {"company_id": COMPANY_IDS["dataflow"], "product_id": 3, "bought_at": now, "support_until": future}, + ] + await session.execute(pg_insert(company_products).values(company_products_payload).on_conflict_do_nothing()) + async def seed_example_users(session: AsyncSession) -> None: """Seed agent and client users.""" pw = PasswordSecurity() default_password = "Demo@2026!" users_payload: list[dict[str, Any]] = [ - # Agents + # Agents (Sem vínculo com empresa) { "id": AGENT_IDS["lucas"], "email": "lucas.silva@syncdesk.pro", "password_hash": pw.generate_password_hash(default_password), "username": "lucas.silva", "name": "Lucas Silva", + "company_id": None, "must_change_password": False, "must_accept_terms": False, }, @@ -72,6 +114,7 @@ async def seed_example_users(session: AsyncSession) -> None: "password_hash": pw.generate_password_hash(default_password), "username": "camila.santos", "name": "Camila Santos", + "company_id": None, "must_change_password": False, "must_accept_terms": False, }, @@ -81,16 +124,18 @@ async def seed_example_users(session: AsyncSession) -> None: "password_hash": pw.generate_password_hash(default_password), "username": "rafael.costa", "name": "Rafael Costa", + "company_id": None, "must_change_password": False, "must_accept_terms": False, }, - # Clients + # Clients (Com vínculo de empresa) { "id": CLIENT_IDS["marcos"], "email": "marcos.oliveira@techsol.com.br", "password_hash": pw.generate_password_hash(default_password), "username": "marcos.oliveira", "name": "Marcos Oliveira", + "company_id": COMPANY_IDS["techsol"], "must_change_password": False, "must_accept_terms": False, }, @@ -100,6 +145,7 @@ async def seed_example_users(session: AsyncSession) -> None: "password_hash": pw.generate_password_hash(default_password), "username": "ana.pereira", "name": "Ana Pereira", + "company_id": COMPANY_IDS["techsol"], "must_change_password": False, "must_accept_terms": False, }, @@ -109,6 +155,7 @@ async def seed_example_users(session: AsyncSession) -> None: "password_hash": pw.generate_password_hash(default_password), "username": "fernanda.lima", "name": "Fernanda Lima", + "company_id": COMPANY_IDS["dataflow"], "must_change_password": False, "must_accept_terms": False, }, @@ -118,6 +165,7 @@ async def seed_example_users(session: AsyncSession) -> None: "password_hash": pw.generate_password_hash(default_password), "username": "ricardo.mendes", "name": "Ricardo Mendes", + "company_id": COMPANY_IDS["dataflow"], "must_change_password": False, "must_accept_terms": False, }, @@ -126,7 +174,6 @@ async def seed_example_users(session: AsyncSession) -> None: insert_stmt = pg_insert(User).values(users_payload).on_conflict_do_nothing() await session.execute(insert_stmt) - async def seed_example_user_roles(session: AsyncSession) -> None: """Assign agent and client roles to seeded users.""" role_map: dict[str, list[UUID]] = { @@ -145,7 +192,7 @@ async def seed_example_user_roles(session: AsyncSession) -> None: await session.execute(stmt) -# ===== MONGODB — helpers ===== +# ===== MONGODB - helpers ===== def _client_doc(name: str, email: str, client_id: UUID, company_name: str, company_id: UUID) -> dict[str, Any]: return { @@ -155,7 +202,6 @@ def _client_doc(name: str, email: str, client_id: UUID, company_name: str, compa "company": {"id": str(company_id), "name": company_name}, } - CLIENTS_DOC = { "marcos": _client_doc("Marcos Oliveira", "marcos.oliveira@techsol.com.br", CLIENT_IDS["marcos"], "TechSol Sistemas", COMPANY_IDS["techsol"]), @@ -167,13 +213,12 @@ def _client_doc(name: str, email: str, client_id: UUID, company_name: str, compa CLIENT_IDS["ricardo"], "DataFlow Analytics", COMPANY_IDS["dataflow"]), } - -# ===== MONGODB — attendances (triage sessions) ===== +# ===== MONGODB - attendances (triage sessions) ===== def _build_attendances() -> list[dict[str, Any]]: """Build 6 attendance documents representing completed triage flows.""" return [ - # 0 — Marcos: Product A → system failure → ticket created + # 0 - Marcos: Product A - system failure - ticket created { "_id": TRIAGE_IDS[0], "status": "finished", @@ -194,7 +239,7 @@ def _build_attendances() -> list[dict[str, Any]]: "answer_text": None, "answer_value": None, "type": "quick_replies"}, ], }, - # 1 — Ana: Product B → new feature request → ticket created + # 1 - Ana: Product B - new feature request - ticket created { "_id": TRIAGE_IDS[1], "status": "finished", @@ -215,7 +260,7 @@ def _build_attendances() -> list[dict[str, Any]]: "answer_text": None, "answer_value": None, "type": "quick_replies"}, ], }, - # 2 — Fernanda: access request → ticket created + # 2 - Fernanda: access request - ticket created { "_id": TRIAGE_IDS[2], "status": "finished", @@ -234,7 +279,7 @@ def _build_attendances() -> list[dict[str, Any]]: "answer_text": None, "answer_value": None, "type": "quick_replies"}, ], }, - # 3 — Ricardo: Product C → system failure → ticket created + # 3 - Ricardo: Product C - system failure - ticket created { "_id": TRIAGE_IDS[3], "status": "finished", @@ -255,7 +300,7 @@ def _build_attendances() -> list[dict[str, Any]]: "answer_text": None, "answer_value": None, "type": "quick_replies"}, ], }, - # 4 — Marcos: doubt about deadlines → resolved without ticket + # 4 - Marcos: doubt about deadlines - resolved without ticket { "_id": TRIAGE_IDS[4], "status": "finished", @@ -275,7 +320,7 @@ def _build_attendances() -> list[dict[str, Any]]: "answer_text": None, "answer_value": None, "type": "quick_replies"}, ], }, - # 5 — Ana: Product A → system failure → ticket (most recent) + # 5 - Ana: Product A - system failure - ticket (most recent) { "_id": TRIAGE_IDS[5], "status": "finished", @@ -290,7 +335,7 @@ def _build_attendances() -> list[dict[str, Any]]: {"step": "B", "question": "Como posso te ajudar hoje em relação ao Produto escolhido?", "answer_text": None, "answer_value": "1", "type": "quick_replies"}, {"step": "F", "question": "Por favor, explique da maneira mais detalhada possível o seu problema.", - "answer_text": "A integração com a API de pagamentos no Produto A parou de funcionar. As transações ficam pendentes e não são processadas. Urgente pois está impactando o faturamento.", + "answer_text": "A integração com a API de pagamentos no Produto A parou de funcionar. As transações ficam pendentes e não processadas. Urgente pois está impactando o faturamento.", "answer_value": None, "type": "free_text"}, {"step": "E", "question": "Aguarde, sua solicitação foi criada.", "answer_text": None, "answer_value": None, "type": "quick_replies"}, @@ -299,12 +344,12 @@ def _build_attendances() -> list[dict[str, Any]]: ] -# ===== MONGODB — tickets ===== +# ===== MONGODB - tickets ===== def _build_tickets() -> list[dict[str, Any]]: """Build 5 tickets (indices 0-3 and 5 from attendances; #4 had no ticket).""" return [ - # Ticket 0 — Marcos / Product A issue / in_progress (assigned to Lucas) + # Ticket 0 - Marcos / Product A issue / in_progress (assigned to Lucas) { "_id": TICKET_IDS[0], "triage_id": TRIAGE_IDS[0], @@ -336,7 +381,7 @@ def _build_tickets() -> list[dict[str, Any]]: }, ], }, - # Ticket 1 — Ana / Product B new feature / open + # Ticket 1 - Ana / Product B new feature / open { "_id": TICKET_IDS[1], "triage_id": TRIAGE_IDS[1], @@ -351,7 +396,7 @@ def _build_tickets() -> list[dict[str, Any]]: "client": CLIENTS_DOC["ana"], "comments": [], }, - # Ticket 2 — Fernanda / access request / waiting_for_provider + # Ticket 2 - Fernanda / access request / waiting_for_provider { "_id": TICKET_IDS[2], "triage_id": TRIAGE_IDS[2], @@ -383,7 +428,7 @@ def _build_tickets() -> list[dict[str, Any]]: }, ], }, - # Ticket 3 — Ricardo / Product C issue / in_progress (escalated) + # Ticket 3 - Ricardo / Product C issue / in_progress (escalated) { "_id": TICKET_IDS[3], "triage_id": TRIAGE_IDS[3], @@ -409,7 +454,7 @@ def _build_tickets() -> list[dict[str, Any]]: "level": "N2", "assignment_date": (NOW - timedelta(hours=4)).isoformat(), "exit_date": (NOW - timedelta(hours=4)).isoformat(), - "transfer_reason": "Escalado para N2 — problema de infraestrutura de autenticação", + "transfer_reason": "Escalado para N2 - problema de infraestrutura de autenticação", }, ], "client": CLIENTS_DOC["ricardo"], @@ -430,7 +475,7 @@ def _build_tickets() -> list[dict[str, Any]]: }, ], }, - # Ticket 4 (index 5 from attendances) — Ana / Product A issue / open (newest) + # Ticket 4 (index 5 from attendances) - Ana / Product A issue / open (newest) { "_id": TICKET_IDS[5], "triage_id": TRIAGE_IDS[5], @@ -439,7 +484,7 @@ def _build_tickets() -> list[dict[str, Any]]: "product": "Product A", "status": "open", "creation_date": (NOW - timedelta(hours=1, minutes=50)).isoformat(), - "description": "A integração com a API de pagamentos no Produto A parou de funcionar. As transações ficam pendentes e não são processadas. Urgente pois está impactando o faturamento.", + "description": "A integração com a API de pagamentos no Produto A parou de funcionar. As transações ficam pendentes e não processadas. Urgente pois está impactando o faturamento.", "chat_ids": [], "agent_history": [], "client": CLIENTS_DOC["ana"], @@ -448,12 +493,12 @@ def _build_tickets() -> list[dict[str, Any]]: ] -# ===== MONGODB — conversations ===== +# ===== MONGODB - conversations ===== def _build_conversations() -> list[dict[str, Any]]: """Build conversations linked to tickets that have chat_ids.""" return [ - # Conversation 0 — Ticket 0 (Marcos ↔ Lucas, Product A PDF export) + # Conversation 0 - Ticket 0 (Marcos -> Lucas, Product A PDF export) { "_id": CONVERSATION_IDS[0], "ticket_id": TICKET_IDS[0], @@ -515,7 +560,7 @@ def _build_conversations() -> list[dict[str, Any]]: }, ], }, - # Conversation 2 — Ticket 2 (Fernanda ↔ Camila, access request) + # Conversation 2 - Ticket 2 (Fernanda -> Camila, access request) { "_id": CONVERSATION_IDS[2], "ticket_id": TICKET_IDS[2], @@ -561,7 +606,7 @@ def _build_conversations() -> list[dict[str, Any]]: }, ], }, - # Conversation 3 — Ticket 3 (Ricardo ↔ Camila → Rafael, auth issue) + # Conversation 3 - Ticket 3 (Ricardo -> Camila -> Rafael, auth issue) { "_id": CONVERSATION_IDS[3], "ticket_id": TICKET_IDS[3], @@ -595,7 +640,7 @@ def _build_conversations() -> list[dict[str, Any]]: "sender_id": str(CLIENT_IDS["ricardo"]), "timestamp": (NOW - timedelta(hours=5, minutes=22)).isoformat(), "type": "text", - "content": "Oi Camila! Uns 5 de 12 usuários. O padrão é estranho — funciona, falha, funciona de novo. Parece aleatório.", + "content": "Oi Camila! Uns 5 de 12 usuários. O padrão é meio estranho – funciona, falha, funciona de novo. Parece aleatório.", }, { "id": str(uuid4()), @@ -642,26 +687,21 @@ def _build_conversations() -> list[dict[str, Any]]: ] -# ===== MONGODB — seed functions ===== - async def seed_example_attendances(db: AsyncIOMotorDatabase) -> None: # type: ignore[type-arg] """Insert example attendance (triage) documents into MongoDB.""" collection = db["atendimentos"] for doc in _build_attendances(): await collection.replace_one({"_id": doc["_id"]}, doc, upsert=True) - async def seed_example_tickets(db: AsyncIOMotorDatabase) -> None: # type: ignore[type-arg] """Insert example ticket documents into MongoDB.""" collection = db["tickets"] for doc in _build_tickets(): await collection.replace_one({"_id": doc["_id"]}, doc, upsert=True) - async def seed_example_conversations(db: AsyncIOMotorDatabase) -> None: # type: ignore[type-arg] """Insert example conversation documents into MongoDB.""" collection = db["conversations"] - # Drop legacy index that conflicts with the current schema indexes = await collection.index_information() legacy = "service_session_id_1_sequential_index_1" @@ -669,4 +709,4 @@ async def seed_example_conversations(db: AsyncIOMotorDatabase) -> None: # type: await collection.drop_index(legacy) for doc in _build_conversations(): - await collection.replace_one({"_id": doc["_id"]}, doc, upsert=True) + await collection.replace_one({"_id": doc["_id"]}, doc, upsert=True) \ No newline at end of file diff --git a/deploy/docker-compose.staging.yaml b/deploy/docker-compose.staging.yaml new file mode 100644 index 0000000..7bb3ea4 --- /dev/null +++ b/deploy/docker-compose.staging.yaml @@ -0,0 +1,8 @@ +services: + api: + image: ghcr.io/titus-system/syncdesk-api:staging + volumes: + - app_logs:/app/logs + +volumes: + app_logs: \ No newline at end of file diff --git a/docker-compose.override.yaml b/docker-compose.override.yaml new file mode 100644 index 0000000..1ac3b8b --- /dev/null +++ b/docker-compose.override.yaml @@ -0,0 +1,10 @@ +services: + api: + build: + context: . + dockerfile: deploy/Dockerfile + volumes: + - .:/app + - app_logs:/app/logs + environment: + UVICORN_RELOAD: "true" \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index f047dc5..0bb898a 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -41,9 +41,6 @@ services: start_period: 10s api: - build: - context: . - dockerfile: deploy/Dockerfile container_name: syncdesk_api restart: unless-stopped env_file: @@ -51,11 +48,10 @@ services: environment: POSTGRES_HOST: db MONGO_HOST: mongo - UVICORN_RELOAD: "true" ports: - "8000:8000" volumes: - - .:/app + - app_logs:/app/logs depends_on: db: condition: service_healthy @@ -100,7 +96,7 @@ services: container_name: syncdesk_promtail restart: unless-stopped volumes: - - ./logs:/var/log/api + - app_logs:/var/log/api - ./deploy/promtail/promtail-config.yml:/etc/promtail/config.yml - /var/run/docker.sock:/var/run/docker.sock:ro - promtail_data:/run/promtail @@ -125,6 +121,7 @@ services: - alertmanager volumes: + app_logs: postgres_data: mongo_data: prometheus_data: diff --git a/docs/event_dispatcher.md b/docs/event_dispatcher.md new file mode 100644 index 0000000..e50cf61 --- /dev/null +++ b/docs/event_dispatcher.md @@ -0,0 +1,148 @@ +# Event Dispatcher — Inter-domain communication via internal events + +## Problem + +The project architecture separates features into independent domains (`auth`, `ticket`, `live_chat`, `chatbot`). This works well while each domain operates in isolation, but some business actions trigger consequences in other domains: + +- Finishing a triage needs to create a ticket and open a conversation. +- Closing a ticket needs to end the conversation and request an attendance evaluation. +- Deactivating a user might need to terminate active sessions and revoke tokens. + +The most intuitive approach — injecting services from other domains — creates problems as the system grows: + +- **Growing coupling**: each new side effect requires changing the signature and body of the originating service. +- **Circular dependencies**: the day two domains need to react to each other's actions, the import graph breaks. +- **Responsibility violation**: the service that performs the action ends up knowing details of all affected domains. + +## When to use events vs. direct injection + +Not every cross-domain interaction should be an event. The distinction is simple: + +| Scenario | Mechanism | Example | +|---|---|---| +| The caller **needs the result** to proceed | Service injection | Chatbot queries `UserService` to validate user existence before opening triage | +| The caller **just notifies something happened** and does not depend on the consequence | Event | Finished triage triggers ticket and conversation creation | + +**Guiding questions:** + +- "Do X **and then** Y with the result" → direct injection. +- "When X happens, **react**" → event. +- "Adding a new behavior requires changing the originating service?" → if yes, it should be an event. + +## Solution: In-process EventDispatcher + +An async lightweight dispatcher implemented in `app/core/event_dispatcher/`. No external infrastructure (Kafka, Redis, RabbitMQ) — just in-process coordination with `asyncio`. + +### Structure + +``` +app/core/event_dispatcher/ +├── __init__.py # Re-exports: EventDispatcher, get_event_dispatcher, EventDispatcherDep +├── enums.py # AppEvent enum (event catalog) +├── schemas.py # DispatcherSchema base, typed payloads, EVENT_PAYLOAD_MAP +├── exceptions.py # EventSchemaError, InvalidHandlerError +├── decorators.py # @event_handler decorator +├── metrics.py # Prometheus counters and histograms +└── event_dispatcher.py # EventDispatcher (subscribe, publish), get_event_dispatcher +``` + +## Event catalog + +| Event | Enum | Emitter | Payload | Listeners | +|---|---|---|---|---| +| `triage.finished` | `TRIAGE_FINISHED` | `ChatbotService` | `TriageFinishedEventSchema` | `TicketListener` — creates ticket, publishes `ticket.created` | +| `ticket.created` | `TICKET_CREATED` | `TicketListener` | `TicketCreatedEventSchema` | `ConversationListener` — opens first support conversation | +| `ticket.assignee_updated` | `TICKET_ASSIGNEE_UPDATED` | `TicketService` | `TicketAssigneeUpdatedEventSchema` | `ConversationListener` — updates participants in active conversation | +| `ticket.status_updated` | `TICKET_STATUS_UPDATED` | `TicketService` | `TicketStatusUpdatedEventSchema` | `ConversationListener` — updates message history with system message; `ChatbotService` — updates attendance status | +| `ticket.escalated` | `TICKET_ESCALATED` | `TicketService` | `TicketEscalatedEventSchema` | `ConversationListener` — opens new conversation linked to ticket | +| `ticket.closed` | `TICKET_CLOSED` | `TicketService` | `TicketClosedEventSchema` | `ConversationListener` — closes active conversation; `ChatbotListener` — closes attendance and requests evaluation | + +## Event payloads + +### `triage.finished` + +| Field | Type | Description | +|---|---|---| +| `client_id` | `UUID` | Client ID in the auth domain | +| `client_email` | `str` | Client email | +| `client_name` | `str` | Client name | +| `company_id` | `UUID \| None` | Company ID (optional) | +| `company_name` | `str \| None` | Company name (optional) | +| `attendance_id` | `PydanticObjectId` | Attendance/triage ID | +| `ticket_type` | `TicketType` | Ticket type (`issue`, `access`, `new_feature`) | +| `ticket_criticality` | `TicketCriticality` | Criticality (`high`, `medium`, `low`) | +| `product_name` | `str` | Product name | +| `ticket_description` | `str` | Problem description | + +### `ticket.created` + +| Field | Type | Description | +|---|---|---| +| `ticket_id` | `PydanticObjectId` | Newly created ticket ID | +| `client_id` | `UUID` | Client ID | +| `agent_id` | `UUID \| None` | Assigned agent (None if awaiting assignment) | + +### `ticket.assignee_updated` + +| Field | Type | Description | +|---|---|---| +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `new_agent_id` | `UUID` | New responsible agent | +| `reason` | `str \| None` | Reassignment reason | + +### `ticket.status_updated` + +| Field | Type | Description | +|---|---|---| +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `new_status` | `TicketStatus` | New ticket status | + +### `ticket.escalated` + +| Field | Type | Description | +|---|---|---| +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `new_agent_id` | `UUID \| None` | Agent at the new level (None if pending) | +| `new_agent_name` | `str \| None` | New agent name | +| `new_level` | `str` | Target support level | +| `transfer_reason` | `str \| None` | Escalation reason | + +### `ticket.closed` + +| Field | Type | Description | +|---|---|---| +| `ticket_id` | `PydanticObjectId` | Ticket ID | +| `triage_id` | `PydanticObjectId` | Original triage ID | +| `client_id` | `UUID` | Client ID | + +## Chained event flow + +``` +triage.finished + └─ TicketListener creates ticket + └─ publishes ticket.created + ├─ ConversationListener creates conversation + └─ (future) NotificationListener notifies agent +``` + +The conversation depends on `ticket_id`, which only exists after ticket creation. Therefore `ConversationListener` reacts to `ticket.created`, not `triage.finished`. + +## Rules + +- Services never import models or repositories from other domains. +- The dispatcher is fire-and-forget: `publish` schedules each handler as an `asyncio.Task` and returns immediately. +- All handlers must use the `@event_handler` decorator. `subscribe` rejects undecorated handlers with `InvalidHandlerError`. +- Handler subscription is idempotent — subscribing the same handler to the same event twice has no effect. +- The `@event_handler` decorator catches and logs exceptions automatically. An unhandled exception does not affect the emitter or other handlers. +- Listeners live in `listeners.py` inside each domain. +- Event names follow the pattern `{domain}.{past_action}`. +- Every event payload must be documented in this file when created. + +## Adding a new event + +1. Add the member to the `AppEvent` enum in `enums.py`. +2. Create the corresponding schema (inherits from `DispatcherSchema`) in `schemas.py`. +3. Add the entry to `EVENT_PAYLOAD_MAP`. +4. Document the event in this file (catalog table + payload fields). +5. Create the handler in the `listeners.py` of the reacting domain. +6. Register the handler in the domain's `register_*_listener`. diff --git a/tests/app/e2e/conftest.py b/tests/app/e2e/conftest.py index cdf7b48..d76e139 100644 --- a/tests/app/e2e/conftest.py +++ b/tests/app/e2e/conftest.py @@ -20,8 +20,15 @@ from app.core.dependencies import get_email_service from app.core.email.schemas import ResetPasswordEmailParams, WelcomeEmailParams from app.core.email.strategy import EmailStrategy +from app.core.event_dispatcher import AppEvent, event_handler, get_event_dispatcher +from app.core.event_dispatcher.schemas import PasswordResetEventSchema, WelcomeInviteEventSchema from app.db.mongo.dependencies import get_mongo_session from app.db.postgres.base import Base + +import app.domains.auth.models # noqa: F401 — register models with Base.metadata +import app.domains.companies.models # noqa: F401 — register models with Base.metadata +import app.domains.notifications.models # noqa: F401 — register models with Base.metadata +import app.domains.products.models # noqa: F401 — register models with Base.metadata from app.db.postgres.dependencies import get_postgres_session from app.domains.auth.entities import UserWithRoles from app.main import create_app @@ -164,6 +171,8 @@ def fake_email() -> FakeEmailStrategy: @pytest.fixture def app(fake_email: FakeEmailStrategy) -> FastAPI: + # Fresh dispatcher per test so handlers don't bleed across tests + get_event_dispatcher.cache_clear() application = create_app() application.dependency_overrides[get_email_service] = lambda: fake_email return application @@ -172,6 +181,7 @@ def app(fake_email: FakeEmailStrategy) -> FastAPI: @pytest.fixture async def client( app: FastAPI, + fake_email: FakeEmailStrategy, db_session: AsyncSession, mongo_db_conn: AsyncGenerator[AsyncIOMotorDatabase[dict[str,Any]], None] ) -> AsyncGenerator[AsyncClient, None]: @@ -188,11 +198,50 @@ async def _override_mongo() -> AsyncGenerator[AsyncIOMotorDatabase[dict[str,Any] transport = ASGITransport(app=app) async with AsyncClient(transport=transport, base_url="http://test") as ac: + # Lifespan has run; subscribe test capture handlers on the fresh dispatcher + _register_email_capture(get_event_dispatcher(), fake_email) yield ac app.dependency_overrides.clear() +def _register_email_capture(dispatcher: Any, fake_email: FakeEmailStrategy) -> None: + """Subscribe lightweight handlers that feed event data into FakeEmailStrategy.""" + + @event_handler(WelcomeInviteEventSchema) + async def _capture_welcome(schema: WelcomeInviteEventSchema) -> None: + cfg = get_settings() + base_url = ( + cfg.WEB_FRONTEND_URL + if ("agent" in schema.roles or "admin" in schema.roles) + else cfg.MOBILE_FRONTEND_URL + ) + params = WelcomeEmailParams( + user_name=schema.user_name, + user_email=schema.user_email, + one_time_password=schema.one_time_password, + login_url=f"{base_url}/login?token={schema.raw_token}", + ) + await fake_email.send_welcome_email(schema.user_email, params) + + @event_handler(PasswordResetEventSchema) + async def _capture_reset(schema: PasswordResetEventSchema) -> None: + cfg = get_settings() + base_url = ( + cfg.WEB_FRONTEND_URL + if ("agent" in schema.roles or "admin" in schema.roles) + else cfg.MOBILE_FRONTEND_URL + ) + params = ResetPasswordEmailParams( + user_email=schema.user_email, + reset_url=f"{base_url}/reset-password?token={schema.raw_token}", + ) + await fake_email.send_reset_email(schema.user_email, params) + + dispatcher.subscribe(AppEvent.USER_WELCOME_INVITE, _capture_welcome) + dispatcher.subscribe(AppEvent.USER_PASSWORD_RESET, _capture_reset) + + # ──────────────────────────────────────────────────────── # Seed permissions + admin role for permission-protected endpoints # ──────────────────────────────────────────────────────── diff --git a/tests/app/e2e/domains/auth/test_user_routes.py b/tests/app/e2e/domains/auth/test_user_routes.py index 219abee..497669b 100644 --- a/tests/app/e2e/domains/auth/test_user_routes.py +++ b/tests/app/e2e/domains/auth/test_user_routes.py @@ -106,6 +106,96 @@ async def test_update_user(self, client: AsyncClient, auth: AuthActions) -> None assert r.status_code == 200 assert r.json()["data"]["name"] == "Updated Name" + # ── Deactivate ────────────────────────────────────── + + @pytest.mark.asyncio + async def test_deactivate_user(self, client: AsyncClient, auth: AuthActions) -> None: + tokens = await auth.register_and_login_admin( + email="deactadm@test.com", username="deactadm" + ) + headers = auth.auth_headers(tokens["access_token"]) + + target = await auth.register(email="deacttarget@test.com", username="deacttarget") + target_id = target["id"] + + r = await client.patch(f"/api/users/{target_id}/deactivate", headers=headers) + assert r.status_code == 200 + data = r.json()["data"] + assert data["id"] == target_id + assert data["is_active"] is False + assert "password_hash" not in data + + @pytest.mark.asyncio + async def test_deactivate_user_is_idempotent( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="deactidemadm@test.com", username="deactidemadm" + ) + headers = auth.auth_headers(tokens["access_token"]) + + target = await auth.register( + email="deactidemtarget@test.com", username="deactidemtgt" + ) + target_id = target["id"] + + first = await client.patch(f"/api/users/{target_id}/deactivate", headers=headers) + assert first.status_code == 200 + assert first.json()["data"]["is_active"] is False + + second = await client.patch(f"/api/users/{target_id}/deactivate", headers=headers) + assert second.status_code == 200 + assert second.json()["data"]["is_active"] is False + + @pytest.mark.asyncio + async def test_deactivate_user_not_found( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="deactnf@test.com", username="deactnf" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.patch( + "/api/users/00000000-0000-0000-0000-000000000000/deactivate", + headers=headers, + ) + assert r.status_code == 404 + + @pytest.mark.asyncio + async def test_deactivate_user_invalid_uuid( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="deactbad@test.com", username="deactbad" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.patch("/api/users/not-a-uuid/deactivate", headers=headers) + assert r.status_code == 422 + + @pytest.mark.asyncio + async def test_deactivate_user_requires_auth(self, client: AsyncClient) -> None: + r = await client.patch( + "/api/users/00000000-0000-0000-0000-000000000000/deactivate" + ) + assert r.status_code == 403 + + @pytest.mark.asyncio + async def test_deactivate_user_requires_permission( + self, client: AsyncClient, auth: AuthActions + ) -> None: + regular = await auth.register_and_login( + email="deactreg@test.com", username="deactreg" + ) + headers = auth.auth_headers(regular["access_token"]) + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + r = await client.patch(f"/api/users/{user_id}/deactivate", headers=headers) + assert r.status_code == 403 + # ── Role assignment ───────────────────────────────── @pytest.mark.asyncio @@ -146,6 +236,224 @@ async def test_add_roles_empty_list(self, client: AsyncClient, auth: AuthActions ) assert r.status_code == 400 + # ── Update roles (PATCH) ──────────────────────────── + + @pytest.mark.asyncio + async def test_update_user_roles_add_and_remove( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="rolepatch@test.com", username="rolepatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + role_a = ( + await client.post("/api/roles/", json={"name": "patch_a"}, headers=headers) + ).json()["data"] + role_b = ( + await client.post("/api/roles/", json={"name": "patch_b"}, headers=headers) + ).json()["data"] + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + await client.post( + f"/api/users/{user_id}/roles", + json={"role_ids": [role_a["id"]]}, + headers=headers, + ) + + r = await client.patch( + f"/api/users/{user_id}/roles", + json={"add_role_ids": [role_b["id"]], "remove_role_ids": [role_a["id"]]}, + headers=headers, + ) + assert r.status_code == 200 + + role_ids = {role["id"] for role in r.json()["data"]["roles"]} + assert role_b["id"] in role_ids + assert role_a["id"] not in role_ids + + @pytest.mark.asyncio + async def test_update_user_roles_empty_payload( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="emptypatch@test.com", username="emptypatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + r = await client.patch( + f"/api/users/{user_id}/roles", + json={"add_role_ids": [], "remove_role_ids": []}, + headers=headers, + ) + assert r.status_code == 400 + + @pytest.mark.asyncio + async def test_update_user_roles_intersection( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="interpatch@test.com", username="interpatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + role_r = await client.post( + "/api/roles/", json={"name": "interpatchrole"}, headers=headers + ) + role_id = role_r.json()["data"]["id"] + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + r = await client.patch( + f"/api/users/{user_id}/roles", + json={"add_role_ids": [role_id], "remove_role_ids": [role_id]}, + headers=headers, + ) + assert r.status_code == 422 + + @pytest.mark.asyncio + async def test_update_user_roles_exceeds_limit( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="limitpatch@test.com", username="limitpatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + r = await client.patch( + f"/api/users/{user_id}/roles", + json={"add_role_ids": list(range(1, 12)), "remove_role_ids": []}, + headers=headers, + ) + assert r.status_code == 422 + + @pytest.mark.asyncio + async def test_update_user_roles_unknown_role( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="unkrolepatch@test.com", username="unkrolepatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + r = await client.patch( + f"/api/users/{user_id}/roles", + json={"add_role_ids": [999999], "remove_role_ids": []}, + headers=headers, + ) + assert r.status_code == 404 + + @pytest.mark.asyncio + async def test_update_user_roles_unknown_user( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="unkupatch@test.com", username="unkupatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + role_r = await client.post( + "/api/roles/", json={"name": "ghostpatchrole"}, headers=headers + ) + role_id = role_r.json()["data"]["id"] + + r = await client.patch( + "/api/users/00000000-0000-0000-0000-000000000000/roles", + json={"add_role_ids": [role_id], "remove_role_ids": []}, + headers=headers, + ) + assert r.status_code == 404 + + @pytest.mark.asyncio + async def test_update_user_roles_dedupes_input( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="duppatch@test.com", username="duppatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + role_r = await client.post( + "/api/roles/", json={"name": "duppatchrole"}, headers=headers + ) + role_id = role_r.json()["data"]["id"] + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + r = await client.patch( + f"/api/users/{user_id}/roles", + json={"add_role_ids": [role_id, role_id], "remove_role_ids": []}, + headers=headers, + ) + assert r.status_code == 200 + role_ids = [role["id"] for role in r.json()["data"]["roles"]] + assert role_ids.count(role_id) == 1 + + # ── Remove roles (DELETE) ─────────────────────────── + + @pytest.mark.asyncio + async def test_remove_user_roles(self, client: AsyncClient, auth: AuthActions) -> None: + tokens = await auth.register_and_login_admin( + email="roledel@test.com", username="roledel" + ) + headers = auth.auth_headers(tokens["access_token"]) + + role_r = await client.post("/api/roles/", json={"name": "delrole"}, headers=headers) + role_id = role_r.json()["data"]["id"] + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + await client.post( + f"/api/users/{user_id}/roles", + json={"role_ids": [role_id]}, + headers=headers, + ) + + r = await client.request( + "DELETE", + f"/api/users/{user_id}/roles", + json={"role_ids": [role_id]}, + headers=headers, + ) + assert r.status_code == 200 + + role_ids = {role["id"] for role in r.json()["data"]["roles"]} + assert role_id not in role_ids + + @pytest.mark.asyncio + async def test_remove_user_roles_empty_list( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="emroldel@test.com", username="emroldel" + ) + headers = auth.auth_headers(tokens["access_token"]) + + me_r = await client.get("/api/auth/me", headers=headers) + user_id = me_r.json()["data"]["id"] + + r = await client.request( + "DELETE", + f"/api/users/{user_id}/roles", + json={"role_ids": []}, + headers=headers, + ) + assert r.status_code == 400 + # ── Auth guard ────────────────────────────────────── @pytest.mark.asyncio diff --git a/tests/app/e2e/domains/chatbot/test_chatbot_routes.py b/tests/app/e2e/domains/chatbot/test_chatbot_routes.py index c7453d9..d01e277 100644 --- a/tests/app/e2e/domains/chatbot/test_chatbot_routes.py +++ b/tests/app/e2e/domains/chatbot/test_chatbot_routes.py @@ -49,13 +49,17 @@ async def test_create_triage_success_and_persists_attendance( assert body["meta"]["success"] is True triage_id = body["data"]["triage_id"] assert triage_id + assert body["data"]["step_id"] == "step_a" + assert body["data"]["input"]["mode"] == "quick_replies" + assert len(body["data"]["input"]["quick_replies"]) > 0 stored = await mongo_db_conn["atendimentos"].find_one({"_id": ObjectId(triage_id)}) assert stored is not None assert str(stored["_id"]) == triage_id assert stored["status"] == "opened" assert stored["end_date"] is None - assert stored["triage"] == [] + assert len(stored["triage"]) == 1 + assert stored["triage"][0]["step"] == "A" assert stored["result"] is None assert stored["evaluation"] is None assert stored["client"]["id"] == str(user.id) @@ -170,3 +174,107 @@ async def test_webhook_rejects_both_answer_text_and_answer_value( assert response.status_code == 422 assert response.json()["detail"] == "Request validation failed" + + @pytest.mark.asyncio + async def test_get_attendance_not_found_raises_404( + self, + client: AsyncClient, + auth: AuthActions, + ) -> None: + tokens = await auth.register_and_login( + email="chatbot_get_notfound@test.com", + username="chatbotgetnotfound", + ) + fake_id = str(ObjectId()) + + response = await client.get( + f"/api/chatbot/{fake_id}", + headers=auth.auth_headers(tokens["access_token"]), + ) + + assert response.status_code == 404 + body = response.json() + assert body["title"] == "Attendance Not Found" + assert fake_id in body["detail"] + + @pytest.mark.asyncio + async def test_set_evaluation_attendance_not_found_raises_404( + self, + client: AsyncClient, + auth: AuthActions, + ) -> None: + tokens = await auth.register_and_login( + email="chatbot_eval_notfound@test.com", + username="chatbotevalnotfound", + ) + fake_id = str(ObjectId()) + + response = await client.post( + f"/api/chatbot/{fake_id}/evaluation", + headers=auth.auth_headers(tokens["access_token"]), + json={"rating": 5}, + ) + + assert response.status_code == 404 + body = response.json() + assert body["title"] == "Attendance Not Found" + assert fake_id in body["detail"] + + @pytest.mark.asyncio + async def test_set_evaluation_on_open_attendance_raises_409( + self, + client: AsyncClient, + auth: AuthActions, + ) -> None: + tokens = await auth.register_and_login( + email="chatbot_eval_notfinished@test.com", + username="chatbotevalnotfinished", + ) + create_response = await client.post( + "/api/chatbot/", + headers=auth.auth_headers(tokens["access_token"]), + ) + triage_id = create_response.json()["data"]["triage_id"] + + response = await client.post( + f"/api/chatbot/{triage_id}/evaluation", + headers=auth.auth_headers(tokens["access_token"]), + json={"rating": 5}, + ) + + assert response.status_code == 409 + body = response.json() + assert body["title"] == "Attendance Not Finished" + + @pytest.mark.asyncio + async def test_set_evaluation_already_evaluated_raises_409( + self, + client: AsyncClient, + auth: AuthActions, + mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], + ) -> None: + tokens = await auth.register_and_login( + email="chatbot_eval_duplicate@test.com", + username="chatbotevalduplicate", + ) + triage_id = str(ObjectId()) + await mongo_db_conn["atendimentos"].insert_one({ + "_id": ObjectId(triage_id), + "status": "finished", + "evaluation": {"rating": 4}, + "start_date": "2026-01-01T00:00:00", + "end_date": "2026-01-01T01:00:00", + "triage": [], + "result": {"type": "Resolved", "closure_message": "Resolved."}, + "client": {"id": str(uuid4()), "name": "Test User", "email": "test@test.com"}, + }) + + response = await client.post( + f"/api/chatbot/{triage_id}/evaluation", + headers=auth.auth_headers(tokens["access_token"]), + json={"rating": 5}, + ) + + assert response.status_code == 409 + body = response.json() + assert body["title"] == "Attendance Already Evaluated" diff --git a/tests/app/e2e/domains/companies/__init__.py b/tests/app/e2e/domains/companies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/app/e2e/domains/companies/test_company_routes.py b/tests/app/e2e/domains/companies/test_company_routes.py new file mode 100644 index 0000000..4c267e5 --- /dev/null +++ b/tests/app/e2e/domains/companies/test_company_routes.py @@ -0,0 +1,619 @@ +"""End-to-end tests for the /api/companies endpoints (CRUD + relationships).""" + +from uuid import uuid4 + +import pytest +from httpx import AsyncClient + +from tests.app.e2e.conftest import AuthActions + + +def _tax_id() -> str: + return uuid4().hex[:14] + + +def _legal_name(prefix: str = "Acme") -> str: + return f"{prefix} {uuid4().hex[:8]} LTDA" + + +async def _create_company( + client: AsyncClient, headers: dict[str, str], **overrides: object +) -> dict[str, object]: + payload = { + "legal_name": _legal_name(), + "trade_name": "Acme", + "tax_id": _tax_id(), + } + payload.update(overrides) + r = await client.post("/api/companies/", json=payload, headers=headers) + assert r.status_code == 201, f"Create company failed: {r.text}" + return r.json()["data"] + + +class TestCompaniesCRUD: + """Tests for /api/companies/ CRUD endpoints.""" + + # ── Create ────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_create_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ccreate@test.com", username="ccreate" + ) + headers = auth.auth_headers(tokens["access_token"]) + + legal_name = _legal_name() + tax_id = _tax_id() + r = await client.post( + "/api/companies/", + json={ + "legal_name": legal_name, + "trade_name": "Acme", + "tax_id": tax_id, + }, + headers=headers, + ) + assert r.status_code == 201, r.text + data = r.json()["data"] + assert data["id"] is not None + assert data["legal_name"] == legal_name + assert data["tax_id"] == tax_id + assert data["trade_name"] == "Acme" + assert "created_at" in data + + @pytest.mark.asyncio + async def test_create_company_normalizes_tax_id( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cnorm@test.com", username="cnorm" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.post( + "/api/companies/", + json={ + "legal_name": _legal_name(), + "trade_name": "Acme", + "tax_id": "12.345.678/0001-90", + }, + headers=headers, + ) + assert r.status_code == 201 + assert r.json()["data"]["tax_id"] == "12345678000190" + + @pytest.mark.asyncio + async def test_create_company_duplicate_tax_id( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cdup@test.com", username="cdup" + ) + headers = auth.auth_headers(tokens["access_token"]) + + tax_id = _tax_id() + await _create_company(client, headers, tax_id=tax_id) + + r = await client.post( + "/api/companies/", + json={ + "legal_name": _legal_name("Other"), + "trade_name": "Other", + "tax_id": tax_id, + }, + headers=headers, + ) + assert r.status_code == 409 + + @pytest.mark.asyncio + async def test_create_company_invalid_payload( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cinv@test.com", username="cinv" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.post( + "/api/companies/", + json={"legal_name": "Ab", "trade_name": "A", "tax_id": "123"}, + headers=headers, + ) + assert r.status_code == 422 + + # ── Read ──────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_list_companies( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="clist@test.com", username="clist" + ) + headers = auth.auth_headers(tokens["access_token"]) + + await _create_company(client, headers) + await _create_company(client, headers) + + r = await client.get("/api/companies/", headers=headers) + assert r.status_code == 200 + data = r.json()["data"] + assert data["total"] >= 2 + assert isinstance(data["items"], list) + assert data["page"] == 1 + + @pytest.mark.asyncio + async def test_list_companies_pagination( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cpag@test.com", username="cpag" + ) + headers = auth.auth_headers(tokens["access_token"]) + + for _ in range(3): + await _create_company(client, headers) + + r = await client.get( + "/api/companies/", params={"page": 1, "limit": 2}, headers=headers + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["limit"] == 2 + assert len(data["items"]) <= 2 + + @pytest.mark.asyncio + async def test_get_company_by_id( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cget@test.com", username="cget" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_company(client, headers) + + r = await client.get(f"/api/companies/{created['id']}", headers=headers) + assert r.status_code == 200 + assert r.json()["data"]["id"] == created["id"] + + @pytest.mark.asyncio + async def test_get_company_not_found( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cnf@test.com", username="cnf" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.get( + "/api/companies/00000000-0000-0000-0000-000000000000", headers=headers + ) + assert r.status_code == 404 + + # ── Update (PATCH) ────────────────────────────────── + + @pytest.mark.asyncio + async def test_patch_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cpatch@test.com", username="cpatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_company(client, headers) + + r = await client.patch( + f"/api/companies/{created['id']}", + json={"trade_name": "Renamed Trade"}, + headers=headers, + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["trade_name"] == "Renamed Trade" + assert data["legal_name"] == created["legal_name"] + + @pytest.mark.asyncio + async def test_patch_company_empty_payload_rejected( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cpatchemp@test.com", username="cpatchemp" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_company(client, headers) + + r = await client.patch( + f"/api/companies/{created['id']}", json={}, headers=headers + ) + assert r.status_code == 422 + + @pytest.mark.asyncio + async def test_patch_company_not_found( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cpatchnf@test.com", username="cpatchnf" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.patch( + "/api/companies/00000000-0000-0000-0000-000000000000", + json={"trade_name": "Acme"}, + headers=headers, + ) + assert r.status_code == 404 + + # ── Replace (PUT) ─────────────────────────────────── + + @pytest.mark.asyncio + async def test_put_company_overwrites_all_fields( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cput@test.com", username="cput" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_company(client, headers) + new_legal = _legal_name("Replaced") + new_tax = _tax_id() + + r = await client.put( + f"/api/companies/{created['id']}", + json={ + "legal_name": new_legal, + "trade_name": "Replaced", + "tax_id": new_tax, + }, + headers=headers, + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["legal_name"] == new_legal + assert data["tax_id"] == new_tax + + @pytest.mark.asyncio + async def test_put_company_duplicate_tax_id( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cputdup@test.com", username="cputdup" + ) + headers = auth.auth_headers(tokens["access_token"]) + + existing = await _create_company(client, headers) + target = await _create_company(client, headers) + + r = await client.put( + f"/api/companies/{target['id']}", + json={ + "legal_name": _legal_name(), + "trade_name": "Acme", + "tax_id": existing["tax_id"], + }, + headers=headers, + ) + assert r.status_code == 409 + + # ── Delete ────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_soft_delete_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cdel@test.com", username="cdel" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_company(client, headers) + + r = await client.delete(f"/api/companies/{created['id']}", headers=headers) + assert r.status_code == 200 + + # Após soft delete, GET retorna 404 + get_r = await client.get( + f"/api/companies/{created['id']}", headers=headers + ) + assert get_r.status_code == 404 + + @pytest.mark.asyncio + async def test_soft_delete_company_idempotent_returns_404( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cdelidem@test.com", username="cdelidem" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_company(client, headers) + await client.delete(f"/api/companies/{created['id']}", headers=headers) + + r = await client.delete(f"/api/companies/{created['id']}", headers=headers) + assert r.status_code == 404 + + # ── Auth guard ────────────────────────────────────── + + @pytest.mark.asyncio + async def test_companies_require_auth(self, client: AsyncClient) -> None: + r = await client.get("/api/companies/") + assert r.status_code == 403 + + r = await client.post( + "/api/companies/", + json={ + "legal_name": _legal_name(), + "trade_name": "Acme", + "tax_id": _tax_id(), + }, + ) + assert r.status_code == 403 + + +class TestCompanyProducts: + """Tests for /api/companies/{id}/products endpoints.""" + + @pytest.mark.asyncio + async def test_add_products_to_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cprod@test.com", username="cprod" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + product = await client.post( + "/api/products/", + json={"name": f"P {uuid4().hex[:6]}", "description": "Initial desc"}, + headers=headers, + ) + assert product.status_code == 201 + product_id = product.json()["data"]["id"] + + r = await client.post( + f"/api/companies/{company['id']}/products", + json={"product_ids": [product_id]}, + headers=headers, + ) + assert r.status_code == 201 + + @pytest.mark.asyncio + async def test_add_products_to_unknown_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cproduc@test.com", username="cproduc" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.post( + "/api/companies/00000000-0000-0000-0000-000000000000/products", + json={"product_ids": [1]}, + headers=headers, + ) + assert r.status_code == 404 + + @pytest.mark.asyncio + async def test_add_unknown_products_returns_404( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cprodunk@test.com", username="cprodunk" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + + r = await client.post( + f"/api/companies/{company['id']}/products", + json={"product_ids": [9_999_999]}, + headers=headers, + ) + assert r.status_code == 404 + + @pytest.mark.asyncio + async def test_remove_single_product_from_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cprodrm@test.com", username="cprodrm" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + product = ( + await client.post( + "/api/products/", + json={"name": f"P {uuid4().hex[:6]}", "description": "Initial desc"}, + headers=headers, + ) + ).json()["data"] + await client.post( + f"/api/companies/{company['id']}/products", + json={"product_ids": [product["id"]]}, + headers=headers, + ) + + r = await client.delete( + f"/api/companies/{company['id']}/products/{product['id']}", + headers=headers, + ) + assert r.status_code == 200 + + @pytest.mark.asyncio + async def test_remove_products_batch( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cprodbatch@test.com", username="cprodbatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + product_ids: list[int] = [] + for _ in range(2): + res = await client.post( + "/api/products/", + json={"name": f"P {uuid4().hex[:6]}", "description": "Initial desc"}, + headers=headers, + ) + product_ids.append(res.json()["data"]["id"]) + + await client.post( + f"/api/companies/{company['id']}/products", + json={"product_ids": product_ids}, + headers=headers, + ) + + r = await client.request( + "DELETE", + f"/api/companies/{company['id']}/products", + json={"product_ids": product_ids}, + headers=headers, + ) + assert r.status_code == 200 + + +class TestCompanyUsers: + """Tests for /api/companies/{id}/users endpoints.""" + + @pytest.mark.asyncio + async def test_assign_users_to_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="cuadm@test.com", username="cuadm" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + new_user = ( + await client.post( + "/api/users/", + json={ + "email": f"member_{uuid4().hex[:6]}@test.com", + "password_hash": "hash", + "username": f"member_{uuid4().hex[:6]}", + }, + headers=headers, + ) + ).json()["data"] + + r = await client.post( + f"/api/companies/{company['id']}/users", + json={"user_ids": [new_user["id"]]}, + headers=headers, + ) + assert r.status_code == 201 + + @pytest.mark.asyncio + async def test_remove_user_from_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="curm@test.com", username="curm" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + member = ( + await client.post( + "/api/users/", + json={ + "email": f"rm_{uuid4().hex[:6]}@test.com", + "password_hash": "hash", + "username": f"rm_{uuid4().hex[:6]}", + }, + headers=headers, + ) + ).json()["data"] + await client.post( + f"/api/companies/{company['id']}/users", + json={"user_ids": [member["id"]]}, + headers=headers, + ) + + r = await client.delete( + f"/api/companies/{company['id']}/users/{member['id']}", headers=headers + ) + assert r.status_code == 200 + + @pytest.mark.asyncio + async def test_remove_users_batch( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="curmb@test.com", username="curmb" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + user_ids: list[str] = [] + for _ in range(2): + res = await client.post( + "/api/users/", + json={ + "email": f"b_{uuid4().hex[:6]}@test.com", + "password_hash": "hash", + "username": f"b_{uuid4().hex[:6]}", + }, + headers=headers, + ) + user_ids.append(res.json()["data"]["id"]) + + await client.post( + f"/api/companies/{company['id']}/users", + json={"user_ids": user_ids}, + headers=headers, + ) + + r = await client.request( + "DELETE", + f"/api/companies/{company['id']}/users", + json={"user_ids": user_ids}, + headers=headers, + ) + assert r.status_code == 200 + + @pytest.mark.asyncio + async def test_list_company_users_excludes_password_hash( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="culist@test.com", username="culist" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + member = ( + await client.post( + "/api/users/", + json={ + "email": f"l_{uuid4().hex[:6]}@test.com", + "password_hash": "hash", + "username": f"l_{uuid4().hex[:6]}", + }, + headers=headers, + ) + ).json()["data"] + await client.post( + f"/api/companies/{company['id']}/users", + json={"user_ids": [member["id"]]}, + headers=headers, + ) + + r = await client.get( + f"/api/companies/{company['id']}/users", headers=headers + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["total"] == 1 + assert len(data["items"]) == 1 + for user in data["items"]: + assert "password_hash" not in user diff --git a/tests/app/e2e/domains/live_chat/test_conversation_routes.py b/tests/app/e2e/domains/live_chat/test_conversation_routes.py index 5850794..3189d65 100644 --- a/tests/app/e2e/domains/live_chat/test_conversation_routes.py +++ b/tests/app/e2e/domains/live_chat/test_conversation_routes.py @@ -1,5 +1,5 @@ from typing import Any -from uuid import uuid4 +from uuid import UUID, uuid4 import pytest import pytest_asyncio @@ -402,5 +402,262 @@ async def test_get_conversations_from_client_different_tickets( returned_tickets = {c["ticket_id"] for c in data} assert set(str(t) for t in ticket_ids) == returned_tickets - +async def _seed_conversation( + client: AsyncClient, + auth: AuthActions, + admin_token: str, + contents: list[str], + ticket_id: PydanticObjectId | None = None, + client_id: Any = None, + agent_id: Any = None, + sequential_index: int = 0, +) -> PydanticObjectId: + dto = CreateConversationDTO( + ticket_id=ticket_id or PydanticObjectId(), + agent_id=agent_id if agent_id is not None else uuid4(), + client_id=client_id or uuid4(), + sequential_index=sequential_index, + ) + r = await client.post( + "/api/conversations/", + json=dto.model_dump(mode="json"), + headers=auth.auth_headers(admin_token), + ) + assert r.status_code == 201, r.text + conv_id = PydanticObjectId(r.json()["data"]["id"]) + + conv = await Conversation.get(conv_id) + assert conv is not None + sender = client_id if client_id is not None else conv.client_id + for content in contents: + msg = ChatMessage.create(conv_id, sender, "text", content) + await conv.update({"$push": {"messages": msg.model_dump()}}) + return conv_id + + +class TestConversationSearch: + @pytest.fixture + async def admin_user(self, auth: AuthActions) -> tuple[UserWithRoles, str]: + tokens = await auth.register_and_login_admin( + email="search_admin@test.com", username="searchadm" + ) + user = await auth.me(tokens["access_token"]) + return user, tokens["access_token"] + + @pytest.mark.asyncio + async def test_admin_finds_conversation_by_message_content( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + match_id = await _seed_conversation( + client, auth, admin_user[1], ["preciso de ajuda com o boleto"] + ) + await _seed_conversation(client, auth, admin_user[1], ["nada relacionado"]) + + r = await client.get( + "/api/conversations/search", + params={"search_query": "boleto"}, + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 200 + data = r.json()["data"] + assert len(data) == 1 + assert data[0]["id"] == str(match_id) + + @pytest.mark.asyncio + async def test_search_picks_highest_match_score_per_ticket( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + best_id = await _seed_conversation( + client, auth, admin_user[1], + [ + "primeiro contato sobre reembolso", + "ainda discutindo reembolso", + "novo pedido de reembolso registrado", + ], + ticket_id=ticket_id, client_id=client_id, sequential_index=0, + ) + await _seed_conversation( + client, auth, admin_user[1], + ["apenas uma menção a reembolso aqui"], + ticket_id=ticket_id, client_id=client_id, sequential_index=1, + ) + + r = await client.get( + "/api/conversations/search", + params={"search_query": "reembolso"}, + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 200 + data = r.json()["data"] + assert len(data) == 1 + assert data[0]["id"] == str(best_id) + assert data[0]["sequential_index"] == 0 + + @pytest.mark.asyncio + async def test_search_tiebreaker_prefers_most_recent_per_ticket( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await _seed_conversation( + client, auth, admin_user[1], + ["primeira menção ao reembolso"], + ticket_id=ticket_id, client_id=client_id, sequential_index=0, + ) + latest_id = await _seed_conversation( + client, auth, admin_user[1], + ["nova mensagem sobre reembolso"], + ticket_id=ticket_id, client_id=client_id, sequential_index=1, + ) + + r = await client.get( + "/api/conversations/search", + params={"search_query": "reembolso"}, + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 200 + data = r.json()["data"] + assert len(data) == 1 + assert data[0]["id"] == str(latest_id) + assert data[0]["sequential_index"] == 1 + + @pytest.mark.asyncio + async def test_agent_only_finds_their_conversations( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + agent = await auth.register_agent( + email="search_agent@test.com", username="searchag" + ) + agent_id = UUID(agent["id"]) + + owned_id = await _seed_conversation( + client, auth, admin_user[1], + ["cliente pediu cancelamento da fatura"], + agent_id=agent_id, + ) + await _seed_conversation( + client, auth, admin_user[1], + ["cliente pediu cancelamento da fatura"], + ) + + r = await client.get( + "/api/conversations/search", + params={"search_query": "cancelamento"}, + headers=auth.auth_headers(agent["access_token"]), + ) + assert r.status_code == 200 + data = r.json()["data"] + assert len(data) == 1 + assert data[0]["id"] == str(owned_id) + assert data[0]["agent_id"] == str(agent_id) + + @pytest.mark.asyncio + async def test_client_only_finds_own_conversations( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + regular = await auth.register_and_login( + email="search_client@test.com", username="searchcli" + ) + regular_user = await auth.me(regular["access_token"]) + + owned_id = await _seed_conversation( + client, auth, admin_user[1], + ["dúvida sobre o pedido"], + client_id=regular_user.id, + ) + await _seed_conversation( + client, auth, admin_user[1], + ["dúvida sobre o pedido"], + ) + + r = await client.get( + "/api/conversations/search", + params={"search_query": "pedido"}, + headers=auth.auth_headers(regular["access_token"]), + ) + assert r.status_code == 200 + data = r.json()["data"] + assert len(data) == 1 + assert data[0]["id"] == str(owned_id) + assert data[0]["client_id"] == str(regular_user.id) + + @pytest.mark.asyncio + async def test_search_no_results_returns_empty_list( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + await _seed_conversation( + client, auth, admin_user[1], ["mensagem qualquer"] + ) + + r = await client.get( + "/api/conversations/search", + params={"search_query": "inexistente"}, + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 200 + assert r.json()["data"] == [] + + @pytest.mark.asyncio + async def test_search_is_case_insensitive( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + match_id = await _seed_conversation( + client, auth, admin_user[1], ["Erro no LOGIN do sistema"] + ) + + r = await client.get( + "/api/conversations/search", + params={"search_query": "login"}, + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 200 + data = r.json()["data"] + assert len(data) == 1 + assert data[0]["id"] == str(match_id) + + @pytest.mark.asyncio + async def test_search_missing_query_returns_400( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + r = await client.get( + "/api/conversations/search", + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 400 + assert "search_query" in r.json()["detail"] + + @pytest.mark.asyncio + async def test_search_whitespace_only_query_returns_400( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + r = await client.get( + "/api/conversations/search", + params={"search_query": " "}, + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 400 + assert "search_query" in r.json()["detail"] + + @pytest.mark.asyncio + async def test_search_query_too_short_returns_422( + self, client: AsyncClient, auth: AuthActions, admin_user: tuple[UserWithRoles, str] + ) -> None: + r = await client.get( + "/api/conversations/search", + params={"search_query": "abc"}, + headers=auth.auth_headers(admin_user[1]), + ) + assert r.status_code == 422 + + @pytest.mark.asyncio + async def test_search_requires_authentication( + self, client: AsyncClient + ) -> None: + r = await client.get( + "/api/conversations/search", + params={"search_query": "qualquer"}, + ) + assert r.status_code == 403 diff --git a/tests/app/e2e/domains/live_chat/test_live_chat_routes.py b/tests/app/e2e/domains/live_chat/test_live_chat_routes.py index 5bdd88a..df03e7f 100644 --- a/tests/app/e2e/domains/live_chat/test_live_chat_routes.py +++ b/tests/app/e2e/domains/live_chat/test_live_chat_routes.py @@ -368,9 +368,8 @@ async def test_non_participant_cannot_connect( creator, creator_token = await self._register_client_user(auth) conv_id = await self._create_conversation(client, auth, creator_token, creator.id) - outsider_tokens = await auth.register_and_login_admin( - email="outsider@test.com", username="outsider" - ) + await auth.register_agent(email="outsider@test.com", username="outsider") + outsider_tokens = await auth.login(email="outsider@test.com") with pytest.raises(WebSocketDeniedError) as exc_info: async with AsyncWebSocket( @@ -381,4 +380,4 @@ async def test_non_participant_cannot_connect( pass assert exc_info.value.status == 403 - assert "not a participant" in exc_info.value.body + assert "not allowed to join" in exc_info.value.body diff --git a/tests/app/e2e/domains/products/__init__.py b/tests/app/e2e/domains/products/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/app/e2e/domains/products/test_product_routes.py b/tests/app/e2e/domains/products/test_product_routes.py new file mode 100644 index 0000000..b689e72 --- /dev/null +++ b/tests/app/e2e/domains/products/test_product_routes.py @@ -0,0 +1,420 @@ +"""End-to-end tests for the /api/products endpoints (CRUD + relationships).""" + +from uuid import uuid4 + +import pytest +from httpx import AsyncClient + +from tests.app.e2e.conftest import AuthActions + + +def _product_name(prefix: str = "Product") -> str: + return f"{prefix} {uuid4().hex[:8]}" + + +def _tax_id() -> str: + return uuid4().hex[:14] + + +def _legal_name(prefix: str = "Acme") -> str: + return f"{prefix} {uuid4().hex[:8]} LTDA" + + +async def _create_product( + client: AsyncClient, headers: dict[str, str], **overrides: object +) -> dict[str, object]: + payload = {"name": _product_name(), "description": "Initial description"} + payload.update(overrides) + r = await client.post("/api/products/", json=payload, headers=headers) + assert r.status_code == 201, f"Create product failed: {r.text}" + return r.json()["data"] + + +async def _create_company( + client: AsyncClient, headers: dict[str, str] +) -> dict[str, object]: + r = await client.post( + "/api/companies/", + json={ + "legal_name": _legal_name(), + "trade_name": "Acme", + "tax_id": _tax_id(), + }, + headers=headers, + ) + assert r.status_code == 201, f"Create company failed: {r.text}" + return r.json()["data"] + + +class TestProductsCRUD: + """Tests for /api/products/ CRUD endpoints.""" + + # ── Create ────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_create_product( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pcreate@test.com", username="pcreate" + ) + headers = auth.auth_headers(tokens["access_token"]) + + name = _product_name() + r = await client.post( + "/api/products/", + json={"name": name, "description": "A great product"}, + headers=headers, + ) + assert r.status_code == 201, r.text + data = r.json()["data"] + assert data["id"] is not None + assert data["name"] == name + assert data["description"] == "A great product" + assert "created_at" in data + + @pytest.mark.asyncio + async def test_create_product_invalid_payload( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pinv@test.com", username="pinv" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.post( + "/api/products/", + json={"name": "ab", "description": "x"}, + headers=headers, + ) + assert r.status_code == 422 + + # ── Read ──────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_list_products( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="plist@test.com", username="plist" + ) + headers = auth.auth_headers(tokens["access_token"]) + + await _create_product(client, headers) + await _create_product(client, headers) + + r = await client.get("/api/products/", headers=headers) + assert r.status_code == 200 + data = r.json()["data"] + assert data["total"] >= 2 + assert isinstance(data["items"], list) + + @pytest.mark.asyncio + async def test_list_products_pagination( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ppag@test.com", username="ppag" + ) + headers = auth.auth_headers(tokens["access_token"]) + + for _ in range(3): + await _create_product(client, headers) + + r = await client.get( + "/api/products/", params={"page": 1, "limit": 2}, headers=headers + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["limit"] == 2 + assert len(data["items"]) <= 2 + + @pytest.mark.asyncio + async def test_get_product_by_id( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pget@test.com", username="pget" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_product(client, headers) + + r = await client.get(f"/api/products/{created['id']}", headers=headers) + assert r.status_code == 200 + assert r.json()["data"]["id"] == created["id"] + + @pytest.mark.asyncio + async def test_get_product_not_found( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pnf@test.com", username="pnf" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.get("/api/products/9999999", headers=headers) + assert r.status_code == 404 + + # ── Update (PATCH) ────────────────────────────────── + + @pytest.mark.asyncio + async def test_patch_product( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ppatch@test.com", username="ppatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_product(client, headers) + + r = await client.patch( + f"/api/products/{created['id']}", + json={"description": "Refreshed description"}, + headers=headers, + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["description"] == "Refreshed description" + assert data["name"] == created["name"] + + @pytest.mark.asyncio + async def test_patch_product_empty_payload_rejected( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ppatchemp@test.com", username="ppatchemp" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_product(client, headers) + + r = await client.patch( + f"/api/products/{created['id']}", json={}, headers=headers + ) + assert r.status_code == 422 + + @pytest.mark.asyncio + async def test_patch_product_not_found( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ppatchnf@test.com", username="ppatchnf" + ) + headers = auth.auth_headers(tokens["access_token"]) + + r = await client.patch( + "/api/products/9999999", + json={"description": "Anything works"}, + headers=headers, + ) + assert r.status_code == 404 + + # ── Replace (PUT) ─────────────────────────────────── + + @pytest.mark.asyncio + async def test_put_product_overwrites_all_fields( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pput@test.com", username="pput" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_product(client, headers) + new_name = _product_name("Replaced") + + r = await client.put( + f"/api/products/{created['id']}", + json={"name": new_name, "description": "Brand new description"}, + headers=headers, + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["name"] == new_name + assert data["description"] == "Brand new description" + + # ── Delete ────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_soft_delete_product( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pdel@test.com", username="pdel" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_product(client, headers) + + r = await client.delete(f"/api/products/{created['id']}", headers=headers) + assert r.status_code == 200 + + get_r = await client.get(f"/api/products/{created['id']}", headers=headers) + assert get_r.status_code == 404 + + @pytest.mark.asyncio + async def test_soft_delete_product_idempotent_returns_404( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pdelidem@test.com", username="pdelidem" + ) + headers = auth.auth_headers(tokens["access_token"]) + + created = await _create_product(client, headers) + await client.delete(f"/api/products/{created['id']}", headers=headers) + + r = await client.delete(f"/api/products/{created['id']}", headers=headers) + assert r.status_code == 404 + + # ── Auth guard ────────────────────────────────────── + + @pytest.mark.asyncio + async def test_products_require_auth(self, client: AsyncClient) -> None: + r = await client.get("/api/products/") + assert r.status_code == 403 + + r = await client.post( + "/api/products/", + json={"name": _product_name(), "description": "Some desc"}, + ) + assert r.status_code == 403 + + +class TestProductCompanies: + """Tests for /api/products/{id}/companies endpoints.""" + + @pytest.mark.asyncio + async def test_add_companies_to_product( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pcomp@test.com", username="pcomp" + ) + headers = auth.auth_headers(tokens["access_token"]) + + product = await _create_product(client, headers) + company = await _create_company(client, headers) + + r = await client.post( + f"/api/products/{product['id']}/companies", + json={"company_ids": [company["id"]]}, + headers=headers, + ) + assert r.status_code == 201 + + @pytest.mark.asyncio + async def test_add_companies_to_unknown_product( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pcompunkp@test.com", username="pcompunkp" + ) + headers = auth.auth_headers(tokens["access_token"]) + + company = await _create_company(client, headers) + + r = await client.post( + "/api/products/9999999/companies", + json={"company_ids": [company["id"]]}, + headers=headers, + ) + assert r.status_code == 404 + + @pytest.mark.asyncio + async def test_add_unknown_companies_returns_404( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pcompunkc@test.com", username="pcompunkc" + ) + headers = auth.auth_headers(tokens["access_token"]) + + product = await _create_product(client, headers) + + r = await client.post( + f"/api/products/{product['id']}/companies", + json={"company_ids": ["00000000-0000-0000-0000-000000000000"]}, + headers=headers, + ) + assert r.status_code == 404 + + @pytest.mark.asyncio + async def test_remove_single_company_from_product( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pcomprm@test.com", username="pcomprm" + ) + headers = auth.auth_headers(tokens["access_token"]) + + product = await _create_product(client, headers) + company = await _create_company(client, headers) + await client.post( + f"/api/products/{product['id']}/companies", + json={"company_ids": [company["id"]]}, + headers=headers, + ) + + r = await client.delete( + f"/api/products/{product['id']}/companies/{company['id']}", + headers=headers, + ) + assert r.status_code == 200 + + @pytest.mark.asyncio + async def test_remove_companies_batch( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pcompbatch@test.com", username="pcompbatch" + ) + headers = auth.auth_headers(tokens["access_token"]) + + product = await _create_product(client, headers) + company_ids: list[str] = [] + for _ in range(2): + company = await _create_company(client, headers) + company_ids.append(company["id"]) + + await client.post( + f"/api/products/{product['id']}/companies", + json={"company_ids": company_ids}, + headers=headers, + ) + + r = await client.request( + "DELETE", + f"/api/products/{product['id']}/companies", + json={"company_ids": company_ids}, + headers=headers, + ) + assert r.status_code == 200 + + @pytest.mark.asyncio + async def test_list_product_companies( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="pcomplist@test.com", username="pcomplist" + ) + headers = auth.auth_headers(tokens["access_token"]) + + product = await _create_product(client, headers) + company = await _create_company(client, headers) + await client.post( + f"/api/products/{product['id']}/companies", + json={"company_ids": [company["id"]]}, + headers=headers, + ) + + r = await client.get( + f"/api/products/{product['id']}/companies", headers=headers + ) + assert r.status_code == 200 + data = r.json()["data"] + assert data["total"] == 1 + assert len(data["items"]) == 1 + assert data["items"][0]["id"] == company["id"] diff --git a/tests/app/e2e/domains/ticket/test_ticket_routes.py b/tests/app/e2e/domains/ticket/test_ticket_routes.py new file mode 100644 index 0000000..32ae726 --- /dev/null +++ b/tests/app/e2e/domains/ticket/test_ticket_routes.py @@ -0,0 +1,1608 @@ +import asyncio +from typing import Any +from uuid import UUID, uuid4 + +import pytest +from beanie import PydanticObjectId +from bson import ObjectId +from httpx import AsyncClient +from motor.motor_asyncio import AsyncIOMotorDatabase +from sqlalchemy import text + +from app.core.event_dispatcher import get_event_dispatcher +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.schemas import ( + TicketAssigneeUpdatedEventSchema, + TicketClosedEventSchema, + TicketEscalatedEventSchema, +) +from app.domains.chatbot.enums import AttendanceStatus +from app.domains.chatbot.listeners import ChatbotListener +from app.domains.chatbot.repositories.chatbot_repository import ChatbotRepository +from app.domains.chatbot.services.chatbot_service import ChatbotService +from app.domains.live_chat.entities import Conversation +from app.domains.live_chat.listeners import ConversationListener +from app.domains.live_chat.repositories.conversation_repository import ConversationRepository +from app.domains.live_chat.services.conversation_service import ConversationService +from app.domains.ticket.models import Ticket +from tests.app.e2e.conftest import AuthActions + + +async def _create_ticket( + client: AsyncClient, + auth: AuthActions, + admin_email: str, + admin_username: str, + client_email: str, + client_username: str, + product: str, +) -> tuple[dict[str, Any], dict[str, str]]: + tokens = await auth.register_and_login_admin(email=admin_email, username=admin_username) + headers = auth.auth_headers(tokens["access_token"]) + created_user = await auth.register(email=client_email, username=client_username) + + payload = { + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "high", + "product": product, + "description": "Erro ao emitir boleto", + "chat_ids": ["67f0c9b8e4b0b1a2c3d4e5f7"], + "client_id": created_user["id"], + } + + response = await client.post("/api/tickets/", json=payload, headers=headers) + assert response.status_code == 201, response.text + + return created_user, headers + + +async def _create_ticket_with_payload( + client: AsyncClient, + headers: dict[str, str], + payload: dict[str, Any], +) -> dict[str, Any]: + response = await client.post("/api/tickets/", json=payload, headers=headers) + assert response.status_code == 201, response.text + return response.json()["data"] + + +async def _list_tickets_for_client( + client: AsyncClient, + headers: dict[str, str], + client_id: str, +) -> list[dict[str, Any]]: + response = await client.get( + "/api/tickets/", + params={"client_id": client_id, "page": 1, "page_size": 20}, + headers=headers, + ) + assert response.status_code == 200, response.text + return response.json()["data"]["items"] + + +async def _drain_background_tasks() -> None: + pending = [ + task + for task in asyncio.all_tasks() + if task is not asyncio.current_task() and not task.done() + ] + if pending: + await asyncio.gather(*pending, return_exceptions=True) + + +async def _create_assigned_ticket( + client: AsyncClient, + auth: AuthActions, + *, + admin_email: str, + admin_username: str, + client_email: str, + client_username: str, + agent_email: str, + agent_username: str, + product: str, + reason: str = "Primeira atribuicao", +) -> tuple[str, dict[str, Any], dict[str, str], dict[str, Any]]: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email=admin_email, + admin_username=admin_username, + client_email=client_email, + client_username=client_username, + product=product, + ) + items = await _list_tickets_for_client(client, headers, created_user["id"]) + ticket_id = items[0]["id"] + agent_data = await auth.register_agent(email=agent_email, username=agent_username) + + assign_response = await client.post( + f"/api/tickets/{ticket_id}/assign", + json={"agent_id": agent_data["id"], "reason": reason}, + headers=headers, + ) + assert assign_response.status_code == 200, assign_response.text + + return ticket_id, created_user, headers, agent_data + + +def _isolate_dispatcher_handlers(monkeypatch: pytest.MonkeyPatch) -> Any: + dispatcher = get_event_dispatcher() + monkeypatch.setattr(dispatcher, "_handlers", {}) + return dispatcher + + +async def _finish_ticket( + client: AsyncClient, + ticket_id: str, + headers: dict[str, str], +) -> dict[str, Any]: + response = await client.patch( + f"/api/tickets/{ticket_id}", + json={"status": "finished"}, + headers=headers, + ) + assert response.status_code == 200, response.text + return response.json()["data"] + + +async def _register_agent_with_support_level( + auth: AuthActions, + *, + email: str, + username: str, + level: str, +) -> dict[str, Any]: + agent_data = await auth.register_agent(email=email, username=username) + role_result = await auth.db_session.execute( + text( + "INSERT INTO roles (name, description)" + " VALUES (:name, :description)" + " ON CONFLICT (name) DO UPDATE SET description = EXCLUDED.description" + " RETURNING id" + ), + { + "name": level, + "description": f"Support level {level}", + }, + ) + role_id = role_result.scalar_one() + await auth.db_session.execute( + text( + "INSERT INTO user_roles (user_id, role_id)" + " VALUES (:uid, :rid) ON CONFLICT DO NOTHING" + ), + {"uid": agent_data["id"], "rid": role_id}, + ) + await auth.db_session.flush() + return agent_data + + +class TestTicketRoutes: + @pytest.mark.asyncio + async def test_create_ticket_uses_official_initial_status( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ticket-admin-create@test.com", + username="ticketadmincreate", + ) + headers = auth.auth_headers(tokens["access_token"]) + created_user = await auth.register( + email="ticket-client-create@test.com", + username="ticketclientcreate", + ) + + response = await client.post( + "/api/tickets/", + json={ + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "medium", + "product": "Produto Status Inicial", + "description": "Primeiro ticket oficial", + "chat_ids": ["67f0c9b8e4b0b1a2c3d4e5f7"], + "client_id": created_user["id"], + }, + headers=headers, + ) + assert response.status_code == 201 + assert response.json()["data"]["status"] == "awaiting_assignment" + + @pytest.mark.asyncio + async def test_get_tickets_returns_official_paginated_shape( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-page@test.com", + admin_username="ticketadminpage", + client_email="ticket-client-page@test.com", + client_username="ticketclientpage", + product="Produto Contrato Paginado", + ) + + response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato Paginado"}, + headers=headers, + ) + assert response.status_code == 200 + + data = response.json()["data"] + assert isinstance(data, dict) + assert data["page"] == 1 + assert data["page_size"] == 20 + assert data["total"] == 1 + assert len(data["items"]) == 1 + assert data["items"][0]["product"] == "Produto Contrato Paginado" + assert data["items"][0]["status"] == "awaiting_assignment" + + @pytest.mark.asyncio + async def test_get_ticket_by_id_returns_single_ticket( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-byid@test.com", + admin_username="ticketadminbyid", + client_email="ticket-client-byid@test.com", + client_username="ticketclientbyid", + product="Produto Contrato ById", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato ById"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + response = await client.get(f"/api/tickets/{ticket_id}", headers=headers) + assert response.status_code == 200 + assert response.json()["data"]["id"] == ticket_id + + @pytest.mark.asyncio + async def test_partial_patch_is_the_official_update_route( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-patch@test.com", + admin_username="ticketadminpatch", + client_email="ticket-client-patch@test.com", + client_username="ticketclientpatch", + product="Produto Contrato Patch", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato Patch"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + response = await client.patch( + f"/api/tickets/{ticket_id}", + json={ + "status": "in_progress", + "criticality": "medium", + "description": "Chamado assumido e em andamento.", + }, + headers=headers, + ) + assert response.status_code == 200 + data = response.json()["data"] + assert data["status"] == "in_progress" + assert data["criticality"] == "medium" + assert data["description"] == "Chamado assumido e em andamento." + + @pytest.mark.asyncio + async def test_finish_ticket_publishes_ticket_closed_event( + self, client: AsyncClient, auth: AuthActions, monkeypatch: pytest.MonkeyPatch + ) -> None: + ticket_id, created_user, headers, _agent_data = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-close-event@test.com", + admin_username="ticketadmincloseevent", + client_email="ticket-client-close-event@test.com", + client_username="ticketclientcloseevent", + agent_email="ticket-agent-close-event@test.com", + agent_username="ticketagentcloseevent", + product="Produto Close Event", + ) + ticket = await Ticket.get(PydanticObjectId(ticket_id)) + assert ticket is not None + + dispatcher = _isolate_dispatcher_handlers(monkeypatch) + original_publish = dispatcher.publish + published: list[TicketClosedEventSchema] = [] + + async def spy_publish(event: AppEvent, payload: Any) -> None: + if event == AppEvent.TICKET_CLOSED: + assert isinstance(payload, TicketClosedEventSchema) + published.append(payload) + await original_publish(event, payload) + + monkeypatch.setattr(dispatcher, "publish", spy_publish) + + data = await _finish_ticket(client, ticket_id, headers) + + assert data["status"] == "finished" + assert len(published) == 1 + assert str(published[0].ticket_id) == ticket_id + assert published[0].triage_id == ticket.triage_id + assert str(published[0].client_id) == created_user["id"] + + @pytest.mark.asyncio + async def test_finish_ticket_redundant_status_does_not_publish_duplicate_event( + self, client: AsyncClient, auth: AuthActions, monkeypatch: pytest.MonkeyPatch + ) -> None: + ticket_id, _created_user, headers, _agent_data = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-close-idem@test.com", + admin_username="ticketadmincloseidem", + client_email="ticket-client-close-idem@test.com", + client_username="ticketclientcloseidem", + agent_email="ticket-agent-close-idem@test.com", + agent_username="ticketagentcloseidem", + product="Produto Close Idempotent", + ) + + dispatcher = _isolate_dispatcher_handlers(monkeypatch) + original_publish = dispatcher.publish + published: list[TicketClosedEventSchema] = [] + + async def spy_publish(event: AppEvent, payload: Any) -> None: + if event == AppEvent.TICKET_CLOSED: + assert isinstance(payload, TicketClosedEventSchema) + published.append(payload) + await original_publish(event, payload) + + monkeypatch.setattr(dispatcher, "publish", spy_publish) + + first_data = await _finish_ticket(client, ticket_id, headers) + assert first_data["status"] == "finished" + assert len(published) == 1 + + published.clear() + second_data = await _finish_ticket(client, ticket_id, headers) + + assert second_data["status"] == "finished" + assert published == [] + + @pytest.mark.asyncio + async def test_finish_ticket_closes_active_live_chat_conversation( + self, + client: AsyncClient, + auth: AuthActions, + mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], + monkeypatch: pytest.MonkeyPatch, + ) -> None: + await Ticket.delete_all() + await Conversation.delete_all() + + ticket_id, created_user, headers, agent_data = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-close-livechat@test.com", + admin_username="ticketadmincloselivechat", + client_email="ticket-client-close-livechat@test.com", + client_username="ticketclientcloselivechat", + agent_email="ticket-agent-close-livechat@test.com", + agent_username="ticketagentcloselivechat", + product="Produto Close Live Chat", + ) + conversation = await Conversation( + ticket_id=PydanticObjectId(ticket_id), + agent_id=UUID(agent_data["id"]), + client_id=UUID(created_user["id"]), + ).insert() + assert conversation.id is not None + + dispatcher = _isolate_dispatcher_handlers(monkeypatch) + conversation_listener = ConversationListener( + ConversationService(ConversationRepository(mongo_db_conn)) + ) + dispatcher.subscribe(AppEvent.TICKET_CLOSED, conversation_listener.on_ticket_closed) + + data = await _finish_ticket(client, ticket_id, headers) + await _drain_background_tasks() + + assert data["status"] == "finished" + updated_conversation = await Conversation.get(conversation.id) + assert updated_conversation is not None + assert updated_conversation.finished_at is not None + assert updated_conversation.is_opened() is False + assert any( + message.sender_id == "System" and "encerr" in message.content.lower() + for message in updated_conversation.messages + ) + + @pytest.mark.asyncio + async def test_finish_ticket_marks_chatbot_attendance_finished_pending_evaluation( + self, + client: AsyncClient, + auth: AuthActions, + mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], + monkeypatch: pytest.MonkeyPatch, + ) -> None: + await Ticket.delete_all() + await Conversation.delete_all() + await mongo_db_conn["atendimentos"].delete_many({}) + + ticket_id, created_user, headers, _agent_data = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-close-chatbot@test.com", + admin_username="ticketadminclosechatbot", + client_email="ticket-client-close-chatbot@test.com", + client_username="ticketclientclosechatbot", + agent_email="ticket-agent-close-chatbot@test.com", + agent_username="ticketagentclosechatbot", + product="Produto Close Chatbot", + ) + ticket = await Ticket.get(PydanticObjectId(ticket_id)) + assert ticket is not None + + triage_object_id = ObjectId(str(ticket.triage_id)) + await mongo_db_conn["atendimentos"].insert_one( + { + "_id": triage_object_id, + "status": AttendanceStatus.IN_PROGRESS.value, + "start_date": "2026-04-14T12:00:00+00:00", + "end_date": None, + "client": { + "id": created_user["id"], + "name": created_user["username"], + "email": created_user["email"], + "company": None, + }, + "triage": [], + "result": {"type": "Ticket", "closure_message": "Ticket criado."}, + "evaluation": None, + } + ) + + dispatcher = _isolate_dispatcher_handlers(monkeypatch) + chatbot_listener = ChatbotListener( + ChatbotService(ChatbotRepository(mongo_db_conn), dispatcher) + ) + dispatcher.subscribe(AppEvent.TICKET_CLOSED, chatbot_listener.on_ticket_closed) + + data = await _finish_ticket(client, ticket_id, headers) + await _drain_background_tasks() + + assert data["status"] == "finished" + attendance = await mongo_db_conn["atendimentos"].find_one({"_id": triage_object_id}) + assert attendance is not None + assert attendance["status"] == AttendanceStatus.FINISHED.value + assert attendance["end_date"] is not None + assert attendance["evaluation"] is None + + @pytest.mark.asyncio + async def test_assign_ticket_returns_200_and_updates_ticket_history_and_status( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-stubs@test.com", + admin_username="ticketadminstubs", + client_email="ticket-client-stubs@test.com", + client_username="ticketclientstubs", + product="Produto Contrato Stubs", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato Stubs"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + agent_data = await auth.register_agent( + email="ticket-agent-assign@test.com", + username="ticketagentassign", + ) + agent_tokens = await auth.login(email="ticket-agent-assign@test.com") + agent_user = await auth.me(agent_tokens["access_token"]) + + assign_response = await client.post( + f"/api/tickets/{ticket_id}/assign", + json={"agent_id": str(agent_user.id), "reason": "Primeira atribuicao"}, + headers=headers, + ) + assert assign_response.status_code == 200, assign_response.text + assign_data = assign_response.json()["data"] + assert assign_data["status"] == "in_progress" + assert assign_data["assigned_agent_id"] == str(agent_user.id) + assert assign_data["assigned_agent_name"] == "ticketagentassign" + assert len(assign_data["agent_history"]) == 1 + assert assign_data["agent_history"][0]["agent_id"] == agent_data["id"] + assert assign_data["agent_history"][0]["transfer_reason"] == "Primeira atribuicao" + assert assign_data["agent_history"][0]["exit_date"] is None + + ticket_response = await client.get(f"/api/tickets/{ticket_id}", headers=headers) + assert ticket_response.status_code == 200, ticket_response.text + ticket_data = ticket_response.json()["data"] + assert ticket_data["status"] == "in_progress" + assert ticket_data["assigned_agent_id"] == str(agent_user.id) + assert len(ticket_data["agent_history"]) == 1 + + @pytest.mark.asyncio + async def test_assign_ticket_returns_404_for_missing_ticket( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ticket-admin-assign404@test.com", + username="ticketadminassign404", + ) + headers = auth.auth_headers(tokens["access_token"]) + agent_data = await auth.register_agent( + email="ticket-agent-assign404@test.com", + username="ticketagentassign404", + ) + + response = await client.post( + "/api/tickets/67f0c9b8e4b0b1a2c3d4e5ff/assign", + json={"agent_id": agent_data["id"], "reason": "Tentativa em ticket inexistente"}, + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_assign_ticket_returns_404_for_missing_agent( + self, client: AsyncClient, auth: AuthActions + ) -> None: + await Ticket.delete_all() + await Conversation.delete_all() + + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-missingagent@test.com", + admin_username="ticketadminmissingagent", + client_email="ticket-client-missingagent@test.com", + client_username="ticketclientmissingagent", + product="Produto Missing Agent", + ) + items = await _list_tickets_for_client(client, headers, created_user["id"]) + ticket_id = items[0]["id"] + + response = await client.post( + f"/api/tickets/{ticket_id}/assign", + json={"agent_id": str(uuid4()), "reason": "Agente inexistente"}, + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_assign_ticket_requires_permission( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, admin_headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-assignperm@test.com", + admin_username="ticketadminassignperm", + client_email="ticket-client-assignperm@test.com", + client_username="ticketclientassignperm", + product="Produto Assign Permission", + ) + items = await _list_tickets_for_client(client, admin_headers, created_user["id"]) + ticket_id = items[0]["id"] + + user_tokens = await auth.register_and_login( + email="ticket-user-assignperm@test.com", + username="ticketuserassignperm", + ) + agent_data = await auth.register_agent( + email="ticket-agent-assignperm@test.com", + username="ticketagentassignperm", + ) + + response = await client.post( + f"/api/tickets/{ticket_id}/assign", + json={"agent_id": agent_data["id"], "reason": "Sem permissao"}, + headers=auth.auth_headers(user_tokens["access_token"]), + ) + assert response.status_code == 403, response.text + + @pytest.mark.asyncio + async def test_escalate_ticket_returns_200_and_moves_to_higher_level_agent( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _created_user, headers, first_agent = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-escalate@test.com", + admin_username="ticketadminescalate", + client_email="ticket-client-escalate@test.com", + client_username="ticketclientescalate", + agent_email="ticket-agent-escalate@test.com", + agent_username="ticketagentescalate", + product="Produto Escalate N1 N2", + ) + target_agent = await _register_agent_with_support_level( + auth, + email="ticket-agent-escalate-n2@test.com", + username="ticketagentescalaten2", + level="N2", + ) + + escalate_response = await client.post( + f"/api/tickets/{ticket_id}/escalate", + json={ + "target_agent_id": target_agent["id"], + "reason": "Escalar para N2", + }, + headers=headers, + ) + assert escalate_response.status_code == 200, escalate_response.text + escalate_data = escalate_response.json()["data"] + assert escalate_data["status"] == "in_progress" + assert escalate_data["assigned_agent_id"] == target_agent["id"] + assert escalate_data["assigned_agent_name"] == "ticketagentescalaten2" + assert len(escalate_data["agent_history"]) == 2 + + previous_history = escalate_data["agent_history"][0] + current_history = escalate_data["agent_history"][1] + assert previous_history["agent_id"] == first_agent["id"] + assert previous_history["exit_date"] is not None + assert previous_history["transfer_reason"] == "Escalar para N2" + assert current_history["agent_id"] == target_agent["id"] + assert current_history["level"] == "N2" + assert current_history["exit_date"] is None + assert current_history["transfer_reason"] == "Escalar para N2" + + @pytest.mark.asyncio + async def test_escalate_ticket_rejects_lower_target_level( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _created_user, headers, _agent_data = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-escalate-down@test.com", + admin_username="ticketadminescalatedown", + client_email="ticket-client-escalate-down@test.com", + client_username="ticketclientescalatedown", + agent_email="ticket-agent-escalate-down@test.com", + agent_username="ticketagentescalatedown", + product="Produto Escalate N2 N1", + ) + + ticket = await Ticket.get(PydanticObjectId(ticket_id)) + assert ticket is not None + ticket.agent_history[-1].level = "N2" + await ticket.save() + target_agent = await auth.register_agent( + email="ticket-agent-escalate-down-n1@test.com", + username="ticketagentescalatedownn1", + ) + + response = await client.post( + f"/api/tickets/{ticket_id}/escalate", + json={ + "target_agent_id": target_agent["id"], + "reason": "Tentar reduzir nivel", + }, + headers=headers, + ) + assert response.status_code == 400, response.text + + @pytest.mark.asyncio + async def test_transfer_ticket_returns_200_and_moves_active_assignment( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _created_user, headers, first_agent = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-transfer@test.com", + admin_username="ticketadmintransfer", + client_email="ticket-client-transfer@test.com", + client_username="ticketclienttransfer", + agent_email="ticket-agent-transfer-from@test.com", + agent_username="ticketagenttransferfrom", + product="Produto Transfer Direct", + ) + target_agent = await auth.register_agent( + email="ticket-agent-transfer-to@test.com", + username="ticketagenttransferto", + ) + + transfer_response = await client.post( + f"/api/tickets/{ticket_id}/transfer", + json={"target_agent_id": target_agent["id"], "reason": "Redistribuir atendimento"}, + headers=headers, + ) + assert transfer_response.status_code == 200, transfer_response.text + transfer_data = transfer_response.json()["data"] + assert transfer_data["status"] == "in_progress" + assert transfer_data["assigned_agent_id"] == target_agent["id"] + assert transfer_data["assigned_agent_name"] == "ticketagenttransferto" + assert len(transfer_data["agent_history"]) == 2 + + previous_history = transfer_data["agent_history"][0] + current_history = transfer_data["agent_history"][1] + assert previous_history["agent_id"] == first_agent["id"] + assert previous_history["exit_date"] is not None + assert previous_history["transfer_reason"] == "Redistribuir atendimento" + assert current_history["agent_id"] == target_agent["id"] + assert current_history["name"] == "ticketagenttransferto" + assert current_history["level"] == previous_history["level"] + assert current_history["assignment_date"] is not None + assert current_history["exit_date"] is None + assert current_history["transfer_reason"] == "Redistribuir atendimento" + + @pytest.mark.asyncio + async def test_transfer_ticket_rejects_different_target_level( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _created_user, headers, _first_agent = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-transfer-level@test.com", + admin_username="ticketadmintransferlevel", + client_email="ticket-client-transfer-level@test.com", + client_username="ticketclienttransferlevel", + agent_email="ticket-agent-transfer-level-from@test.com", + agent_username="ticketagenttransferlevelfrom", + product="Produto Transfer Different Level", + ) + target_agent = await _register_agent_with_support_level( + auth, + email="ticket-agent-transfer-level-n2@test.com", + username="ticketagenttransferleveln2", + level="N2", + ) + + response = await client.post( + f"/api/tickets/{ticket_id}/transfer", + json={"target_agent_id": target_agent["id"], "reason": "Tentativa N1 para N2"}, + headers=headers, + ) + assert response.status_code == 400, response.text + + @pytest.mark.asyncio + async def test_escalate_ticket_publishes_ticket_escalated_event_in_http_flow( + self, client: AsyncClient, auth: AuthActions, monkeypatch: pytest.MonkeyPatch + ) -> None: + ticket_id, created_user, headers, _agent_data = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-escalate-event@test.com", + admin_username="ticketadminescalateevent", + client_email="ticket-client-escalate-event@test.com", + client_username="ticketclientescalateevent", + agent_email="ticket-agent-escalate-event@test.com", + agent_username="ticketagentescalateevent", + product="Produto Event Escalate", + ) + target_agent = await _register_agent_with_support_level( + auth, + email="ticket-agent-escalate-event-n2@test.com", + username="ticketagentescalateeventn2", + level="N2", + ) + + dispatcher = get_event_dispatcher() + original_publish = dispatcher.publish + published: list[TicketEscalatedEventSchema] = [] + + async def spy_publish(event: AppEvent, payload: Any) -> None: + if event == AppEvent.TICKET_ESCALATED: + assert isinstance(payload, TicketEscalatedEventSchema) + published.append(payload) + await original_publish(event, payload) + + monkeypatch.setattr(dispatcher, "publish", spy_publish) + + response = await client.post( + f"/api/tickets/{ticket_id}/escalate", + json={ + "target_agent_id": target_agent["id"], + "reason": "Validando evento escalado", + }, + headers=headers, + ) + assert response.status_code == 200, response.text + assert len(published) == 1 + assert str(published[0].ticket_id) == ticket_id + assert str(published[0].client_id) == created_user["id"] + assert str(published[0].new_agent_id) == target_agent["id"] + assert published[0].new_agent_name == "ticketagentescalateeventn2" + assert published[0].new_level == "N2" + assert published[0].transfer_reason == "Validando evento escalado" + + @pytest.mark.asyncio + async def test_transfer_ticket_publishes_ticket_assignee_updated_event_in_http_flow( + self, client: AsyncClient, auth: AuthActions, monkeypatch: pytest.MonkeyPatch + ) -> None: + ticket_id, _created_user, headers, _first_agent = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-transfer-event@test.com", + admin_username="ticketadmintransferevent", + client_email="ticket-client-transfer-event@test.com", + client_username="ticketclienttransferevent", + agent_email="ticket-agent-transfer-event-from@test.com", + agent_username="ticketagenttransfereventfrom", + product="Produto Event Transfer", + ) + target_agent = await auth.register_agent( + email="ticket-agent-transfer-event-to@test.com", + username="ticketagenttransfereventto", + ) + + dispatcher = get_event_dispatcher() + original_publish = dispatcher.publish + published: list[TicketAssigneeUpdatedEventSchema] = [] + + async def spy_publish(event: AppEvent, payload: Any) -> None: + if event == AppEvent.TICKET_ASSIGNEE_UPDATED: + assert isinstance(payload, TicketAssigneeUpdatedEventSchema) + published.append(payload) + await original_publish(event, payload) + + monkeypatch.setattr(dispatcher, "publish", spy_publish) + + response = await client.post( + f"/api/tickets/{ticket_id}/transfer", + json={"target_agent_id": target_agent["id"], "reason": "Validando evento transfer"}, + headers=headers, + ) + assert response.status_code == 200, response.text + assert len(published) == 1 + assert str(published[0].ticket_id) == ticket_id + assert str(published[0].new_agent_id) == target_agent["id"] + assert published[0].reason == "Validando evento transfer" + + @pytest.mark.asyncio + async def test_escalate_and_transfer_require_permissions( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _created_user, _admin_headers, _first_agent = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-actionsperm@test.com", + admin_username="ticketadminactionsperm", + client_email="ticket-client-actionsperm@test.com", + client_username="ticketclientactionsperm", + agent_email="ticket-agent-actionsperm-from@test.com", + agent_username="ticketagentactionspermfrom", + product="Produto Actions Permission", + ) + target_agent = await auth.register_agent( + email="ticket-agent-actionsperm-to@test.com", + username="ticketagentactionspermto", + ) + user_tokens = await auth.register_and_login( + email="ticket-user-actionsperm@test.com", + username="ticketuseractionsperm", + ) + user_headers = auth.auth_headers(user_tokens["access_token"]) + + escalate_response = await client.post( + f"/api/tickets/{ticket_id}/escalate", + json={ + "target_agent_id": target_agent["id"], + "reason": "Sem permissao para escalar", + }, + headers=user_headers, + ) + assert escalate_response.status_code == 403, escalate_response.text + + transfer_response = await client.post( + f"/api/tickets/{ticket_id}/transfer", + json={"target_agent_id": target_agent["id"], "reason": "Sem permissao"}, + headers=user_headers, + ) + assert transfer_response.status_code == 403, transfer_response.text + + @pytest.mark.asyncio + async def test_get_ticket_queue_requires_authentication_and_permission( + self, client: AsyncClient, auth: AuthActions + ) -> None: + unauthenticated = await client.get("/api/tickets/queue") + assert unauthenticated.status_code == 403 + + user_tokens = await auth.register_and_login( + email="ticket-user-queueperm@test.com", + username="ticketuserqueueperm", + ) + forbidden = await client.get( + "/api/tickets/queue", + headers=auth.auth_headers(user_tokens["access_token"]), + ) + assert forbidden.status_code == 403 + + @pytest.mark.asyncio + async def test_get_ticket_queue_returns_sorted_items_and_supports_filters( + self, client: AsyncClient, auth: AuthActions + ) -> None: + await Ticket.delete_all() + await Conversation.delete_all() + + tokens = await auth.register_and_login_admin( + email="ticket-admin-queue@test.com", + username="ticketadminqueue", + ) + headers = auth.auth_headers(tokens["access_token"]) + admin_user = await auth.me(tokens["access_token"]) + + created_user = await auth.register( + email="ticket-client-queue@test.com", + username="ticketclientqueue", + ) + + base_payload = { + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "description": "Ticket para fila", + "chat_ids": ["67f0c9b8e4b0b1a2c3d4e5f7"], + "client_id": created_user["id"], + } + + await _create_ticket_with_payload( + client, + headers, + { + **base_payload, + "type": "issue", + "criticality": "low", + "product": "Fila Assigned Low", + }, + ) + + await _create_ticket_with_payload( + client, + headers, + { + **base_payload, + "type": "issue", + "criticality": "high", + "product": "Fila Assigned High", + }, + ) + + await _create_ticket_with_payload( + client, + headers, + { + **base_payload, + "type": "issue", + "criticality": "medium", + "product": "Fila Unassigned Medium", + }, + ) + + await _create_ticket_with_payload( + client, + headers, + { + **base_payload, + "type": "new_feature", + "criticality": "medium", + "product": "Fila Feature Medium", + }, + ) + + items = await _list_tickets_for_client(client, headers, created_user["id"]) + ticket_ids_by_product = {item["product"]: item["id"] for item in items} + + assign_high_response = await client.post( + f"/api/tickets/{ticket_ids_by_product['Fila Assigned High']}/assign", + json={ + "agent_id": str(admin_user.id), + "reason": "Atribuição para teste de fila", + }, + headers=headers, + ) + assert assign_high_response.status_code == 200, assign_high_response.text + + assign_low_response = await client.post( + f"/api/tickets/{ticket_ids_by_product['Fila Assigned Low']}/assign", + json={ + "agent_id": str(admin_user.id), + "reason": "Atribuição para teste de fila", + }, + headers=headers, + ) + assert assign_low_response.status_code == 200, assign_low_response.text + + queue_response = await client.get( + "/api/tickets/queue", + params={"page": 1, "page_size": 20}, + headers=headers, + ) + assert queue_response.status_code == 200, queue_response.text + queue_data = queue_response.json()["data"] + assert queue_data["page"] == 1 + assert queue_data["page_size"] == 20 + assert queue_data["total"] == 4 + queue_products = [item["product"] for item in queue_data["items"]] + assert queue_products[:4] == [ + "Fila Assigned High", + "Fila Unassigned Medium", + "Fila Feature Medium", + "Fila Assigned Low", + ] + + status_response = await client.get( + "/api/tickets/queue", + params={"status": "in_progress", "page": 1, "page_size": 20}, + headers=headers, + ) + assert status_response.status_code == 200, status_response.text + status_items = status_response.json()["data"]["items"] + assert {item["product"] for item in status_items} == { + "Fila Assigned High", + "Fila Assigned Low", + } + assert all(item["status"] == "in_progress" for item in status_items) + + type_response = await client.get( + "/api/tickets/queue", + params={"type": "new_feature", "page": 1, "page_size": 20}, + headers=headers, + ) + assert type_response.status_code == 200, type_response.text + type_items = type_response.json()["data"]["items"] + assert len(type_items) == 1 + assert type_items[0]["product"] == "Fila Feature Medium" + assert type_items[0]["type"] == "new_feature" + + unassigned_response = await client.get( + "/api/tickets/queue", + params={"unassigned_only": True, "page": 1, "page_size": 20}, + headers=headers, + ) + assert unassigned_response.status_code == 200, unassigned_response.text + unassigned_items = unassigned_response.json()["data"]["items"] + assert any(item["product"] == "Fila Unassigned Medium" for item in unassigned_items) + assert all(item["unassigned"] is True for item in unassigned_items) + + assignee_response = await client.get( + "/api/tickets/queue", + params={"assignee_id": str(admin_user.id), "page": 1, "page_size": 20}, + headers=headers, + ) + assert assignee_response.status_code == 200, assignee_response.text + assignee_items = assignee_response.json()["data"]["items"] + assert len(assignee_items) == 2 + assert assignee_items[0]["product"] == "Fila Assigned High" + assert assignee_items[0]["criticality"] == "high" + assert assignee_items[1]["product"] == "Fila Assigned Low" + assert assignee_items[1]["criticality"] == "low" + assert all(item["assignee_id"] == str(admin_user.id) for item in assignee_items) + + @pytest.mark.asyncio + async def test_assign_ticket_publishes_ticket_assignee_updated_event_in_http_flow( + self, client: AsyncClient, auth: AuthActions, monkeypatch: pytest.MonkeyPatch + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-event@test.com", + admin_username="ticketadminevent", + client_email="ticket-client-event@test.com", + client_username="ticketclientevent", + product="Produto Event Assign", + ) + items = await _list_tickets_for_client(client, headers, created_user["id"]) + ticket_id = items[0]["id"] + agent_data = await auth.register_agent( + email="ticket-agent-event@test.com", + username="ticketagentevent", + ) + + dispatcher = get_event_dispatcher() + original_publish = dispatcher.publish + published: list[TicketAssigneeUpdatedEventSchema] = [] + + async def spy_publish(event: AppEvent, payload: Any) -> None: + if event == AppEvent.TICKET_ASSIGNEE_UPDATED: + assert isinstance(payload, TicketAssigneeUpdatedEventSchema) + published.append(payload) + await original_publish(event, payload) + + monkeypatch.setattr(dispatcher, "publish", spy_publish) + + response = await client.post( + f"/api/tickets/{ticket_id}/assign", + json={"agent_id": agent_data["id"], "reason": "Validando publish"}, + headers=headers, + ) + assert response.status_code == 200, response.text + assert len(published) == 1 + assert str(published[0].ticket_id) == ticket_id + assert str(published[0].new_agent_id) == agent_data["id"] + assert published[0].reason == "Validando publish" + + @pytest.mark.asyncio + async def test_openapi_exposes_only_official_update_route( + self, client: AsyncClient, auth: AuthActions + ) -> None: + _ = auth + response = await client.get("/openapi.json") + assert response.status_code == 200 + + paths = response.json()["paths"] + assert "/api/tickets/" in paths + assert "/api/tickets/queue" in paths + assert "/api/tickets/{ticket_id}" in paths + assert "/api/tickets/{ticket_id}/assign" in paths + assert "/api/tickets/{ticket_id}/escalate" in paths + assert "/api/tickets/{ticket_id}/transfer" in paths + assert "/api/tickets/{ticket_id}/comments" in paths + assert "/api/tickets/{ticket_id}/status" not in paths + + @pytest.mark.asyncio + async def test_comment_on_ticket_returns_created_comment( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-comment@test.com", + admin_username="ticketadmincomment", + client_email="ticket-client-comment@test.com", + client_username="ticketclientcomment", + product="Produto Contrato Comment", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato Comment"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + response = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": "Cliente confirmou o erro.", "internal": False}, + headers=headers, + ) + assert response.status_code == 201, response.text + data = response.json()["data"] + assert data["text"] == "Cliente confirmou o erro." + assert data["internal"] is False + assert data["author"] == "ticketadmincomment" + assert "comment_id" in data + assert "date" in data + + @pytest.mark.asyncio + async def test_get_ticket_comments_returns_added_comments_in_order( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-listcomments@test.com", + admin_username="ticketadminlistcomments", + client_email="ticket-client-listcomments@test.com", + client_username="ticketclientlistcomments", + product="Produto Contrato ListComments", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato ListComments"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + first = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": "Primeiro comentário interno.", "internal": True}, + headers=headers, + ) + assert first.status_code == 201, first.text + second = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": "Segundo comentário público.", "internal": False}, + headers=headers, + ) + assert second.status_code == 201, second.text + + response = await client.get( + f"/api/tickets/{ticket_id}/comments", + headers=headers, + ) + assert response.status_code == 200, response.text + data: list[dict[str, Any]] = response.json()["data"] + assert isinstance(data, list) + assert len(data) == 2 + assert data[0]["text"] == "Primeiro comentário interno." + assert data[0]["internal"] is True + assert data[1]["text"] == "Segundo comentário público." + assert data[1]["internal"] is False + + @pytest.mark.asyncio + async def test_get_comments_returns_empty_list_for_ticket_without_comments( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-nocomments@test.com", + admin_username="ticketadminnocomments", + client_email="ticket-client-nocomments@test.com", + client_username="ticketclientnocomments", + product="Produto Contrato NoComments", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato NoComments"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + response = await client.get( + f"/api/tickets/{ticket_id}/comments", + headers=headers, + ) + assert response.status_code == 200, response.text + assert response.json()["data"] == [] + + @pytest.mark.asyncio + async def test_comment_on_missing_ticket_returns_404( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ticket-admin-comment404@test.com", + username="ticketadmincomment404", + ) + headers = auth.auth_headers(tokens["access_token"]) + + missing_id = "67f0c9b8e4b0b1a2c3d4e5ff" + response = await client.post( + f"/api/tickets/{missing_id}/comments", + json={"text": "Comentário em ticket inexistente.", "internal": False}, + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_get_comments_for_missing_ticket_returns_404( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ticket-admin-listcomments404@test.com", + username="ticketadminlistcomments404", + ) + headers = auth.auth_headers(tokens["access_token"]) + + missing_id = "67f0c9b8e4b0b1a2c3d4e5ff" + response = await client.get( + f"/api/tickets/{missing_id}/comments", + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_update_ticket_comment_persists_partial_changes( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-updatecomment@test.com", + admin_username="ticketadminupdatecomment", + client_email="ticket-client-updatecomment@test.com", + client_username="ticketclientupdatecomment", + product="Produto Contrato UpdateComment", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato UpdateComment"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + post_response = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": "Texto original.", "internal": True}, + headers=headers, + ) + assert post_response.status_code == 201, post_response.text + comment_id = post_response.json()["data"]["comment_id"] + + patch_response = await client.patch( + f"/api/tickets/{ticket_id}/comments/{comment_id}", + json={"text": "Texto editado."}, + headers=headers, + ) + assert patch_response.status_code == 200, patch_response.text + data = patch_response.json()["data"] + assert data["comment_id"] == comment_id + assert data["text"] == "Texto editado." + assert data["internal"] is True + + list_comments = await client.get( + f"/api/tickets/{ticket_id}/comments", + headers=headers, + ) + comments: list[dict[str, Any]] = list_comments.json()["data"] + assert len(comments) == 1 + assert comments[0]["text"] == "Texto editado." + assert comments[0]["internal"] is True + + @pytest.mark.asyncio + async def test_update_ticket_comment_returns_404_for_missing_comment( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-updatecomment404@test.com", + admin_username="ticketadminupdatecomment404", + client_email="ticket-client-updatecomment404@test.com", + client_username="ticketclientupdatecomment404", + product="Produto Contrato UpdateComment404", + ) + + list_response = await client.get( + "/api/tickets/", + params={ + "client_id": created_user["id"], + "product": "Produto Contrato UpdateComment404", + }, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + response = await client.patch( + f"/api/tickets/{ticket_id}/comments/{uuid4()}", + json={"text": "Não existe."}, + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_update_ticket_comment_returns_404_for_missing_ticket( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ticket-admin-updatecommentnoticket@test.com", + username="ticketadminupdatecommentnoticket", + ) + headers = auth.auth_headers(tokens["access_token"]) + + response = await client.patch( + f"/api/tickets/67f0c9b8e4b0b1a2c3d4e5ff/comments/{uuid4()}", + json={"text": "Ticket inexistente."}, + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_delete_ticket_comment_removes_from_listing( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-deletecomment@test.com", + admin_username="ticketadmindeletecomment", + client_email="ticket-client-deletecomment@test.com", + client_username="ticketclientdeletecomment", + product="Produto Contrato DeleteComment", + ) + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"], "product": "Produto Contrato DeleteComment"}, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + first = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": "Comentário a ser removido.", "internal": False}, + headers=headers, + ) + assert first.status_code == 201, first.text + comment_id = first.json()["data"]["comment_id"] + + second = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": "Comentário que permanece.", "internal": True}, + headers=headers, + ) + assert second.status_code == 201, second.text + kept_comment_id = second.json()["data"]["comment_id"] + + delete_response = await client.delete( + f"/api/tickets/{ticket_id}/comments/{comment_id}", + headers=headers, + ) + assert delete_response.status_code == 200, delete_response.text + deleted = delete_response.json()["data"] + assert deleted["comment_id"] == comment_id + assert deleted["text"] == "Comentário a ser removido." + assert deleted["internal"] is False + + list_comments = await client.get( + f"/api/tickets/{ticket_id}/comments", + headers=headers, + ) + comments: list[dict[str, Any]] = list_comments.json()["data"] + assert [c["comment_id"] for c in comments] == [kept_comment_id] + + @pytest.mark.asyncio + async def test_delete_ticket_comment_is_idempotent( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-deletecommentidem@test.com", + admin_username="ticketadmindeletecommentidem", + client_email="ticket-client-deletecommentidem@test.com", + client_username="ticketclientdeletecommentidem", + product="Produto Contrato DeleteCommentIdem", + ) + + list_response = await client.get( + "/api/tickets/", + params={ + "client_id": created_user["id"], + "product": "Produto Contrato DeleteCommentIdem", + }, + headers=headers, + ) + ticket_id = list_response.json()["data"]["items"][0]["id"] + + post_response = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": "Vou ser apagado.", "internal": False}, + headers=headers, + ) + assert post_response.status_code == 201, post_response.text + comment_id = post_response.json()["data"]["comment_id"] + + first = await client.delete( + f"/api/tickets/{ticket_id}/comments/{comment_id}", + headers=headers, + ) + assert first.status_code == 200, first.text + + second = await client.delete( + f"/api/tickets/{ticket_id}/comments/{comment_id}", + headers=headers, + ) + assert second.status_code == 404, second.text + + @pytest.mark.asyncio + async def test_delete_ticket_comment_returns_404_for_missing_ticket( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ticket-admin-deletecommentnoticket@test.com", + username="ticketadmindeletecommentnoticket", + ) + headers = auth.auth_headers(tokens["access_token"]) + + response = await client.delete( + f"/api/tickets/67f0c9b8e4b0b1a2c3d4e5ff/comments/{uuid4()}", + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_get_ticket_history_returns_entries_after_assign_and_escalate( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _created_user, headers, first_agent = await _create_assigned_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-history@test.com", + admin_username="ticketadminhistory", + client_email="ticket-client-history@test.com", + client_username="ticketclienthistory", + agent_email="ticket-agent-history@test.com", + agent_username="ticketagenthistory", + product="Produto Histórico", + ) + target_agent = await _register_agent_with_support_level( + auth, + email="ticket-agent-history-n2@test.com", + username="ticketagenthistoryn2", + level="N2", + ) + escalate_response = await client.post( + f"/api/tickets/{ticket_id}/escalate", + json={ + "target_agent_id": target_agent["id"], + "reason": "Subir para N2 no histórico", + }, + headers=headers, + ) + assert escalate_response.status_code == 200, escalate_response.text + + response = await client.get( + f"/api/tickets/{ticket_id}/history", + headers=headers, + ) + assert response.status_code == 200, response.text + data: list[dict[str, Any]] = response.json()["data"] + assert isinstance(data, list) + assert len(data) == 2 + + previous, current = data + assert previous["agent_id"] == first_agent["id"] + assert previous["name"] == "ticketagenthistory" + assert previous["exit_date"] is not None + assert previous["transfer_reason"] == "Subir para N2 no histórico" + assert current["agent_id"] == target_agent["id"] + assert current["name"] == "ticketagenthistoryn2" + assert current["level"] == "N2" + assert current["exit_date"] is None + assert current["transfer_reason"] == "Subir para N2 no histórico" + + @pytest.mark.asyncio + async def test_get_ticket_history_returns_empty_list_for_unassigned_ticket( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-historyempty@test.com", + admin_username="ticketadminhistoryempty", + client_email="ticket-client-historyempty@test.com", + client_username="ticketclienthistoryempty", + product="Produto Histórico Vazio", + ) + items = await _list_tickets_for_client(client, headers, created_user["id"]) + ticket_id = items[0]["id"] + + response = await client.get( + f"/api/tickets/{ticket_id}/history", + headers=headers, + ) + assert response.status_code == 200, response.text + assert response.json()["data"] == [] + + @pytest.mark.asyncio + async def test_get_ticket_history_returns_404_for_missing_ticket( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="ticket-admin-history404@test.com", + username="ticketadminhistory404", + ) + headers = auth.auth_headers(tokens["access_token"]) + + response = await client.get( + "/api/tickets/67f0c9b8e4b0b1a2c3d4e5ff/history", + headers=headers, + ) + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_get_ticket_history_requires_permission( + self, client: AsyncClient, auth: AuthActions + ) -> None: + created_user, admin_headers = await _create_ticket( + client=client, + auth=auth, + admin_email="ticket-admin-historyperm@test.com", + admin_username="ticketadminhistoryperm", + client_email="ticket-client-historyperm@test.com", + client_username="ticketclienthistoryperm", + product="Produto Histórico Permissão", + ) + items = await _list_tickets_for_client(client, admin_headers, created_user["id"]) + ticket_id = items[0]["id"] + + unauthenticated = await client.get(f"/api/tickets/{ticket_id}/history") + assert unauthenticated.status_code == 403, unauthenticated.text diff --git a/tests/app/e2e/domains/ticket/test_ticket_search_routes.py b/tests/app/e2e/domains/ticket/test_ticket_search_routes.py new file mode 100644 index 0000000..1453eed --- /dev/null +++ b/tests/app/e2e/domains/ticket/test_ticket_search_routes.py @@ -0,0 +1,476 @@ +from collections.abc import AsyncGenerator +from typing import Any +from uuid import UUID, uuid4 + +import pytest +import pytest_asyncio +from httpx import AsyncClient +from sqlalchemy import text + +from app.domains.live_chat.entities import Conversation +from app.domains.ticket.models import Ticket +from tests.app.e2e.conftest import AuthActions + + +@pytest_asyncio.fixture(autouse=True) +async def _cleanup_mongo() -> AsyncGenerator[None, None]: + await Ticket.delete_all() + await Conversation.delete_all() + yield + await Ticket.delete_all() + await Conversation.delete_all() + + +async def _create_ticket( + client: AsyncClient, + headers: dict[str, str], + *, + client_id: str, + description: str, + product: str, + company_id: str | None = None, +) -> dict[str, Any]: + payload: dict[str, Any] = { + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "high", + "product": product, + "description": description, + "chat_ids": ["67f0c9b8e4b0b1a2c3d4e5f7"], + "client_id": client_id, + } + if company_id is not None: + payload["company_id"] = company_id + + response = await client.post("/api/tickets/", json=payload, headers=headers) + assert response.status_code == 201, response.text + return response.json()["data"] + + +async def _list_tickets( + client: AsyncClient, + headers: dict[str, str], + client_id: str, +) -> list[dict[str, Any]]: + response = await client.get( + "/api/tickets/", + params={"client_id": client_id, "page": 1, "page_size": 50}, + headers=headers, + ) + assert response.status_code == 200, response.text + return response.json()["data"]["items"] + + +async def _add_comment( + client: AsyncClient, + headers: dict[str, str], + ticket_id: str, + text_value: str, +) -> None: + response = await client.post( + f"/api/tickets/{ticket_id}/comments", + json={"text": text_value, "internal": False}, + headers=headers, + ) + assert response.status_code == 201, response.text + + +async def _assign_ticket( + client: AsyncClient, + headers: dict[str, str], + ticket_id: str, + agent_id: str, + reason: str = "Atribuído para teste de busca", +) -> None: + response = await client.post( + f"/api/tickets/{ticket_id}/assign", + json={"agent_id": agent_id, "reason": reason}, + headers=headers, + ) + assert response.status_code == 200, response.text + + +async def _attach_company_to_user( + auth: AuthActions, + user_id: str, + suffix: str, +) -> UUID: + company_id = uuid4() + tax_id = f"{suffix:0>14s}"[:14] + await auth.db_session.execute( + text( + "INSERT INTO companies (id, legal_name, trade_name, tax_id)" + " VALUES (:id, :legal, :trade, :tax)" + ), + { + "id": company_id, + "legal": f"Empresa {suffix}", + "trade": f"Empresa {suffix}", + "tax": tax_id, + }, + ) + await auth.db_session.execute( + text("UPDATE users SET company_id = :cid WHERE id = :uid"), + {"cid": company_id, "uid": user_id}, + ) + await auth.db_session.flush() + return company_id + + +async def _search( + client: AsyncClient, + headers: dict[str, str], + query: str, +) -> tuple[int, Any]: + response = await client.get( + "/api/tickets/search", + params={"search_query": query}, + headers=headers, + ) + return response.status_code, response.json() if response.content else None + + +class TestSearchTicketByTextRoute: + @pytest.mark.asyncio + async def test_unauthenticated_request_is_rejected( + self, client: AsyncClient + ) -> None: + response = await client.get( + "/api/tickets/search", params={"search_query": "boleto"} + ) + assert response.status_code in {401, 403} + + @pytest.mark.asyncio + async def test_user_finds_only_their_own_tickets( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_tokens = await auth.register_and_login_admin( + email="search-admin-self@test.com", + username="searchadminself", + ) + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_a = await auth.register( + email="search-client-a@test.com", + username="searchclienta", + ) + client_b = await auth.register( + email="search-client-b@test.com", + username="searchclientb", + ) + + await _create_ticket( + client, + admin_headers, + client_id=client_a["id"], + description="Erro ao emitir boleto do cliente A", + product="Produto Search Self A", + ) + await _create_ticket( + client, + admin_headers, + client_id=client_b["id"], + description="Erro ao emitir boleto do cliente B", + product="Produto Search Self B", + ) + + client_a_tokens = await auth.login( + email="search-client-a@test.com", + ) + client_a_headers = auth.auth_headers(client_a_tokens["access_token"]) + + status_code, body = await _search(client, client_a_headers, "boleto") + + assert status_code == 200, body + data = body["data"] + assert len(data) == 1 + assert data[0]["description"] == "Erro ao emitir boleto do cliente A" + assert data[0]["client"]["id"] == client_a["id"] + + @pytest.mark.asyncio + async def test_search_matches_text_inside_comments( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_tokens = await auth.register_and_login_admin( + email="search-admin-comments@test.com", + username="searchadmincomments", + ) + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_user = await auth.register( + email="search-client-comments@test.com", + username="searchclientcomments", + ) + await _create_ticket( + client, + admin_headers, + client_id=client_user["id"], + description="Pedido genérico de suporte", + product="Produto Search Comentário", + ) + items = await _list_tickets(client, admin_headers, client_user["id"]) + ticket_id = items[0]["id"] + await _add_comment( + client, + admin_headers, + ticket_id, + "Cliente relatou queda na fatura mensal", + ) + + client_tokens = await auth.login(email="search-client-comments@test.com") + client_headers = auth.auth_headers(client_tokens["access_token"]) + + status_code, body = await _search(client, client_headers, "queda") + + assert status_code == 200, body + data = body["data"] + assert len(data) == 1 + assert data[0]["comments"][0]["text"] == "Cliente relatou queda na fatura mensal" + + @pytest.mark.asyncio + async def test_search_is_case_insensitive( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_tokens = await auth.register_and_login_admin( + email="search-admin-case@test.com", + username="searchadmincase", + ) + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_user = await auth.register( + email="search-client-case@test.com", + username="searchclientcase", + ) + await _create_ticket( + client, + admin_headers, + client_id=client_user["id"], + description="Falha CRÍTICA na importação", + product="Produto Search Case", + ) + + client_tokens = await auth.login(email="search-client-case@test.com") + client_headers = auth.auth_headers(client_tokens["access_token"]) + + status_code, body = await _search(client, client_headers, "crítica") + + assert status_code == 200, body + assert len(body["data"]) == 1 + + @pytest.mark.asyncio + async def test_agent_finds_only_tickets_they_were_assigned_to( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_tokens = await auth.register_and_login_admin( + email="search-admin-agent@test.com", + username="searchadminagent", + ) + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_user = await auth.register( + email="search-client-agent@test.com", + username="searchclientagent", + ) + agent = await auth.register_agent( + email="search-agent@test.com", + username="searchagent", + ) + await _attach_company_to_user(auth, agent["id"], "55555") + + await _create_ticket( + client, + admin_headers, + client_id=client_user["id"], + description="Acesso negado ao módulo financeiro", + product="Produto Search Agente Atribuído", + ) + await _create_ticket( + client, + admin_headers, + client_id=client_user["id"], + description="Acesso negado ao módulo de relatórios", + product="Produto Search Agente NaoAtribuido", + ) + + items = await _list_tickets(client, admin_headers, client_user["id"]) + assigned = next(t for t in items if "financeiro" in t["description"]) + await _assign_ticket(client, admin_headers, assigned["id"], agent["id"]) + + agent_tokens = await auth.login(email="search-agent@test.com") + agent_headers = auth.auth_headers(agent_tokens["access_token"]) + + status_code, body = await _search(client, agent_headers, "acesso") + + assert status_code == 200, body + data = body["data"] + assert len(data) == 1 + assert data[0]["description"] == "Acesso negado ao módulo financeiro" + + @pytest.mark.asyncio + async def test_admin_finds_tickets_in_their_company( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_data = await auth.register_admin( + email="search-admin-company@test.com", + username="searchadmincompany", + ) + company_id = await _attach_company_to_user(auth, admin_data["id"], "12345") + admin_tokens = await auth.login(email="search-admin-company@test.com") + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_in_company = await auth.register( + email="search-client-company-in@test.com", + username="searchclientcompanyin", + ) + client_outside = await auth.register( + email="search-client-company-out@test.com", + username="searchclientcompanyout", + ) + + await _create_ticket( + client, + admin_headers, + client_id=client_in_company["id"], + description="Falha de sincronização na nota fiscal", + product="Produto Search Empresa Dentro", + company_id=str(company_id), + ) + await _create_ticket( + client, + admin_headers, + client_id=client_outside["id"], + description="Falha de sincronização em outro grupo", + product="Produto Search Empresa Fora", + company_id=str(uuid4()), + ) + + status_code, body = await _search(client, admin_headers, "sincronização") + + assert status_code == 200, body + data = body["data"] + assert len(data) == 1 + assert data[0]["client"]["company"]["id"] == str(company_id) + + @pytest.mark.asyncio + async def test_admin_without_company_searches_globally( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_tokens = await auth.register_and_login_admin( + email="search-admin-global@test.com", + username="searchadminglobal", + ) + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_a = await auth.register( + email="search-client-global-a@test.com", + username="searchclientglobala", + ) + client_b = await auth.register( + email="search-client-global-b@test.com", + username="searchclientglobalb", + ) + await _create_ticket( + client, + admin_headers, + client_id=client_a["id"], + description="Falha global na sincronização A", + product="Produto Global A", + company_id=str(uuid4()), + ) + await _create_ticket( + client, + admin_headers, + client_id=client_b["id"], + description="Falha global na sincronização B", + product="Produto Global B", + company_id=str(uuid4()), + ) + + status_code, body = await _search(client, admin_headers, "global") + + assert status_code == 200, body + descriptions = sorted(item["description"] for item in body["data"]) + assert descriptions == [ + "Falha global na sincronização A", + "Falha global na sincronização B", + ] + + @pytest.mark.asyncio + async def test_agent_without_company_searches_globally( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_tokens = await auth.register_and_login_admin( + email="search-admin-agentglobal@test.com", + username="searchadminagentglobal", + ) + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_user = await auth.register( + email="search-client-agentglobal@test.com", + username="searchclientagentglobal", + ) + agent = await auth.register_agent( + email="search-agent-global@test.com", + username="searchagentglobal", + ) + + await _create_ticket( + client, + admin_headers, + client_id=client_user["id"], + description="Erro global de processamento atribuído", + product="Produto Agente Global Atribuído", + ) + await _create_ticket( + client, + admin_headers, + client_id=client_user["id"], + description="Erro global de processamento não atribuído", + product="Produto Agente Global NaoAtribuido", + ) + + items = await _list_tickets(client, admin_headers, client_user["id"]) + assigned = next(t for t in items if "atribuído" in t["description"]) + await _assign_ticket(client, admin_headers, assigned["id"], agent["id"]) + + agent_tokens = await auth.login(email="search-agent-global@test.com") + agent_headers = auth.auth_headers(agent_tokens["access_token"]) + + status_code, body = await _search(client, agent_headers, "global") + + assert status_code == 200, body + descriptions = sorted(item["description"] for item in body["data"]) + assert descriptions == [ + "Erro global de processamento atribuído", + "Erro global de processamento não atribuído", + ] + + @pytest.mark.asyncio + async def test_blank_query_returns_empty_list( + self, client: AsyncClient, auth: AuthActions + ) -> None: + admin_tokens = await auth.register_and_login_admin( + email="search-admin-blank@test.com", + username="searchadminblank", + ) + admin_headers = auth.auth_headers(admin_tokens["access_token"]) + + client_user = await auth.register( + email="search-client-blank@test.com", + username="searchclientblank", + ) + await _create_ticket( + client, + admin_headers, + client_id=client_user["id"], + description="Conteúdo qualquer", + product="Produto Search Blank", + ) + + client_tokens = await auth.login(email="search-client-blank@test.com") + client_headers = auth.auth_headers(client_tokens["access_token"]) + + status_code, body = await _search(client, client_headers, "") + + assert status_code == 422, body + assert body["title"] == "Validation Error" diff --git a/tests/app/e2e/domains/ticket/test_ticket_take_route.py b/tests/app/e2e/domains/ticket/test_ticket_take_route.py new file mode 100644 index 0000000..30da897 --- /dev/null +++ b/tests/app/e2e/domains/ticket/test_ticket_take_route.py @@ -0,0 +1,268 @@ +from typing import Any + +import pytest +from httpx import AsyncClient + +from tests.app.e2e.conftest import AuthActions + +MISSING_TICKET_ID = "67f0c9b8e4b0b1a2c3d4e5ff" + + +async def _create_ticket_and_get_id( + client: AsyncClient, + auth: AuthActions, + *, + admin_email: str, + admin_username: str, + client_email: str, + client_username: str, +) -> tuple[str, dict[str, str]]: + tokens = await auth.register_and_login_admin(email=admin_email, username=admin_username) + headers = auth.auth_headers(tokens["access_token"]) + created_user = await auth.register(email=client_email, username=client_username) + + create_response = await client.post( + "/api/tickets/", + json={ + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "high", + "product": "Produto Take", + "description": "Chamado para teste de take", + "chat_ids": [], + "client_id": created_user["id"], + }, + headers=headers, + ) + assert create_response.status_code == 201, create_response.text + + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"]}, + headers=headers, + ) + ticket_id: str = list_response.json()["data"]["items"][0]["id"] + return ticket_id, headers + + +class TestTakeTicket: + @pytest.mark.asyncio + async def test_agent_takes_unassigned_ticket_registers_history_entry( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _ = await _create_ticket_and_get_id( + client, + auth, + admin_email="take-admin-a1@test.com", + admin_username="takeadmina1", + client_email="take-client-a1@test.com", + client_username="takeclienta1", + ) + await auth.register_agent(email="take-agent-a1@test.com", username="takeagenta1") + agent_tokens = await auth.login(email="take-agent-a1@test.com") + agent_user = await auth.me(agent_tokens["access_token"]) + + response = await client.post( + f"/api/tickets/{ticket_id}/take", + headers=auth.auth_headers(agent_tokens["access_token"]), + ) + + assert response.status_code == 200, response.text + data: dict[str, Any] = response.json()["data"] + assert data["assigned_agent_id"] == str(agent_user.id) + history = data["agent_history"] + assert len(history) == 1 + assert history[0]["agent_id"] == str(agent_user.id) + assert history[0]["level"] == "agent" + assert history[0]["transfer_reason"] == "Assumido via fila" + assert history[0]["exit_date"] is None + + @pytest.mark.asyncio + async def test_admin_takes_unassigned_ticket_registers_history_entry( + self, client: AsyncClient, auth: AuthActions + ) -> None: + tokens = await auth.register_and_login_admin( + email="take-admin-b1@test.com", username="takeadminb1" + ) + headers = auth.auth_headers(tokens["access_token"]) + created_user = await auth.register( + email="take-client-b1@test.com", username="takeclientb1" + ) + create_response = await client.post( + "/api/tickets/", + json={ + "triage_id": "67f0c9b8e4b0b1a2c3d4e5f6", + "type": "issue", + "criticality": "medium", + "product": "Produto Take Admin", + "description": "Admin takes ticket", + "chat_ids": [], + "client_id": created_user["id"], + }, + headers=headers, + ) + assert create_response.status_code == 201, create_response.text + list_response = await client.get( + "/api/tickets/", + params={"client_id": created_user["id"]}, + headers=headers, + ) + ticket_id: str = list_response.json()["data"]["items"][0]["id"] + admin_user = await auth.me(tokens["access_token"]) + + response = await client.post(f"/api/tickets/{ticket_id}/take", headers=headers) + + assert response.status_code == 200, response.text + data: dict[str, Any] = response.json()["data"] + assert data["assigned_agent_id"] == str(admin_user.id) + history = data["agent_history"] + assert len(history) == 1 + assert history[0]["agent_id"] == str(admin_user.id) + assert history[0]["level"] == "admin" + + @pytest.mark.asyncio + async def test_taking_own_assigned_ticket_is_idempotent( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _ = await _create_ticket_and_get_id( + client, + auth, + admin_email="take-admin-c1@test.com", + admin_username="takeadminc1", + client_email="take-client-c1@test.com", + client_username="takeclientc1", + ) + await auth.register_agent(email="take-agent-c1@test.com", username="takeagentc1") + agent_tokens = await auth.login(email="take-agent-c1@test.com") + agent_headers = auth.auth_headers(agent_tokens["access_token"]) + + first = await client.post(f"/api/tickets/{ticket_id}/take", headers=agent_headers) + assert first.status_code == 200, first.text + + second = await client.post(f"/api/tickets/{ticket_id}/take", headers=agent_headers) + + assert second.status_code == 200, second.text + assert len(second.json()["data"]["agent_history"]) == 1 + + @pytest.mark.asyncio + async def test_returns_409_when_ticket_already_assigned_to_another_agent( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _ = await _create_ticket_and_get_id( + client, + auth, + admin_email="take-admin-d1@test.com", + admin_username="takeadmind1", + client_email="take-client-d1@test.com", + client_username="takeclientd1", + ) + await auth.register_agent(email="take-agent-d1@test.com", username="takeagentd1") + agent_a_tokens = await auth.login(email="take-agent-d1@test.com") + + await auth.register_agent(email="take-agent-d2@test.com", username="takeagentd2") + agent_b_tokens = await auth.login(email="take-agent-d2@test.com") + + first = await client.post( + f"/api/tickets/{ticket_id}/take", + headers=auth.auth_headers(agent_a_tokens["access_token"]), + ) + assert first.status_code == 200, first.text + + second = await client.post( + f"/api/tickets/{ticket_id}/take", + headers=auth.auth_headers(agent_b_tokens["access_token"]), + ) + + assert second.status_code == 409, second.text + + @pytest.mark.asyncio + async def test_user_without_permission_cannot_take_ticket( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _ = await _create_ticket_and_get_id( + client, + auth, + admin_email="take-admin-e1@test.com", + admin_username="takeadmine1", + client_email="take-client-e1@test.com", + client_username="takecliante1", + ) + user_tokens = await auth.register_and_login( + email="take-user-e1@test.com", username="takeusere1" + ) + + response = await client.post( + f"/api/tickets/{ticket_id}/take", + headers=auth.auth_headers(user_tokens["access_token"]), + ) + + assert response.status_code == 403, response.text + + @pytest.mark.asyncio + async def test_unauthenticated_request_is_rejected( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _ = await _create_ticket_and_get_id( + client, + auth, + admin_email="take-admin-f1@test.com", + admin_username="takeadminf1", + client_email="take-client-f1@test.com", + client_username="takeclientf1", + ) + + response = await client.post(f"/api/tickets/{ticket_id}/take") + + assert response.status_code == 403, response.text + + @pytest.mark.asyncio + async def test_take_nonexistent_ticket_returns_404( + self, client: AsyncClient, auth: AuthActions + ) -> None: + await auth.register_agent(email="take-agent-g1@test.com", username="takeagentg1") + agent_tokens = await auth.login(email="take-agent-g1@test.com") + + response = await client.post( + f"/api/tickets/{MISSING_TICKET_ID}/take", + headers=auth.auth_headers(agent_tokens["access_token"]), + ) + + assert response.status_code == 404, response.text + + @pytest.mark.asyncio + async def test_response_includes_full_ticket_contract( + self, client: AsyncClient, auth: AuthActions + ) -> None: + ticket_id, _ = await _create_ticket_and_get_id( + client, + auth, + admin_email="take-admin-h1@test.com", + admin_username="takeadminh1", + client_email="take-client-h1@test.com", + client_username="takeclienth1", + ) + await auth.register_agent(email="take-agent-h1@test.com", username="takeagenth1") + agent_tokens = await auth.login(email="take-agent-h1@test.com") + + response = await client.post( + f"/api/tickets/{ticket_id}/take", + headers=auth.auth_headers(agent_tokens["access_token"]), + ) + + assert response.status_code == 200, response.text + data: dict[str, Any] = response.json()["data"] + for field in ( + "id", + "triage_id", + "type", + "criticality", + "product", + "status", + "creation_date", + "description", + "chat_ids", + "agent_history", + "client", + "comments", + ): + assert field in data, f"Campo ausente na resposta: {field}" diff --git a/tests/app/integration/domains/auth/test_password_service.py b/tests/app/integration/domains/auth/test_password_service.py index 763a3bf..9c248f0 100644 --- a/tests/app/integration/domains/auth/test_password_service.py +++ b/tests/app/integration/domains/auth/test_password_service.py @@ -1,9 +1,12 @@ +import asyncio from unittest.mock import AsyncMock from uuid import uuid4 import pytest from sqlalchemy.ext.asyncio import AsyncSession +from app.core.event_dispatcher import AppEvent, event_handler, get_event_dispatcher +from app.core.event_dispatcher.schemas import PasswordResetEventSchema from app.core.security import PasswordSecurity, ResetTokenSecurity from app.domains.auth.entities import User from app.domains.auth.enums import OAuthProvider, TokenPurpose @@ -67,6 +70,7 @@ def service( password_security=password_security, email_strategy=mock_email, reset_token_security=reset_token_security, + dispatcher=get_event_dispatcher(), ) @pytest.fixture @@ -344,36 +348,49 @@ async def test_reset_password_persists_in_db( # ── forgot_password ─────────────────────────────────────────────── @pytest.mark.asyncio - async def test_forgot_password_sends_email( + async def test_forgot_password_creates_token_in_db( self, service: PasswordService, local_user: User, - mock_email: AsyncMock, - user_repo: UserRepository, + token_repo: PasswordResetTokenRepository, + reset_token_security: ResetTokenSecurity, ) -> None: + """forgot_password must create and store a reset token in the DB.""" + # Create a sentinel token first so we can detect a NEW one was created + first_raw = await service.create_reset_token(local_user.id, TokenPurpose.RESET) + first_hash = reset_token_security.hash_token(first_raw) + await service.forgot_password(local_user.email) - mock_email.send_reset_email.assert_awaited_once() - call_args = mock_email.send_reset_email.call_args - assert call_args[0][0] == local_user.email + + # The sentinel token is now invalidated (used_at set), proving a new one was created + old = await token_repo.get_by_hash(first_hash) + assert old is not None + assert old.used_at is not None @pytest.mark.asyncio async def test_forgot_password_nonexistent_email_does_nothing( - self, service: PasswordService, mock_email: AsyncMock + self, + service: PasswordService, ) -> None: - """Should silently return without sending email (no user enumeration).""" - await service.forgot_password("nonexistent@nowhere.com") - mock_email.send_reset_email.assert_not_awaited() + """Should silently return without any side-effect (no user enumeration).""" + await service.forgot_password("nonexistent@nowhere.com") # must not raise @pytest.mark.asyncio - async def test_forgot_password_email_failure_does_not_raise( + async def test_forgot_password_pipeline_failure_does_not_raise( self, service: PasswordService, local_user: User, - mock_email: AsyncMock, + token_repo: PasswordResetTokenRepository, ) -> None: - """If the email service fails, forgot_password should swallow the error.""" - mock_email.send_reset_email.side_effect = Exception("SMTP down") - await service.forgot_password(local_user.email) # should not raise + """If publishing the event raises, forgot_password must swallow the error.""" + # Temporarily corrupt the dispatcher's payload map to force an EventSchemaError + # by publishing with the wrong event type — the try/except in forgot_password catches it + original_map = service.dispatcher._payload_map.copy() + del service.dispatcher._payload_map[AppEvent.USER_PASSWORD_RESET] # type: ignore[misc] + try: + await service.forgot_password(local_user.email) # should not raise + finally: + service.dispatcher._payload_map = original_map # type: ignore[assignment] @pytest.mark.asyncio async def test_forgot_password_invalidates_previous_token( @@ -479,35 +496,45 @@ async def test_send_reset_password_email_called( # ── end-to-end flow ─────────────────────────────────────────────── + @pytest.fixture + async def captured_reset_events(self) -> list[PasswordResetEventSchema]: + """Subscribe a real (non-mock) listener that captures PASSWORD_RESET events. + + Cleaned up via unsubscribe after the test to avoid handler accumulation. + """ + captured: list[PasswordResetEventSchema] = [] + dispatcher = get_event_dispatcher() + + @event_handler(PasswordResetEventSchema) + async def _capture(schema: PasswordResetEventSchema) -> None: + captured.append(schema) + + dispatcher.subscribe(AppEvent.USER_PASSWORD_RESET, _capture) + yield captured + dispatcher.unsubscribe(AppEvent.USER_PASSWORD_RESET, _capture) + @pytest.mark.asyncio async def test_full_forgot_and_reset_flow( self, service: PasswordService, local_user: User, user_repo: UserRepository, - mock_email: AsyncMock, password_security: PasswordSecurity, - reset_token_security: ResetTokenSecurity, + captured_reset_events: list[PasswordResetEventSchema], ) -> None: - """Simulate the complete forgot → email → reset → login-with-new-password flow.""" - # 1. user triggers forgot password + """Simulate the complete forgot → event dispatched → reset → login flow.""" await service.forgot_password(local_user.email) - mock_email.send_reset_email.assert_awaited_once() + await asyncio.sleep(0) # yield so the async task fires the capture listener - # 2. extract the raw token from the email params (URL contains it) - reset_url: str = mock_email.send_reset_email.call_args[0][1].reset_url - raw_token = reset_url.split("token=")[1] + assert len(captured_reset_events) == 1 + raw_token: str = captured_reset_events[0].raw_token - # 3. reset password using token result = await service.reset_password(raw_token, "FinalNewPass1!") assert result is not None - # 4. verify the new password works fetched = await user_repo.get_by_id(local_user.id) assert fetched is not None assert password_security.verify_password("FinalNewPass1!", fetched.password_hash) - - # 5. old password no longer works assert not password_security.verify_password("OldPassword123!", fetched.password_hash) @pytest.mark.asyncio @@ -516,23 +543,21 @@ async def test_full_change_then_forgot_reset_flow( service: PasswordService, local_user: User, user_repo: UserRepository, - mock_email: AsyncMock, password_security: PasswordSecurity, + captured_reset_events: list[PasswordResetEventSchema], ) -> None: """Change password first, then forgot-password flow should still work.""" - # 1. change password normally await service.change_password(local_user, "OldPassword123!", "Middle456!") - # 2. forgot password await service.forgot_password(local_user.email) - reset_url: str = mock_email.send_reset_email.call_args[0][1].reset_url - raw_token = reset_url.split("token=")[1] + await asyncio.sleep(0) # yield so the async task fires the capture listener + + assert len(captured_reset_events) == 1 + raw_token: str = captured_reset_events[0].raw_token - # 3. reset result = await service.reset_password(raw_token, "Final789!") assert result is not None - # 4. only the last password works fetched = await user_repo.get_by_id(local_user.id) assert fetched is not None assert password_security.verify_password("Final789!", fetched.password_hash) diff --git a/tests/app/integration/domains/auth/test_user_repository.py b/tests/app/integration/domains/auth/test_user_repository.py index 162bfe3..ade9c72 100644 --- a/tests/app/integration/domains/auth/test_user_repository.py +++ b/tests/app/integration/domains/auth/test_user_repository.py @@ -557,6 +557,115 @@ async def test_remove_roles_from_unexistent_user( res = await user_repo.remove_roles(uuid4(), [role1.id, role2.id]) assert res == [] + # ── update_user_roles ───────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_update_user_roles_add_and_remove( + self, user_repo: UserRepository, db_session: AsyncSession + ) -> None: + user = await user_repo.create(self.create_with_oauth_dto) + role_a = RoleModel(name="update_role_a") + role_b = RoleModel(name="update_role_b") + db_session.add_all([role_a, role_b]) + await db_session.commit() + await db_session.refresh(role_a) + await db_session.refresh(role_b) + + await user_repo.add_roles(user.id, [role_a.id]) + + result, missing = await user_repo.update_user_roles( + user.id, add_ids=[role_b.id], remove_ids=[role_a.id] + ) + assert result is not None and missing is None + assert result.roles is not None + role_ids = {r.id for r in result.roles} + assert role_b.id in role_ids + assert role_a.id not in role_ids + + @pytest.mark.asyncio + async def test_update_user_roles_add_only( + self, user_repo: UserRepository, db_session: AsyncSession + ) -> None: + user = await user_repo.create(self.create_with_oauth_dto) + role = RoleModel(name="update_add_only") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + result, missing = await user_repo.update_user_roles( + user.id, add_ids=[role.id], remove_ids=[] + ) + assert result is not None and missing is None + assert result.roles is not None + assert any(r.id == role.id for r in result.roles) + + @pytest.mark.asyncio + async def test_update_user_roles_remove_only( + self, user_repo: UserRepository, db_session: AsyncSession + ) -> None: + user = await user_repo.create(self.create_with_oauth_dto) + role = RoleModel(name="update_remove_only") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + await user_repo.add_roles(user.id, [role.id]) + + result, missing = await user_repo.update_user_roles( + user.id, add_ids=[], remove_ids=[role.id] + ) + assert result is not None and missing is None + assert result.roles == [] + + @pytest.mark.asyncio + async def test_update_user_roles_unknown_user(self, user_repo: UserRepository) -> None: + result, missing = await user_repo.update_user_roles(uuid4(), add_ids=[1], remove_ids=[]) + assert result is None and missing is None + + @pytest.mark.asyncio + async def test_update_user_roles_missing_add_ids( + self, user_repo: UserRepository + ) -> None: + user = await user_repo.create(self.create_with_oauth_dto) + result, missing = await user_repo.update_user_roles( + user.id, add_ids=[999998, 999999], remove_ids=[] + ) + assert result is None + assert missing == {999998, 999999} + + @pytest.mark.asyncio + async def test_update_user_roles_dedupes_add_ids( + self, user_repo: UserRepository, db_session: AsyncSession + ) -> None: + user = await user_repo.create(self.create_with_oauth_dto) + role = RoleModel(name="update_dedup_role") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + result, missing = await user_repo.update_user_roles( + user.id, add_ids=[role.id, role.id], remove_ids=[] + ) + assert result is not None and missing is None + assert result.roles is not None + assert [r.id for r in result.roles].count(role.id) == 1 + + @pytest.mark.asyncio + async def test_update_user_roles_remove_nonexistent_is_noop( + self, user_repo: UserRepository, db_session: AsyncSession + ) -> None: + user = await user_repo.create(self.create_with_oauth_dto) + role = RoleModel(name="update_noop_role") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + result, missing = await user_repo.update_user_roles( + user.id, add_ids=[], remove_ids=[role.id] + ) + assert result is not None and missing is None + assert result.roles == [] + # ── must_change_password ────────────────────────────────────────── @pytest.mark.asyncio diff --git a/tests/app/integration/domains/chatbot/test_chatbot_repository.py b/tests/app/integration/domains/chatbot/test_chatbot_repository.py index ff7a1b6..7b3edc8 100644 --- a/tests/app/integration/domains/chatbot/test_chatbot_repository.py +++ b/tests/app/integration/domains/chatbot/test_chatbot_repository.py @@ -8,17 +8,9 @@ from beanie import PydanticObjectId from motor.motor_asyncio import AsyncIOMotorDatabase +from app.domains.chatbot.models import AttendanceClient, AttendanceCompany from app.domains.chatbot.repositories.chatbot_repository import ChatbotRepository -from app.domains.chatbot.schemas import AttendanceClient, AttendanceCompany, CreateAttendanceDTO -from app.domains.ticket.models import ( - Ticket, - TicketClient, - TicketComment, - TicketCompany, - TicketCriticality, - TicketStatus, - TicketType, -) +from app.domains.chatbot.schemas import CreateAttendanceDTO @pytest_asyncio.fixture(autouse=True) @@ -26,10 +18,8 @@ async def cleanup_collections( mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], ) -> AsyncGenerator[None, None]: await mongo_db_conn["atendimentos"].delete_many({}) - await Ticket.delete_all() yield await mongo_db_conn["atendimentos"].delete_many({}) - await Ticket.delete_all() class TestChatbotRepository: @@ -228,42 +218,3 @@ async def test_save_attendance_persists_expected_document_format( assert stored["result"]["type"] == "Ticket" assert stored["result"]["closure_message"] == "Please wait, your request has been created..." assert stored["evaluation"]["rating"] == 5 - - @pytest.mark.asyncio - async def test_create_ticket_success( - self, - repo: ChatbotRepository, - ) -> None: - client_id = uuid4() - ticket = Ticket( - triage_id=PydanticObjectId(), - type=TicketType.ISSUE, - criticality=TicketCriticality.HIGH, - product="Product A", - status=TicketStatus.OPEN, - creation_date=datetime.now(UTC), - description="Issue created from chatbot repository integration test", - chat_ids=[], - agent_history=[], - client=TicketClient( - id=client_id, - name="Client Test", - email="client@test.com", - company=TicketCompany(id=client_id, name="SyncDesk Co"), - ), - comments=[ - TicketComment( - author="system", - text="created", - date=datetime.now(UTC), - ) - ], - ) - - ticket_id = await repo.create_ticket(ticket) - - assert ticket_id - stored = await Ticket.get(PydanticObjectId(ticket_id)) - assert stored is not None - assert stored.product == "Product A" - assert stored.status == TicketStatus.OPEN diff --git a/tests/app/integration/domains/chatbot/test_chatbot_service.py b/tests/app/integration/domains/chatbot/test_chatbot_service.py index 7558e88..3bb770c 100644 --- a/tests/app/integration/domains/chatbot/test_chatbot_service.py +++ b/tests/app/integration/domains/chatbot/test_chatbot_service.py @@ -1,5 +1,6 @@ from collections.abc import AsyncGenerator from typing import Any +from unittest.mock import AsyncMock from uuid import uuid4 import pytest @@ -7,11 +8,12 @@ from beanie import PydanticObjectId from motor.motor_asyncio import AsyncIOMotorDatabase +from app.core.event_dispatcher.event_dispatcher import EventDispatcher from app.core.exceptions import AppHTTPException +from app.domains.chatbot.models import AttendanceClient, AttendanceCompany from app.domains.chatbot.repositories.chatbot_repository import ChatbotRepository -from app.domains.chatbot.schemas import AttendanceClient, AttendanceCompany, TriageInputDTO +from app.domains.chatbot.schemas import TriageInputDTO from app.domains.chatbot.services.chatbot_service import ChatbotService -from app.domains.ticket.models import Ticket, TicketStatus @pytest_asyncio.fixture(autouse=True) @@ -19,17 +21,16 @@ async def cleanup_collections( mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], ) -> AsyncGenerator[None, None]: await mongo_db_conn["atendimentos"].delete_many({}) - await Ticket.delete_all() yield await mongo_db_conn["atendimentos"].delete_many({}) - await Ticket.delete_all() class TestChatbotService: @pytest.fixture def service(self, mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]]) -> ChatbotService: repo = ChatbotRepository(mongo_db_conn) - return ChatbotService(repo) + dispatcher = AsyncMock(spec=EventDispatcher) + return ChatbotService(repo, dispatcher) @pytest.mark.asyncio async def test_create_attendance_persists_expected_base_model( @@ -44,10 +45,14 @@ async def test_create_attendance_persists_expected_base_model( ) created = await service.create_attendance(client) - stored = await service.repository.find_attendance(created["triage_id"]) + stored = await service.repository.find_attendance(created.triage_id) + + assert created.step_id == "step_a" + assert created.input is not None + assert created.input.mode == "quick_replies" assert stored is not None - assert str(stored["_id"]) == created["triage_id"] + assert str(stored["_id"]) == created.triage_id assert stored["status"] == "opened" assert isinstance(stored["start_date"], str) assert stored["end_date"] is None @@ -56,7 +61,10 @@ async def test_create_attendance_persists_expected_base_model( assert stored["client"]["company"]["name"] == "Tech Solutions" assert stored["result"] is None assert stored["evaluation"] is None - assert stored["triage"] == [] + assert len(stored["triage"]) == 1 + assert stored["triage"][0]["step"] == "A" + assert stored["triage"][0]["answer_text"] is None + assert stored["triage"][0]["answer_value"] is None @pytest.mark.asyncio async def test_process_message_bootstraps_attendance_for_unknown_triage_id( @@ -77,10 +85,10 @@ async def test_process_message_bootstraps_attendance_for_unknown_triage_id( response = await service.process_message(payload) stored = await service.repository.find_attendance(triage_id) - assert response.data.triage_id == triage_id - assert response.data.step_id == "step_a" - assert response.data.input is not None - assert response.data.input.mode == "quick_replies" + assert response.triage_id == triage_id + assert response.step_id == "step_a" + assert response.input is not None + assert response.input.mode == "quick_replies" assert stored is not None assert str(stored["_id"]) == triage_id @@ -113,7 +121,7 @@ async def test_process_message_unknown_triage_without_client_payload_returns_422 assert "triage_id was not found" in str(exc_info.value.detail) @pytest.mark.asyncio - async def test_process_message_flow_updates_triage_answers_and_creates_ticket( + async def test_process_message_flow_updates_triage_answers_and_finishes( self, service: ChatbotService, ) -> None: @@ -159,7 +167,7 @@ async def test_process_message_flow_updates_triage_answers_and_creates_ticket( ) ) - # 4) Answer F -> ticket created and finished payload + # 4) Answer F -> finalizado pelo ChatbotService (criação do ticket fica a cargo do event bus) final_response = await service.process_message( TriageInputDTO( triage_id=triage_id, @@ -177,14 +185,14 @@ async def test_process_message_flow_updates_triage_answers_and_creates_ticket( stored = await service.repository.find_attendance(triage_id) - assert final_response.data.finished is True - assert final_response.data.result is not None - assert final_response.data.result.type == "Ticket" - assert final_response.data.result.id - assert final_response.data.closure_message is not None + assert final_response.finished is True + assert final_response.result is not None + assert final_response.result.type == "Ticket" + assert final_response.closure_message is not None assert stored is not None - assert len(stored["triage"]) == 4 + assert stored["status"] == "finished" + assert stored["result"]["type"] == "Ticket" assert stored["triage"][0]["step"] == "A" assert stored["triage"][0]["answer_value"] == "1" assert stored["triage"][1]["step"] == "B" @@ -192,9 +200,3 @@ async def test_process_message_flow_updates_triage_answers_and_creates_ticket( assert stored["triage"][2]["step"] == "F" assert stored["triage"][2]["type"] == "free_text" assert "freezes" in stored["triage"][2]["answer_text"] - - ticket = await Ticket.get(PydanticObjectId(final_response.data.result.id)) - assert ticket is not None - assert ticket.status == TicketStatus.OPEN - assert str(ticket.triage_id) == triage_id - assert ticket.client.name == "John Silva" diff --git a/tests/app/integration/domains/companies/test_company_repository.py b/tests/app/integration/domains/companies/test_company_repository.py new file mode 100644 index 0000000..c90fb14 --- /dev/null +++ b/tests/app/integration/domains/companies/test_company_repository.py @@ -0,0 +1,463 @@ +from uuid import UUID, uuid4 + +import pytest +from pydantic import ValidationError +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db.exceptions import ResourceAlreadyExistsError +from app.domains.auth.models import Role as RoleModel +from app.domains.auth.models import User as UserModel +from app.domains.auth.models import user_roles +from app.domains.companies.repositories import CompanyRepository +from app.domains.companies.schemas import ( + CreateCompanyDTO, + ReplaceCompanyDTO, + UpdateCompanyDTO, +) +from app.domains.products.models import Product as ProductModel + + +def _tax_id() -> str: + return uuid4().hex[:14] + + +def _legal_name(prefix: str = "Company") -> str: + return f"{prefix} {uuid4().hex[:8]} LTDA" + + +async def _make_user( + db: AsyncSession, *, company_id: UUID | None = None, email: str | None = None +) -> UserModel: + user = UserModel( + email=email or f"u_{uuid4().hex[:8]}@example.com", + password_hash="hash", + company_id=company_id, + ) + db.add(user) + await db.commit() + await db.refresh(user) + return user + + +async def _make_product(db: AsyncSession, *, name: str | None = None) -> ProductModel: + product = ProductModel(name=name or f"P {uuid4().hex[:8]}", description="seed") + db.add(product) + await db.commit() + await db.refresh(product) + return product + + +class TestCompanyDTOs: + def test_create_company_normalizes_tax_id_stripping_punctuation(self) -> None: + dto = CreateCompanyDTO( + legal_name="Acme LTDA", + trade_name="Acme", + tax_id="12.345.678/0001-90", + ) + assert dto.tax_id == "12345678000190" + + def test_create_company_with_short_legal_name_fails(self) -> None: + with pytest.raises(ValidationError): + CreateCompanyDTO(legal_name="Ab", trade_name="Acme", tax_id=_tax_id()) + + def test_create_company_with_short_tax_id_fails(self) -> None: + with pytest.raises(ValidationError): + CreateCompanyDTO(legal_name=_legal_name(), trade_name="Acme", tax_id="123") + + def test_create_company_with_long_tax_id_fails(self) -> None: + with pytest.raises(ValidationError): + CreateCompanyDTO( + legal_name=_legal_name(), trade_name="Acme", tax_id="1" * 20 + ) + + def test_update_company_with_all_none_fails(self) -> None: + with pytest.raises(ValidationError): + UpdateCompanyDTO() + + def test_update_company_with_single_field_succeeds(self) -> None: + dto = UpdateCompanyDTO(trade_name="New Trade") + assert dto.trade_name == "New Trade" + assert dto.legal_name is None + assert dto.tax_id is None + + +class TestCompanyRepository: + @pytest.fixture + def company_repo(self, db_session: AsyncSession) -> CompanyRepository: + return CompanyRepository(db=db_session) + + @pytest.fixture + async def company(self, company_repo: CompanyRepository) -> object: + return await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name(), trade_name="Acme", tax_id=_tax_id() + ) + ) + + # ── create ──────────────────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_create_company_success(self, company_repo: CompanyRepository) -> None: + dto = CreateCompanyDTO( + legal_name=_legal_name(), trade_name="Acme", tax_id=_tax_id() + ) + company = await company_repo.create(dto) + assert company.id is not None + assert company.legal_name == dto.legal_name + assert company.tax_id == dto.tax_id + assert company.trade_name == "Acme" + assert company.created_at is not None + + @pytest.mark.asyncio + async def test_create_with_duplicate_tax_id_raises( + self, company_repo: CompanyRepository + ) -> None: + tax_id = _tax_id() + await company_repo.create( + CreateCompanyDTO(legal_name=_legal_name("A"), trade_name="Acme", tax_id=tax_id) + ) + with pytest.raises(ResourceAlreadyExistsError): + await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name("B"), trade_name="Beta", tax_id=tax_id + ) + ) + + @pytest.mark.asyncio + async def test_create_with_duplicate_legal_name_raises( + self, company_repo: CompanyRepository + ) -> None: + legal_name = _legal_name() + await company_repo.create( + CreateCompanyDTO(legal_name=legal_name, trade_name="Acme", tax_id=_tax_id()) + ) + with pytest.raises(ResourceAlreadyExistsError): + await company_repo.create( + CreateCompanyDTO( + legal_name=legal_name, trade_name="Beta", tax_id=_tax_id() + ) + ) + + # ── get_by_id ───────────────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_get_by_id_returns_none_when_not_found( + self, company_repo: CompanyRepository + ) -> None: + assert await company_repo.get_by_id(uuid4()) is None + + @pytest.mark.asyncio + async def test_get_by_id_returns_none_for_soft_deleted( + self, company_repo: CompanyRepository, company: object + ) -> None: + assert await company_repo.soft_delete(company.id) is True # type: ignore[attr-defined] + assert await company_repo.get_by_id(company.id) is None # type: ignore[attr-defined] + + # ── get_all_paginated ───────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_get_all_paginated_returns_total_and_items( + self, company_repo: CompanyRepository + ) -> None: + for _ in range(3): + await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name(), trade_name="Acme", tax_id=_tax_id() + ) + ) + result = await company_repo.get_all_paginated(skip=0, limit=10) + assert result.total == 3 + assert len(result.items) == 3 + assert result.page == 1 + assert result.limit == 10 + + @pytest.mark.asyncio + async def test_get_all_paginated_excludes_soft_deleted( + self, company_repo: CompanyRepository + ) -> None: + kept = await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name("kept"), trade_name="Kept", tax_id=_tax_id() + ) + ) + deleted = await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name("del"), trade_name="Del", tax_id=_tax_id() + ) + ) + await company_repo.soft_delete(deleted.id) + + result = await company_repo.get_all_paginated(skip=0, limit=10) + assert result.total == 1 + assert [c.id for c in result.items] == [kept.id] + + @pytest.mark.asyncio + async def test_get_all_paginated_pagination_skip_and_limit( + self, company_repo: CompanyRepository + ) -> None: + for _ in range(5): + await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name(), trade_name="Acme", tax_id=_tax_id() + ) + ) + page = await company_repo.get_all_paginated(skip=2, limit=2) + assert page.total == 5 + assert len(page.items) == 2 + assert page.page == 2 + assert page.limit == 2 + + # ── update ──────────────────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_update_partial_fields( + self, company_repo: CompanyRepository, company: object + ) -> None: + updated = await company_repo.update( + company.id, UpdateCompanyDTO(trade_name="Renamed") # type: ignore[attr-defined] + ) + assert updated is not None + assert updated.trade_name == "Renamed" + assert updated.legal_name == company.legal_name # type: ignore[attr-defined] + assert updated.tax_id == company.tax_id # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_replace_company_overwrites_fields( + self, company_repo: CompanyRepository, company: object + ) -> None: + new_name = _legal_name("replaced") + new_tax = _tax_id() + replaced = await company_repo.update( + company.id, # type: ignore[attr-defined] + ReplaceCompanyDTO(legal_name=new_name, trade_name="Replaced", tax_id=new_tax), + ) + assert replaced is not None + assert replaced.legal_name == new_name + assert replaced.tax_id == new_tax + assert replaced.trade_name == "Replaced" + + @pytest.mark.asyncio + async def test_update_returns_none_when_not_found( + self, company_repo: CompanyRepository + ) -> None: + result = await company_repo.update( + uuid4(), UpdateCompanyDTO(trade_name="Nope") + ) + assert result is None + + @pytest.mark.asyncio + async def test_update_with_duplicate_tax_id_raises( + self, company_repo: CompanyRepository + ) -> None: + existing_tax = _tax_id() + await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name("A"), trade_name="Acme", tax_id=existing_tax + ) + ) + target = await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name("B"), trade_name="Beta", tax_id=_tax_id() + ) + ) + with pytest.raises(ResourceAlreadyExistsError): + await company_repo.update( + target.id, UpdateCompanyDTO(tax_id=existing_tax) + ) + + # ── soft_delete ─────────────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_soft_delete_returns_true_first_time( + self, company_repo: CompanyRepository, company: object + ) -> None: + assert await company_repo.soft_delete(company.id) is True # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_soft_delete_already_deleted_returns_false( + self, company_repo: CompanyRepository, company: object + ) -> None: + await company_repo.soft_delete(company.id) # type: ignore[attr-defined] + assert await company_repo.soft_delete(company.id) is False # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_soft_delete_unknown_id_returns_false( + self, company_repo: CompanyRepository + ) -> None: + assert await company_repo.soft_delete(uuid4()) is False + + # ── associate_users / disassociate_users ────────────────────────── + + @pytest.mark.asyncio + async def test_associate_users_sets_company_id( + self, + company_repo: CompanyRepository, + db_session: AsyncSession, + company: object, + ) -> None: + user = await _make_user(db_session) + await company_repo.associate_users(company.id, [user.id]) # type: ignore[attr-defined] + + result = await db_session.execute( + select(UserModel).where(UserModel.id == user.id) + ) + refreshed = result.scalar_one() + assert refreshed.company_id == company.id # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_associate_users_with_empty_list_is_noop( + self, company_repo: CompanyRepository, company: object + ) -> None: + # Não deve levantar nem persistir nada + await company_repo.associate_users(company.id, []) # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_disassociate_users_clears_only_company_users( + self, + company_repo: CompanyRepository, + db_session: AsyncSession, + company: object, + ) -> None: + other_company = await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name("other"), trade_name="Other", tax_id=_tax_id() + ) + ) + own_user = await _make_user(db_session, company_id=company.id) # type: ignore[attr-defined] + outsider = await _make_user(db_session, company_id=other_company.id) + + await company_repo.disassociate_users( + company.id, [own_user.id, outsider.id] # type: ignore[attr-defined] + ) + + result = await db_session.execute( + select(UserModel).where(UserModel.id.in_([own_user.id, outsider.id])) + ) + users_by_id = {u.id: u for u in result.scalars().all()} + assert users_by_id[own_user.id].company_id is None + assert users_by_id[outsider.id].company_id == other_company.id + + # ── get_company_users_paginated ─────────────────────────────────── + + @pytest.mark.asyncio + async def test_get_company_users_paginated_loads_roles_eagerly( + self, + company_repo: CompanyRepository, + db_session: AsyncSession, + company: object, + ) -> None: + role = RoleModel(name=f"role_{uuid4().hex[:8]}") + db_session.add(role) + await db_session.commit() + await db_session.refresh(role) + + user = await _make_user(db_session, company_id=company.id) # type: ignore[attr-defined] + await db_session.execute( + user_roles.insert().values(user_id=user.id, role_id=role.id) + ) + await db_session.commit() + + users, total = await company_repo.get_company_users_paginated( + company.id, skip=0, limit=10 # type: ignore[attr-defined] + ) + assert total == 1 + assert len(users) == 1 + # Acesso a .roles não pode disparar lazy load (regressão do MissingGreenlet) + assert len(users[0].roles) == 1 + assert users[0].roles[0].id == role.id + + @pytest.mark.asyncio + async def test_get_company_users_paginated_excludes_other_companies( + self, + company_repo: CompanyRepository, + db_session: AsyncSession, + company: object, + ) -> None: + other_company = await company_repo.create( + CreateCompanyDTO( + legal_name=_legal_name("other"), trade_name="Other", tax_id=_tax_id() + ) + ) + own_user = await _make_user(db_session, company_id=company.id) # type: ignore[attr-defined] + await _make_user(db_session, company_id=other_company.id) + + users, total = await company_repo.get_company_users_paginated( + company.id, skip=0, limit=10 # type: ignore[attr-defined] + ) + assert total == 1 + assert [u.id for u in users] == [own_user.id] + + # ── add_products / remove_products ──────────────────────────────── + + @pytest.mark.asyncio + async def test_add_products_creates_relationship( + self, + company_repo: CompanyRepository, + db_session: AsyncSession, + company: object, + ) -> None: + product = await _make_product(db_session) + await company_repo.add_products(company.id, [product.id]) # type: ignore[attr-defined] + + from app.domains.companies.models import company_products + + result = await db_session.execute( + select(company_products).where( + company_products.c.company_id == company.id, # type: ignore[attr-defined] + company_products.c.product_id == product.id, + ) + ) + assert result.first() is not None + + @pytest.mark.asyncio + async def test_add_products_idempotent_on_duplicate( + self, + company_repo: CompanyRepository, + db_session: AsyncSession, + company: object, + ) -> None: + product = await _make_product(db_session) + await company_repo.add_products(company.id, [product.id]) # type: ignore[attr-defined] + # Segunda chamada não deve falhar (on_conflict_do_nothing) + await company_repo.add_products(company.id, [product.id]) # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_add_products_with_unknown_id_raises_value_error( + self, company_repo: CompanyRepository, company: object + ) -> None: + with pytest.raises(ValueError, match="product_ids"): + await company_repo.add_products(company.id, [9_999_999]) # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_add_products_with_empty_list_is_noop( + self, company_repo: CompanyRepository, company: object + ) -> None: + await company_repo.add_products(company.id, []) # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_remove_products_success( + self, + company_repo: CompanyRepository, + db_session: AsyncSession, + company: object, + ) -> None: + from app.domains.companies.models import company_products + + product = await _make_product(db_session) + await company_repo.add_products(company.id, [product.id]) # type: ignore[attr-defined] + await company_repo.remove_products(company.id, [product.id]) # type: ignore[attr-defined] + + result = await db_session.execute( + select(company_products).where( + company_products.c.company_id == company.id, # type: ignore[attr-defined] + company_products.c.product_id == product.id, + ) + ) + assert result.first() is None + + @pytest.mark.asyncio + async def test_remove_products_with_empty_list_is_noop( + self, company_repo: CompanyRepository, company: object + ) -> None: + await company_repo.remove_products(company.id, []) # type: ignore[attr-defined] diff --git a/tests/app/integration/domains/live_chat/test_conversation_listener.py b/tests/app/integration/domains/live_chat/test_conversation_listener.py new file mode 100644 index 0000000..6572adb --- /dev/null +++ b/tests/app/integration/domains/live_chat/test_conversation_listener.py @@ -0,0 +1,627 @@ +from typing import Any +from uuid import UUID, uuid4 + +import pytest +import pytest_asyncio +from beanie import PydanticObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +from app.core.event_dispatcher.schemas import ( + TicketAssigneeUpdatedEventSchema, + TicketClosedEventSchema, + TicketCreatedEventSchema, + TicketEscalatedEventSchema, + TicketStatusUpdatedEventSchema, +) +from app.domains.live_chat.entities import Conversation +from app.domains.live_chat.listeners import ConversationListener +from app.domains.live_chat.repositories.conversation_repository import ConversationRepository +from app.domains.live_chat.services.conversation_service import ConversationService +from app.domains.ticket.models import TicketStatus + + +@pytest_asyncio.fixture(autouse=True) +async def cleanup_conversation_collection(): + await Conversation.delete_all() + yield + await Conversation.delete_all() + + +@pytest.fixture +def listener( + mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], +) -> ConversationListener: + repo = ConversationRepository(mongo_db_conn) + service = ConversationService(repo) + return ConversationListener(service) + + +TICKET_ID = PydanticObjectId() +CLIENT_ID = uuid4() +AGENT_ID = uuid4() + + +def _ticket_created_schema( + ticket_id: PydanticObjectId = TICKET_ID, + client_id: UUID = CLIENT_ID, + agent_id: UUID | None = AGENT_ID, +) -> TicketCreatedEventSchema: + return TicketCreatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + agent_id=agent_id, + ) + + + +class TestOnTicketCreated: + + @pytest.mark.asyncio + async def test_creates_conversation(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + schema = _ticket_created_schema(ticket_id=ticket_id) + + await listener.on_ticket_created(schema) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.ticket_id == ticket_id + assert conv.client_id == schema.client_id + assert conv.agent_id == schema.agent_id + assert conv.sequential_index == 0 + assert conv.is_opened() + + @pytest.mark.asyncio + async def test_idempotent_does_not_duplicate(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + schema = _ticket_created_schema(ticket_id=ticket_id) + + await listener.on_ticket_created(schema) + await listener.on_ticket_created(schema) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert len(convs) == 1 + + @pytest.mark.asyncio + async def test_without_agent(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + schema = _ticket_created_schema(ticket_id=ticket_id, agent_id=None) + + await listener.on_ticket_created(schema) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.agent_id is None + + + +class TestOnTicketAssigneeUpdated: + + @pytest.mark.asyncio + async def test_closes_old_and_opens_new_conversation(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + new_agent = uuid4() + await listener.on_ticket_assignee_updated( + TicketAssigneeUpdatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=new_agent, + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert len(convs) == 2 + + old_conv = convs[0] + new_conv = convs[1] + + assert not old_conv.is_opened() + assert new_conv.is_opened() + assert new_conv.agent_id == new_agent + assert new_conv.sequential_index == 1 + assert new_conv.parent_id == old_conv.id + + @pytest.mark.asyncio + async def test_posts_closing_message_to_old_conversation(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + await listener.on_ticket_assignee_updated( + TicketAssigneeUpdatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=uuid4(), + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert convs[0].id is not None + old_conv = await listener.service.get_by_id(convs[0].id) + assert old_conv is not None + assert len(old_conv.messages) == 1 + assert old_conv.messages[0].sender_id == "System" + assert "transferido" in old_conv.messages[0].content + + @pytest.mark.asyncio + async def test_populates_children_ids_on_parent(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + await listener.on_ticket_assignee_updated( + TicketAssigneeUpdatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=uuid4(), + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert convs[0].id is not None + parent = await listener.service.get_by_id(convs[0].id) + assert parent is not None + assert convs[1].id in parent.children_ids + + @pytest.mark.asyncio + async def test_multiple_consecutive_assignee_changes(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + agent_1 = uuid4() + agent_2 = uuid4() + agent_3 = uuid4() + + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id, agent_id=agent_1) + ) + + await listener.on_ticket_assignee_updated( + TicketAssigneeUpdatedEventSchema(ticket_id=ticket_id, client_id=client_id, new_agent_id=agent_2) + ) + await listener.on_ticket_assignee_updated( + TicketAssigneeUpdatedEventSchema(ticket_id=ticket_id, client_id=client_id, new_agent_id=agent_3) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert len(convs) == 3 + + assert not convs[0].is_opened() + assert convs[0].agent_id == agent_1 + assert convs[0].sequential_index == 0 + + assert not convs[1].is_opened() + assert convs[1].agent_id == agent_2 + assert convs[1].sequential_index == 1 + assert convs[1].parent_id == convs[0].id + + assert convs[2].is_opened() + assert convs[2].agent_id == agent_3 + assert convs[2].sequential_index == 2 + assert convs[2].parent_id == convs[1].id + + # children_ids chain + assert convs[0].id is not None + assert convs[1].id is not None + c0 = await listener.service.get_by_id(convs[0].id) + c1 = await listener.service.get_by_id(convs[1].id) + assert c0 is not None + assert convs[1].id in c0.children_ids + assert c1 is not None + assert convs[2].id in c1.children_ids + + @pytest.mark.asyncio + async def test_no_previous_conversation_creates_first(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + + await listener.on_ticket_assignee_updated( + TicketAssigneeUpdatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=uuid4(), + ) + ) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.sequential_index == 0 + assert conv.parent_id is None + + + +class TestOnTicketEscalated: + + @pytest.mark.asyncio + async def test_closes_old_and_opens_new_conversation(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + new_agent = uuid4() + await listener.on_ticket_escalated( + TicketEscalatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=new_agent, + new_agent_name="Senior Agent", + new_level="L2", + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert len(convs) == 2 + + old_conv = convs[0] + new_conv = convs[1] + + assert not old_conv.is_opened() + assert new_conv.is_opened() + assert new_conv.agent_id == new_agent + assert new_conv.sequential_index == 1 + + @pytest.mark.asyncio + async def test_posts_escalation_message_to_both_conversations(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + await listener.on_ticket_escalated( + TicketEscalatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=uuid4(), + new_agent_name="Senior Agent", + new_level="L2", + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert convs[0].id is not None + assert convs[1].id is not None + old_conv = await listener.service.get_by_id(convs[0].id) + new_conv = await listener.service.get_by_id(convs[1].id) + + assert old_conv is not None + assert len(old_conv.messages) == 1 + assert "escalonado" in old_conv.messages[0].content + assert "L2" in old_conv.messages[0].content + assert "Senior Agent" in old_conv.messages[0].content + + assert new_conv is not None + assert len(new_conv.messages) == 1 + assert "escalonado" in new_conv.messages[0].content + + @pytest.mark.asyncio + async def test_escalation_without_agent_name_shows_pending(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + await listener.on_ticket_escalated( + TicketEscalatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=None, + new_agent_name=None, + new_level="L3", + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert convs[0].id is not None + old_conv = await listener.service.get_by_id(convs[0].id) + assert old_conv is not None + assert "agente pendente" in old_conv.messages[0].content + + @pytest.mark.asyncio + async def test_no_previous_conversation_creates_first(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + + await listener.on_ticket_escalated( + TicketEscalatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=uuid4(), + new_agent_name="Agent", + new_level="L2", + ) + ) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.sequential_index == 0 + assert conv.parent_id is None + + @pytest.mark.asyncio + async def test_populates_children_ids_on_parent(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + await listener.on_ticket_escalated( + TicketEscalatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=uuid4(), + new_agent_name="Agent", + new_level="L2", + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert convs[0].id is not None + parent = await listener.service.get_by_id(convs[0].id) + assert parent is not None + assert convs[1].id in parent.children_ids + + +class TestOnTicketStatusUpdated: + + @pytest.mark.asyncio + async def test_posts_status_message(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id) + ) + + await listener.on_ticket_status_updated( + TicketStatusUpdatedEventSchema( + ticket_id=ticket_id, + new_status=TicketStatus.IN_PROGRESS, + ) + ) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.id is not None + conv_full = await listener.service.get_by_id(conv.id) + assert conv_full is not None + assert len(conv_full.messages) == 1 + assert conv_full.messages[0].sender_id == "System" + assert "in_progress" in conv_full.messages[0].content + + @pytest.mark.asyncio + async def test_no_conversation_is_noop(self, listener: ConversationListener) -> None: + await listener.on_ticket_status_updated( + TicketStatusUpdatedEventSchema( + ticket_id=PydanticObjectId(), + new_status=TicketStatus.OPEN, + ) + ) + # no exception raised + + @pytest.mark.asyncio + async def test_multiple_status_updates_append_messages(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id) + ) + + await listener.on_ticket_status_updated( + TicketStatusUpdatedEventSchema(ticket_id=ticket_id, new_status=TicketStatus.IN_PROGRESS) + ) + await listener.on_ticket_status_updated( + TicketStatusUpdatedEventSchema(ticket_id=ticket_id, new_status=TicketStatus.WAITING_FOR_PROVIDER) + ) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.id is not None + conv_full = await listener.service.get_by_id(conv.id) + assert conv_full is not None + assert len(conv_full.messages) == 2 + assert "in_progress" in conv_full.messages[0].content + assert "waiting_for_provider" in conv_full.messages[1].content + + @pytest.mark.asyncio + async def test_status_update_on_closed_conversation(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + await listener.on_ticket_closed( + TicketClosedEventSchema( + ticket_id=ticket_id, + triage_id=PydanticObjectId(), + client_id=client_id, + ) + ) + + # late status event arrives after close + await listener.on_ticket_status_updated( + TicketStatusUpdatedEventSchema(ticket_id=ticket_id, new_status=TicketStatus.FINISHED) + ) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.id is not None + assert not conv.is_opened() + conv_full = await listener.service.get_by_id(conv.id) + assert conv_full is not None + assert len(conv_full.messages) == 2 + assert "encerrado" in conv_full.messages[0].content + assert "finished" in conv_full.messages[1].content + + +class TestOnTicketClosed: + + @pytest.mark.asyncio + async def test_closes_conversation_with_message(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + await listener.on_ticket_closed( + TicketClosedEventSchema( + ticket_id=ticket_id, + triage_id=PydanticObjectId(), + client_id=client_id, + ) + ) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert not conv.is_opened() + assert conv.finished_at is not None + assert conv.id is not None + + conv_full = await listener.service.get_by_id(conv.id) + assert conv_full is not None + assert len(conv_full.messages) == 1 + assert "encerrado" in conv_full.messages[0].content + + @pytest.mark.asyncio + async def test_no_conversation_is_noop(self, listener: ConversationListener) -> None: + await listener.on_ticket_closed( + TicketClosedEventSchema( + ticket_id=PydanticObjectId(), + triage_id=PydanticObjectId(), + client_id=uuid4(), + ) + ) + # no exception raised + + @pytest.mark.asyncio + async def test_double_close_does_not_duplicate_message(self, listener: ConversationListener) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id) + ) + + close_schema = TicketClosedEventSchema( + ticket_id=ticket_id, + triage_id=PydanticObjectId(), + client_id=client_id, + ) + + await listener.on_ticket_closed(close_schema) + await listener.on_ticket_closed(close_schema) + + conv = await listener.service.get_last_conversation_from_ticket(ticket_id) + assert conv is not None + assert conv.id is not None + assert not conv.is_opened() + conv_full = await listener.service.get_by_id(conv.id) + assert conv_full is not None + assert len(conv_full.messages) == 1 + assert "encerrado" in conv_full.messages[0].content + + +class TestFullFlow: + + @pytest.mark.asyncio + async def test_ticket_lifecycle(self, listener: ConversationListener) -> None: + """triage -> created -> status update -> assignee change -> escalation -> close""" + ticket_id = PydanticObjectId() + client_id = uuid4() + agent_1 = uuid4() + agent_2 = uuid4() + agent_3 = uuid4() + + # 1. ticket created + await listener.on_ticket_created( + _ticket_created_schema(ticket_id=ticket_id, client_id=client_id, agent_id=agent_1) + ) + + # 2. status update + await listener.on_ticket_status_updated( + TicketStatusUpdatedEventSchema(ticket_id=ticket_id, new_status=TicketStatus.IN_PROGRESS) + ) + + # 3. assignee change + await listener.on_ticket_assignee_updated( + TicketAssigneeUpdatedEventSchema( + ticket_id=ticket_id, client_id=client_id, new_agent_id=agent_2 + ) + ) + + # 4. escalation + await listener.on_ticket_escalated( + TicketEscalatedEventSchema( + ticket_id=ticket_id, + client_id=client_id, + new_agent_id=agent_3, + new_agent_name="L2 Agent", + new_level="L2", + ) + ) + + # 5. close + await listener.on_ticket_closed( + TicketClosedEventSchema( + ticket_id=ticket_id, triage_id=PydanticObjectId(), client_id=client_id + ) + ) + + convs = await listener.service.get_chats_from_ticket(ticket_id) + assert len(convs) == 3 + + # conv 0: agent_1, closed by assignee change + assert convs[0].id is not None + c0 = await listener.service.get_by_id(convs[0].id) + assert c0 is not None + assert c0.agent_id == agent_1 + assert not c0.is_opened() + assert c0.sequential_index == 0 + # messages: status update + transfer closing + assert len(c0.messages) == 2 + assert "in_progress" in c0.messages[0].content + assert "transferido" in c0.messages[1].content + + # conv 1: agent_2, closed by escalation + assert convs[1].id is not None + c1 = await listener.service.get_by_id(convs[1].id) + assert c1 is not None + assert c1.agent_id == agent_2 + assert not c1.is_opened() + assert c1.sequential_index == 1 + assert c1.parent_id == c0.id + # messages: escalation closing + assert len(c1.messages) == 1 + assert "escalonado" in c1.messages[0].content + + # conv 2: agent_3, closed by ticket close + assert convs[2].id is not None + c2 = await listener.service.get_by_id(convs[2].id) + assert c2 is not None + assert c2.agent_id == agent_3 + assert not c2.is_opened() + assert c2.sequential_index == 2 + assert c2.parent_id == c1.id + # messages: escalation opening + close + assert len(c2.messages) == 2 + assert "escalonado" in c2.messages[0].content + assert "encerrado" in c2.messages[1].content + + # children_ids chain + assert c0.id is not None + assert c1.id is not None + c0_fresh = await listener.service.get_by_id(c0.id) + c1_fresh = await listener.service.get_by_id(c1.id) + assert c0_fresh is not None + assert c1.id in c0_fresh.children_ids + assert c1_fresh is not None + assert c2.id in c1_fresh.children_ids diff --git a/tests/app/integration/domains/live_chat/test_conversation_repository.py b/tests/app/integration/domains/live_chat/test_conversation_repository.py index b2c30f1..285dfd5 100644 --- a/tests/app/integration/domains/live_chat/test_conversation_repository.py +++ b/tests/app/integration/domains/live_chat/test_conversation_repository.py @@ -1,6 +1,6 @@ from datetime import datetime from typing import Any -from uuid import uuid4 +from uuid import UUID, uuid4 import pytest import pytest_asyncio @@ -408,3 +408,412 @@ async def test_get_by_client_id_with_finalized_and_open_conversations(self, conv assert len(convos) == 2 assert any(c.finished_at is not None for c in convos) assert any(c.finished_at is None for c in convos) + + # --- ticket_has_conversation --- + + @pytest.mark.asyncio + async def test_ticket_has_conversation_true( + self, conversation_repo: ConversationRepository + ) -> None: + await conversation_repo.create(self.create_dto) + assert await conversation_repo.ticket_has_conversation(self.create_dto.ticket_id) is True + + @pytest.mark.asyncio + async def test_ticket_has_conversation_false( + self, conversation_repo: ConversationRepository + ) -> None: + assert await conversation_repo.ticket_has_conversation(PydanticObjectId()) is False + + @pytest.mark.asyncio + async def test_ticket_has_conversation_multiple( + self, conversation_repo: ConversationRepository + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + for i in range(3): + await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_id, + agent_id=uuid4(), + client_id=client_id, + sequential_index=i, + )) + assert await conversation_repo.ticket_has_conversation(ticket_id) is True + + # --- get_last_by_ticket_id --- + + @pytest.mark.asyncio + async def test_get_last_by_ticket_id_single( + self, conversation_repo: ConversationRepository + ) -> None: + c = await conversation_repo.create(self.create_dto) + last = await conversation_repo.get_last_by_ticket_id(self.create_dto.ticket_id) + assert last is not None + assert last.id == c.id + assert last.sequential_index == 0 + + @pytest.mark.asyncio + async def test_get_last_by_ticket_id_multiple_inserted_out_of_order( + self, conversation_repo: ConversationRepository + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + insertion_order = [1, 0, 2] + ids: dict[int, PydanticObjectId] = {} + for idx in insertion_order: + c = await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_id, + agent_id=uuid4(), + client_id=client_id, + sequential_index=idx, + )) + assert c.id is not None + ids[idx] = c.id + + last = await conversation_repo.get_last_by_ticket_id(ticket_id) + assert last is not None + assert last.id == ids[2] + assert last.sequential_index == 2 + + @pytest.mark.asyncio + async def test_get_last_by_ticket_id_does_not_cross_tickets( + self, conversation_repo: ConversationRepository + ) -> None: + client_id = uuid4() + ticket_a = PydanticObjectId() + ticket_b = PydanticObjectId() + + await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_a, agent_id=uuid4(), client_id=client_id, sequential_index=0, + )) + await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_b, agent_id=uuid4(), client_id=client_id, sequential_index=5, + )) + + last_a = await conversation_repo.get_last_by_ticket_id(ticket_a) + assert last_a is not None + assert last_a.sequential_index == 0 + assert last_a.ticket_id == ticket_a + + @pytest.mark.asyncio + async def test_get_last_by_ticket_id_none( + self, conversation_repo: ConversationRepository + ) -> None: + last = await conversation_repo.get_last_by_ticket_id(PydanticObjectId()) + assert last is None + + # --- add_child --- + + @pytest.mark.asyncio + async def test_add_child( + self, conversation_repo: ConversationRepository + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + parent = await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_id, agent_id=uuid4(), client_id=client_id, + )) + assert parent.id is not None + child = await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_id, agent_id=uuid4(), client_id=client_id, + sequential_index=1, parent_id=parent.id, + )) + assert child.id is not None + + await conversation_repo.add_child(parent.id, child.id) + + updated_parent = await conversation_repo.get_by_id(parent.id) + assert updated_parent is not None + assert updated_parent.children_ids is not None + assert child.id in updated_parent.children_ids + + @pytest.mark.asyncio + async def test_add_child_multiple( + self, conversation_repo: ConversationRepository + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + parent = await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_id, agent_id=uuid4(), client_id=client_id, + )) + assert parent.id is not None + + child_ids: list[PydanticObjectId] = [] + for i in range(1, 4): + child = await conversation_repo.create(CreateConversationDTO( + ticket_id=ticket_id, agent_id=uuid4(), client_id=client_id, + sequential_index=i, parent_id=parent.id, + )) + assert child.id is not None + child_ids.append(child.id) + await conversation_repo.add_child(parent.id, child.id) + + updated_parent = await conversation_repo.get_by_id(parent.id) + assert updated_parent is not None + assert updated_parent.children_ids is not None + assert updated_parent.children_ids == child_ids + + @pytest.mark.asyncio + async def test_add_child_nonexistent_parent_is_noop( + self, conversation_repo: ConversationRepository + ) -> None: + fake_parent = PydanticObjectId() + fake_child = PydanticObjectId() + await conversation_repo.add_child(fake_parent, fake_child) + + doc = await conversation_repo.get_by_id(fake_parent) + assert doc is None + + # --- search_conversation_by_text --- + + async def _seed_conversation_with_messages( + self, + conversation_repo: ConversationRepository, + contents: list[str], + ticket_id: PydanticObjectId | None = None, + client_id: UUID | None = None, + agent_id: UUID | None = None, + sequential_index: int = 0, + ) -> Conversation: + conv = await conversation_repo.create( + CreateConversationDTO( + ticket_id=ticket_id or PydanticObjectId(), + agent_id=agent_id if agent_id is not None else uuid4(), + client_id=client_id or uuid4(), + sequential_index=sequential_index, + ) + ) + assert conv.id is not None + for content in contents: + await conversation_repo.add_message( + conv.id, + ChatMessage.create( + conversation_id=conv.id, + sender_id=conv.client_id, + type="text", + content=content, + ), + ) + refreshed = await conversation_repo.get_by_id(conv.id) + assert refreshed is not None + return refreshed + + @pytest.mark.asyncio + async def test_search_conversation_by_text_matches_message_content( + self, conversation_repo: ConversationRepository + ) -> None: + match = await self._seed_conversation_with_messages( + conversation_repo, ["olá, preciso de ajuda com o boleto"] + ) + await self._seed_conversation_with_messages( + conversation_repo, ["nada relacionado aqui"] + ) + + res = await conversation_repo.search_conversation_by_text("boleto") + assert len(res) == 1 + assert res[0].id == match.id + + @pytest.mark.asyncio + async def test_search_conversation_by_text_is_case_insensitive( + self, conversation_repo: ConversationRepository + ) -> None: + match = await self._seed_conversation_with_messages( + conversation_repo, ["Erro no LOGIN do sistema"] + ) + + res = await conversation_repo.search_conversation_by_text("login") + assert len(res) == 1 + assert res[0].id == match.id + + @pytest.mark.asyncio + async def test_search_conversation_by_text_escapes_regex_metacharacters( + self, conversation_repo: ConversationRepository + ) -> None: + match = await self._seed_conversation_with_messages( + conversation_repo, ["valor cobrado: R$ 1.99 (promo)"] + ) + await self._seed_conversation_with_messages( + conversation_repo, ["valor cobrado: R$ 1X99"] + ) + + res = await conversation_repo.search_conversation_by_text("1.99") + assert len(res) == 1 + assert res[0].id == match.id + + @pytest.mark.asyncio + async def test_search_conversation_by_text_returns_one_per_ticket( + self, conversation_repo: ConversationRepository + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + await self._seed_conversation_with_messages( + conversation_repo, + ["primeira menção ao reembolso"], + ticket_id=ticket_id, + client_id=client_id, + sequential_index=0, + ) + latest_match = await self._seed_conversation_with_messages( + conversation_repo, + ["nova mensagem sobre reembolso"], + ticket_id=ticket_id, + client_id=client_id, + sequential_index=1, + ) + + res = await conversation_repo.search_conversation_by_text("reembolso") + assert len(res) == 1 + assert res[0].id == latest_match.id + assert res[0].sequential_index == 1 + + @pytest.mark.asyncio + async def test_search_conversation_picks_highest_match_score_per_ticket( + self, conversation_repo: ConversationRepository + ) -> None: + ticket_id = PydanticObjectId() + client_id = uuid4() + best = await self._seed_conversation_with_messages( + conversation_repo, + [ + "primeiro contato sobre reembolso", + "ainda discutindo reembolso", + "novo pedido de reembolso registrado", + ], + ticket_id=ticket_id, + client_id=client_id, + sequential_index=0, + ) + await self._seed_conversation_with_messages( + conversation_repo, + ["apenas uma menção a reembolso aqui"], + ticket_id=ticket_id, + client_id=client_id, + sequential_index=1, + ) + + res = await conversation_repo.search_conversation_by_text("reembolso") + assert len(res) == 1 + assert res[0].id == best.id + assert res[0].sequential_index == 0 + + @pytest.mark.asyncio + async def test_search_conversation_orders_results_by_match_score( + self, conversation_repo: ConversationRepository + ) -> None: + few = await self._seed_conversation_with_messages( + conversation_repo, ["uma menção a reembolso"] + ) + many = await self._seed_conversation_with_messages( + conversation_repo, + [ + "reembolso solicitado", + "reembolso em análise", + "reembolso aprovado", + ], + ) + + res = await conversation_repo.search_conversation_by_text("reembolso") + assert [c.id for c in res] == [many.id, few.id] + + @pytest.mark.asyncio + async def test_search_conversation_by_text_scoped_by_client( + self, conversation_repo: ConversationRepository + ) -> None: + target_client = uuid4() + owned = await self._seed_conversation_with_messages( + conversation_repo, + ["preciso de ajuda urgente"], + client_id=target_client, + ) + await self._seed_conversation_with_messages( + conversation_repo, ["preciso de ajuda urgente"] + ) + + res = await conversation_repo.search_conversation_by_text( + "urgente", client_id=target_client + ) + assert len(res) == 1 + assert res[0].id == owned.id + assert res[0].client_id == target_client + + @pytest.mark.asyncio + async def test_search_conversation_by_text_scoped_by_agent( + self, conversation_repo: ConversationRepository + ) -> None: + target_agent = uuid4() + owned = await self._seed_conversation_with_messages( + conversation_repo, + ["cliente reclamou da fatura"], + agent_id=target_agent, + ) + await self._seed_conversation_with_messages( + conversation_repo, ["cliente reclamou da fatura"] + ) + + res = await conversation_repo.search_conversation_by_text( + "fatura", agent_id=target_agent + ) + assert len(res) == 1 + assert res[0].id == owned.id + assert res[0].agent_id == target_agent + + @pytest.mark.asyncio + async def test_search_conversation_by_text_scoped_by_agent_string_legacy( + self, conversation_repo: ConversationRepository + ) -> None: + target_agent = uuid4() + legacy = await self._seed_conversation_with_messages( + conversation_repo, ["assunto sobre integração"] + ) + assert legacy.id is not None + await Conversation.get_motor_collection().update_one( + {"_id": legacy.id}, + {"$set": {"agent_id": str(target_agent)}}, + ) + + res = await conversation_repo.search_conversation_by_text( + "integração", agent_id=target_agent + ) + assert len(res) == 1 + assert res[0].id == legacy.id + + @pytest.mark.asyncio + async def test_search_conversation_by_text_no_results( + self, conversation_repo: ConversationRepository + ) -> None: + await self._seed_conversation_with_messages( + conversation_repo, ["alguma mensagem qualquer"] + ) + + res = await conversation_repo.search_conversation_by_text("inexistente") + assert res == [] + + @pytest.mark.asyncio + async def test_search_conversation_by_text_skips_conversations_without_messages( + self, conversation_repo: ConversationRepository + ) -> None: + await conversation_repo.create( + CreateConversationDTO( + ticket_id=PydanticObjectId(), + agent_id=uuid4(), + client_id=uuid4(), + ) + ) + + res = await conversation_repo.search_conversation_by_text("qualquer") + assert res == [] + + @pytest.mark.asyncio + async def test_search_conversation_by_text_scope_excludes_other_users( + self, conversation_repo: ConversationRepository + ) -> None: + other_client = uuid4() + await self._seed_conversation_with_messages( + conversation_repo, + ["cobrança duplicada"], + client_id=other_client, + ) + + res = await conversation_repo.search_conversation_by_text( + "cobrança", client_id=uuid4() + ) + assert res == [] diff --git a/tests/app/integration/domains/notifications/__init__.py b/tests/app/integration/domains/notifications/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/app/integration/domains/notifications/test_email_outbox_repository.py b/tests/app/integration/domains/notifications/test_email_outbox_repository.py new file mode 100644 index 0000000..334fb7e --- /dev/null +++ b/tests/app/integration/domains/notifications/test_email_outbox_repository.py @@ -0,0 +1,285 @@ +from datetime import UTC, datetime, timedelta +from typing import Any +from uuid import UUID, uuid4 + +import pytest +from pydantic import ValidationError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.domains.notifications.enums import EmailEventType, EmailOutboxStatus +from app.domains.notifications.repositories.email_outbox_repository import ( + EmailOutboxRepository, +) +from app.domains.notifications.schemas import ( + EnqueueEmailOutboxDTO, + PasswordResetPayload, + WelcomeInvitePayload, +) + + +def _now() -> datetime: + return datetime.now(UTC).replace(tzinfo=None) + + +def _welcome_payload() -> WelcomeInvitePayload: + return WelcomeInvitePayload( + user_id=uuid4(), + user_name="Test User", + user_email="test@example.com", + one_time_password="TempPass1!", + frontend_url="http://localhost:3000", + token="raw-token-abc", + ) + + +def _reset_payload() -> PasswordResetPayload: + return PasswordResetPayload( + user_id=uuid4(), + user_email="reset@example.com", + frontend_url="http://localhost:3000", + token="reset-token", + ) + + +def _welcome_dto(**overrides: Any) -> EnqueueEmailOutboxDTO: + defaults: dict[str, Any] = { + "event_type": EmailEventType.WELCOME_INVITE, + "recipient": f"user_{uuid4().hex[:6]}@example.com", + "payload": _welcome_payload(), + } + return EnqueueEmailOutboxDTO(**{**defaults, **overrides}) + + +def _reset_dto(**overrides: Any) -> EnqueueEmailOutboxDTO: + defaults: dict[str, Any] = { + "event_type": EmailEventType.PASSWORD_RESET, + "recipient": f"user_{uuid4().hex[:6]}@example.com", + "payload": _reset_payload(), + } + return EnqueueEmailOutboxDTO(**{**defaults, **overrides}) + + +class TestEmailOutboxDTOs: + def test_enqueue_dto_welcome_valid(self) -> None: + dto = _welcome_dto() + assert dto.event_type == EmailEventType.WELCOME_INVITE + assert isinstance(dto.payload, WelcomeInvitePayload) + assert dto.max_attempts == 5 + + def test_enqueue_dto_reset_valid(self) -> None: + dto = _reset_dto() + assert dto.event_type == EmailEventType.PASSWORD_RESET + assert isinstance(dto.payload, PasswordResetPayload) + + def test_enqueue_dto_custom_max_attempts(self) -> None: + dto = _welcome_dto(max_attempts=10) + assert dto.max_attempts == 10 + + def test_welcome_payload_invalid_user_id_should_fail(self) -> None: + with pytest.raises(ValidationError): + WelcomeInvitePayload( + user_id="not-a-uuid", # pyright: ignore[reportArgumentType] + user_name="X", + user_email="x@example.com", + one_time_password="P!", + frontend_url="http://x", + token="t", + ) + + def test_welcome_payload_missing_required_field_should_fail(self) -> None: + with pytest.raises(ValidationError): + WelcomeInvitePayload( # pyright: ignore[reportCallIssue] + user_id=uuid4(), + user_name="X", + user_email="x@example.com", + frontend_url="http://x", + token="t", + ) + + def test_reset_payload_invalid_user_id_should_fail(self) -> None: + with pytest.raises(ValidationError): + PasswordResetPayload( + user_id="not-a-uuid", # pyright: ignore[reportArgumentType] + user_email="x@example.com", + frontend_url="http://x", + token="t", + ) + + +class TestEmailOutboxRepository: + @pytest.fixture + def repo(self, db_session: AsyncSession) -> EmailOutboxRepository: + return EmailOutboxRepository(db_session) + + @pytest.mark.asyncio + async def test_enqueue_creates_pending_row(self, repo: EmailOutboxRepository) -> None: + row = await repo.enqueue(_welcome_dto()) + assert row.id is not None + assert row.status == EmailOutboxStatus.PENDING + assert row.attempts == 0 + assert row.sent_at is None + + @pytest.mark.asyncio + async def test_enqueue_returns_full_entity(self, repo: EmailOutboxRepository) -> None: + recipient = "full@example.com" + dto = _welcome_dto(recipient=recipient, max_attempts=7) + row = await repo.enqueue(dto) + assert row.recipient == recipient + assert row.max_attempts == 7 + assert row.event_type == EmailEventType.WELCOME_INVITE + assert row.last_error is None + assert row.locked_at is None + assert row.lock_owner is None + assert row.created_at is not None + assert row.next_attempt_at is not None + + @pytest.mark.asyncio + async def test_enqueue_stores_payload(self, repo: EmailOutboxRepository) -> None: + dto = _welcome_dto() + row = await repo.enqueue(dto) + assert isinstance(row.payload, WelcomeInvitePayload) + assert row.payload.user_name == "Test User" + assert row.payload.token == "raw-token-abc" + + @pytest.mark.asyncio + async def test_enqueue_payload_uuid_round_trips(self, repo: EmailOutboxRepository) -> None: + original_user_id = uuid4() + payload = WelcomeInvitePayload( + user_id=original_user_id, + user_name="UUID Test", + user_email="uuid@example.com", + one_time_password="P!", + frontend_url="http://x", + token="t", + ) + dto = EnqueueEmailOutboxDTO( + event_type=EmailEventType.WELCOME_INVITE, + recipient="uuid@example.com", + payload=payload, + ) + row = await repo.enqueue(dto) + assert isinstance(row.payload, WelcomeInvitePayload) + assert isinstance(row.payload.user_id, UUID) + assert row.payload.user_id == original_user_id + + @pytest.mark.asyncio + async def test_enqueue_reset_returns_typed_payload( + self, repo: EmailOutboxRepository + ) -> None: + dto = _reset_dto() + row = await repo.enqueue(dto) + assert row.event_type == EmailEventType.PASSWORD_RESET + assert isinstance(row.payload, PasswordResetPayload) + assert not isinstance(row.payload, WelcomeInvitePayload) + + @pytest.mark.asyncio + async def test_claim_batch_empty_when_no_pending( + self, repo: EmailOutboxRepository + ) -> None: + rows = await repo.claim_batch(_now(), "worker-1", limit=10) + assert rows == [] + + @pytest.mark.asyncio + async def test_claim_batch_returns_pending_rows(self, repo: EmailOutboxRepository) -> None: + await repo.enqueue(_welcome_dto()) + await repo.enqueue(_welcome_dto()) + + rows = await repo.claim_batch(_now(), "worker-1", limit=10) + assert len(rows) >= 2 + + @pytest.mark.asyncio + async def test_claim_batch_sets_processing_status( + self, repo: EmailOutboxRepository + ) -> None: + await repo.enqueue(_welcome_dto()) + rows = await repo.claim_batch(_now(), "worker-1", limit=10) + assert len(rows) >= 1 + # mark_sent proves the row exists and has a valid id (worker found it) + await repo.mark_sent(rows[0].id, _now()) + + @pytest.mark.asyncio + async def test_claim_batch_skips_future_rows(self, repo: EmailOutboxRepository) -> None: + future = _now() + timedelta(hours=1) + row = await repo.enqueue(_welcome_dto()) + # Manually set next_attempt_at to future via mark_retry + await repo.mark_retry(row.id, "err", future, 1) + + claimed = await repo.claim_batch(_now(), "worker-1", limit=10) + claimed_ids = [r.id for r in claimed] + assert row.id not in claimed_ids + + @pytest.mark.asyncio + async def test_claim_batch_respects_limit(self, repo: EmailOutboxRepository) -> None: + for _ in range(5): + await repo.enqueue(_welcome_dto()) + + rows = await repo.claim_batch(_now(), "worker-1", limit=2) + assert len(rows) <= 2 + + @pytest.mark.asyncio + async def test_mark_sent_sets_sent_status(self, repo: EmailOutboxRepository) -> None: + row = await repo.enqueue(_welcome_dto()) + now = _now() + await repo.mark_sent(row.id, now) + # Verify by trying to claim — SENT rows should not appear + claimed = await repo.claim_batch(now, "worker-2", limit=10) + assert row.id not in [r.id for r in claimed] + + @pytest.mark.asyncio + async def test_mark_retry_increments_attempts(self, repo: EmailOutboxRepository) -> None: + row = await repo.enqueue(_welcome_dto()) + next_at = _now() + timedelta(seconds=4) + await repo.mark_retry(row.id, "provider timeout", next_at, 1) + # Row should be claimable after next_attempt_at passes + claimed = await repo.claim_batch(next_at + timedelta(seconds=1), "worker-1", limit=10) + assert any(r.id == row.id for r in claimed) + + @pytest.mark.asyncio + async def test_mark_retry_persists_error_and_attempts( + self, repo: EmailOutboxRepository + ) -> None: + row = await repo.enqueue(_welcome_dto()) + next_at = _now() + timedelta(seconds=1) + await repo.mark_retry(row.id, "provider timeout", next_at, 3) + + claimed = await repo.claim_batch(next_at + timedelta(seconds=2), "worker-1", limit=10) + matching = next((r for r in claimed if r.id == row.id), None) + assert matching is not None + assert matching.last_error == "provider timeout" + assert matching.attempts == 3 + + @pytest.mark.asyncio + async def test_mark_retry_truncates_long_error( + self, repo: EmailOutboxRepository + ) -> None: + row = await repo.enqueue(_welcome_dto()) + long_error = "x" * 3000 + next_at = _now() + timedelta(seconds=1) + await repo.mark_retry(row.id, long_error, next_at, 1) + + claimed = await repo.claim_batch(next_at + timedelta(seconds=2), "worker-1", limit=10) + matching = next((r for r in claimed if r.id == row.id), None) + assert matching is not None + assert matching.last_error is not None + assert len(matching.last_error) == 2000 + + @pytest.mark.asyncio + async def test_mark_retry_not_claimable_before_next_attempt_at( + self, repo: EmailOutboxRepository + ) -> None: + row = await repo.enqueue(_welcome_dto()) + future = _now() + timedelta(hours=2) + await repo.mark_retry(row.id, "err", future, 1) + + claimed = await repo.claim_batch(_now(), "worker-1", limit=10) + assert row.id not in [r.id for r in claimed] + + @pytest.mark.asyncio + async def test_mark_dead_prevents_future_claims( + self, repo: EmailOutboxRepository + ) -> None: + row = await repo.enqueue(_welcome_dto()) + await repo.mark_dead(row.id, "max retries exceeded") + + claimed = await repo.claim_batch(_now(), "worker-1", limit=10) + assert row.id not in [r.id for r in claimed] diff --git a/tests/app/integration/domains/products/test_product_repository.py b/tests/app/integration/domains/products/test_product_repository.py new file mode 100644 index 0000000..73cf416 --- /dev/null +++ b/tests/app/integration/domains/products/test_product_repository.py @@ -0,0 +1,306 @@ +from uuid import uuid4 + +import pytest +from pydantic import ValidationError +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.domains.companies.models import Company as CompanyModel +from app.domains.companies.models import company_products +from app.domains.companies.repositories import CompanyRepository +from app.domains.companies.schemas import CreateCompanyDTO +from app.domains.products.repositories import ProductRepository +from app.domains.products.schemas import ( + CreateProductDTO, + ReplaceProductDTO, + UpdateProductDTO, +) + + +def _tax_id() -> str: + return uuid4().hex[:14] + + +def _legal_name(prefix: str = "Company") -> str: + return f"{prefix} {uuid4().hex[:8]} LTDA" + + +def _product_name(prefix: str = "Product") -> str: + return f"{prefix} {uuid4().hex[:8]}" + + +async def _make_company(db: AsyncSession, *, soft_deleted: bool = False) -> CompanyModel: + repo = CompanyRepository(db=db) + company = await repo.create( + CreateCompanyDTO( + legal_name=_legal_name(), trade_name="Acme", tax_id=_tax_id() + ) + ) + if soft_deleted: + await repo.soft_delete(company.id) + result = await db.execute(select(CompanyModel).where(CompanyModel.id == company.id)) + return result.scalar_one() + + +class TestProductDTOs: + def test_create_product_with_short_name_fails(self) -> None: + with pytest.raises(ValidationError): + CreateProductDTO(name="ab", description="A valid description") + + def test_create_product_with_short_description_fails(self) -> None: + with pytest.raises(ValidationError): + CreateProductDTO(name=_product_name(), description="x") + + def test_update_product_with_all_none_fails(self) -> None: + with pytest.raises(ValidationError): + UpdateProductDTO() + + def test_update_product_with_single_field_succeeds(self) -> None: + dto = UpdateProductDTO(description="A valid new description") + assert dto.description == "A valid new description" + assert dto.name is None + + +class TestProductRepository: + @pytest.fixture + def product_repo(self, db_session: AsyncSession) -> ProductRepository: + return ProductRepository(db=db_session) + + @pytest.fixture + async def product(self, product_repo: ProductRepository) -> object: + return await product_repo.create( + CreateProductDTO(name=_product_name(), description="Initial description") + ) + + # ── create / get_by_id ──────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_create_product_success(self, product_repo: ProductRepository) -> None: + dto = CreateProductDTO(name=_product_name(), description="A great product") + product = await product_repo.create(dto) + assert product.id is not None + assert product.name == dto.name + assert product.description == "A great product" + assert product.created_at is not None + + @pytest.mark.asyncio + async def test_get_by_id_returns_none_when_not_found( + self, product_repo: ProductRepository + ) -> None: + assert await product_repo.get_by_id(9_999_999) is None + + @pytest.mark.asyncio + async def test_get_by_id_excludes_soft_deleted( + self, product_repo: ProductRepository, product: object + ) -> None: + await product_repo.soft_delete(product.id) # type: ignore[attr-defined] + assert await product_repo.get_by_id(product.id) is None # type: ignore[attr-defined] + + # ── get_all_paginated ───────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_get_all_paginated_returns_total_and_items( + self, product_repo: ProductRepository + ) -> None: + for _ in range(3): + await product_repo.create( + CreateProductDTO(name=_product_name(), description="Some desc") + ) + result = await product_repo.get_all_paginated(skip=0, limit=10) + assert result.total == 3 + assert len(result.items) == 3 + assert result.page == 1 + + @pytest.mark.asyncio + async def test_get_all_paginated_excludes_soft_deleted( + self, product_repo: ProductRepository + ) -> None: + kept = await product_repo.create( + CreateProductDTO(name=_product_name("kept"), description="Some desc") + ) + deleted = await product_repo.create( + CreateProductDTO(name=_product_name("del"), description="Some desc") + ) + await product_repo.soft_delete(deleted.id) + + result = await product_repo.get_all_paginated(skip=0, limit=10) + assert result.total == 1 + assert [p.id for p in result.items] == [kept.id] + + # ── update ──────────────────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_update_partial_fields( + self, product_repo: ProductRepository, product: object + ) -> None: + updated = await product_repo.update( + product.id, UpdateProductDTO(description="Refreshed description") # type: ignore[attr-defined] + ) + assert updated is not None + assert updated.description == "Refreshed description" + assert updated.name == product.name # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_replace_product_overwrites_fields( + self, product_repo: ProductRepository, product: object + ) -> None: + new_name = _product_name("replaced") + replaced = await product_repo.update( + product.id, # type: ignore[attr-defined] + ReplaceProductDTO(name=new_name, description="Brand new description"), + ) + assert replaced is not None + assert replaced.name == new_name + assert replaced.description == "Brand new description" + + @pytest.mark.asyncio + async def test_update_with_empty_dto_returns_current_state( + self, product_repo: ProductRepository, product: object + ) -> None: + # ProductRepository.update tem um early-return quando exclude_unset() fica vazio + empty_dto = UpdateProductDTO.model_construct() + result = await product_repo.update(product.id, empty_dto) # type: ignore[attr-defined] + assert result is not None + assert result.id == product.id # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_update_returns_none_when_not_found( + self, product_repo: ProductRepository + ) -> None: + assert ( + await product_repo.update( + 9_999_999, UpdateProductDTO(description="Anything works") + ) + is None + ) + + # ── soft_delete ─────────────────────────────────────────────────── + + @pytest.mark.asyncio + async def test_soft_delete_returns_true_first_time( + self, product_repo: ProductRepository, product: object + ) -> None: + assert await product_repo.soft_delete(product.id) is True # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_soft_delete_already_deleted_returns_false( + self, product_repo: ProductRepository, product: object + ) -> None: + await product_repo.soft_delete(product.id) # type: ignore[attr-defined] + assert await product_repo.soft_delete(product.id) is False # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_soft_delete_unknown_id_returns_false( + self, product_repo: ProductRepository + ) -> None: + assert await product_repo.soft_delete(9_999_999) is False + + # ── get_product_companies_paginated ─────────────────────────────── + + @pytest.mark.asyncio + async def test_get_product_companies_paginated_returns_companies( + self, + product_repo: ProductRepository, + db_session: AsyncSession, + product: object, + ) -> None: + company = await _make_company(db_session) + await product_repo.add_companies(product.id, [company.id]) # type: ignore[attr-defined] + + companies, total = await product_repo.get_product_companies_paginated( + product.id, skip=0, limit=10 # type: ignore[attr-defined] + ) + assert total == 1 + assert len(companies) == 1 + assert companies[0].id == company.id + + @pytest.mark.asyncio + async def test_get_product_companies_excludes_soft_deleted_companies( + self, + product_repo: ProductRepository, + db_session: AsyncSession, + product: object, + ) -> None: + kept = await _make_company(db_session) + soft_deleted = await _make_company(db_session) + await product_repo.add_companies( # type: ignore[attr-defined] + product.id, [kept.id, soft_deleted.id] + ) + + repo = CompanyRepository(db=db_session) + await repo.soft_delete(soft_deleted.id) + + companies, total = await product_repo.get_product_companies_paginated( + product.id, skip=0, limit=10 # type: ignore[attr-defined] + ) + assert total == 1 + assert [c.id for c in companies] == [kept.id] + + # ── add_companies / remove_companies ────────────────────────────── + + @pytest.mark.asyncio + async def test_add_companies_creates_relationship( + self, + product_repo: ProductRepository, + db_session: AsyncSession, + product: object, + ) -> None: + company = await _make_company(db_session) + await product_repo.add_companies(product.id, [company.id]) # type: ignore[attr-defined] + + result = await db_session.execute( + select(company_products).where( + company_products.c.product_id == product.id, # type: ignore[attr-defined] + company_products.c.company_id == company.id, + ) + ) + assert result.first() is not None + + @pytest.mark.asyncio + async def test_add_companies_idempotent_on_duplicate( + self, + product_repo: ProductRepository, + db_session: AsyncSession, + product: object, + ) -> None: + company = await _make_company(db_session) + await product_repo.add_companies(product.id, [company.id]) # type: ignore[attr-defined] + await product_repo.add_companies(product.id, [company.id]) # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_add_companies_with_unknown_id_raises_value_error( + self, product_repo: ProductRepository, product: object + ) -> None: + with pytest.raises(ValueError, match="company_ids"): + await product_repo.add_companies(product.id, [uuid4()]) # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_add_companies_with_empty_list_is_noop( + self, product_repo: ProductRepository, product: object + ) -> None: + await product_repo.add_companies(product.id, []) # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_remove_companies_success( + self, + product_repo: ProductRepository, + db_session: AsyncSession, + product: object, + ) -> None: + company = await _make_company(db_session) + await product_repo.add_companies(product.id, [company.id]) # type: ignore[attr-defined] + await product_repo.remove_companies(product.id, [company.id]) # type: ignore[attr-defined] + + result = await db_session.execute( + select(company_products).where( + company_products.c.product_id == product.id, # type: ignore[attr-defined] + company_products.c.company_id == company.id, + ) + ) + assert result.first() is None + + @pytest.mark.asyncio + async def test_remove_companies_with_empty_list_is_noop( + self, product_repo: ProductRepository, product: object + ) -> None: + await product_repo.remove_companies(product.id, []) # type: ignore[attr-defined] diff --git a/tests/app/integration/domains/ticket/test_ticket_created_pubsub.py b/tests/app/integration/domains/ticket/test_ticket_created_pubsub.py new file mode 100644 index 0000000..319011b --- /dev/null +++ b/tests/app/integration/domains/ticket/test_ticket_created_pubsub.py @@ -0,0 +1,305 @@ +import asyncio +from collections.abc import AsyncGenerator +from typing import Any +from unittest.mock import AsyncMock +from uuid import UUID, uuid4 + +import pytest +import pytest_asyncio +from beanie import PydanticObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.schemas import ( + EVENT_PAYLOAD_MAP, + TicketAssigneeUpdatedEventSchema, + TicketCreatedEventSchema, +) +from app.core.logger import get_logger +from app.domains.auth.entities import Role, User, UserWithRoles +from app.domains.auth.services.user_service import UserService +from app.domains.live_chat.entities import Conversation +from app.domains.live_chat.listeners import ConversationListener +from app.domains.live_chat.repositories.conversation_repository import ConversationRepository +from app.domains.live_chat.services.conversation_service import ConversationService +from app.domains.ticket.models import Ticket, TicketCriticality, TicketType +from app.domains.ticket.repositories import TicketRepository +from app.domains.ticket.schemas import AssignTicketRequest, CreateTicketDTO +from app.domains.ticket.services import TicketService + + +@pytest_asyncio.fixture(autouse=True) +async def _cleanup_collections() -> AsyncGenerator[None, None]: + await Ticket.delete_all() + await Conversation.delete_all() + yield + await Ticket.delete_all() + await Conversation.delete_all() + + +@pytest.fixture +def dispatcher() -> EventDispatcher: + return EventDispatcher(EVENT_PAYLOAD_MAP, get_logger("test.ticket_pubsub")) + + +@pytest.fixture +def user_service() -> UserService: + service = AsyncMock(spec=UserService) + + async def _get_by_id(user_id: UUID) -> User: + return User( + id=user_id, + email="client@example.com", + name="Test Client", + username="testclient", + ) + + async def _get_by_id_with_roles(user_id: UUID) -> UserWithRoles: + return UserWithRoles( + id=user_id, + email="agent@example.com", + name="Test Agent", + username="testagent", + roles=[Role(id=1, name="agent")], + ) + + service.get_by_id.side_effect = _get_by_id + service.get_by_id_with_roles.side_effect = _get_by_id_with_roles + return service + + +@pytest.fixture +def conversation_listener( + mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], +) -> ConversationListener: + repo = ConversationRepository(mongo_db_conn) + service = ConversationService(repo) + return ConversationListener(service) + + +@pytest.fixture +def ticket_service( + mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], + user_service: UserService, + dispatcher: EventDispatcher, +) -> TicketService: + return TicketService(TicketRepository(mongo_db_conn), user_service, dispatcher) + + +def _make_dto(client_id: UUID | None = None) -> CreateTicketDTO: + return CreateTicketDTO( + triage_id=PydanticObjectId(), + type=TicketType.ISSUE, + criticality=TicketCriticality.HIGH, + product="Sistema Financeiro", + description="Erro ao emitir boleto", + chat_ids=[], + client_id=client_id or uuid4(), + company_id=None, + company_name=None, + ) + + +async def _drain_background_tasks() -> None: + """The dispatcher runs handlers as fire-and-forget ``asyncio.create_task``. + + Tests must await those tasks before asserting on side effects. + """ + pending = [t for t in asyncio.all_tasks() if t is not asyncio.current_task()] + if pending: + await asyncio.gather(*pending, return_exceptions=True) + + +class TestTicketCreatedPubSub: + @pytest.mark.asyncio + async def test_publishes_ticket_created_event( + self, + ticket_service: TicketService, + dispatcher: EventDispatcher, + ) -> None: + received: list[TicketCreatedEventSchema] = [] + + original_publish = dispatcher.publish + + async def spy_publish(event: AppEvent, payload: Any) -> None: + if event == AppEvent.TICKET_CREATED: + received.append(payload) + await original_publish(event, payload) + + dispatcher.publish = spy_publish # type: ignore[method-assign] + + dto = _make_dto() + response = await ticket_service.create_ticket(dto) + await _drain_background_tasks() + + assert len(received) == 1 + event = received[0] + assert isinstance(event, TicketCreatedEventSchema) + assert str(event.ticket_id) == response.id + assert event.client_id == dto.client_id + + @pytest.mark.asyncio + async def test_listener_creates_conversation_for_published_event( + self, + ticket_service: TicketService, + conversation_listener: ConversationListener, + dispatcher: EventDispatcher, + ) -> None: + dispatcher.subscribe( + AppEvent.TICKET_CREATED, conversation_listener.on_ticket_created + ) + + dto = _make_dto() + response = await ticket_service.create_ticket(dto) + await _drain_background_tasks() + + ticket_id = PydanticObjectId(response.id) + conv = await conversation_listener.service.get_last_conversation_from_ticket( + ticket_id + ) + assert conv is not None + assert conv.ticket_id == ticket_id + assert conv.client_id == dto.client_id + assert conv.is_opened() + assert conv.sequential_index == 0 + + @pytest.mark.asyncio + async def test_event_is_not_delivered_without_subscription( + self, + ticket_service: TicketService, + conversation_listener: ConversationListener, + ) -> None: + # Dispatcher has no subscribers registered here — listener is built but not wired up. + dto = _make_dto() + response = await ticket_service.create_ticket(dto) + await _drain_background_tasks() + + conv = await conversation_listener.service.get_last_conversation_from_ticket( + PydanticObjectId(response.id) + ) + assert conv is None + + @pytest.mark.asyncio + async def test_multiple_tickets_produce_one_conversation_each( + self, + ticket_service: TicketService, + conversation_listener: ConversationListener, + dispatcher: EventDispatcher, + ) -> None: + dispatcher.subscribe( + AppEvent.TICKET_CREATED, conversation_listener.on_ticket_created + ) + + responses = [ + await ticket_service.create_ticket(_make_dto()) for _ in range(3) + ] + await _drain_background_tasks() + + for response in responses: + conv = await conversation_listener.service.get_last_conversation_from_ticket( + PydanticObjectId(response.id) + ) + assert conv is not None + assert conv.sequential_index == 0 + + @pytest.mark.asyncio + async def test_listener_idempotency_when_event_replayed( + self, + ticket_service: TicketService, + conversation_listener: ConversationListener, + dispatcher: EventDispatcher, + ) -> None: + dispatcher.subscribe( + AppEvent.TICKET_CREATED, conversation_listener.on_ticket_created + ) + + dto = _make_dto() + response = await ticket_service.create_ticket(dto) + await _drain_background_tasks() + + replay = TicketCreatedEventSchema( + ticket_id=PydanticObjectId(response.id), + client_id=dto.client_id, + ) + await dispatcher.publish(AppEvent.TICKET_CREATED, replay) + await _drain_background_tasks() + + convs = await conversation_listener.service.get_chats_from_ticket( + PydanticObjectId(response.id) + ) + assert len(convs) == 1 + + +class TestTicketAssigneeUpdatedPubSub: + @pytest.mark.asyncio + async def test_publishes_ticket_assignee_updated_event( + self, + ticket_service: TicketService, + dispatcher: EventDispatcher, + ) -> None: + received: list[TicketAssigneeUpdatedEventSchema] = [] + + original_publish = dispatcher.publish + + async def spy_publish(event: AppEvent, payload: Any) -> None: + if event == AppEvent.TICKET_ASSIGNEE_UPDATED: + received.append(payload) + await original_publish(event, payload) + + dispatcher.publish = spy_publish # type: ignore[method-assign] + + dto = _make_dto() + created = await ticket_service.create_ticket(dto) + ticket_id = PydanticObjectId(created.id) + agent_id = uuid4() + + response = await ticket_service.assign_ticket( + ticket_id, + AssignTicketRequest(agent_id=agent_id, reason="Primeira atribuicao"), + ) + await _drain_background_tasks() + + assert response.status.value == "in_progress" + assert response.assigned_agent_id == agent_id + assert len(received) == 1 + event = received[0] + assert isinstance(event, TicketAssigneeUpdatedEventSchema) + assert event.ticket_id == ticket_id + assert event.client_id == dto.client_id + assert event.new_agent_id == agent_id + assert event.reason == "Primeira atribuicao" + + @pytest.mark.asyncio + async def test_listener_reacts_to_published_assignee_event( + self, + ticket_service: TicketService, + conversation_listener: ConversationListener, + dispatcher: EventDispatcher, + ) -> None: + dispatcher.subscribe( + AppEvent.TICKET_CREATED, conversation_listener.on_ticket_created + ) + dispatcher.subscribe( + AppEvent.TICKET_ASSIGNEE_UPDATED, + conversation_listener.on_ticket_assignee_updated, + ) + + dto = _make_dto() + created = await ticket_service.create_ticket(dto) + ticket_id = PydanticObjectId(created.id) + agent_id = uuid4() + + await _drain_background_tasks() + + await ticket_service.assign_ticket( + ticket_id, + AssignTicketRequest(agent_id=agent_id, reason="Encaminhado para atendimento"), + ) + await _drain_background_tasks() + + convs = await conversation_listener.service.get_chats_from_ticket(ticket_id) + assert len(convs) == 2 + assert not convs[0].is_opened() + assert convs[1].is_opened() + assert convs[1].agent_id == agent_id diff --git a/tests/app/integration/domains/ticket/test_ticket_repository.py b/tests/app/integration/domains/ticket/test_ticket_repository.py new file mode 100644 index 0000000..8d75621 --- /dev/null +++ b/tests/app/integration/domains/ticket/test_ticket_repository.py @@ -0,0 +1,351 @@ +from collections.abc import AsyncGenerator +from datetime import UTC, datetime +from typing import Any +from uuid import UUID, uuid4 + +import pytest +import pytest_asyncio +from beanie import PydanticObjectId +from motor.motor_asyncio import AsyncIOMotorDatabase + +from app.domains.ticket.models import ( + Ticket, + TicketClient, + TicketComment, + TicketCompany, + TicketCriticality, + TicketHistory, + TicketStatus, + TicketType, +) +from app.domains.ticket.repositories import TicketRepository + + +@pytest_asyncio.fixture(autouse=True) +async def _cleanup_tickets() -> AsyncGenerator[None, None]: + await Ticket.delete_all() + yield + await Ticket.delete_all() + + +@pytest.fixture +def repository( + mongo_db_conn: AsyncIOMotorDatabase[dict[str, Any]], +) -> TicketRepository: + return TicketRepository(mongo_db_conn) + + +def _make_ticket( + *, + description: str = "Erro ao emitir boleto", + comments: list[TicketComment] | None = None, + client_id: UUID | None = None, + company_id: UUID | None = None, + agent_history: list[TicketHistory] | None = None, +) -> Ticket: + cid = client_id or uuid4() + coid = company_id or uuid4() + return Ticket( + triage_id=PydanticObjectId(), + type=TicketType.ISSUE, + criticality=TicketCriticality.HIGH, + product="Sistema", + status=TicketStatus.AWAITING_ASSIGNMENT, + creation_date=datetime.now(UTC), + description=description, + chat_ids=[], + agent_history=agent_history or [], + client=TicketClient( + id=cid, + name="Cliente", + email="cliente@test.com", + company=TicketCompany(id=coid, name="Empresa"), + ), + comments=comments or [], + ) + + +def _make_comment(text: str) -> TicketComment: + return TicketComment( + author="agente", + text=text, + date=datetime.now(UTC), + internal=False, + ) + + +def _make_history(agent_id: UUID, level: str = "N1") -> TicketHistory: + return TicketHistory( + agent_id=agent_id, + name="Agente", + level=level, + assignment_date=datetime.now(UTC), + exit_date=None, + transfer_reason=None, + ) + + +class TestSearchTicketByClient: + @pytest.mark.asyncio + async def test_matches_text_in_description( + self, repository: TicketRepository + ) -> None: + client_id = uuid4() + await repository.create_ticket( + _make_ticket(description="Erro crítico no boleto", client_id=client_id) + ) + await repository.create_ticket( + _make_ticket(description="Configuração de SMTP", client_id=client_id) + ) + + result = await repository.search_ticket("boleto", client_id=client_id) + + assert result is not None + assert len(result) == 1 + assert result[0].description == "Erro crítico no boleto" + + @pytest.mark.asyncio + async def test_matches_text_in_comments( + self, repository: TicketRepository + ) -> None: + client_id = uuid4() + await repository.create_ticket( + _make_ticket( + description="Pedido genérico", + comments=[_make_comment("Cliente relatou queda na fatura")], + client_id=client_id, + ) + ) + + result = await repository.search_ticket("queda", client_id=client_id) + + assert result is not None + assert len(result) == 1 + assert result[0].comments[0].text == "Cliente relatou queda na fatura" + + @pytest.mark.asyncio + async def test_search_is_case_insensitive( + self, repository: TicketRepository + ) -> None: + client_id = uuid4() + await repository.create_ticket( + _make_ticket(description="Falha CRÍTICA na importação", client_id=client_id) + ) + + result = await repository.search_ticket("crítica", client_id=client_id) + + assert result is not None + assert len(result) == 1 + + @pytest.mark.asyncio + async def test_excludes_tickets_from_other_clients( + self, repository: TicketRepository + ) -> None: + target_client = uuid4() + other_client = uuid4() + await repository.create_ticket( + _make_ticket(description="boleto vencido", client_id=target_client) + ) + await repository.create_ticket( + _make_ticket(description="boleto duplicado", client_id=other_client) + ) + + result = await repository.search_ticket("boleto", client_id=target_client) + + assert result is not None + assert len(result) == 1 + assert result[0].client.id == target_client + + @pytest.mark.asyncio + async def test_returns_empty_list_when_no_matches( + self, repository: TicketRepository + ) -> None: + client_id = uuid4() + await repository.create_ticket( + _make_ticket(description="cobrança incorreta", client_id=client_id) + ) + + result = await repository.search_ticket("inexistente", client_id=client_id) + + assert result == [] + + +class TestSearchTicketByAgent: + @pytest.mark.asyncio + async def test_filters_by_agent_history( + self, repository: TicketRepository + ) -> None: + target_agent = uuid4() + another_agent = uuid4() + await repository.create_ticket( + _make_ticket( + description="cobrança incorreta", + agent_history=[_make_history(target_agent)], + ) + ) + await repository.create_ticket( + _make_ticket( + description="cobrança duplicada", + agent_history=[_make_history(another_agent)], + ) + ) + + result = await repository.search_ticket("cobrança", agent_id=target_agent) + + assert result is not None + assert len(result) == 1 + assert any(h.agent_id == target_agent for h in result[0].agent_history) + + @pytest.mark.asyncio + async def test_matches_when_agent_appears_anywhere_in_history( + self, repository: TicketRepository + ) -> None: + first_agent = uuid4() + second_agent = uuid4() + history = [ + _make_history(first_agent), + _make_history(second_agent, level="N2"), + ] + await repository.create_ticket( + _make_ticket(description="acesso negado", agent_history=history) + ) + + result_first = await repository.search_ticket("acesso", agent_id=first_agent) + result_second = await repository.search_ticket("acesso", agent_id=second_agent) + + assert result_first is not None and len(result_first) == 1 + assert result_second is not None and len(result_second) == 1 + + +class TestSearchTicketByCompany: + @pytest.mark.asyncio + async def test_filters_by_client_company_id( + self, repository: TicketRepository + ) -> None: + target_company = uuid4() + await repository.create_ticket( + _make_ticket(description="acesso negado", company_id=target_company) + ) + await repository.create_ticket( + _make_ticket(description="acesso negado", company_id=uuid4()) + ) + + result = await repository.search_ticket("acesso", company_id=target_company) + + assert result is not None + assert len(result) == 1 + assert result[0].client.company.id == target_company + + +class TestSearchTicketGlobalScope: + @pytest.mark.asyncio + async def test_global_scope_returns_matches_across_clients( + self, repository: TicketRepository + ) -> None: + await repository.create_ticket( + _make_ticket(description="erro global no faturamento", client_id=uuid4()) + ) + await repository.create_ticket( + _make_ticket(description="erro global no envio", client_id=uuid4()) + ) + await repository.create_ticket( + _make_ticket(description="cobrança comum", client_id=uuid4()) + ) + + result = await repository.search_ticket("global", global_scope=True) + + assert result is not None + descriptions = sorted(t.description for t in result) + assert descriptions == ["erro global no envio", "erro global no faturamento"] + + @pytest.mark.asyncio + async def test_global_scope_matches_text_in_comments( + self, repository: TicketRepository + ) -> None: + await repository.create_ticket( + _make_ticket( + description="ticket sem termo na descrição", + comments=[_make_comment("Cliente reportou indisponibilidade total")], + ) + ) + + result = await repository.search_ticket("indisponibilidade", global_scope=True) + + assert result is not None + assert len(result) == 1 + + @pytest.mark.asyncio + async def test_specific_scope_takes_precedence_over_global( + self, repository: TicketRepository + ) -> None: + target_client = uuid4() + await repository.create_ticket( + _make_ticket(description="alvo do cliente", client_id=target_client) + ) + await repository.create_ticket( + _make_ticket(description="alvo de outro cliente", client_id=uuid4()) + ) + + result = await repository.search_ticket( + "alvo", client_id=target_client, global_scope=True + ) + + assert result is not None + assert len(result) == 1 + assert result[0].client.id == target_client + + +class TestSearchTicketEdgeCases: + @pytest.mark.asyncio + async def test_returns_empty_when_no_scope_and_not_global( + self, repository: TicketRepository + ) -> None: + await repository.create_ticket(_make_ticket(description="qualquer")) + + result = await repository.search_ticket("qualquer") + + assert result == [] + + @pytest.mark.asyncio + async def test_special_regex_characters_are_escaped( + self, repository: TicketRepository + ) -> None: + client_id = uuid4() + await repository.create_ticket( + _make_ticket(description="Saldo (negativo) detectado", client_id=client_id) + ) + await repository.create_ticket( + _make_ticket(description="saldo positivo", client_id=client_id) + ) + + result = await repository.search_ticket("(negativo)", client_id=client_id) + + assert result is not None + assert len(result) == 1 + assert "(negativo)" in result[0].description + + @pytest.mark.asyncio + async def test_scope_priority_uses_first_provided( + self, repository: TicketRepository + ) -> None: + target_client = uuid4() + unrelated_agent = uuid4() + await repository.create_ticket( + _make_ticket(description="cobrança", client_id=target_client) + ) + await repository.create_ticket( + _make_ticket( + description="cobrança", + agent_history=[_make_history(unrelated_agent)], + ) + ) + + result = await repository.search_ticket( + "cobrança", + client_id=target_client, + agent_id=unrelated_agent, + ) + + assert result is not None + assert len(result) == 1 + assert result[0].client.id == target_client diff --git a/tests/app/unit/event_dispatcher/test_event_dipatcher.py b/tests/app/unit/event_dispatcher/test_event_dipatcher.py new file mode 100644 index 0000000..8810463 --- /dev/null +++ b/tests/app/unit/event_dispatcher/test_event_dipatcher.py @@ -0,0 +1,185 @@ +import asyncio + +import pytest + +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.enums import AppEvent +from app.core.event_dispatcher.event_dispatcher import EventDispatcher +from app.core.event_dispatcher.exceptions import EventSchemaError, InvalidHandlerError +from app.core.event_dispatcher.schemas import DispatcherSchema +from app.core.logger import get_logger + + +class FakePayload(DispatcherSchema): + value: int + + +class WrongPayload(DispatcherSchema): + other: str + + +EVENT1 = AppEvent.TICKET_CLOSED +PAYLOAD_MAP = { + EVENT1: FakePayload, + AppEvent.TRIAGE_FINISHED: FakePayload, + AppEvent.TICKET_CREATED: FakePayload, +} + + +@pytest.fixture +def dispatcher() -> EventDispatcher: + return EventDispatcher(PAYLOAD_MAP, get_logger("test.event_dispatcher")) + + +class TestEventDispatcher: + async def test_handler_receives_payload(self, dispatcher: EventDispatcher) -> None: + received: list[FakePayload] = [] + + @event_handler(FakePayload) + async def handler(payload: FakePayload) -> None: + received.append(payload) + + dispatcher.subscribe(EVENT1, handler) + + await dispatcher.publish(EVENT1, FakePayload(value=42)) + await asyncio.sleep(0) + + assert len(received) == 1 + assert received[0].value == 42 + + async def test_listener_without_decorator_should_fail( + self, dispatcher: EventDispatcher + ) -> None: + async def handler(payload: FakePayload) -> None: + print("passei") + + with pytest.raises(InvalidHandlerError) as e: + dispatcher.subscribe(EVENT1, handler) + assert "must be decorated with @event_handler" in str(e.value) + + async def test_subscribe_listener_wrong_signature_should_fail( + self, dispatcher: EventDispatcher + ) -> None: + @event_handler(WrongPayload) + async def handler(payload: WrongPayload) -> None: + print("passei") + + with pytest.raises(InvalidHandlerError) as e: + dispatcher.subscribe(EVENT1, handler) + assert ( + f"Handler '{handler.__name__}' expects ({WrongPayload.__name__}), " + f"but event '{EVENT1.value}' emits {FakePayload.__name__}" + ) in str(e.value) + + async def test_publish_wrong_payload_should_fail(self, dispatcher: EventDispatcher) -> None: + with pytest.raises(EventSchemaError): + await dispatcher.publish(EVENT1, WrongPayload(other="should fail")) + + async def test_multiple_handlers_all_called(self, dispatcher: EventDispatcher) -> None: + received: dict[str, FakePayload] = {} + + @event_handler(FakePayload) + async def handler1(payload: FakePayload) -> None: + received["h1"] = payload + + @event_handler(FakePayload) + async def handler2(payload: FakePayload) -> None: + received["h2"] = payload + + @event_handler(FakePayload) + async def handler3(payload: FakePayload) -> None: + received["h3"] = payload + + for fn in (handler1, handler2, handler3): + dispatcher.subscribe(EVENT1, fn) + + await dispatcher.publish(EVENT1, FakePayload(value=42)) + await asyncio.sleep(0) + assert len(received) == 3 + assert received["h1"].value == 42 + assert received["h2"].value == 42 + assert received["h3"].value == 42 + + async def test_handler_only_called_for_subscribed_events( + self, dispatcher: EventDispatcher + ) -> None: + received: dict[str, FakePayload] = {} + + @event_handler(FakePayload) + async def handler1(payload: FakePayload) -> None: + received["h1"] = payload + + @event_handler(FakePayload) + async def handler2(payload: FakePayload) -> None: + received["h2"] = payload + + dispatcher.subscribe(AppEvent.TRIAGE_FINISHED, handler1) + dispatcher.subscribe(AppEvent.TICKET_CREATED, handler2) + + await dispatcher.publish(AppEvent.TRIAGE_FINISHED, FakePayload(value=42)) + await dispatcher.publish(AppEvent.TICKET_CREATED, FakePayload(value=43)) + await asyncio.sleep(0) + + assert len(received) == 2 + assert received["h1"].value == 42 + assert received["h2"].value == 43 + + async def test_failing_handler_not_block_others(self, dispatcher: EventDispatcher) -> None: + received: dict[str, FakePayload] = {} + + @event_handler(FakePayload) + async def handler1(payload: FakePayload) -> None: + raise Exception + + @event_handler(FakePayload) + async def handler2(payload: FakePayload) -> None: + received["h2"] = payload + + dispatcher.subscribe(EVENT1, handler1) + dispatcher.subscribe(EVENT1, handler2) + + await dispatcher.publish(EVENT1, FakePayload(value=42)) + await asyncio.sleep(0) + + assert len(received) == 1 + assert received["h2"].value == 42 + + async def test_event_with_no_subs_do_nothing(self, dispatcher: EventDispatcher) -> None: + await dispatcher.publish(EVENT1, FakePayload(value=1)) + await asyncio.sleep(0) + + async def test_handler_subscribed_twice_only_executes_once( + self, dispatcher: EventDispatcher + ) -> None: + call_count = 0 + + @event_handler(FakePayload) + async def handler(payload: FakePayload) -> None: + nonlocal call_count + call_count += 1 + + dispatcher.subscribe(EVENT1, handler) + dispatcher.subscribe(EVENT1, handler) + + await dispatcher.publish(EVENT1, FakePayload(value=42)) + await asyncio.sleep(0) + + assert call_count == 1 + + async def test_handler_no_replay(self, dispatcher: EventDispatcher) -> None: + received: list[FakePayload] = [] + + @event_handler(FakePayload) + async def handler(payload: FakePayload) -> None: + received.append(payload) + + await dispatcher.publish(EVENT1, FakePayload(value=1)) + await asyncio.sleep(0) + + dispatcher.subscribe(EVENT1, handler) + + await dispatcher.publish(EVENT1, FakePayload(value=2)) + await asyncio.sleep(0) + + assert len(received) == 1 + assert received[0].value == 2 diff --git a/tests/app/unit/event_dispatcher/test_event_handler_decorator.py b/tests/app/unit/event_dispatcher/test_event_handler_decorator.py new file mode 100644 index 0000000..7eb99d2 --- /dev/null +++ b/tests/app/unit/event_dispatcher/test_event_handler_decorator.py @@ -0,0 +1,87 @@ +import pytest + +from app.core.event_dispatcher.decorators import event_handler +from app.core.event_dispatcher.exceptions import EventSchemaError +from app.core.event_dispatcher.schemas import DispatcherSchema + + +class FakePayload(DispatcherSchema): + value: int + + +class OtherPayload(DispatcherSchema): + other: str + + +class TestEventHandlerDecorator: + async def test_calls_handler_with_correct_payload(self) -> None: + received: list[FakePayload] = [] + + @event_handler(FakePayload) + async def handler(payload: FakePayload) -> None: + received.append(payload) + + await handler(FakePayload(value=42)) + + assert len(received) == 1 + assert received[0].value == 42 + + async def test_raises_on_wrong_payload_type(self) -> None: + @event_handler(FakePayload) + async def handler(payload: FakePayload) -> None: + pass + + with pytest.raises(EventSchemaError, match="expected.*FakePayload.*got OtherPayload"): + await handler(OtherPayload(other="wrong")) + + async def test_exception_in_handler_is_caught_and_logged( + self, caplog: pytest.LogCaptureFixture + ) -> None: + @event_handler(FakePayload) + async def handler(payload: FakePayload) -> None: + raise ValueError("boom") + + await handler(FakePayload(value=1)) + + assert "Event handler failed" in caplog.text + + async def test_exception_in_handler_does_not_propagate(self) -> None: + @event_handler(FakePayload) + async def handler(payload: FakePayload) -> None: + raise RuntimeError("should not propagate") + + await handler(FakePayload(value=1)) + + async def test_sets_event_payload_types_attribute(self) -> None: + @event_handler(FakePayload, OtherPayload) + async def handler(payload: DispatcherSchema) -> None: + pass + + assert hasattr(handler, "__event_payload_types__") + assert handler.__event_payload_types__ == (FakePayload, OtherPayload) # type: ignore[attr-defined] + + async def test_preserves_function_name(self) -> None: + @event_handler(FakePayload) + async def my_handler(payload: FakePayload) -> None: + pass + + assert my_handler.__name__ == "my_handler" + + async def test_no_payload_types_skips_validation(self) -> None: + @event_handler() + async def handler(payload: OtherPayload) -> None: + pass + + await handler(OtherPayload(other="anything")) + + async def test_accepts_multiple_payload_types(self) -> None: + received: list[DispatcherSchema] = [] + + @event_handler(FakePayload, OtherPayload) + async def handler(payload: DispatcherSchema) -> None: + received.append(payload) + + await handler(FakePayload(value=1)) + await handler(OtherPayload(other="ok")) + + assert len(received) == 2 diff --git a/tests/app/unit/notifications/__init__.py b/tests/app/unit/notifications/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/app/unit/notifications/test_email_outbox_service.py b/tests/app/unit/notifications/test_email_outbox_service.py new file mode 100644 index 0000000..1283ea4 --- /dev/null +++ b/tests/app/unit/notifications/test_email_outbox_service.py @@ -0,0 +1,148 @@ +from unittest.mock import AsyncMock, MagicMock +from uuid import uuid4 + +import pytest + +from app.core.config import get_settings +from app.core.event_dispatcher.schemas import ( + PasswordResetEventSchema, + WelcomeInviteEventSchema, +) +from app.domains.notifications.entities import ( + PasswordResetPayload, + WelcomeInvitePayload, +) +from app.domains.notifications.enums import EmailEventType +from app.domains.notifications.services.email_outbox_service import EmailOutboxService + + +def _make_outbox_row() -> MagicMock: + row = MagicMock() + row.id = uuid4() + return row + + +def _welcome_schema(roles: list[str] | None = None) -> WelcomeInviteEventSchema: + return WelcomeInviteEventSchema( + user_id=uuid4(), + user_name="Test User", + user_email="user@example.com", + roles=roles or ["user"], + raw_token="raw-token", + one_time_password="TempPass1!", + max_attempts=5, + ) + + +def _reset_schema( + roles: list[str] | None = None, + user_email: str = "user@example.com", + raw_token: str = "reset-tok", +) -> PasswordResetEventSchema: + return PasswordResetEventSchema( + user_id=uuid4(), + user_email=user_email, + roles=roles or ["user"], + raw_token=raw_token, + max_attempts=5, + ) + + +class TestEmailOutboxService: + + @pytest.fixture + def repo(self) -> AsyncMock: + mock = AsyncMock() + mock.enqueue = AsyncMock(return_value=_make_outbox_row()) + return mock + + @pytest.fixture + def service(self, repo: AsyncMock) -> EmailOutboxService: + return EmailOutboxService(repo=repo) + + + @pytest.mark.asyncio + async def test_enqueue_welcome_invite_calls_repo( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_welcome_invite(_welcome_schema(roles=["admin"])) + repo.enqueue.assert_awaited_once() + + @pytest.mark.asyncio + async def test_enqueue_welcome_invite_uses_web_url_for_admin( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_welcome_invite(_welcome_schema(roles=["admin"])) + dto = repo.enqueue.call_args[0][0] + assert isinstance(dto.payload, WelcomeInvitePayload) + assert dto.payload.frontend_url == get_settings().WEB_FRONTEND_URL + + @pytest.mark.asyncio + async def test_enqueue_welcome_invite_uses_mobile_url_for_client( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_welcome_invite(_welcome_schema(roles=["client"])) + dto = repo.enqueue.call_args[0][0] + assert isinstance(dto.payload, WelcomeInvitePayload) + assert dto.payload.frontend_url == get_settings().MOBILE_FRONTEND_URL + + @pytest.mark.asyncio + async def test_enqueue_welcome_invite_event_type( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_welcome_invite(_welcome_schema()) + dto = repo.enqueue.call_args[0][0] + assert dto.event_type == EmailEventType.WELCOME_INVITE + + @pytest.mark.asyncio + async def test_enqueue_welcome_invite_payload_contains_token( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + schema = WelcomeInviteEventSchema( + user_id=uuid4(), + user_name="Test User", + user_email="user@example.com", + roles=["user"], + raw_token="my-secret-token", + one_time_password="Pass!", + max_attempts=5, + ) + await service.enqueue_welcome_invite(schema) + dto = repo.enqueue.call_args[0][0] + assert isinstance(dto.payload, WelcomeInvitePayload) + assert dto.payload.token == "my-secret-token" + assert dto.payload.one_time_password == "Pass!" + + + @pytest.mark.asyncio + async def test_enqueue_password_reset_calls_repo( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_password_reset(_reset_schema(roles=["user"])) + repo.enqueue.assert_awaited_once() + + @pytest.mark.asyncio + async def test_enqueue_password_reset_event_type( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_password_reset(_reset_schema()) + dto = repo.enqueue.call_args[0][0] + assert dto.event_type == EmailEventType.PASSWORD_RESET + + @pytest.mark.asyncio + async def test_enqueue_password_reset_payload_contains_token( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_password_reset(_reset_schema(raw_token="reset-secret")) + dto = repo.enqueue.call_args[0][0] + assert isinstance(dto.payload, PasswordResetPayload) + assert dto.payload.token == "reset-secret" + assert not hasattr(dto.payload, "one_time_password") + + @pytest.mark.asyncio + async def test_enqueue_password_reset_recipient_is_user_email( + self, service: EmailOutboxService, repo: AsyncMock + ) -> None: + await service.enqueue_password_reset(_reset_schema(user_email="specific@example.com")) + dto = repo.enqueue.call_args[0][0] + assert dto.recipient == "specific@example.com" diff --git a/tests/app/unit/notifications/test_email_outbox_worker.py b/tests/app/unit/notifications/test_email_outbox_worker.py new file mode 100644 index 0000000..4758b4d --- /dev/null +++ b/tests/app/unit/notifications/test_email_outbox_worker.py @@ -0,0 +1,177 @@ +from datetime import UTC, datetime +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 + +import pytest + +from app.domains.notifications.entities import EmailOutbox +from app.domains.notifications.enums import EmailEventType, EmailOutboxStatus +from app.domains.notifications.schemas import PasswordResetPayload, WelcomeInvitePayload +from app.domains.notifications.worker import _backoff_seconds, _process_single, _render_html + + +def test_backoff_increases_with_attempts() -> None: + b1 = _backoff_seconds(1, 900) + b2 = _backoff_seconds(2, 900) + b3 = _backoff_seconds(3, 900) + assert b1 < b2 < b3 + + +def test_backoff_caps_at_max() -> None: + b = _backoff_seconds(20, 900) + assert b <= 900 * 1.1 # allow for jitter + + +def _welcome_payload() -> WelcomeInvitePayload: + return WelcomeInvitePayload( + user_id=uuid4(), + user_name="Alice", + user_email="alice@example.com", + one_time_password="Tmp1!", + frontend_url="http://localhost:3000", + token="tok123", + ) + + +def _reset_payload() -> PasswordResetPayload: + return PasswordResetPayload( + user_id=uuid4(), + user_email="bob@example.com", + frontend_url="http://localhost:3000", + token="reset-tok", + ) + + +def _make_entry( + payload: WelcomeInvitePayload | PasswordResetPayload, + event_type: EmailEventType, + attempts: int = 0, + max_attempts: int = 5, +) -> EmailOutbox: + entry = MagicMock(spec=EmailOutbox) + entry.id = uuid4() + entry.event_type = event_type + entry.recipient = "test@example.com" + entry.payload = payload + entry.status = EmailOutboxStatus.PENDING + entry.attempts = attempts + entry.max_attempts = max_attempts + entry.last_error = None + entry.next_attempt_at = datetime.now(UTC) + entry.created_at = datetime.now(UTC) + entry.sent_at = None + entry.locked_at = None + entry.lock_owner = None + return entry + + +def test_render_html_welcome_invite() -> None: + entry = _make_entry(_welcome_payload(), EmailEventType.WELCOME_INVITE) + subject, html = _render_html(entry) + assert "Welcome" in subject + assert "Alice" in html or "localhost" in html + + +def test_render_html_password_reset() -> None: + entry = _make_entry(_reset_payload(), EmailEventType.PASSWORD_RESET) + subject, html = _render_html(entry) + assert "Reset" in subject + assert "reset-tok" in html or "localhost" in html + + +@pytest.mark.asyncio +async def test_process_single_success_marks_sent() -> None: + entry = _make_entry(_welcome_payload(), EmailEventType.WELCOME_INVITE) + email_strategy = MagicMock() + email_strategy._send = AsyncMock() + + mock_repo = AsyncMock() + mock_repo.mark_sent = AsyncMock() + + session_cm = MagicMock() + session_cm.__aenter__ = AsyncMock(return_value=MagicMock()) + session_cm.__aexit__ = AsyncMock(return_value=False) + + begin_cm = MagicMock() + begin_cm.__aenter__ = AsyncMock(return_value=None) + begin_cm.__aexit__ = AsyncMock(return_value=False) + + mock_session = MagicMock() + mock_session.begin.return_value = begin_cm + + session_maker = MagicMock() + session_maker.return_value = session_cm + session_cm.__aenter__ = AsyncMock(return_value=mock_session) + + with patch( + "app.domains.notifications.worker.EmailOutboxRepository", + return_value=mock_repo, + ): + await _process_single(session_maker, email_strategy, entry, "worker-1") + + email_strategy._send.assert_awaited_once() + mock_repo.mark_sent.assert_awaited_once() + + +@pytest.mark.asyncio +async def test_process_single_failure_marks_retry() -> None: + entry = _make_entry( + _welcome_payload(), EmailEventType.WELCOME_INVITE, attempts=0, max_attempts=5 + ) + email_strategy = MagicMock() + email_strategy._send = AsyncMock(side_effect=Exception("SMTP down")) + + mock_repo = AsyncMock() + mock_repo.mark_retry = AsyncMock() + mock_repo.mark_dead = AsyncMock() + + session_cm = MagicMock() + begin_cm = MagicMock() + begin_cm.__aenter__ = AsyncMock(return_value=None) + begin_cm.__aexit__ = AsyncMock(return_value=False) + mock_session = MagicMock() + mock_session.begin.return_value = begin_cm + session_cm.__aenter__ = AsyncMock(return_value=mock_session) + session_cm.__aexit__ = AsyncMock(return_value=False) + session_maker = MagicMock(return_value=session_cm) + + with patch( + "app.domains.notifications.worker.EmailOutboxRepository", + return_value=mock_repo, + ): + await _process_single(session_maker, email_strategy, entry, "worker-1") + + mock_repo.mark_retry.assert_awaited_once() + mock_repo.mark_dead.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_process_single_failure_marks_dead_when_max_attempts_reached() -> None: + entry = _make_entry( + _welcome_payload(), EmailEventType.WELCOME_INVITE, attempts=4, max_attempts=5 + ) + email_strategy = MagicMock() + email_strategy._send = AsyncMock(side_effect=Exception("persistent failure")) + + mock_repo = AsyncMock() + mock_repo.mark_retry = AsyncMock() + mock_repo.mark_dead = AsyncMock() + + session_cm = MagicMock() + begin_cm = MagicMock() + begin_cm.__aenter__ = AsyncMock(return_value=None) + begin_cm.__aexit__ = AsyncMock(return_value=False) + mock_session = MagicMock() + mock_session.begin.return_value = begin_cm + session_cm.__aenter__ = AsyncMock(return_value=mock_session) + session_cm.__aexit__ = AsyncMock(return_value=False) + session_maker = MagicMock(return_value=session_cm) + + with patch( + "app.domains.notifications.worker.EmailOutboxRepository", + return_value=mock_repo, + ): + await _process_single(session_maker, email_strategy, entry, "worker-1") + + mock_repo.mark_dead.assert_awaited_once() + mock_repo.mark_retry.assert_not_awaited() diff --git a/tests/app/unit/ticket/test_ticket_schemas.py b/tests/app/unit/ticket/test_ticket_schemas.py new file mode 100644 index 0000000..a639e3a --- /dev/null +++ b/tests/app/unit/ticket/test_ticket_schemas.py @@ -0,0 +1,121 @@ +from uuid import uuid4 + +from app.domains.ticket.models import TicketCriticality, TicketStatus, TicketType +from app.domains.ticket.schemas import ( + AssignTicketRequest, + CreateTicketDTO, + EscalateTicketRequest, + TicketClosedEventPayload, + TicketEscalatedEventPayload, + TicketQueueFiltersDTO, + TicketSearchFiltersDTO, + TriageFinishedEventPayload, + UpdateTicketDTO, +) + + +def test_create_ticket_dto_accepts_existing_contract() -> None: + dto = CreateTicketDTO( + triage_id="67f0c9b8e4b0b1a2c3d4e5f6", + type=TicketType.ISSUE, + criticality=TicketCriticality.HIGH, + product="Sistema Financeiro", + description="Erro ao emitir boleto", + chat_ids=["67f0c9b8e4b0b1a2c3d4e5f7"], + client_id=uuid4(), + ) + + assert dto.type == TicketType.ISSUE + assert dto.criticality == TicketCriticality.HIGH + + +def test_ticket_search_filters_use_official_pagination_defaults() -> None: + filters = TicketSearchFiltersDTO(status=TicketStatus.AWAITING_ASSIGNMENT, page=2, page_size=10) + + assert filters.page == 2 + assert filters.page_size == 10 + + +def test_queue_filters_accept_provisional_department_fields() -> None: + filters = TicketQueueFiltersDTO( + department_id="dept-finance", + level="N2", + unassigned_only=True, + page=1, + page_size=20, + ) + + assert filters.department_id == "dept-finance" + assert filters.level == "N2" + assert filters.unassigned_only is True + + +def test_update_ticket_dto_accepts_awaiting_assignment_status() -> None: + dto = UpdateTicketDTO(status=TicketStatus.AWAITING_ASSIGNMENT) + + assert dto.status == TicketStatus.AWAITING_ASSIGNMENT + + +def test_assign_request_is_importable_and_validatable() -> None: + dto = AssignTicketRequest(agent_id=uuid4(), reason="Primeira atribuicao.") + + assert dto.reason == "Primeira atribuicao." + + +def test_escalate_request_requires_target_agent_id_and_reason() -> None: + target_agent_id = uuid4() + dto = EscalateTicketRequest( + target_agent_id=target_agent_id, + reason="Subir para especialista", + ) + + assert dto.target_agent_id == target_agent_id + assert dto.reason == "Subir para especialista" + + +def test_triage_finished_event_payload_is_valid() -> None: + payload = TriageFinishedEventPayload( + triage_id="67f0c9b8e4b0b1a2c3d4e5f6", + type=TicketType.ISSUE, + criticality=TicketCriticality.HIGH, + product="Sistema Financeiro", + description="Erro ao emitir boleto", + chat_ids=["67f0c9b8e4b0b1a2c3d4e5f7"], + client_id=uuid4(), + ) + + assert payload.type == TicketType.ISSUE + assert payload.criticality == TicketCriticality.HIGH + + +def test_ticket_closed_event_payload_uses_new_status_contract() -> None: + payload = TicketClosedEventPayload( + ticket_id="67f0ca60e4b0b1a2c3d4e601", + triage_id="67f0c9b8e4b0b1a2c3d4e5f6", + client_id=uuid4(), + status=TicketStatus.FINISHED, + occurred_at="2026-04-14T12:30:00Z", + previous_status=TicketStatus.IN_PROGRESS, + closed_at="2026-04-14T12:30:00Z", + ) + + assert payload.event_name == "ticket.closed" + assert payload.status == TicketStatus.FINISHED + + +def test_ticket_escalated_event_payload_is_valid() -> None: + payload = TicketEscalatedEventPayload( + ticket_id="67f0ca60e4b0b1a2c3d4e601", + triage_id="67f0c9b8e4b0b1a2c3d4e5f6", + client_id=uuid4(), + status=TicketStatus.AWAITING_ASSIGNMENT, + occurred_at="2026-04-14T12:40:00Z", + previous_agent_id=uuid4(), + source_level="N1", + target_agent_id=uuid4(), + target_level="N2", + reason="Escalar para especialista", + ) + + assert payload.event_name == "ticket.escalated" + assert payload.status == TicketStatus.AWAITING_ASSIGNMENT diff --git a/tests/conftest.py b/tests/conftest.py index 216f38a..0967c88 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,6 +18,11 @@ from app.core.config import get_settings from app.db.mongo.dependencies import get_mongo_session from app.db.postgres.base import Base + +import app.domains.auth.models # noqa: F401 — register models with Base.metadata +import app.domains.companies.models # noqa: F401 — register models with Base.metadata +import app.domains.products.models # noqa: F401 — register models with Base.metadata +import app.domains.notifications.models # noqa: F401 — register models with Base.metadata from app.db.postgres.dependencies import get_postgres_session from app.domains.live_chat.entities import Conversation from app.domains.ticket.models import Ticket