From dd8f56a6a3c7c243fbd1e5e20d626b5d634347ca Mon Sep 17 00:00:00 2001 From: Allisson Azevedo Date: Wed, 18 Feb 2026 19:32:04 -0300 Subject: [PATCH 1/2] feat: add tokenization API with format-preserving token workflows Implements format-preserving tokenization service with UUID, numeric, luhn-preserving, and alphanumeric token formats. Supports deterministic and non-deterministic token generation, token lifecycle management (revoke, validate, expire), and dual database persistence (PostgreSQL/MySQL). Major changes: - New tokenization domain layer with FormatType, Token, and TokenizationKey models - Tokenization use cases with metrics decorators for observability - Token generator factory with format-specific implementations (UUID, numeric, Luhn, alphanumeric) - MySQL and PostgreSQL repository implementations for tokenization persistence - HTTP handlers with DTO validation for key management and token operations - CLI commands: create-tokenization-key, rotate-tokenization-key, clean-expired-tokens - Database migrations: 000002_add_tokenization (up/down for both PostgreSQL and MySQL) API endpoints: - POST /v1/tokenization/keys (create key with format + algorithm) - POST /v1/tokenization/keys/:name/rotate (version increment) - DELETE /v1/tokenization/keys/:id (soft delete) - POST /v1/tokenization/keys/:name/tokenize (generate token with optional TTL) - POST /v1/tokenization/detokenize (retrieve plaintext) - POST /v1/tokenization/validate (check token validity) - POST /v1/tokenization/revoke (mark token as revoked) Capability mapping: encrypt (tokenize), decrypt (detokenize), read (validate), delete (revoke/delete key), write (create key), rotate (rotate key version). Documentation updates: - Added docs/api/tokenization.md with endpoint contracts and security notes - Added docs/api/capability-matrix.md as canonical capability reference - Added docs/releases/v0.4.0.md with upgrade checklist and migration rollback guidance - Added docs/operations/policy-smoke-tests.md for capability verification runbook - Added docs/metadata.json for centralized release/API version tracking - Added docs/tools/check_docs_metadata.py consistency checker - Updated smoke test script/docs with tokenization round-trip validation - Expanded monitoring operations guide with tokenization metrics - Updated README with v0.4.0 feature highlights and pinned Docker image version - Cross-linked capability matrix from API endpoint docs and policy cookbook CI/CD enhancements: - Added API/docs consistency guard for PRs (enforces doc updates for API changes) - Added docs metadata check validation in CI workflow - Integrated check_docs_metadata.py into make docs-lint Testing: - Comprehensive test coverage for domain, service, use case, repository, and HTTP layers - Integration tests using real PostgreSQL and MySQL test containers - Handler tests with Gin test context and mock use cases - DTO validation tests for request/response contracts Breaking changes: None (additive migration, new endpoints under /v1/tokenization). Rollback note: Down migration available; safe to roll back before production tokenization usage. --- .github/workflows/ci.yml | 40 + .mockery.yaml | 8 + Makefile | 7 +- README.md | 21 +- cmd/app/commands/clean_expired_tokens.go | 92 + cmd/app/commands/create_tokenization_key.go | 75 + cmd/app/commands/helpers.go | 22 + cmd/app/commands/rotate_tokenization_key.go | 76 + cmd/app/main.go | 110 ++ docs/CHANGELOG.md | 39 +- docs/README.md | 13 +- docs/api/audit-logs.md | 7 +- docs/api/authentication.md | 3 +- docs/api/capability-matrix.md | 57 + docs/api/clients.md | 3 +- docs/api/policies.md | 52 +- docs/api/response-shapes.md | 48 +- docs/api/secrets.md | 3 +- docs/api/tokenization.md | 280 +++ docs/api/transit.md | 3 +- docs/api/versioning-policy.md | 7 +- docs/cli/commands.md | 92 +- docs/concepts/architecture.md | 36 +- docs/concepts/security-model.md | 14 +- docs/contributing.md | 44 +- docs/examples/curl.md | 36 +- docs/examples/go.md | 34 +- docs/examples/javascript.md | 50 +- docs/examples/python.md | 46 +- docs/getting-started/docker.md | 26 +- docs/getting-started/smoke-test.md | 5 +- docs/getting-started/smoke-test.sh | 72 +- docs/getting-started/troubleshooting.md | 14 +- docs/metadata.json | 5 + docs/openapi.yaml | 328 +++- docs/operations/failure-playbooks.md | 31 +- docs/operations/monitoring.md | 59 +- docs/operations/policy-smoke-tests.md | 122 ++ docs/operations/production.md | 32 +- docs/releases/v0.4.0.md | 81 + docs/tools/check_docs_metadata.py | 57 + internal/app/di.go | 421 ++++- internal/http/server.go | 53 + internal/testutil/database.go | 8 +- internal/tokenization/domain/const.go | 32 + internal/tokenization/domain/const_test.go | 93 + internal/tokenization/domain/errors.go | 31 + internal/tokenization/domain/token.go | 40 + internal/tokenization/domain/token_test.go | 152 ++ .../tokenization/domain/tokenization_key.go | 20 + internal/tokenization/http/dto/request.go | 184 ++ .../tokenization/http/dto/request_test.go | 479 +++++ internal/tokenization/http/dto/response.go | 59 + .../tokenization/http/dto/response_test.go | 260 +++ internal/tokenization/http/test_helpers.go | 29 + .../tokenization/http/tokenization_handler.go | 202 ++ .../http/tokenization_handler_test.go | 525 ++++++ .../http/tokenization_key_handler.go | 164 ++ .../http/tokenization_key_handler_test.go | 413 ++++ .../repository/mysql_repository.go | 462 +++++ .../repository/mysql_repository_test.go | 441 +++++ .../repository/postgresql_repository.go | 384 ++++ .../repository/postgresql_repository_test.go | 441 +++++ .../service/alphanumeric_generator.go | 62 + .../service/alphanumeric_generator_test.go | 225 +++ internal/tokenization/service/interface.go | 9 + .../tokenization/service/luhn_generator.go | 120 ++ .../service/luhn_generator_test.go | 233 +++ .../tokenization/service/numeric_generator.go | 53 + .../service/numeric_generator_test.go | 153 ++ .../service/token_generator_factory.go | 21 + .../service/token_generator_factory_test.go | 95 + .../tokenization/service/uuid_generator.go | 32 + .../service/uuid_generator_test.go | 78 + internal/tokenization/usecase/hash_service.go | 24 + internal/tokenization/usecase/interface.go | 105 ++ internal/tokenization/usecase/mocks/mocks.go | 1651 ++++++++++++++++ .../tokenization_key_metrics_decorator.go | 89 + ...tokenization_key_metrics_decorator_test.go | 336 ++++ .../usecase/tokenization_key_usecase.go | 179 ++ .../usecase/tokenization_key_usecase_test.go | 615 ++++++ .../usecase/tokenization_metrics_decorator.go | 119 ++ .../tokenization_metrics_decorator_test.go | 441 +++++ .../usecase/tokenization_usecase.go | 276 +++ .../usecase/tokenization_usecase_test.go | 1680 +++++++++++++++++ .../mysql/000002_add_tokenization.down.sql | 5 + .../mysql/000002_add_tokenization.up.sql | 34 + .../000002_add_tokenization.down.sql | 10 + .../postgresql/000002_add_tokenization.up.sql | 34 + test/integration/api_test.go | 1007 ++++++++-- 90 files changed, 14421 insertions(+), 308 deletions(-) create mode 100644 cmd/app/commands/clean_expired_tokens.go create mode 100644 cmd/app/commands/create_tokenization_key.go create mode 100644 cmd/app/commands/rotate_tokenization_key.go create mode 100644 docs/api/capability-matrix.md create mode 100644 docs/api/tokenization.md create mode 100644 docs/metadata.json create mode 100644 docs/operations/policy-smoke-tests.md create mode 100644 docs/releases/v0.4.0.md create mode 100644 docs/tools/check_docs_metadata.py create mode 100644 internal/tokenization/domain/const.go create mode 100644 internal/tokenization/domain/const_test.go create mode 100644 internal/tokenization/domain/errors.go create mode 100644 internal/tokenization/domain/token.go create mode 100644 internal/tokenization/domain/token_test.go create mode 100644 internal/tokenization/domain/tokenization_key.go create mode 100644 internal/tokenization/http/dto/request.go create mode 100644 internal/tokenization/http/dto/request_test.go create mode 100644 internal/tokenization/http/dto/response.go create mode 100644 internal/tokenization/http/dto/response_test.go create mode 100644 internal/tokenization/http/test_helpers.go create mode 100644 internal/tokenization/http/tokenization_handler.go create mode 100644 internal/tokenization/http/tokenization_handler_test.go create mode 100644 internal/tokenization/http/tokenization_key_handler.go create mode 100644 internal/tokenization/http/tokenization_key_handler_test.go create mode 100644 internal/tokenization/repository/mysql_repository.go create mode 100644 internal/tokenization/repository/mysql_repository_test.go create mode 100644 internal/tokenization/repository/postgresql_repository.go create mode 100644 internal/tokenization/repository/postgresql_repository_test.go create mode 100644 internal/tokenization/service/alphanumeric_generator.go create mode 100644 internal/tokenization/service/alphanumeric_generator_test.go create mode 100644 internal/tokenization/service/interface.go create mode 100644 internal/tokenization/service/luhn_generator.go create mode 100644 internal/tokenization/service/luhn_generator_test.go create mode 100644 internal/tokenization/service/numeric_generator.go create mode 100644 internal/tokenization/service/numeric_generator_test.go create mode 100644 internal/tokenization/service/token_generator_factory.go create mode 100644 internal/tokenization/service/token_generator_factory_test.go create mode 100644 internal/tokenization/service/uuid_generator.go create mode 100644 internal/tokenization/service/uuid_generator_test.go create mode 100644 internal/tokenization/usecase/hash_service.go create mode 100644 internal/tokenization/usecase/interface.go create mode 100644 internal/tokenization/usecase/mocks/mocks.go create mode 100644 internal/tokenization/usecase/tokenization_key_metrics_decorator.go create mode 100644 internal/tokenization/usecase/tokenization_key_metrics_decorator_test.go create mode 100644 internal/tokenization/usecase/tokenization_key_usecase.go create mode 100644 internal/tokenization/usecase/tokenization_key_usecase_test.go create mode 100644 internal/tokenization/usecase/tokenization_metrics_decorator.go create mode 100644 internal/tokenization/usecase/tokenization_metrics_decorator_test.go create mode 100644 internal/tokenization/usecase/tokenization_usecase.go create mode 100644 internal/tokenization/usecase/tokenization_usecase_test.go create mode 100644 migrations/mysql/000002_add_tokenization.down.sql create mode 100644 migrations/mysql/000002_add_tokenization.up.sql create mode 100644 migrations/postgresql/000002_add_tokenization.down.sql create mode 100644 migrations/postgresql/000002_add_tokenization.up.sql diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8770398..b73f937 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -53,6 +53,43 @@ jobs: exit 1 fi + - name: API docs consistency guard (PRs) + if: github.event_name == 'pull_request' + run: | + set -euo pipefail + + CHANGED_FILES="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.sha }}")" + if [ -z "$CHANGED_FILES" ]; then + exit 0 + fi + + api_changed=false + docs_changed=false + + while IFS= read -r file; do + [ -z "$file" ] && continue + + case "$file" in + internal/*/http/*.go|cmd/app/commands/*.go|migrations/*/*.sql) + api_changed=true + ;; + esac + + case "$file" in + docs/api/*|docs/openapi.yaml|docs/examples/*|docs/operations/*|docs/getting-started/*|docs/cli/commands.md|docs/releases/*|docs/CHANGELOG.md|docs/README.md|docs/metadata.json|README.md) + docs_changed=true + ;; + esac + done < KEK -> DEK -> Secret Data`) - ๐Ÿš„ Transit encryption (`/v1/transit/keys/*`) for encrypt/decrypt as a service (decrypt input uses `:`; see [Transit API docs](docs/api/transit.md), [create vs rotate](docs/api/transit.md#create-vs-rotate), and [error matrix](docs/api/transit.md#endpoint-error-matrix)) +- ๐ŸŽซ Tokenization API (`/v1/tokenization/*`) for token generation, detokenization, validation, and revocation - ๐Ÿ‘ค Token-based authentication and policy-based authorization - ๐Ÿ“ฆ Versioned secrets by path (`/v1/secrets/*path`) - ๐Ÿ“œ Audit logs with request correlation (`request_id`) and filtering @@ -92,6 +98,7 @@ All detailed guides include practical use cases and copy/paste-ready examples. - Clients: `GET/POST /v1/clients`, `GET/PUT/DELETE /v1/clients/:id` - Secrets: `POST/GET/DELETE /v1/secrets/*path` - Transit: `POST /v1/transit/keys`, `POST /v1/transit/keys/:name/rotate`, `POST /v1/transit/keys/:name/encrypt`, `POST /v1/transit/keys/:name/decrypt`, `DELETE /v1/transit/keys/:id` ([create vs rotate](docs/api/transit.md#create-vs-rotate), [error matrix](docs/api/transit.md#endpoint-error-matrix)) +- Tokenization: `POST /v1/tokenization/keys`, `POST /v1/tokenization/keys/:name/rotate`, `DELETE /v1/tokenization/keys/:id`, `POST /v1/tokenization/keys/:name/tokenize`, `POST /v1/tokenization/detokenize`, `POST /v1/tokenization/validate`, `POST /v1/tokenization/revoke` - Audit logs: `GET /v1/audit-logs` - Metrics: `GET /metrics` (available when `METRICS_ENABLED=true`) diff --git a/cmd/app/commands/clean_expired_tokens.go b/cmd/app/commands/clean_expired_tokens.go new file mode 100644 index 0000000..6e0e93f --- /dev/null +++ b/cmd/app/commands/clean_expired_tokens.go @@ -0,0 +1,92 @@ +package commands + +import ( + "context" + "encoding/json" + "fmt" + "log/slog" + "os" + + "github.com/allisson/secrets/internal/app" + "github.com/allisson/secrets/internal/config" +) + +// RunCleanExpiredTokens deletes expired tokens older than the specified number of days. +// Supports dry-run mode to preview deletion count and both text/JSON output formats. +// +// Requirements: Database must be migrated and accessible. +func RunCleanExpiredTokens(ctx context.Context, days int, dryRun bool, format string) error { + // Validate days parameter + if days < 0 { + return fmt.Errorf("days must be a positive number, got: %d", days) + } + + // Load configuration + cfg := config.Load() + + // Create DI container + container := app.NewContainer(cfg) + + // Get logger from container + logger := container.Logger() + logger.Info("cleaning expired tokens", + slog.Int("days", days), + slog.Bool("dry_run", dryRun), + ) + + // Ensure cleanup on exit + defer closeContainer(container, logger) + + // Get tokenization use case from container + tokenizationUseCase, err := container.TokenizationUseCase() + if err != nil { + return fmt.Errorf("failed to initialize tokenization use case: %w", err) + } + + // Execute deletion or count operation + count, err := tokenizationUseCase.CleanupExpired(ctx, days, dryRun) + if err != nil { + return fmt.Errorf("failed to cleanup expired tokens: %w", err) + } + + // Output result based on format + if format == "json" { + outputCleanExpiredJSON(count, days, dryRun) + } else { + outputCleanExpiredText(count, days, dryRun) + } + + logger.Info("cleanup completed", + slog.Int64("count", count), + slog.Int("days", days), + slog.Bool("dry_run", dryRun), + ) + + return nil +} + +// outputCleanExpiredText outputs the result in human-readable text format. +func outputCleanExpiredText(count int64, days int, dryRun bool) { + if dryRun { + fmt.Printf("Dry-run mode: Would delete %d expired token(s) older than %d day(s)\n", count, days) + } else { + fmt.Printf("Successfully deleted %d expired token(s) older than %d day(s)\n", count, days) + } +} + +// outputCleanExpiredJSON outputs the result in JSON format for machine consumption. +func outputCleanExpiredJSON(count int64, days int, dryRun bool) { + result := map[string]interface{}{ + "count": count, + "days": days, + "dry_run": dryRun, + } + + jsonBytes, err := json.MarshalIndent(result, "", " ") + if err != nil { + fmt.Fprintf(os.Stderr, "failed to marshal JSON: %v\n", err) + return + } + + fmt.Println(string(jsonBytes)) +} diff --git a/cmd/app/commands/create_tokenization_key.go b/cmd/app/commands/create_tokenization_key.go new file mode 100644 index 0000000..248ac1a --- /dev/null +++ b/cmd/app/commands/create_tokenization_key.go @@ -0,0 +1,75 @@ +package commands + +import ( + "context" + "fmt" + "log/slog" + + "github.com/allisson/secrets/internal/app" + "github.com/allisson/secrets/internal/config" +) + +// RunCreateTokenizationKey creates a new tokenization key with the specified parameters. +// Should be run during initial setup or when adding new tokenization formats. +// +// Requirements: Database must be migrated, MASTER_KEYS and ACTIVE_MASTER_KEY_ID must be set. +func RunCreateTokenizationKey( + ctx context.Context, + name string, + formatType string, + isDeterministic bool, + algorithmStr string, +) error { + // Load configuration + cfg := config.Load() + + // Create DI container + container := app.NewContainer(cfg) + + // Get logger from container + logger := container.Logger() + logger.Info("creating new tokenization key", + slog.String("name", name), + slog.String("format_type", formatType), + slog.Bool("is_deterministic", isDeterministic), + slog.String("algorithm", algorithmStr), + ) + + // Ensure cleanup on exit + defer closeContainer(container, logger) + + // Parse format type + format, err := parseFormatType(formatType) + if err != nil { + return err + } + + // Parse algorithm + algorithm, err := parseAlgorithm(algorithmStr) + if err != nil { + return err + } + + // Get tokenization key use case from container + tokenizationKeyUseCase, err := container.TokenizationKeyUseCase() + if err != nil { + return fmt.Errorf("failed to initialize tokenization key use case: %w", err) + } + + // Create the tokenization key + key, err := tokenizationKeyUseCase.Create(ctx, name, format, isDeterministic, algorithm) + if err != nil { + return fmt.Errorf("failed to create tokenization key: %w", err) + } + + logger.Info("tokenization key created successfully", + slog.String("id", key.ID.String()), + slog.String("name", key.Name), + slog.String("format_type", string(key.FormatType)), + slog.Uint64("version", uint64(key.Version)), + slog.Bool("is_deterministic", key.IsDeterministic), + slog.String("algorithm", string(algorithm)), + ) + + return nil +} diff --git a/cmd/app/commands/helpers.go b/cmd/app/commands/helpers.go index 798bee9..078e70e 100644 --- a/cmd/app/commands/helpers.go +++ b/cmd/app/commands/helpers.go @@ -3,11 +3,13 @@ package commands import ( "context" + "fmt" "log/slog" "github.com/golang-migrate/migrate/v4" "github.com/allisson/secrets/internal/app" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" ) // closeContainer closes all resources in the container and logs any errors. @@ -28,3 +30,23 @@ func closeMigrate(migrate *migrate.Migrate, logger *slog.Logger) { ) } } + +// parseFormatType converts format type string to tokenizationDomain.FormatType. +// Returns an error if the format type string is invalid. +func parseFormatType(formatType string) (tokenizationDomain.FormatType, error) { + switch formatType { + case "uuid": + return tokenizationDomain.FormatUUID, nil + case "numeric": + return tokenizationDomain.FormatNumeric, nil + case "luhn-preserving": + return tokenizationDomain.FormatLuhnPreserving, nil + case "alphanumeric": + return tokenizationDomain.FormatAlphanumeric, nil + default: + return "", fmt.Errorf( + "invalid format type: %s (valid options: uuid, numeric, luhn-preserving, alphanumeric)", + formatType, + ) + } +} diff --git a/cmd/app/commands/rotate_tokenization_key.go b/cmd/app/commands/rotate_tokenization_key.go new file mode 100644 index 0000000..1fbd206 --- /dev/null +++ b/cmd/app/commands/rotate_tokenization_key.go @@ -0,0 +1,76 @@ +package commands + +import ( + "context" + "fmt" + "log/slog" + + "github.com/allisson/secrets/internal/app" + "github.com/allisson/secrets/internal/config" +) + +// RunRotateTokenizationKey creates a new version of an existing tokenization key. +// Increments the version number and generates a new DEK while preserving old versions +// for detokenization of previously issued tokens. +// +// Requirements: Database must be migrated, named tokenization key must exist. +func RunRotateTokenizationKey( + ctx context.Context, + name string, + formatType string, + isDeterministic bool, + algorithmStr string, +) error { + // Load configuration + cfg := config.Load() + + // Create DI container + container := app.NewContainer(cfg) + + // Get logger from container + logger := container.Logger() + logger.Info("rotating tokenization key", + slog.String("name", name), + slog.String("format_type", formatType), + slog.Bool("is_deterministic", isDeterministic), + slog.String("algorithm", algorithmStr), + ) + + // Ensure cleanup on exit + defer closeContainer(container, logger) + + // Parse format type + format, err := parseFormatType(formatType) + if err != nil { + return err + } + + // Parse algorithm + algorithm, err := parseAlgorithm(algorithmStr) + if err != nil { + return err + } + + // Get tokenization key use case from container + tokenizationKeyUseCase, err := container.TokenizationKeyUseCase() + if err != nil { + return fmt.Errorf("failed to initialize tokenization key use case: %w", err) + } + + // Rotate the tokenization key + key, err := tokenizationKeyUseCase.Rotate(ctx, name, format, isDeterministic, algorithm) + if err != nil { + return fmt.Errorf("failed to rotate tokenization key: %w", err) + } + + logger.Info("tokenization key rotated successfully", + slog.String("id", key.ID.String()), + slog.String("name", key.Name), + slog.String("format_type", string(key.FormatType)), + slog.Uint64("version", uint64(key.Version)), + slog.Bool("is_deterministic", key.IsDeterministic), + slog.String("algorithm", string(algorithm)), + ) + + return nil +} diff --git a/cmd/app/main.go b/cmd/app/main.go index f30bb45..d83466e 100644 --- a/cmd/app/main.go +++ b/cmd/app/main.go @@ -76,6 +76,116 @@ func main() { return commands.RunRotateKek(ctx, cmd.String("algorithm")) }, }, + { + Name: "create-tokenization-key", + Usage: "Create a new tokenization key for format-preserving tokens", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "name", + Aliases: []string{"n"}, + Required: true, + Usage: "Unique name for the tokenization key", + }, + &cli.StringFlag{ + Name: "format", + Aliases: []string{"fmt"}, + Value: "uuid", + Usage: "Token format: uuid, numeric, luhn-preserving, or alphanumeric", + }, + &cli.BoolFlag{ + Name: "deterministic", + Aliases: []string{"det"}, + Value: false, + Usage: "Enable deterministic mode (same plaintext โ†’ same token)", + }, + &cli.StringFlag{ + Name: "algorithm", + Aliases: []string{"alg"}, + Value: "aes-gcm", + Usage: "Encryption algorithm to use (aes-gcm or chacha20-poly1305)", + }, + }, + Action: func(ctx context.Context, cmd *cli.Command) error { + return commands.RunCreateTokenizationKey( + ctx, + cmd.String("name"), + cmd.String("format"), + cmd.Bool("deterministic"), + cmd.String("algorithm"), + ) + }, + }, + { + Name: "rotate-tokenization-key", + Usage: "Rotate an existing tokenization key to a new version", + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: "name", + Aliases: []string{"n"}, + Required: true, + Usage: "Name of the tokenization key to rotate", + }, + &cli.StringFlag{ + Name: "format", + Aliases: []string{"fmt"}, + Value: "uuid", + Usage: "Token format: uuid, numeric, luhn-preserving, or alphanumeric", + }, + &cli.BoolFlag{ + Name: "deterministic", + Aliases: []string{"det"}, + Value: false, + Usage: "Enable deterministic mode (same plaintext โ†’ same token)", + }, + &cli.StringFlag{ + Name: "algorithm", + Aliases: []string{"alg"}, + Value: "aes-gcm", + Usage: "Encryption algorithm to use (aes-gcm or chacha20-poly1305)", + }, + }, + Action: func(ctx context.Context, cmd *cli.Command) error { + return commands.RunRotateTokenizationKey( + ctx, + cmd.String("name"), + cmd.String("format"), + cmd.Bool("deterministic"), + cmd.String("algorithm"), + ) + }, + }, + { + Name: "clean-expired-tokens", + Usage: "Delete expired tokens older than specified days", + Flags: []cli.Flag{ + &cli.IntFlag{ + Name: "days", + Aliases: []string{"d"}, + Required: true, + Usage: "Delete expired tokens older than this many days", + }, + &cli.BoolFlag{ + Name: "dry-run", + Aliases: []string{"n"}, + Value: false, + Usage: "Show how many tokens would be deleted without deleting", + }, + &cli.StringFlag{ + Name: "format", + Aliases: []string{"f"}, + Value: "text", + Usage: "Output format: 'text' or 'json'", + }, + }, + Action: func(ctx context.Context, cmd *cli.Command) error { + return commands.RunCleanExpiredTokens( + ctx, + cmd.Int("days"), + cmd.Bool("dry-run"), + cmd.String("format"), + ) + }, + }, { Name: "create-client", Usage: "Create a new authentication client with policies", diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index c30534c..85fbf92 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,6 +1,43 @@ # ๐Ÿ—’๏ธ Documentation Changelog -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 + +## 2026-02-18 (docs v8 - docs QA and operations polish) + +- Added docs metadata source file `docs/metadata.json` and metadata consistency checker +- Added `make docs-check-metadata` and integrated it into `make docs-lint` +- Added CI docs metadata check and API/docs consistency guard for PRs +- Added policy verification runbook: `docs/operations/policy-smoke-tests.md` +- Added retention defaults table to production guide and linked policy smoke tests +- Added tokenization lifecycle sequence diagram in architecture docs +- Added copy-safe examples policy and release PR docs QA guard guidance in contributing docs + +## 2026-02-18 (docs v7 - final v0.4.0 hardening) + +- Added canonical capability reference page: `docs/api/capability-matrix.md` +- Linked capability matrix from API endpoint docs, policy cookbook, and docs indexes +- Expanded OpenAPI description and monitoring docs with route-template notes (`{name}` vs `:name`/`*path`) +- Added tokenization deterministic-mode caveats in curl, Python, JavaScript, and Go examples +- Expanded tokenization API guidance with metadata data-classification rules +- Added rollback guidance for additive tokenization schema migration in `docs/releases/v0.4.0.md` +- Added migration-focused troubleshooting for tokenization rollout and expanded smoke test coverage + +## 2026-02-18 (docs v6 - v0.4.0 release prep) + +- Added release notes page: `docs/releases/v0.4.0.md` and promoted it as current in docs indexes +- Updated pinned Docker examples from `allisson/secrets:v0.3.0` to `allisson/secrets:v0.4.0` +- Updated root `README.md` with `What's New in v0.4.0`, tokenization API overview, and release links +- Added tokenization endpoints and corrected request/response contracts in `docs/api/tokenization.md` +- Added tokenization CLI command docs in `docs/cli/commands.md` +- Added tokenization monitoring operations and retention workflow updates in production docs +- Added explicit OpenAPI-coverage gap notes for tokenization rollout docs +- Added tokenization snippets to Python, JavaScript, and Go examples for cross-language parity +- Added tokenization incident runbooks and policy mapping clarifications +- Added `v0.4.0` upgrade checklist (migrate, verify, tokenization smoke checks, retention cleanup) +- Expanded OpenAPI baseline with tokenization endpoint and schema coverage +- Added canonical capability matrix reference and cross-linked API docs to reduce policy drift +- Expanded smoke test script/docs with tokenization round-trip + revoke validation +- Added tokenization migration verification troubleshooting section ## 2026-02-16 (docs v5 - documentation quality improvements) diff --git a/docs/README.md b/docs/README.md index 2bbb122..4e1b76b 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,6 +1,8 @@ # ๐Ÿ“š Secrets Documentation -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 + +Metadata source for release/API labels: `docs/metadata.json` Welcome to the full documentation for Secrets. Pick a path and dive in ๐Ÿš€ @@ -28,6 +30,7 @@ Welcome to the full documentation for Secrets. Pick a path and dive in ๐Ÿš€ - ๐Ÿ“Š [operations/monitoring.md](operations/monitoring.md) - ๐Ÿญ [operations/production.md](operations/production.md) - ๐Ÿš‘ [operations/failure-playbooks.md](operations/failure-playbooks.md) +- ๐Ÿงช [operations/policy-smoke-tests.md](operations/policy-smoke-tests.md) - ๐Ÿ› ๏ธ [development/testing.md](development/testing.md) - ๐Ÿค [contributing.md](contributing.md) - ๐Ÿ—’๏ธ [CHANGELOG.md](CHANGELOG.md) @@ -47,8 +50,10 @@ Welcome to the full documentation for Secrets. Pick a path and dive in ๐Ÿš€ - ๐Ÿ” [api/authentication.md](api/authentication.md) - ๐Ÿ‘ค [api/clients.md](api/clients.md) - ๐Ÿ“˜ [api/policies.md](api/policies.md) +- ๐Ÿ—‚๏ธ [api/capability-matrix.md](api/capability-matrix.md) - ๐Ÿ“ฆ [api/secrets.md](api/secrets.md) - ๐Ÿš„ [api/transit.md](api/transit.md) +- ๐ŸŽซ [api/tokenization.md](api/tokenization.md) - ๐Ÿ“œ [api/audit-logs.md](api/audit-logs.md) - ๐Ÿงฑ [api/response-shapes.md](api/response-shapes.md) - ๐Ÿงฉ [api/versioning-policy.md](api/versioning-policy.md) @@ -56,12 +61,14 @@ Welcome to the full documentation for Secrets. Pick a path and dive in ๐Ÿš€ OpenAPI scope note: -- `openapi.yaml` is a baseline subset for common API flows in `v0.3.0` +- `openapi.yaml` is a baseline subset for common API flows in `v0.4.0` - Full endpoint behavior is documented in the endpoint pages under `docs/api/` +- Tokenization endpoints are included in `openapi.yaml` for `v0.4.0` ## ๐Ÿš€ Releases -- ๐Ÿ“ฆ [releases/v0.3.0.md](releases/v0.3.0.md) +- ๐Ÿ“ฆ [releases/v0.4.0.md](releases/v0.4.0.md) +- ๐Ÿ“ฆ [releases/v0.3.0.md](releases/v0.3.0.md) (historical) - ๐Ÿ“ฆ [releases/v0.2.0.md](releases/v0.2.0.md) (historical) - ๐Ÿ“ฆ [releases/v0.1.0.md](releases/v0.1.0.md) (historical) diff --git a/docs/api/audit-logs.md b/docs/api/audit-logs.md index 2944b59..f4f6c7c 100644 --- a/docs/api/audit-logs.md +++ b/docs/api/audit-logs.md @@ -1,6 +1,6 @@ # ๐Ÿ“œ Audit Logs API -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 > Applies to: API v1 Audit logs capture capability checks and access attempts for monitoring and compliance. @@ -14,6 +14,10 @@ Audit logs capture capability checks and access attempts for monitoring and comp Authentication: required (Bearer token). Authorization: `read` capability for `/v1/audit-logs`. +Capability reference: + +- Canonical mapping source: [Capability matrix](capability-matrix.md) + ## Endpoint - `GET /v1/audit-logs` @@ -170,6 +174,7 @@ curl -s "http://localhost:8080/v1/audit-logs?limit=100" \ - [Authentication API](authentication.md) - [Clients API](clients.md) - [Policies cookbook](policies.md) +- [Capability matrix](capability-matrix.md) - [Response shapes](response-shapes.md) - [API compatibility policy](versioning-policy.md) - [Glossary](../concepts/glossary.md) diff --git a/docs/api/authentication.md b/docs/api/authentication.md index 758d6ec..fcaee84 100644 --- a/docs/api/authentication.md +++ b/docs/api/authentication.md @@ -1,6 +1,6 @@ # ๐Ÿ” Authentication API -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 > Applies to: API v1 All protected endpoints require `Authorization: Bearer `. @@ -116,5 +116,6 @@ Representative error payloads (exact messages may vary): - [Clients API](clients.md) - [Policies cookbook](policies.md) +- [Capability matrix](capability-matrix.md) - [Audit logs API](audit-logs.md) - [Response shapes](response-shapes.md) diff --git a/docs/api/capability-matrix.md b/docs/api/capability-matrix.md new file mode 100644 index 0000000..087a86d --- /dev/null +++ b/docs/api/capability-matrix.md @@ -0,0 +1,57 @@ +# ๐Ÿ—‚๏ธ Capability Matrix + +> Last updated: 2026-02-18 +> Applies to: API v1 + +This page is the canonical capability-to-endpoint reference used by API docs and policy templates. + +## Capability Definitions + +- `read`: list or inspect metadata/state without decrypting payload values +- `write`: create or update non-cryptographic resources and key definitions +- `delete`: delete resources or revoke token lifecycle entries +- `encrypt`: create encrypted outputs (secrets writes, transit encrypt, tokenization tokenize) +- `decrypt`: resolve encrypted/tokenized values back to plaintext +- `rotate`: create new key versions + +## Endpoint Matrix + +| Endpoint | Required capability | +| --- | --- | +| `POST /v1/clients` | `write` | +| `GET /v1/clients` | `read` | +| `GET /v1/clients/:id` | `read` | +| `PUT /v1/clients/:id` | `write` | +| `DELETE /v1/clients/:id` | `delete` | +| `GET /v1/audit-logs` | `read` | +| `POST /v1/secrets/*path` | `encrypt` | +| `GET /v1/secrets/*path` | `decrypt` | +| `DELETE /v1/secrets/*path` | `delete` | +| `POST /v1/transit/keys` | `write` | +| `POST /v1/transit/keys/:name/rotate` | `rotate` | +| `DELETE /v1/transit/keys/:id` | `delete` | +| `POST /v1/transit/keys/:name/encrypt` | `encrypt` | +| `POST /v1/transit/keys/:name/decrypt` | `decrypt` | +| `POST /v1/tokenization/keys` | `write` | +| `POST /v1/tokenization/keys/:name/rotate` | `rotate` | +| `DELETE /v1/tokenization/keys/:id` | `delete` | +| `POST /v1/tokenization/keys/:name/tokenize` | `encrypt` | +| `POST /v1/tokenization/detokenize` | `decrypt` | +| `POST /v1/tokenization/validate` | `read` | +| `POST /v1/tokenization/revoke` | `delete` | + +## Policy Authoring Notes + +- Use path scope as narrowly as possible (service + environment prefixes). +- Avoid wildcard `*` except temporary break-glass workflows. +- Keep encrypt and decrypt separated across clients when operationally possible. +- For tokenization lifecycle endpoints, token value is passed in JSON body; policy path is endpoint path. + +## See also + +- [Policies cookbook](policies.md) +- [Authentication API](authentication.md) +- [Clients API](clients.md) +- [Secrets API](secrets.md) +- [Transit API](transit.md) +- [Tokenization API](tokenization.md) diff --git a/docs/api/clients.md b/docs/api/clients.md index 1acdceb..4474baa 100644 --- a/docs/api/clients.md +++ b/docs/api/clients.md @@ -1,6 +1,6 @@ # ๐Ÿ‘ค Clients API -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 > Applies to: API v1 Client APIs manage machine identities and policy documents. @@ -146,6 +146,7 @@ Expected result: create returns `201 Created` with one-time `secret`; list retur - [Authentication API](authentication.md) - [Policies cookbook](policies.md) +- [Capability matrix](capability-matrix.md) - [Audit logs API](audit-logs.md) - [Response shapes](response-shapes.md) - [API compatibility policy](versioning-policy.md) diff --git a/docs/api/policies.md b/docs/api/policies.md index 1deb174..012377c 100644 --- a/docs/api/policies.md +++ b/docs/api/policies.md @@ -1,6 +1,6 @@ # ๐Ÿ“˜ Authorization Policy Cookbook -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 > Applies to: API v1 Ready-to-use policy templates for common service roles. @@ -15,6 +15,7 @@ Ready-to-use policy templates for common service roles. - [5) Audit log reader](#5-audit-log-reader) - [6) Break-glass admin (emergency)](#6-break-glass-admin-emergency) - [7) Key operator](#7-key-operator) +- [8) Tokenization operator](#8-tokenization-operator) - [Policy mismatch example (wrong vs fixed)](#policy-mismatch-example-wrong-vs-fixed) - [Common policy mistakes](#common-policy-mistakes) - [Best practices](#best-practices) @@ -38,6 +39,17 @@ Ready-to-use policy templates for common service roles. Capabilities: `read`, `write`, `delete`, `encrypt`, `decrypt`, `rotate`. +Endpoint capability intent (quick map, condensed from [Capability matrix](capability-matrix.md)): + +| Endpoint family | Typical capability | +| --- | --- | +| `GET /v1/clients`, `GET /v1/audit-logs`, `POST /v1/tokenization/validate` | `read` | +| `POST /v1/clients`, `PUT /v1/clients/:id`, `POST /v1/transit/keys`, `POST /v1/tokenization/keys` | `write` | +| `DELETE /v1/clients/:id`, `DELETE /v1/transit/keys/:id`, `DELETE /v1/tokenization/keys/:id`, `POST /v1/tokenization/revoke` | `delete` | +| `POST /v1/secrets/*path`, `POST /v1/transit/keys/:name/encrypt`, `POST /v1/tokenization/keys/:name/tokenize` | `encrypt` | +| `GET /v1/secrets/*path`, `POST /v1/transit/keys/:name/decrypt`, `POST /v1/tokenization/detokenize` | `decrypt` | +| `POST /v1/transit/keys/:name/rotate`, `POST /v1/tokenization/keys/:name/rotate` | `rotate` | + ## 1) Read-only service Use when a service only reads existing secrets. @@ -156,6 +168,41 @@ Use for teams responsible only for transit key lifecycle. Risk note: scope key names by environment when possible (for example `/v1/transit/keys/prod-*`). +## 8) Tokenization operator + +Use for services that manage tokenization keys and token lifecycle operations. + +```json +[ + { + "path": "/v1/tokenization/keys", + "capabilities": ["write"] + }, + { + "path": "/v1/tokenization/keys/*/rotate", + "capabilities": ["rotate"] + }, + { + "path": "/v1/tokenization/keys/*/tokenize", + "capabilities": ["encrypt"] + }, + { + "path": "/v1/tokenization/detokenize", + "capabilities": ["decrypt"] + }, + { + "path": "/v1/tokenization/validate", + "capabilities": ["read"] + }, + { + "path": "/v1/tokenization/revoke", + "capabilities": ["delete"] + } +] +``` + +Risk note: avoid wildcard tokenization access for application clients that only need tokenize or detokenize. + ## Policy mismatch example (wrong vs fixed) Wrong policy (insufficient capability for secret reads): @@ -190,8 +237,10 @@ Also verify path matching, for example `/v1/secrets/app/prod/*` if you want tigh | --- | --- | --- | | `403` on `GET /v1/secrets/*path` | Used `read` instead of `decrypt` | Grant `decrypt` for the secret path | | `403` on transit rotate | Missing `rotate` capability | Add `rotate` on `/v1/transit/keys/*/rotate` | +| `403` on tokenization detokenize | Used `read` instead of `decrypt` | Grant `decrypt` on `/v1/tokenization/detokenize` | | Service can access too much | Over-broad wildcard `*` path | Scope paths to service/environment prefixes | | Writes fail on secrets endpoint | Used `write` instead of `encrypt` | Grant `encrypt` for `POST /v1/secrets/*path` | +| Tokenization lifecycle calls fail | Sent token in URL path policy scope only | Add explicit paths for `/v1/tokenization/detokenize`, `/v1/tokenization/validate`, and `/v1/tokenization/revoke` | | Audit query denied | Missing `read` on `/v1/audit-logs` | Add explicit audit read policy | ## Best practices @@ -205,5 +254,6 @@ Also verify path matching, for example `/v1/secrets/app/prod/*` if you want tigh - [Authentication API](authentication.md) - [Clients API](clients.md) +- [Capability matrix](capability-matrix.md) - [Secrets API](secrets.md) - [Transit API](transit.md) diff --git a/docs/api/response-shapes.md b/docs/api/response-shapes.md index 4cfce11..7c6fa0a 100644 --- a/docs/api/response-shapes.md +++ b/docs/api/response-shapes.md @@ -1,6 +1,6 @@ # ๐Ÿงฑ API Response Shapes -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 > Applies to: API v1 Use these representative response schemas as a stable reference across endpoint docs. @@ -65,6 +65,51 @@ Transit decrypt: } ``` +Tokenization key create: + +```json +{ + "id": "0194f4a6-7ec7-78e6-9fe7-5ca35fef48db", + "name": "payment-cards", + "version": 1, + "format_type": "luhn-preserving", + "is_deterministic": true, + "created_at": "2026-02-18T10:30:00Z" +} +``` + +Tokenize: + +```json +{ + "token": "4532015112830366", + "metadata": { + "last_four": "0366" + }, + "created_at": "2026-02-18T10:35:00Z", + "expires_at": "2026-02-18T11:35:00Z" +} +``` + +Detokenize: + +```json +{ + "plaintext": "NDUzMjAxNTExMjgzMDM2Ng==", + "metadata": { + "last_four": "0366" + } +} +``` + +Token validate: + +```json +{ + "valid": true +} +``` + Input contract note: transit decrypt expects `ciphertext` in format `:`. See [Transit API](transit.md#decrypt-input-contract). @@ -124,5 +169,6 @@ Representative conflict payload (for example duplicate transit key create): - [Clients API](clients.md) - [Secrets API](secrets.md) - [Transit API](transit.md) +- [Tokenization API](tokenization.md) - [API compatibility policy](versioning-policy.md) - [Glossary](../concepts/glossary.md) diff --git a/docs/api/secrets.md b/docs/api/secrets.md index e2f0235..ff94925 100644 --- a/docs/api/secrets.md +++ b/docs/api/secrets.md @@ -1,6 +1,6 @@ # ๐Ÿ“ฆ Secrets API -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 > Applies to: API v1 Secrets are versioned by path and encrypted with envelope encryption. @@ -157,6 +157,7 @@ Expected result: write returns `201 Created`; read returns `200 OK` with base64 - [Authentication API](authentication.md) - [Policies cookbook](policies.md) +- [Capability matrix](capability-matrix.md) - [Response shapes](response-shapes.md) - [API compatibility policy](versioning-policy.md) - [Curl examples](../examples/curl.md) diff --git a/docs/api/tokenization.md b/docs/api/tokenization.md new file mode 100644 index 0000000..ecd6a9d --- /dev/null +++ b/docs/api/tokenization.md @@ -0,0 +1,280 @@ +# ๐ŸŽซ Tokenization API + +> Last updated: 2026-02-18 +> Applies to: API v1 + +The Tokenization API provides format-preserving token generation for sensitive values, +with optional deterministic behavior and token lifecycle management. + +## Compatibility + +- API surface: `/v1/tokenization/*` +- Server expectation: Secrets server with initialized KEK and tokenization migrations applied +- OpenAPI baseline: `docs/openapi.yaml` (subset coverage) + +OpenAPI coverage note: + +- Tokenization endpoint coverage is included in `docs/openapi.yaml` for `v0.4.0` +- This page remains the most detailed contract reference with examples and operational guidance + +All endpoints require `Authorization: Bearer `. + +## Endpoints + +Key management: + +- `POST /v1/tokenization/keys` (create key) +- `POST /v1/tokenization/keys/:name/rotate` (rotate key) +- `DELETE /v1/tokenization/keys/:id` (soft delete key) + +Token operations: + +- `POST /v1/tokenization/keys/:name/tokenize` (generate token) +- `POST /v1/tokenization/detokenize` (retrieve original value) +- `POST /v1/tokenization/validate` (check token validity) +- `POST /v1/tokenization/revoke` (revoke token) + +Capability mapping: + +| Endpoint | Required capability | +| --- | --- | +| `POST /v1/tokenization/keys` | `write` | +| `POST /v1/tokenization/keys/:name/rotate` | `rotate` | +| `DELETE /v1/tokenization/keys/:id` | `delete` | +| `POST /v1/tokenization/keys/:name/tokenize` | `encrypt` | +| `POST /v1/tokenization/detokenize` | `decrypt` | +| `POST /v1/tokenization/validate` | `read` | +| `POST /v1/tokenization/revoke` | `delete` | + +## Status Code Quick Reference + +| Endpoint | Success | Common error statuses | +| --- | --- | --- | +| `POST /v1/tokenization/keys` | `201` | `401`, `403`, `409`, `422` | +| `POST /v1/tokenization/keys/:name/rotate` | `201` | `401`, `403`, `404`, `422` | +| `DELETE /v1/tokenization/keys/:id` | `204` | `401`, `403`, `404`, `422` | +| `POST /v1/tokenization/keys/:name/tokenize` | `201` | `401`, `403`, `404`, `422` | +| `POST /v1/tokenization/detokenize` | `200` | `401`, `403`, `404`, `422` | +| `POST /v1/tokenization/validate` | `200` | `401`, `403`, `422` | +| `POST /v1/tokenization/revoke` | `204` | `401`, `403`, `404`, `422` | + +## Create Tokenization Key + +Creates the initial tokenization key version (`version = 1`) for a key name. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/keys \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "name": "payment-cards", + "format_type": "luhn-preserving", + "is_deterministic": true, + "algorithm": "aes-gcm" + }' +``` + +Request fields: + +| Field | Type | Required | Description | +| --- | --- | --- | --- | +| `name` | string | Yes | Unique key name (1-255 chars) | +| `format_type` | string | Yes | `uuid`, `numeric`, `luhn-preserving`, `alphanumeric` | +| `is_deterministic` | boolean | No | Default `false` | +| `algorithm` | string | Yes | `aes-gcm` or `chacha20-poly1305` | + +Example response (`201 Created`): + +```json +{ + "id": "0194f4a6-7ec7-78e6-9fe7-5ca35fef48db", + "name": "payment-cards", + "version": 1, + "format_type": "luhn-preserving", + "is_deterministic": true, + "created_at": "2026-02-18T10:30:00Z" +} +``` + +## Rotate Tokenization Key + +Creates a new key version for an existing key name. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/keys/payment-cards/rotate \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "format_type": "luhn-preserving", + "is_deterministic": true, + "algorithm": "chacha20-poly1305" + }' +``` + +Example response (`201 Created`): + +```json +{ + "id": "0194f4a6-8901-7def-abc0-123456789def", + "name": "payment-cards", + "version": 2, + "format_type": "luhn-preserving", + "is_deterministic": true, + "created_at": "2026-02-18T11:00:00Z" +} +``` + +## Delete Tokenization Key + +Soft deletes a tokenization key by ID. + +```bash +curl -X DELETE http://localhost:8080/v1/tokenization/keys/0194f4a6-7ec7-78e6-9fe7-5ca35fef48db \ + -H "Authorization: Bearer " +``` + +Response: `204 No Content` + +## Tokenize Data + +Generates a token for plaintext using the latest version of a key. + +In deterministic mode, the same plaintext can return the same active token while valid. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/keys/payment-cards/tokenize \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{ + "plaintext": "NDUzMjAxNTExMjgzMDM2Ng==", + "metadata": { + "last_four": "0366", + "card_type": "visa" + }, + "ttl": 3600 + }' +``` + +Request fields: + +| Field | Type | Required | Description | +| --- | --- | --- | --- | +| `plaintext` | string | Yes | Base64-encoded plaintext | +| `metadata` | object | No | Display metadata; stored unencrypted | +| `ttl` | integer | No | Time-to-live in seconds (`>= 1`) | + +Example response (`201 Created`): + +```json +{ + "token": "4532015112830366", + "metadata": { + "last_four": "0366", + "card_type": "visa" + }, + "created_at": "2026-02-18T10:35:00Z", + "expires_at": "2026-02-18T11:35:00Z" +} +``` + +## Detokenize Data + +Retrieves original plaintext for a token. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/detokenize \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{"token":"4532015112830366"}' +``` + +Example response (`200 OK`): + +```json +{ + "plaintext": "NDUzMjAxNTExMjgzMDM2Ng==", + "metadata": { + "last_four": "0366", + "card_type": "visa" + } +} +``` + +Error behavior: + +- Missing token mapping: `404 Not Found` +- Expired token: `422 Unprocessable Entity` +- Revoked token: `422 Unprocessable Entity` + +## Validate Token + +Checks whether a token is currently valid. Does not return plaintext. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/validate \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{"token":"4532015112830366"}' +``` + +Example response (`200 OK`): + +```json +{ + "valid": true +} +``` + +If token is unknown, expired, or revoked, response remains `200` with `valid: false`. + +## Revoke Token + +Marks a token as revoked. + +```bash +curl -X POST http://localhost:8080/v1/tokenization/revoke \ + -H "Authorization: Bearer " \ + -H "Content-Type: application/json" \ + -d '{"token":"4532015112830366"}' +``` + +Response: `204 No Content` + +## Token Formats + +| Format | Description | Example output | +| --- | --- | --- | +| `uuid` | RFC 4122 UUID | `01933e4a-7890-7abc-def0-123456789abc` | +| `numeric` | Numeric digits | `4532015112830366` | +| `luhn-preserving` | Numeric with Luhn validity | `4532015112830366` | +| `alphanumeric` | Letters and digits | `A3b9X2k7Q1m5` | + +## Security Notes + +- Metadata is not encrypted; do not store full PAN, secrets, or regulated payloads in metadata. +- Base64 is encoding, not encryption; always use HTTPS/TLS. +- Clear detokenized plaintext from memory after use in application code. + +## Data Classification for Metadata + +Safe metadata examples (recommended): + +- last-four display fragments (for example `"0366"`) +- token source/system tags (for example `"checkout-service"`) +- non-sensitive workflow identifiers + +Never place in metadata: + +- full PAN, CVV, account numbers, passwords, API keys, or secrets +- plaintext payload copies already represented by the tokenized value +- personal data requiring encryption at rest if your policy requires protected storage + +If data must remain confidential at rest, keep it in encrypted plaintext payload, not metadata. + +## See also + +- [Authentication](authentication.md) +- [Policies](policies.md) +- [Capability matrix](capability-matrix.md) +- [CLI Commands](../cli/commands.md) +- [Production operations](../operations/production.md) diff --git a/docs/api/transit.md b/docs/api/transit.md index 826adfe..3b26ca0 100644 --- a/docs/api/transit.md +++ b/docs/api/transit.md @@ -1,6 +1,6 @@ # ๐Ÿš„ Transit API -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 > Applies to: API v1 Transit API encrypts/decrypts data without storing your application payload. @@ -273,6 +273,7 @@ Expected result: key creation returns `201 Created`; encrypt returns `200 OK` wi - [Authentication API](authentication.md) - [Policies cookbook](policies.md) +- [Capability matrix](capability-matrix.md) - [Response shapes](response-shapes.md) - [API compatibility policy](versioning-policy.md) - [Curl examples](../examples/curl.md) diff --git a/docs/api/versioning-policy.md b/docs/api/versioning-policy.md index 641bc50..b59cfdc 100644 --- a/docs/api/versioning-policy.md +++ b/docs/api/versioning-policy.md @@ -1,6 +1,6 @@ # ๐Ÿงฉ API Compatibility and Versioning Policy -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 > Applies to: API v1 This page defines compatibility expectations for HTTP API changes. @@ -11,15 +11,16 @@ This page defines compatibility expectations for HTTP API changes. - Existing endpoint paths and JSON field names are treated as stable unless explicitly deprecated - OpenAPI source of truth: `docs/openapi.yaml` -## OpenAPI Coverage (v0.3.0) +## OpenAPI Coverage (v0.4.0) - `docs/openapi.yaml` is a baseline subset focused on high-traffic/common integration flows +- `docs/openapi.yaml` includes tokenization endpoint coverage in `v0.4.0` - Endpoint pages in `docs/api/*.md` define full public behavior for covered operations - Endpoints may exist in runtime before they are expanded in OpenAPI detail ## App Version vs API Version -- Application release `v0.3.0` is pre-1.0 software and may evolve quickly +- Application release `v0.4.0` is pre-1.0 software and may evolve quickly - API v1 path contract (`/v1/*`) remains the compatibility baseline for consumers - Breaking API behavior changes require explicit documentation and migration notes diff --git a/docs/cli/commands.md b/docs/cli/commands.md index 8ffabe2..e5872d8 100644 --- a/docs/cli/commands.md +++ b/docs/cli/commands.md @@ -1,6 +1,6 @@ # ๐Ÿงช CLI Commands Reference -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 Use the `app` CLI for server runtime, key management, and client lifecycle operations. @@ -12,10 +12,10 @@ Local binary: ./bin/app [flags] ``` -Docker image (v0.3.0): +Docker image (v0.4.0): ```bash -docker run --rm --env-file .env allisson/secrets:v0.3.0 [flags] +docker run --rm --env-file .env allisson/secrets:v0.4.0 [flags] ``` ## Core Runtime @@ -33,7 +33,7 @@ Local: Docker: ```bash -docker run --rm --network secrets-net --env-file .env -p 8080:8080 allisson/secrets:v0.3.0 server +docker run --rm --network secrets-net --env-file .env -p 8080:8080 allisson/secrets:v0.4.0 server ``` ### `migrate` @@ -49,7 +49,7 @@ Local: Docker: ```bash -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 migrate +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 migrate ``` ## Key Management @@ -71,7 +71,7 @@ Local: Docker: ```bash -docker run --rm allisson/secrets:v0.3.0 create-master-key --id default +docker run --rm allisson/secrets:v0.4.0 create-master-key --id default ``` ### `create-kek` @@ -91,7 +91,7 @@ Local: Docker: ```bash -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 create-kek --algorithm aes-gcm +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 create-kek --algorithm aes-gcm ``` ### `rotate-kek` @@ -111,11 +111,85 @@ Local: Docker: ```bash -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 rotate-kek --algorithm aes-gcm +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 rotate-kek --algorithm aes-gcm ``` After master key or KEK rotation, restart API server instances so they load updated key material. +## Tokenization + +### `create-tokenization-key` + +Creates a tokenization key with version `1`. + +Flags: + +- `--name`, `-n` (required): unique tokenization key name +- `--format`, `--fmt`: `uuid` (default), `numeric`, `luhn-preserving`, or `alphanumeric` +- `--deterministic`, `--det` (default `false`): generate deterministic tokens for identical plaintext +- `--algorithm`, `--alg`: `aes-gcm` (default) or `chacha20-poly1305` + +Examples: + +```bash +./bin/app create-tokenization-key \ + --name payment-cards \ + --format luhn-preserving \ + --deterministic \ + --algorithm aes-gcm + +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 \ + create-tokenization-key --name payment-cards --format luhn-preserving --deterministic --algorithm aes-gcm +``` + +### `rotate-tokenization-key` + +Creates a new version for an existing tokenization key. + +Flags: + +- `--name`, `-n` (required): tokenization key name to rotate +- `--format`, `--fmt`: `uuid` (default), `numeric`, `luhn-preserving`, or `alphanumeric` +- `--deterministic`, `--det` (default `false`) +- `--algorithm`, `--alg`: `aes-gcm` (default) or `chacha20-poly1305` + +Examples: + +```bash +./bin/app rotate-tokenization-key \ + --name payment-cards \ + --format luhn-preserving \ + --deterministic \ + --algorithm chacha20-poly1305 + +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 \ + rotate-tokenization-key --name payment-cards --format luhn-preserving --deterministic --algorithm chacha20-poly1305 +``` + +### `clean-expired-tokens` + +Deletes expired tokens older than a retention window. + +Flags: + +- `--days`, `-d` (required): delete tokens older than this many days +- `--dry-run`, `-n` (default `false`): preview count without deleting +- `--format`, `-f`: `text` (default) or `json` + +Examples: + +```bash +# Preview (no deletion) +./bin/app clean-expired-tokens --days 30 --dry-run --format json + +# Execute deletion +./bin/app clean-expired-tokens --days 30 --format text + +# Docker form +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 \ + clean-expired-tokens --days 30 --dry-run --format json +``` + ## Client Management ### `create-client` @@ -195,7 +269,7 @@ Examples: ./bin/app clean-audit-logs --days 90 --format text # Docker form -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 \ +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 \ clean-audit-logs --days 90 --dry-run --format json ``` diff --git a/docs/concepts/architecture.md b/docs/concepts/architecture.md index 939b696..cc49cee 100644 --- a/docs/concepts/architecture.md +++ b/docs/concepts/architecture.md @@ -1,6 +1,6 @@ # ๐Ÿ—๏ธ Architecture -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 Secrets follows Clean Architecture with domain-driven boundaries so cryptographic rules stay isolated from transport and storage concerns. @@ -23,6 +23,36 @@ Master Key -> KEK -> DEK -> Transit Key -> Application Data Transit mode is encryption-as-a-service: Secrets returns ciphertext/plaintext to the caller and does not persist application payloads. +## ๐ŸŽซ Tokenization model + +```text +Master Key -> KEK -> DEK -> Tokenization Key -> Token <-> Encrypted Plaintext Mapping +``` + +Tokenization mode persists a token mapping. The server stores encrypted plaintext and returns tokens +that can later be detokenized, validated, revoked, and expired. + +Tokenization request lifecycle: + +```mermaid +sequenceDiagram + participant App as Application + participant API as Secrets API + participant Store as Token Store + + App->>API: POST /v1/tokenization/keys/:name/tokenize (plaintext, metadata, ttl) + API->>Store: persist token -> encrypted plaintext mapping + API-->>App: 201 token + expires_at + + App->>API: POST /v1/tokenization/detokenize (token) + API->>Store: lookup active token mapping + API-->>App: 200 plaintext + + App->>API: POST /v1/tokenization/revoke (token) + API->>Store: mark revoked + API-->>App: 204 No Content +``` + ## ๐Ÿค” Secrets API vs Transit API Use this quick rule: @@ -34,6 +64,7 @@ Use this quick rule: | --- | --- | --- | | Centralized secret storage at `/v1/secrets/*path` | Secrets API | Server persists encrypted data and versions it | | Encrypt/decrypt service without storing payloads | Transit API | Server returns crypto result only; payload storage remains in your app | +| Format-preserving tokens with lifecycle controls | Tokenization API | Server persists token mapping and supports detokenize/validate/revoke | | Secret version history by path | Secrets API | Versioning is built into secret writes | | Key version rotation for stateless crypto operations | Transit API | Transit keys rotate independently while old versions can still decrypt | @@ -50,7 +81,7 @@ flowchart TD ## ๐Ÿงฑ Layer responsibilities -- `domain/`: business entities and invariants (`Client`, `Token`, `Secret`, `TransitKey`, `Kek`, `Dek`) +- `domain/`: business entities and invariants (`Client`, `Token`, `Secret`, `TransitKey`, `TokenizationKey`, `Kek`, `Dek`) - `usecase/`: orchestration, transactional boundaries, and policy decisions - `repository/`: PostgreSQL/MySQL persistence and query logic - `service/`: reusable technical services (crypto, token hashing, helpers) @@ -69,5 +100,6 @@ flowchart TD - [Key management operations](../operations/key-management.md) - [Environment variables](../configuration/environment-variables.md) - [Secrets API](../api/secrets.md) +- [Tokenization API](../api/tokenization.md) - [ADR 0001: Envelope Encryption Model](../adr/0001-envelope-encryption-model.md) - [ADR 0002: Transit Versioned Ciphertext Contract](../adr/0002-transit-versioned-ciphertext-contract.md) diff --git a/docs/concepts/security-model.md b/docs/concepts/security-model.md index e6ec905..27e4b76 100644 --- a/docs/concepts/security-model.md +++ b/docs/concepts/security-model.md @@ -1,6 +1,6 @@ # ๐Ÿ”’ Security Model -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 Secrets is designed for practical defense-in-depth around secret storage and cryptographic operations. @@ -19,6 +19,14 @@ Secrets is designed for practical defense-in-depth around secret storage and cry - ๐ŸŽฏ **DEK compromise**: impact scoped to specific data/version boundaries - ๐Ÿงช **Credential abuse**: identify with audit log patterns (`allowed=false`, unusual source IPs) +## ๐ŸŽซ Tokenization security considerations + +- Metadata is not encrypted: do not place full PAN, credentials, or regulated payloads in token metadata. +- Deterministic tokenization leaks equality patterns for identical plaintext under the same active key. +- TTL expiration and revocation both invalidate token usage, but neither should replace endpoint authorization. +- Detokenization is plaintext exposure: isolate clients with `decrypt` capability and avoid shared broad policies. +- Expired tokens should be cleaned on cadence (`clean-expired-tokens`) to reduce stale sensitive mappings. + ## ๐Ÿ“œ Audit log integrity model - Audit entries are append-only at API level @@ -32,6 +40,8 @@ Secrets is designed for practical defense-in-depth around secret storage and cry - Apply least-privilege policies per client and path - Rotate KEKs and client credentials regularly - Alert on repeated denied authorization attempts +- Separate `encrypt` and `decrypt` clients for tokenization and transit when possible +- Prefer non-deterministic tokenization unless deterministic matching is an explicit requirement ## โš ๏ธ Known limitations @@ -51,4 +61,6 @@ Secrets is designed for practical defense-in-depth around secret storage and cry - [Architecture](architecture.md) - [Authentication API](../api/authentication.md) - [Policies cookbook](../api/policies.md) +- [Capability matrix](../api/capability-matrix.md) +- [Tokenization API](../api/tokenization.md) - [Key management operations](../operations/key-management.md) diff --git a/docs/contributing.md b/docs/contributing.md index 5e85f54..9a355f5 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -1,6 +1,6 @@ # ๐Ÿค Documentation Contributing Guide -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 Use this guide when adding or editing project documentation. @@ -45,6 +45,18 @@ Use this guide when adding or editing project documentation. - Include expected status/result where useful - Avoid placeholder values that look like real secrets +Copy-safe examples policy: + +- Use clearly synthetic values (``, `tok_sample`, `example.com`) +- Never include real keys, tokens, credentials, or production hostnames +- For sensitive domains (payments/PII), prefer redacted fragments (for example `last_four`) + +## Metadata Source of Truth + +- `docs/metadata.json` is the canonical source for current release and API version labels +- Keep `README.md`, `docs/README.md`, and API applies-to markers aligned with this file +- Validate with `make docs-check-metadata` + ## Local Docs Checks Run the same style/link checks locally before opening a PR: @@ -52,12 +64,15 @@ Run the same style/link checks locally before opening a PR: ```bash make docs-lint make docs-check-examples +make docs-check-metadata ``` This target runs markdown linting and offline markdown link validation. `make docs-check-examples` validates representative JSON response shapes used in docs. +`make docs-check-metadata` validates release/API metadata alignment across docs entry points. + ## PR Checklist 1. Links are valid and relative paths resolve @@ -70,11 +85,13 @@ This target runs markdown linting and offline markdown link validation. For behavior changes, update all relevant docs in the same PR: -1. Environment variables and defaults (`docs/configuration/environment-variables.md`) -2. API overview and endpoint pages (`README.md`, `docs/api/*.md`) -3. Operational runbooks (`docs/operations/*.md`) -4. Release notes (`docs/releases/vX.Y.Z.md`) and `docs/CHANGELOG.md` -5. Local and Docker examples (`docs/getting-started/*.md`, `docs/cli/commands.md`) +1. API endpoint page (`docs/api/.md`) plus capability mapping references +2. OpenAPI contract updates (`docs/openapi.yaml`) for new/changed request and response shapes +3. Examples parity (`docs/examples/*.md`) for at least curl and one SDK/runtime path +4. Monitoring/query updates (`docs/operations/monitoring.md`) when new operations/metrics are introduced +5. Runbook updates (`docs/operations/*.md` or `docs/getting-started/troubleshooting.md`) for incident/upgrade impact +6. Release notes and changelog (`docs/releases/vX.Y.Z.md`, `docs/CHANGELOG.md`) +7. Entry-point navigation updates (`README.md`, `docs/README.md`) when docs scope expands ## Ownership and Review Cadence @@ -86,9 +103,18 @@ For behavior changes, update all relevant docs in the same PR: ## Docs Release Process 1. Update `Last updated` in every changed docs file -2. Add or update relevant examples if behavior/commands changed -3. Append a concise entry in `docs/CHANGELOG.md` for significant docs changes -4. Run `make docs-lint` before opening or merging PRs +2. Update `docs/metadata.json` when release/API labels change +3. Add or update relevant examples if behavior/commands changed +4. Append a concise entry in `docs/CHANGELOG.md` for significant docs changes +5. Run `make docs-lint` before opening or merging PRs + +## Release PR Docs QA Guard + +CI includes an API/docs guard for pull requests: + +- If API-facing code changes (`internal/*/http/*.go`, `cmd/app/commands/*.go`, `migrations/*`), + PRs must include corresponding docs changes in at least one relevant docs area +- This guard helps ensure API/runtime changes ship with docs, examples, and/or runbook updates ## See also diff --git a/docs/examples/curl.md b/docs/examples/curl.md index 6785fd5..63a2bbd 100644 --- a/docs/examples/curl.md +++ b/docs/examples/curl.md @@ -1,6 +1,6 @@ # ๐Ÿงช Curl Examples -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 โš ๏ธ Security Warning: base64 is encoding, not encryption. Always use HTTPS/TLS. @@ -103,12 +103,46 @@ curl "$BASE_URL/v1/audit-logs?limit=50&offset=0" \ -H "Authorization: Bearer $TOKEN" ``` +## 6) Tokenization quick flow + +```bash +# Create a tokenization key +curl -X POST "$BASE_URL/v1/tokenization/keys" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"name":"payment-cards","format_type":"luhn-preserving","is_deterministic":true,"algorithm":"aes-gcm"}' + +# Tokenize a value +TOKENIZED=$(curl -s -X POST "$BASE_URL/v1/tokenization/keys/payment-cards/tokenize" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"plaintext":"NDUzMjAxNTExMjgzMDM2Ng==","metadata":{"last_four":"0366"},"ttl":3600}' | jq -r .token) + +# Validate token +curl -X POST "$BASE_URL/v1/tokenization/validate" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"token\":\"$TOKENIZED\"}" + +# Detokenize token +curl -X POST "$BASE_URL/v1/tokenization/detokenize" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"token\":\"$TOKENIZED\"}" +``` + +Deterministic caveat: + +- When `is_deterministic=true`, tokenizing the same plaintext with the same active key can return the same token +- Prefer non-deterministic mode unless you explicitly need equality matching + ## Common Mistakes - Sending raw plaintext instead of base64 in `value`/`plaintext` - Building your own decrypt `ciphertext` instead of reusing encrypt response exactly - Missing `Bearer` prefix in `Authorization` header - Using create repeatedly for same transit key name instead of rotate after `409` +- Sending token in URL path for tokenization lifecycle endpoints (the API expects token in JSON body) ## See also diff --git a/docs/examples/go.md b/docs/examples/go.md index fd01122..dd1e756 100644 --- a/docs/examples/go.md +++ b/docs/examples/go.md @@ -1,6 +1,6 @@ # ๐Ÿน Go Examples -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 โš ๏ธ Security Warning: base64 is encoding, not encryption. Always use HTTPS/TLS. @@ -190,16 +190,48 @@ func createTransitKey(token, keyName string) error { } ``` +## Tokenization Quick Snippet + +```go +func tokenizationFlow(token string) error { + _ = createTokenizationKey(token, "go-tokenization") + + tokenValue, err := tokenize(token, "go-tokenization", "sensitive-value") + if err != nil { + return err + } + + plaintextB64, err := detokenize(token, tokenValue) + if err != nil { + return err + } + + expected := base64.StdEncoding.EncodeToString([]byte("sensitive-value")) + if plaintextB64 != expected { + return fmt.Errorf("tokenization round-trip verification failed") + } + + return nil +} +``` + +Deterministic caveat: + +- Keys configured as deterministic can emit the same token for the same plaintext under the same active key. +- Use deterministic mode only when your workflow requires equality matching. + ## Common Mistakes - Posting raw strings instead of base64-encoded fields for secrets/transit payloads - Generating decrypt `ciphertext` from local assumptions instead of encrypt response - Missing bearer token header on one request in a multi-step flow - Ignoring `409 Conflict` on transit create and not switching to rotate logic +- Sending tokenization token in URL path instead of JSON body for `detokenize`, `validate`, and `revoke` ## See also - [Authentication API](../api/authentication.md) - [Secrets API](../api/secrets.md) - [Transit API](../api/transit.md) +- [Tokenization API](../api/tokenization.md) - [Response shapes](../api/response-shapes.md) diff --git a/docs/examples/javascript.md b/docs/examples/javascript.md index f813309..2e62bf1 100644 --- a/docs/examples/javascript.md +++ b/docs/examples/javascript.md @@ -1,6 +1,6 @@ # ๐ŸŸจ JavaScript Examples -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 โš ๏ธ Security Warning: base64 is encoding, not encryption. Always use HTTPS/TLS. @@ -101,16 +101,64 @@ main().catch((error) => { }); ``` +## Tokenization Quick Snippet + +```javascript +async function tokenizationFlow(token) { + await fetch(`${BASE_URL}/v1/tokenization/keys`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ + name: "js-tokenization", + format_type: "uuid", + is_deterministic: false, + algorithm: "aes-gcm", + }), + }); + + const tokenizeRes = await fetch(`${BASE_URL}/v1/tokenization/keys/js-tokenization/tokenize`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ plaintext: toBase64("sensitive-value"), ttl: 600 }), + }); + if (!tokenizeRes.ok) throw new Error(`tokenize failed: ${tokenizeRes.status}`); + const { token: tokenValue } = await tokenizeRes.json(); + + const detokenizeRes = await fetch(`${BASE_URL}/v1/tokenization/detokenize`, { + method: "POST", + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + }, + body: JSON.stringify({ token: tokenValue }), + }); + if (!detokenizeRes.ok) throw new Error(`detokenize failed: ${detokenizeRes.status}`); +} +``` + +Deterministic caveat: + +- With `is_deterministic: true`, tokenizing the same plaintext with the same active key can produce the same token. +- Prefer non-deterministic mode unless stable equality matching is required. + ## Common Mistakes - Sending UTF-8 plaintext directly instead of base64 in transit/secrets payloads - Reformatting `ciphertext` for decrypt instead of passing encrypt response as-is - Missing `Authorization: Bearer ` header on protected endpoints - Reusing transit create for existing keys without fallback to rotate on `409` +- Sending tokenization token in URL path instead of JSON body for `detokenize`, `validate`, and `revoke` ## See also - [Authentication API](../api/authentication.md) - [Secrets API](../api/secrets.md) - [Transit API](../api/transit.md) +- [Tokenization API](../api/tokenization.md) - [Response shapes](../api/response-shapes.md) diff --git a/docs/examples/python.md b/docs/examples/python.md index 2b5f32c..89cbbcf 100644 --- a/docs/examples/python.md +++ b/docs/examples/python.md @@ -1,6 +1,6 @@ # ๐Ÿ Python Examples -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 โš ๏ธ Security Warning: base64 is encoding, not encryption. Always use HTTPS/TLS. @@ -95,16 +95,60 @@ if __name__ == "__main__": transit_encrypt_decrypt(token) ``` +## Tokenization Quick Snippet + +```python +def tokenize_detokenize(token: str) -> None: + headers = {"Authorization": f"Bearer {token}"} + + requests.post( + f"{BASE_URL}/v1/tokenization/keys", + headers=headers, + json={ + "name": "python-tokenization", + "format_type": "uuid", + "is_deterministic": False, + "algorithm": "aes-gcm", + }, + timeout=10, + ) + + tokenize = requests.post( + f"{BASE_URL}/v1/tokenization/keys/python-tokenization/tokenize", + headers=headers, + json={"plaintext": b64("sensitive-value"), "ttl": 600}, + timeout=10, + ) + tokenize.raise_for_status() + token_value = tokenize.json()["token"] + + detokenize = requests.post( + f"{BASE_URL}/v1/tokenization/detokenize", + headers=headers, + json={"token": token_value}, + timeout=10, + ) + detokenize.raise_for_status() + assert detokenize.json()["plaintext"] == b64("sensitive-value") +``` + +Deterministic caveat: + +- If you create a key with `is_deterministic=True`, repeated tokenization of identical plaintext can return the same token. +- Use deterministic mode only when equality matching is a functional requirement. + ## Common Mistakes - Passing raw plaintext instead of base64-encoded `value`/`plaintext` - Constructing decrypt `ciphertext` manually instead of using encrypt output - Forgetting `Bearer` prefix in `Authorization` header - Retrying transit create for an existing key name instead of handling `409` with rotate +- Sending tokenization token in URL path instead of JSON body for `detokenize`, `validate`, and `revoke` ## See also - [Authentication API](../api/authentication.md) - [Secrets API](../api/secrets.md) - [Transit API](../api/transit.md) +- [Tokenization API](../api/tokenization.md) - [Response shapes](../api/response-shapes.md) diff --git a/docs/getting-started/docker.md b/docs/getting-started/docker.md index 2ff5f65..5196ec3 100644 --- a/docs/getting-started/docker.md +++ b/docs/getting-started/docker.md @@ -1,10 +1,10 @@ # ๐Ÿณ Run with Docker (Recommended) -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 This is the default way to run Secrets. -For release reproducibility, this guide uses the pinned image tag `allisson/secrets:v0.3.0`. +For release reproducibility, this guide uses the pinned image tag `allisson/secrets:v0.4.0`. You can use `allisson/secrets:latest` for fast iteration. ## โšก Quickstart Copy Block @@ -12,7 +12,7 @@ You can use `allisson/secrets:latest` for fast iteration. Use this minimal flow when you just want to get a working instance quickly: ```bash -docker pull allisson/secrets:v0.3.0 +docker pull allisson/secrets:v0.4.0 docker network create secrets-net || true docker run -d --name secrets-postgres --network secrets-net \ @@ -21,19 +21,19 @@ docker run -d --name secrets-postgres --network secrets-net \ -e POSTGRES_DB=mydb \ postgres:16-alpine -docker run --rm allisson/secrets:v0.3.0 create-master-key --id default +docker run --rm allisson/secrets:v0.4.0 create-master-key --id default # copy generated MASTER_KEYS and ACTIVE_MASTER_KEY_ID into .env -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 migrate -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 create-kek --algorithm aes-gcm +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 migrate +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 create-kek --algorithm aes-gcm docker run --rm --name secrets-api --network secrets-net --env-file .env -p 8080:8080 \ - allisson/secrets:v0.3.0 server + allisson/secrets:v0.4.0 server ``` ## 1) Pull the image ```bash -docker pull allisson/secrets:v0.3.0 +docker pull allisson/secrets:v0.4.0 ``` ## 2) Start PostgreSQL @@ -51,7 +51,7 @@ docker run -d --name secrets-postgres --network secrets-net \ ## 3) Generate a master key ```bash -docker run --rm allisson/secrets:v0.3.0 create-master-key --id default +docker run --rm allisson/secrets:v0.4.0 create-master-key --id default ``` Copy the generated values into a local `.env` file. @@ -83,15 +83,15 @@ EOF ## 5) Run migrations and bootstrap KEK ```bash -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 migrate -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 create-kek --algorithm aes-gcm +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 migrate +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 create-kek --algorithm aes-gcm ``` ## 6) Start the API server ```bash docker run --rm --name secrets-api --network secrets-net --env-file .env -p 8080:8080 \ - allisson/secrets:v0.3.0 server + allisson/secrets:v0.4.0 server ``` ## 7) Verify @@ -111,7 +111,7 @@ Expected: Use the CLI command to create your first API client and policy set: ```bash -docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.3.0 create-client \ +docker run --rm --network secrets-net --env-file .env allisson/secrets:v0.4.0 create-client \ --name bootstrap-admin \ --active \ --policies '[{"path":"*","capabilities":["read","write","delete","encrypt","decrypt","rotate"]}]' \ diff --git a/docs/getting-started/smoke-test.md b/docs/getting-started/smoke-test.md index 308ae6b..8527753 100644 --- a/docs/getting-started/smoke-test.md +++ b/docs/getting-started/smoke-test.md @@ -1,6 +1,6 @@ # โœ… Smoke Test Script -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 Run a fast end-to-end validation of a running Secrets instance. @@ -14,6 +14,8 @@ Script path: `docs/getting-started/smoke-test.sh` 4. `GET /v1/secrets/*path` 5. `POST /v1/transit/keys` 6. `POST /v1/transit/keys/:name/encrypt` and `/decrypt` +7. `POST /v1/tokenization/keys` +8. `POST /v1/tokenization/keys/:name/tokenize` + `POST /v1/tokenization/detokenize` + `POST /v1/tokenization/validate` + `POST /v1/tokenization/revoke` For transit decrypt, pass `ciphertext` exactly as returned by encrypt (`:`). @@ -42,6 +44,7 @@ Optional variables: - `SECRET_PATH` (default: `/app/prod/smoke-test`) - `TRANSIT_KEY_NAME` (default: `smoke-test-key`) +- `TOKENIZATION_KEY_NAME` (default: `smoke-test-tokenization-key`) Expected output includes `Smoke test completed successfully`. If transit decrypt fails with `422`, see [Troubleshooting](troubleshooting.md#422-unprocessable-entity). diff --git a/docs/getting-started/smoke-test.sh b/docs/getting-started/smoke-test.sh index 8ee1035..1abd147 100644 --- a/docs/getting-started/smoke-test.sh +++ b/docs/getting-started/smoke-test.sh @@ -12,6 +12,7 @@ CLIENT_ID="${CLIENT_ID:-}" CLIENT_SECRET="${CLIENT_SECRET:-}" SECRET_PATH="${SECRET_PATH:-/app/prod/smoke-test}" TRANSIT_KEY_NAME="${TRANSIT_KEY_NAME:-smoke-test-key}" +TOKENIZATION_KEY_NAME="${TOKENIZATION_KEY_NAME:-smoke-test-tokenization-key}" if [[ -z "$CLIENT_ID" || -z "$CLIENT_SECRET" ]]; then echo "CLIENT_ID and CLIENT_SECRET are required" @@ -19,10 +20,10 @@ if [[ -z "$CLIENT_ID" || -z "$CLIENT_SECRET" ]]; then exit 1 fi -echo "[1/6] Health check" +echo "[1/8] Health check" curl -fsS "$BASE_URL/health" | jq . -echo "[2/6] Issue token" +echo "[2/8] Issue token" TOKEN="$(curl -fsS -X POST "$BASE_URL/v1/token" \ -H "Content-Type: application/json" \ -d "{\"client_id\":\"$CLIENT_ID\",\"client_secret\":\"$CLIENT_SECRET\"}" | jq -r '.token')" @@ -32,17 +33,17 @@ if [[ -z "$TOKEN" || "$TOKEN" == "null" ]]; then exit 1 fi -echo "[3/6] Write secret" +echo "[3/8] Write secret" curl -fsS -X POST "$BASE_URL/v1/secrets$SECRET_PATH" \ -H "Authorization: Bearer $TOKEN" \ -H "Content-Type: application/json" \ -d '{"value":"c21va2UtdGVzdC12YWx1ZQ=="}' | jq . -echo "[4/6] Read secret" +echo "[4/8] Read secret" curl -fsS "$BASE_URL/v1/secrets$SECRET_PATH" \ -H "Authorization: Bearer $TOKEN" | jq . -echo "[5/6] Create transit key (ignores conflict)" +echo "[5/8] Create transit key (ignores conflict)" CREATE_STATUS="$(curl -sS -o /tmp/secrets_transit_create.json -w "%{http_code}" -X POST "$BASE_URL/v1/transit/keys" \ -H "Authorization: Bearer $TOKEN" \ -H "Content-Type: application/json" \ @@ -54,7 +55,7 @@ if [[ "$CREATE_STATUS" != "201" && "$CREATE_STATUS" != "409" ]]; then exit 1 fi -echo "[6/6] Encrypt and decrypt with transit key" +echo "[6/8] Encrypt and decrypt with transit key" CIPHERTEXT="$(curl -fsS -X POST "$BASE_URL/v1/transit/keys/$TRANSIT_KEY_NAME/encrypt" \ -H "Authorization: Bearer $TOKEN" \ -H "Content-Type: application/json" \ @@ -65,4 +66,63 @@ curl -fsS -X POST "$BASE_URL/v1/transit/keys/$TRANSIT_KEY_NAME/decrypt" \ -H "Content-Type: application/json" \ -d "{\"ciphertext\":\"$CIPHERTEXT\"}" | jq . +echo "[7/8] Create tokenization key (ignores conflict)" +TOKENIZATION_CREATE_STATUS="$(curl -sS -o /tmp/secrets_tokenization_create.json -w "%{http_code}" -X POST "$BASE_URL/v1/tokenization/keys" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"name\":\"$TOKENIZATION_KEY_NAME\",\"format_type\":\"uuid\",\"is_deterministic\":false,\"algorithm\":\"aes-gcm\"}")" + +if [[ "$TOKENIZATION_CREATE_STATUS" != "201" && "$TOKENIZATION_CREATE_STATUS" != "409" ]]; then + echo "Tokenization key creation failed (status=$TOKENIZATION_CREATE_STATUS)" + cat /tmp/secrets_tokenization_create.json + exit 1 +fi + +echo "[8/8] Tokenization round-trip and revoke" +TOKEN_VALUE="$(curl -fsS -X POST "$BASE_URL/v1/tokenization/keys/$TOKENIZATION_KEY_NAME/tokenize" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"plaintext":"c21va2UtdG9rZW5pemF0aW9u","ttl":300}' | jq -r '.token')" + +if [[ -z "$TOKEN_VALUE" || "$TOKEN_VALUE" == "null" ]]; then + echo "Failed to tokenize sample payload" + exit 1 +fi + +curl -fsS -X POST "$BASE_URL/v1/tokenization/detokenize" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"token\":\"$TOKEN_VALUE\"}" | jq . + +VALID_BEFORE="$(curl -fsS -X POST "$BASE_URL/v1/tokenization/validate" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"token\":\"$TOKEN_VALUE\"}" | jq -r '.valid')" + +if [[ "$VALID_BEFORE" != "true" ]]; then + echo "Token should be valid before revoke" + exit 1 +fi + +REVOKE_STATUS="$(curl -sS -o /tmp/secrets_tokenization_revoke.json -w "%{http_code}" -X POST "$BASE_URL/v1/tokenization/revoke" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"token\":\"$TOKEN_VALUE\"}")" + +if [[ "$REVOKE_STATUS" != "204" ]]; then + echo "Token revoke failed (status=$REVOKE_STATUS)" + cat /tmp/secrets_tokenization_revoke.json + exit 1 +fi + +VALID_AFTER="$(curl -fsS -X POST "$BASE_URL/v1/tokenization/validate" \ + -H "Authorization: Bearer $TOKEN" \ + -H "Content-Type: application/json" \ + -d "{\"token\":\"$TOKEN_VALUE\"}" | jq -r '.valid')" + +if [[ "$VALID_AFTER" != "false" ]]; then + echo "Token should be invalid after revoke" + exit 1 +fi + echo "Smoke test completed successfully" diff --git a/docs/getting-started/troubleshooting.md b/docs/getting-started/troubleshooting.md index 405964b..f26c62e 100644 --- a/docs/getting-started/troubleshooting.md +++ b/docs/getting-started/troubleshooting.md @@ -1,6 +1,6 @@ # ๐Ÿงฐ Troubleshooting -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 Use this guide for common setup and runtime errors. @@ -15,6 +15,7 @@ Use this quick route before diving into detailed sections: 5. After rotating keys behavior is stale -> go to `Rotation completed but server still uses old key context` 6. Startup fails with key config errors -> go to `Missing or Invalid Master Keys` 7. Monitoring data is missing -> go to `Metrics Troubleshooting Matrix` +8. Tokenization endpoints fail after upgrade -> go to `Tokenization migration verification` ## ๐Ÿ“‘ Table of Contents @@ -27,6 +28,7 @@ Use this quick route before diving into detailed sections: - [Missing or Invalid Master Keys](#missing-or-invalid-master-keys) - [Missing KEK](#missing-kek) - [Metrics Troubleshooting Matrix](#metrics-troubleshooting-matrix) +- [Tokenization migration verification](#tokenization-migration-verification) - [Rotation completed but server still uses old key context](#rotation-completed-but-server-still-uses-old-key-context) - [Token issuance fails with valid-looking credentials](#token-issuance-fails-with-valid-looking-credentials) - [Quick diagnostics checklist](#quick-diagnostics-checklist) @@ -131,6 +133,16 @@ Common 422 cases: | Dashboards show empty values for paths | Query uses concrete URLs, not route patterns | Query by route pattern labels (for example `/v1/secrets/*path`) | | Prometheus memory growth or slow queries | High-cardinality query patterns | Aggregate by stable labels and avoid per-request dimensions | +## Tokenization migration verification + +- Symptom: tokenization endpoints return `404`/`500` after upgrading to `v0.4.0` +- Likely cause: tokenization migration (`000002_add_tokenization`) not applied or partially applied +- Fix: + - run `./bin/app migrate` (or Docker `... allisson/secrets:v0.4.0 migrate`) + - verify migration logs indicate `000002_add_tokenization` applied for your DB + - confirm initial KEK exists (`create-kek` if missing) + - re-run smoke flow for tokenization (`tokenize -> detokenize -> validate -> revoke`) + ## Rotation completed but server still uses old key context - Symptom: master key/KEK rotation completed, but runtime behavior suggests old values are still in use diff --git a/docs/metadata.json b/docs/metadata.json new file mode 100644 index 0000000..8ee022d --- /dev/null +++ b/docs/metadata.json @@ -0,0 +1,5 @@ +{ + "current_release": "v0.4.0", + "api_version": "v1", + "last_docs_refresh": "2026-02-18" +} diff --git a/docs/openapi.yaml b/docs/openapi.yaml index f75f63f..ec3e303 100644 --- a/docs/openapi.yaml +++ b/docs/openapi.yaml @@ -4,7 +4,9 @@ info: version: v1 description: >- Baseline OpenAPI specification for Secrets API v1. This is intentionally concise - and focuses on high-traffic endpoints and common payloads. + and focuses on high-traffic endpoints and common payloads. OpenAPI path templates + use `{param}` syntax while runtime router/metrics labels may expose `:param` or + wildcard forms such as `*path`. servers: - url: http://localhost:8080 description: Local development @@ -13,6 +15,7 @@ tags: - name: clients - name: secrets - name: transit + - name: tokenization - name: audit paths: /health: @@ -266,6 +269,222 @@ paths: $ref: "#/components/schemas/ErrorResponse" "422": $ref: "#/components/responses/ValidationError" + /v1/tokenization/keys: + post: + tags: [tokenization] + summary: Create tokenization key + security: + - bearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizationKeyCreateRequest" + responses: + "201": + description: Tokenization key created + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizationKeyResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "409": + description: Tokenization key already exists + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" + /v1/tokenization/keys/{name}/rotate: + post: + tags: [tokenization] + summary: Rotate tokenization key + security: + - bearerAuth: [] + parameters: + - name: name + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizationKeyRotateRequest" + responses: + "201": + description: New tokenization key version created + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizationKeyResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + description: Tokenization key not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" + /v1/tokenization/keys/{id}: + delete: + tags: [tokenization] + summary: Delete tokenization key + security: + - bearerAuth: [] + parameters: + - name: id + in: path + required: true + schema: + type: string + format: uuid + responses: + "204": + description: Deleted + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + description: Tokenization key not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" + /v1/tokenization/keys/{name}/tokenize: + post: + tags: [tokenization] + summary: Tokenize plaintext + security: + - bearerAuth: [] + parameters: + - name: name + in: path + required: true + schema: + type: string + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizeRequest" + responses: + "201": + description: Token created + content: + application/json: + schema: + $ref: "#/components/schemas/TokenizeResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + description: Tokenization key not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" + /v1/tokenization/detokenize: + post: + tags: [tokenization] + summary: Detokenize token + security: + - bearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/DetokenizeRequest" + responses: + "200": + description: Plaintext resolved + content: + application/json: + schema: + $ref: "#/components/schemas/DetokenizeResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + description: Token not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" + /v1/tokenization/validate: + post: + tags: [tokenization] + summary: Validate token + security: + - bearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/ValidateTokenRequest" + responses: + "200": + description: Validation result + content: + application/json: + schema: + $ref: "#/components/schemas/ValidateTokenResponse" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "422": + $ref: "#/components/responses/ValidationError" + /v1/tokenization/revoke: + post: + tags: [tokenization] + summary: Revoke token + security: + - bearerAuth: [] + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/RevokeTokenRequest" + responses: + "204": + description: Revoked + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + description: Token not found + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + "422": + $ref: "#/components/responses/ValidationError" /v1/audit-logs: get: tags: [audit] @@ -420,3 +639,110 @@ components: created_at: type: string format: date-time + TokenizationKeyCreateRequest: + type: object + properties: + name: + type: string + format_type: + type: string + enum: [uuid, numeric, luhn-preserving, alphanumeric] + is_deterministic: + type: boolean + algorithm: + type: string + enum: [aes-gcm, chacha20-poly1305] + required: [name, format_type, algorithm] + TokenizationKeyRotateRequest: + type: object + properties: + format_type: + type: string + enum: [uuid, numeric, luhn-preserving, alphanumeric] + is_deterministic: + type: boolean + algorithm: + type: string + enum: [aes-gcm, chacha20-poly1305] + required: [format_type, algorithm] + TokenizationKeyResponse: + type: object + properties: + id: + type: string + format: uuid + name: + type: string + version: + type: integer + format_type: + type: string + is_deterministic: + type: boolean + created_at: + type: string + format: date-time + required: [id, name, version, format_type, is_deterministic, created_at] + TokenizeRequest: + type: object + properties: + plaintext: + type: string + description: Base64-encoded plaintext + metadata: + type: object + additionalProperties: true + ttl: + type: integer + minimum: 1 + required: [plaintext] + TokenizeResponse: + type: object + properties: + token: + type: string + metadata: + type: object + additionalProperties: true + created_at: + type: string + format: date-time + expires_at: + type: string + format: date-time + nullable: true + required: [token, created_at] + DetokenizeRequest: + type: object + properties: + token: + type: string + required: [token] + DetokenizeResponse: + type: object + properties: + plaintext: + type: string + description: Base64-encoded plaintext + metadata: + type: object + additionalProperties: true + required: [plaintext] + ValidateTokenRequest: + type: object + properties: + token: + type: string + required: [token] + ValidateTokenResponse: + type: object + properties: + valid: + type: boolean + required: [valid] + RevokeTokenRequest: + type: object + properties: + token: + type: string + required: [token] diff --git a/docs/operations/failure-playbooks.md b/docs/operations/failure-playbooks.md index 242bd7b..6e7242a 100644 --- a/docs/operations/failure-playbooks.md +++ b/docs/operations/failure-playbooks.md @@ -1,6 +1,6 @@ # ๐Ÿš‘ Failure Playbooks -> Last updated: 2026-02-14 +> Last updated: 2026-02-18 Use this page for fast incident triage on common API failures. @@ -43,6 +43,33 @@ Triage steps: 3. Confirm encrypt/decrypt still work after rotation 4. Update automation to avoid repeated create for existing names +## 404/422 on Tokenization Detokenize + +Symptoms: + +- `POST /v1/tokenization/detokenize` returns `404 Not Found` or `422 Unprocessable Entity` + +Triage steps: + +1. Confirm token was produced by `POST /v1/tokenization/keys/:name/tokenize` +2. Confirm request shape uses JSON body `{"token":"..."}` (not URL path token) +3. Check if token is expired (`ttl`) or revoked +4. Validate caller has `decrypt` capability on `/v1/tokenization/detokenize` +5. If expired tokens accumulate, run cleanup routine (`clean-expired-tokens`) + +## 409 on Tokenization Key Create + +Symptoms: + +- `POST /v1/tokenization/keys` returns `409 Conflict` + +Triage steps: + +1. Treat conflict as "key already initialized" +2. Call `POST /v1/tokenization/keys/:name/rotate` for a new active version +3. Confirm tokenize/detokenize paths remain healthy after rotation +4. Update automation to avoid repeated create for existing names + ## Quick Commands ```bash @@ -63,5 +90,7 @@ curl -s "http://localhost:8080/v1/audit-logs?limit=50&offset=0" \ - [Troubleshooting](../getting-started/troubleshooting.md) - [Policies cookbook](../api/policies.md) +- [Policy smoke tests](policy-smoke-tests.md) - [Transit API](../api/transit.md) +- [Tokenization API](../api/tokenization.md) - [Production operations](production.md) diff --git a/docs/operations/monitoring.md b/docs/operations/monitoring.md index 4529977..fa32728 100644 --- a/docs/operations/monitoring.md +++ b/docs/operations/monitoring.md @@ -1,6 +1,6 @@ # ๐Ÿ“Š Monitoring -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 This document describes the metrics instrumentation and monitoring capabilities in the Secrets application. @@ -115,7 +115,7 @@ Compatibility note: **Description:** Total number of business operations executed **Labels:** -- `domain` - Business domain (auth, secrets, transit) +- `domain` - Business domain (auth, secrets, transit, tokenization) - `operation` - Operation name (e.g., client_create, secret_get, transit_encrypt) - `status` - Operation result (success, error) @@ -133,7 +133,7 @@ secrets_operations_total{domain="transit",operation="transit_key_rotate",status= **Description:** Duration of business operations in seconds **Labels:** -- `domain` - Business domain (auth, secrets, transit) +- `domain` - Business domain (auth, secrets, transit, tokenization) - `operation` - Operation name - `status` - Operation result (success, error) @@ -155,9 +155,14 @@ secrets_operation_duration_seconds_count{domain="auth",operation="client_create" **Labels:** - `method` - HTTP method (GET, POST, PUT, DELETE) -- `path` - Route pattern (e.g., /v1/secrets/*path) +- `path` - Route pattern (e.g., `/v1/secrets/*path`, `/v1/tokenization/keys/:name/tokenize`) - `status_code` - HTTP status code (200, 404, 500, etc.) +Route-template note: + +- OpenAPI pages use `{name}` parameter syntax +- Runtime HTTP metrics typically expose Gin-style patterns like `:name` and wildcard `*path` + **Example:** ```prometheus @@ -221,6 +226,19 @@ secrets_http_request_duration_seconds_count{method="GET",path="/v1/secrets/*path | `transit_encrypt` | Encrypt data with transit key | | `transit_decrypt` | Decrypt data with transit key | +### Tokenization Domain + +| Operation | Description | +|-----------|-------------| +| `tokenization_key_create` | Create new tokenization key | +| `tokenization_key_rotate` | Rotate tokenization key to new version | +| `tokenization_key_delete` | Delete tokenization key | +| `tokenize` | Generate token for plaintext | +| `detokenize` | Resolve token back to plaintext | +| `validate` | Validate token lifecycle state | +| `revoke` | Revoke token | +| `cleanup_expired` | Delete expired tokens older than retention | + ## Prometheus Configuration ### Scrape Configuration @@ -262,6 +280,39 @@ rate(secrets_operations_total{status="error"}[5m]) / rate(secrets_operations_tot topk(5, rate(secrets_operation_duration_seconds_sum[5m]) / rate(secrets_operation_duration_seconds_count[5m])) ``` +### Tokenization-focused Queries + +**Detokenize error rate (5m):** + +```promql +rate(secrets_operations_total{domain="tokenization",operation="detokenize",status="error"}[5m]) +/ +rate(secrets_operations_total{domain="tokenization",operation="detokenize"}[5m]) +``` + +**Tokenization p95 latency (tokenize path):** + +```promql +histogram_quantile( + 0.95, + sum by (le) ( + rate(secrets_http_request_duration_seconds_bucket{path="/v1/tokenization/keys/:name/tokenize"}[5m]) + ) +) +``` + +**Expired-token cleanup throughput (rows per second):** + +```promql +rate(secrets_operations_total{domain="tokenization",operation="cleanup_expired",status="success"}[15m]) +``` + +### SLO Starters (Tokenization) + +- `POST /v1/tokenization/keys/:name/tokenize` latency: p95 < 300 ms +- `POST /v1/tokenization/detokenize` latency: p95 < 400 ms +- Tokenization server errors: < 0.2% across tokenization operations + ## Grafana Dashboard ### Recommended Panels diff --git a/docs/operations/policy-smoke-tests.md b/docs/operations/policy-smoke-tests.md new file mode 100644 index 0000000..8c46a56 --- /dev/null +++ b/docs/operations/policy-smoke-tests.md @@ -0,0 +1,122 @@ +# ๐Ÿงช Policy Smoke Tests + +> Last updated: 2026-02-18 + +Use this page to quickly validate authorization behavior after policy changes. + +## Why this exists + +- Catch capability drift before production rollout +- Prove least-privilege policies actually enforce intended boundaries +- Provide repeatable checks for CI/CD or release validation + +## Prerequisites + +- Running Secrets API +- `curl` and `jq` +- One client expected to be allowed and one expected to be denied for the target path + +```bash +export BASE_URL="http://localhost:8080" +export ALLOW_CLIENT_ID="" +export ALLOW_CLIENT_SECRET="" +export DENY_CLIENT_ID="" +export DENY_CLIENT_SECRET="" +``` + +## 1) Issue test tokens + +```bash +ALLOW_TOKEN=$(curl -s -X POST "$BASE_URL/v1/token" \ + -H "Content-Type: application/json" \ + -d "{\"client_id\":\"$ALLOW_CLIENT_ID\",\"client_secret\":\"$ALLOW_CLIENT_SECRET\"}" | jq -r .token) + +DENY_TOKEN=$(curl -s -X POST "$BASE_URL/v1/token" \ + -H "Content-Type: application/json" \ + -d "{\"client_id\":\"$DENY_CLIENT_ID\",\"client_secret\":\"$DENY_CLIENT_SECRET\"}" | jq -r .token) +``` + +## 2) Capability checks + +Secrets read check (`decrypt` required): + +```bash +ALLOW_STATUS=$(curl -s -o /tmp/allow-read.json -w "%{http_code}" \ + "$BASE_URL/v1/secrets/app/prod/smoke-policy" \ + -H "Authorization: Bearer $ALLOW_TOKEN") + +DENY_STATUS=$(curl -s -o /tmp/deny-read.json -w "%{http_code}" \ + "$BASE_URL/v1/secrets/app/prod/smoke-policy" \ + -H "Authorization: Bearer $DENY_TOKEN") + +echo "allowed status=$ALLOW_STATUS denied status=$DENY_STATUS" +test "$ALLOW_STATUS" = "200" +test "$DENY_STATUS" = "403" +``` + +Transit encrypt check (`encrypt` required): + +```bash +ALLOW_STATUS=$(curl -s -o /tmp/allow-transit.json -w "%{http_code}" -X POST \ + "$BASE_URL/v1/transit/keys/payment/encrypt" \ + -H "Authorization: Bearer $ALLOW_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"plaintext":"c21va2UtcG9saWN5"}') + +DENY_STATUS=$(curl -s -o /tmp/deny-transit.json -w "%{http_code}" -X POST \ + "$BASE_URL/v1/transit/keys/payment/encrypt" \ + -H "Authorization: Bearer $DENY_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"plaintext":"c21va2UtcG9saWN5"}') + +echo "allowed status=$ALLOW_STATUS denied status=$DENY_STATUS" +test "$ALLOW_STATUS" = "200" +test "$DENY_STATUS" = "403" +``` + +Tokenization detokenize check (`decrypt` required): + +```bash +ALLOW_STATUS=$(curl -s -o /tmp/allow-detokenize.json -w "%{http_code}" -X POST \ + "$BASE_URL/v1/tokenization/detokenize" \ + -H "Authorization: Bearer $ALLOW_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"token":"tok_sample"}') + +DENY_STATUS=$(curl -s -o /tmp/deny-detokenize.json -w "%{http_code}" -X POST \ + "$BASE_URL/v1/tokenization/detokenize" \ + -H "Authorization: Bearer $DENY_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"token":"tok_sample"}') + +echo "allowed status=$ALLOW_STATUS denied status=$DENY_STATUS" +# allowed may be 200 or 404 depending on sample token existence +test "$ALLOW_STATUS" = "200" -o "$ALLOW_STATUS" = "404" +test "$DENY_STATUS" = "403" +``` + +## 3) Audit verification + +```bash +curl -s "$BASE_URL/v1/audit-logs?limit=100" \ + -H "Authorization: Bearer $ALLOW_TOKEN" \ + | jq '.audit_logs[] | select(.metadata.allowed == false) | {path, capability, client_id, created_at}' +``` + +Expected: + +- denied requests appear with `metadata.allowed=false` +- denied client has expected path/capability mismatches only + +## CI-friendly pattern + +- Keep smoke checks idempotent +- Assert expected status pairs (allow vs deny) +- Run after policy deployment but before traffic cutover + +## See also + +- [Capability matrix](../api/capability-matrix.md) +- [Policies cookbook](../api/policies.md) +- [Failure playbooks](failure-playbooks.md) +- [Troubleshooting](../getting-started/troubleshooting.md) diff --git a/docs/operations/production.md b/docs/operations/production.md index 8cb0df2..a2bcfbc 100644 --- a/docs/operations/production.md +++ b/docs/operations/production.md @@ -1,6 +1,6 @@ # ๐Ÿญ Production Deployment Guide -> Last updated: 2026-02-16 +> Last updated: 2026-02-18 This guide covers baseline production hardening and operations for Secrets. @@ -11,8 +11,9 @@ This guide covers baseline production hardening and operations for Secrets. - [3) Database Operations](#3-database-operations) - [4) Key Rotation Schedule](#4-key-rotation-schedule) - [5) Observability and Monitoring](#5-observability-and-monitoring) -- [6) Incident Response Checklist](#6-incident-response-checklist) -- [7) Go-Live Checklist](#7-go-live-checklist) +- [6) Retention Defaults](#6-retention-defaults) +- [7) Incident Response Checklist](#7-incident-response-checklist) +- [8) Go-Live Checklist](#8-go-live-checklist) ## 1) TLS and Reverse Proxy @@ -42,6 +43,7 @@ Minimal reverse proxy checklist: - Monitor connection pool metrics and error rates - Run migrations before rolling out new app versions - Define and execute audit log retention cleanup on a fixed cadence +- Define and execute expired token cleanup on a fixed cadence when tokenization is enabled Backup/restore checklist: @@ -60,6 +62,16 @@ Audit log retention routine (recommended monthly): ./bin/app clean-audit-logs --days 90 --format text ``` +Token retention routine (recommended monthly for tokenization workloads): + +```bash +# 1) Preview expired tokens older than 30 days +./bin/app clean-expired-tokens --days 30 --dry-run --format json + +# 2) Execute deletion +./bin/app clean-expired-tokens --days 30 --format text +``` + ## 4) Key Rotation Schedule - Rotate KEKs on a fixed cadence (for example every 90 days) @@ -114,7 +126,16 @@ SLO examples (starting point): - Secrets read/write latency (`GET/POST /v1/secrets/*`): p95 < 500 ms - Server error budget: 5xx < 0.1% of total requests -## 6) Incident Response Checklist +## 6) Retention Defaults + +| Dataset | Suggested retention | Cleanup command | Cadence | +| --- | --- | --- | --- | +| Audit logs | 90 days | `clean-audit-logs --days 90` | Monthly | +| Expired tokens | 30 days | `clean-expired-tokens --days 30` | Monthly | + +Adjust retention to match your compliance and incident-response requirements. + +## 7) Incident Response Checklist 1. Identify affected client/key/path scope 2. Revoke/deactivate compromised clients @@ -124,7 +145,7 @@ SLO examples (starting point): 6. Review audit logs for lateral movement or unusual access 7. Record timeline and remediation actions -## 7) Go-Live Checklist +## 8) Go-Live Checklist - [ ] HTTPS/TLS enabled and verified - [ ] DB backups and restore drill validated @@ -139,6 +160,7 @@ SLO examples (starting point): - [Key management operations](key-management.md) - [Monitoring](monitoring.md) +- [Policy smoke tests](policy-smoke-tests.md) - [Environment variables](../configuration/environment-variables.md) - [Security model](../concepts/security-model.md) - [Troubleshooting](../getting-started/troubleshooting.md) diff --git a/docs/releases/v0.4.0.md b/docs/releases/v0.4.0.md new file mode 100644 index 0000000..c21ae67 --- /dev/null +++ b/docs/releases/v0.4.0.md @@ -0,0 +1,81 @@ +# ๐Ÿš€ Secrets v0.4.0 Release Notes + +> Release date: 2026-02-18 + +This release adds tokenization capabilities for format-preserving data protection, +including API endpoints, CLI operations, persistence, and observability. + +## Highlights + +- Added tokenization API under `/v1/tokenization/*` +- Added tokenization key lifecycle: create, rotate, delete +- Added token lifecycle: tokenize, detokenize, validate, revoke +- Added deterministic mode support for repeatable token generation +- Added token format support: `uuid`, `numeric`, `luhn-preserving`, `alphanumeric` +- Added expired-token maintenance command: `clean-expired-tokens` + +## API Additions + +New endpoints: + +- `POST /v1/tokenization/keys` +- `POST /v1/tokenization/keys/{name}/rotate` +- `DELETE /v1/tokenization/keys/{id}` +- `POST /v1/tokenization/keys/{name}/tokenize` +- `POST /v1/tokenization/detokenize` +- `POST /v1/tokenization/validate` +- `POST /v1/tokenization/revoke` + +## CLI Additions + +- `create-tokenization-key --name --format [--deterministic] [--algorithm ]` +- `rotate-tokenization-key --name --format [--deterministic] [--algorithm ]` +- `clean-expired-tokens --days [--dry-run] [--format text|json]` + +## Data Model and Migrations + +Added migration `000002_add_tokenization` for PostgreSQL and MySQL: + +- `tokenization_keys` table for versioned key metadata +- `tokenization_tokens` table for token-to-ciphertext mapping and lifecycle fields + +## Observability + +Added tokenization business operations metrics in the `tokenization` domain, +including key and token lifecycle operations. + +## Runtime and Compatibility + +- API baseline remains v1 (`/v1/*`) +- Local development targets: Linux and macOS +- CI baseline: Go `1.25.5`, PostgreSQL `16-alpine`, MySQL `8.0` +- Compatibility targets: PostgreSQL `12+`, MySQL `8.0+` + +## Upgrade Notes + +- Non-breaking addition: tokenization capability under API v1 +- Existing auth, secrets, transit, and audit behavior remain compatible +- Run database migrations before using tokenization endpoints or CLI commands + +## Upgrade Checklist + +1. Deploy binaries/images with `v0.4.0` +2. Run DB migrations (`app migrate`) before serving traffic +3. Verify baseline health (`GET /health`, `GET /ready`) +4. Create a tokenization key (`create-tokenization-key` or `POST /v1/tokenization/keys`) +5. Run round-trip check: tokenize -> detokenize -> validate -> revoke +6. Schedule retention cleanup for expired tokens (`clean-expired-tokens`) + +## Rollback Notes + +- `000002_add_tokenization` is additive schema migration and is expected to remain applied during app rollback. +- Rolling back binaries/images to pre-`v0.4.0` can leave tokenization tables unused but present. +- Avoid destructive schema rollback in production unless you have a validated backup/restore plan. +- If rollback is required, keep existing data and disable tokenization traffic paths operationally until re-upgrade. + +## See also + +- [Tokenization API](../api/tokenization.md) +- [CLI commands reference](../cli/commands.md) +- [Production operations](../operations/production.md) +- [API compatibility policy](../api/versioning-policy.md) diff --git a/docs/tools/check_docs_metadata.py b/docs/tools/check_docs_metadata.py new file mode 100644 index 0000000..85f29ca --- /dev/null +++ b/docs/tools/check_docs_metadata.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 + +import json +import re +from pathlib import Path + + +def require_contains(path: Path, needle: str) -> None: + content = path.read_text(encoding="utf-8") + if needle not in content: + raise ValueError(f"{path} missing required text: {needle}") + + +def main() -> None: + metadata_path = Path("docs/metadata.json") + metadata = json.loads(metadata_path.read_text(encoding="utf-8")) + + current_release = metadata["current_release"] + api_version = metadata["api_version"] + + require_contains(Path("README.md"), current_release) + require_contains(Path("docs/README.md"), current_release) + + openapi = Path("docs/openapi.yaml").read_text(encoding="utf-8") + if f"version: {api_version}" not in openapi: + raise ValueError( + "docs/openapi.yaml version does not match docs/metadata.json api_version" + ) + + api_pages = sorted(Path("docs/api").glob("*.md")) + missing = [] + marker = f"> Applies to: API {api_version}" + for page in api_pages: + content = page.read_text(encoding="utf-8") + if marker not in content: + missing.append(str(page)) + + if missing: + raise ValueError("API pages missing applies-to marker: " + ", ".join(missing)) + + # Ensure docs index points to metadata source. + require_contains(Path("docs/README.md"), "docs/metadata.json") + + # Soft check: release notes for current release should exist. + release_file = Path(f"docs/releases/{current_release}.md") + if not release_file.exists(): + raise ValueError(f"Missing release notes file: {release_file}") + + # Keep date shape simple for maintainers. + if not re.match(r"^\d{4}-\d{2}-\d{2}$", metadata["last_docs_refresh"]): + raise ValueError("docs/metadata.json last_docs_refresh must be YYYY-MM-DD") + + print("docs metadata checks passed") + + +if __name__ == "__main__": + main() diff --git a/internal/app/di.go b/internal/app/di.go index e4f05d2..4ce19bc 100644 --- a/internal/app/di.go +++ b/internal/app/di.go @@ -24,6 +24,9 @@ import ( secretsHTTP "github.com/allisson/secrets/internal/secrets/http" secretsRepository "github.com/allisson/secrets/internal/secrets/repository" secretsUseCase "github.com/allisson/secrets/internal/secrets/usecase" + tokenizationHTTP "github.com/allisson/secrets/internal/tokenization/http" + tokenizationRepository "github.com/allisson/secrets/internal/tokenization/repository" + tokenizationUseCase "github.com/allisson/secrets/internal/tokenization/usecase" transitHTTP "github.com/allisson/secrets/internal/transit/http" transitRepository "github.com/allisson/secrets/internal/transit/repository" transitUseCase "github.com/allisson/secrets/internal/transit/usecase" @@ -53,68 +56,82 @@ type Container struct { tokenService authService.TokenService // Repositories - kekRepository cryptoUseCase.KekRepository - dekRepository secretsUseCase.DekRepository - secretRepository secretsUseCase.SecretRepository - clientRepository authUseCase.ClientRepository - tokenRepository authUseCase.TokenRepository - auditLogRepository authUseCase.AuditLogRepository - transitKeyRepository transitUseCase.TransitKeyRepository - transitDekRepository transitUseCase.DekRepository + kekRepository cryptoUseCase.KekRepository + dekRepository secretsUseCase.DekRepository + secretRepository secretsUseCase.SecretRepository + clientRepository authUseCase.ClientRepository + tokenRepository authUseCase.TokenRepository + auditLogRepository authUseCase.AuditLogRepository + transitKeyRepository transitUseCase.TransitKeyRepository + transitDekRepository transitUseCase.DekRepository + tokenizationKeyRepository tokenizationUseCase.TokenizationKeyRepository + tokenizationTokenRepository tokenizationUseCase.TokenRepository + tokenizationDekRepository tokenizationUseCase.DekRepository // Use Cases - kekUseCase cryptoUseCase.KekUseCase - secretUseCase secretsUseCase.SecretUseCase - clientUseCase authUseCase.ClientUseCase - tokenUseCase authUseCase.TokenUseCase - auditLogUseCase authUseCase.AuditLogUseCase - transitKeyUseCase transitUseCase.TransitKeyUseCase + kekUseCase cryptoUseCase.KekUseCase + secretUseCase secretsUseCase.SecretUseCase + clientUseCase authUseCase.ClientUseCase + tokenUseCase authUseCase.TokenUseCase + auditLogUseCase authUseCase.AuditLogUseCase + transitKeyUseCase transitUseCase.TransitKeyUseCase + tokenizationKeyUseCase tokenizationUseCase.TokenizationKeyUseCase + tokenizationUseCase tokenizationUseCase.TokenizationUseCase // HTTP Handlers - clientHandler *authHTTP.ClientHandler - tokenHandler *authHTTP.TokenHandler - auditLogHandler *authHTTP.AuditLogHandler - secretHandler *secretsHTTP.SecretHandler - transitKeyHandler *transitHTTP.TransitKeyHandler - cryptoHandler *transitHTTP.CryptoHandler + clientHandler *authHTTP.ClientHandler + tokenHandler *authHTTP.TokenHandler + auditLogHandler *authHTTP.AuditLogHandler + secretHandler *secretsHTTP.SecretHandler + transitKeyHandler *transitHTTP.TransitKeyHandler + cryptoHandler *transitHTTP.CryptoHandler + tokenizationKeyHandler *tokenizationHTTP.TokenizationKeyHandler + tokenizationHandler *tokenizationHTTP.TokenizationHandler // Servers and Workers httpServer *http.Server // Initialization flags and mutex for thread-safety - mu sync.Mutex - loggerInit sync.Once - dbInit sync.Once - masterKeyChainInit sync.Once - txManagerInit sync.Once - metricsProviderInit sync.Once - businessMetricsInit sync.Once - aeadManagerInit sync.Once - keyManagerInit sync.Once - secretServiceInit sync.Once - tokenServiceInit sync.Once - kekRepositoryInit sync.Once - dekRepositoryInit sync.Once - secretRepositoryInit sync.Once - clientRepositoryInit sync.Once - tokenRepositoryInit sync.Once - auditLogRepositoryInit sync.Once - transitKeyRepositoryInit sync.Once - transitDekRepositoryInit sync.Once - kekUseCaseInit sync.Once - secretUseCaseInit sync.Once - clientUseCaseInit sync.Once - tokenUseCaseInit sync.Once - auditLogUseCaseInit sync.Once - transitKeyUseCaseInit sync.Once - clientHandlerInit sync.Once - tokenHandlerInit sync.Once - auditLogHandlerInit sync.Once - secretHandlerInit sync.Once - transitKeyHandlerInit sync.Once - cryptoHandlerInit sync.Once - httpServerInit sync.Once - initErrors map[string]error + mu sync.Mutex + loggerInit sync.Once + dbInit sync.Once + masterKeyChainInit sync.Once + txManagerInit sync.Once + metricsProviderInit sync.Once + businessMetricsInit sync.Once + aeadManagerInit sync.Once + keyManagerInit sync.Once + secretServiceInit sync.Once + tokenServiceInit sync.Once + kekRepositoryInit sync.Once + dekRepositoryInit sync.Once + secretRepositoryInit sync.Once + clientRepositoryInit sync.Once + tokenRepositoryInit sync.Once + auditLogRepositoryInit sync.Once + transitKeyRepositoryInit sync.Once + transitDekRepositoryInit sync.Once + tokenizationKeyRepositoryInit sync.Once + tokenizationTokenRepositoryInit sync.Once + tokenizationDekRepositoryInit sync.Once + kekUseCaseInit sync.Once + secretUseCaseInit sync.Once + clientUseCaseInit sync.Once + tokenUseCaseInit sync.Once + auditLogUseCaseInit sync.Once + transitKeyUseCaseInit sync.Once + tokenizationKeyUseCaseInit sync.Once + tokenizationUseCaseInit sync.Once + clientHandlerInit sync.Once + tokenHandlerInit sync.Once + auditLogHandlerInit sync.Once + secretHandlerInit sync.Once + transitKeyHandlerInit sync.Once + cryptoHandlerInit sync.Once + tokenizationKeyHandlerInit sync.Once + tokenizationHandlerInit sync.Once + httpServerInit sync.Once + initErrors map[string]error } // NewContainer creates a new dependency injection container with the provided configuration. @@ -720,6 +737,16 @@ func (c *Container) initHTTPServer() (*http.Server, error) { return nil, fmt.Errorf("failed to get crypto handler: %w", err) } + tokenizationKeyHandler, err := c.TokenizationKeyHandler() + if err != nil { + return nil, fmt.Errorf("failed to get tokenization key handler: %w", err) + } + + tokenizationHandler, err := c.TokenizationHandler() + if err != nil { + return nil, fmt.Errorf("failed to get tokenization handler: %w", err) + } + tokenUseCase, err := c.TokenUseCase() if err != nil { return nil, fmt.Errorf("failed to get token use case: %w", err) @@ -746,6 +773,8 @@ func (c *Container) initHTTPServer() (*http.Server, error) { secretHandler, transitKeyHandler, cryptoHandler, + tokenizationKeyHandler, + tokenizationHandler, tokenUseCase, tokenService, auditLogUseCase, @@ -1309,3 +1338,289 @@ func (c *Container) initCryptoHandler() (*transitHTTP.CryptoHandler, error) { return transitHTTP.NewCryptoHandler(transitKeyUseCase, auditLogUseCase, logger), nil } + +// TokenizationKeyRepository returns the tokenization key repository. +func (c *Container) TokenizationKeyRepository() (tokenizationUseCase.TokenizationKeyRepository, error) { + var err error + c.tokenizationKeyRepositoryInit.Do(func() { + c.tokenizationKeyRepository, err = c.initTokenizationKeyRepository() + if err != nil { + c.initErrors["tokenizationKeyRepository"] = err + } + }) + if err != nil { + return nil, err + } + return c.tokenizationKeyRepository, c.initErrors["tokenizationKeyRepository"] +} + +// TokenizationTokenRepository returns the tokenization token repository. +func (c *Container) TokenizationTokenRepository() (tokenizationUseCase.TokenRepository, error) { + var err error + c.tokenizationTokenRepositoryInit.Do(func() { + c.tokenizationTokenRepository, err = c.initTokenizationTokenRepository() + if err != nil { + c.initErrors["tokenizationTokenRepository"] = err + } + }) + if err != nil { + return nil, err + } + return c.tokenizationTokenRepository, c.initErrors["tokenizationTokenRepository"] +} + +// TokenizationDekRepository returns the DEK repository for tokenization use case. +func (c *Container) TokenizationDekRepository() (tokenizationUseCase.DekRepository, error) { + var err error + c.tokenizationDekRepositoryInit.Do(func() { + c.tokenizationDekRepository, err = c.initTokenizationDekRepository() + if err != nil { + c.initErrors["tokenizationDekRepository"] = err + } + }) + if err != nil { + return nil, err + } + return c.tokenizationDekRepository, c.initErrors["tokenizationDekRepository"] +} + +// TokenizationKeyUseCase returns the tokenization key use case. +func (c *Container) TokenizationKeyUseCase() (tokenizationUseCase.TokenizationKeyUseCase, error) { + var err error + c.tokenizationKeyUseCaseInit.Do(func() { + c.tokenizationKeyUseCase, err = c.initTokenizationKeyUseCase() + if err != nil { + c.initErrors["tokenizationKeyUseCase"] = err + } + }) + if err != nil { + return nil, err + } + return c.tokenizationKeyUseCase, c.initErrors["tokenizationKeyUseCase"] +} + +// TokenizationUseCase returns the tokenization use case. +func (c *Container) TokenizationUseCase() (tokenizationUseCase.TokenizationUseCase, error) { + var err error + c.tokenizationUseCaseInit.Do(func() { + c.tokenizationUseCase, err = c.initTokenizationUseCase() + if err != nil { + c.initErrors["tokenizationUseCase"] = err + } + }) + if err != nil { + return nil, err + } + return c.tokenizationUseCase, c.initErrors["tokenizationUseCase"] +} + +// TokenizationKeyHandler returns the tokenization key HTTP handler. +func (c *Container) TokenizationKeyHandler() (*tokenizationHTTP.TokenizationKeyHandler, error) { + var err error + c.tokenizationKeyHandlerInit.Do(func() { + c.tokenizationKeyHandler, err = c.initTokenizationKeyHandler() + if err != nil { + c.initErrors["tokenizationKeyHandler"] = err + } + }) + if err != nil { + return nil, err + } + return c.tokenizationKeyHandler, c.initErrors["tokenizationKeyHandler"] +} + +// TokenizationHandler returns the tokenization HTTP handler. +func (c *Container) TokenizationHandler() (*tokenizationHTTP.TokenizationHandler, error) { + var err error + c.tokenizationHandlerInit.Do(func() { + c.tokenizationHandler, err = c.initTokenizationHandler() + if err != nil { + c.initErrors["tokenizationHandler"] = err + } + }) + if err != nil { + return nil, err + } + return c.tokenizationHandler, c.initErrors["tokenizationHandler"] +} + +// initTokenizationKeyRepository creates the tokenization key repository. +func (c *Container) initTokenizationKeyRepository() (tokenizationUseCase.TokenizationKeyRepository, error) { + db, err := c.DB() + if err != nil { + return nil, fmt.Errorf("failed to get database for tokenization key repository: %w", err) + } + + switch c.config.DBDriver { + case "postgres": + return tokenizationRepository.NewPostgreSQLTokenizationKeyRepository(db), nil + case "mysql": + return tokenizationRepository.NewMySQLTokenizationKeyRepository(db), nil + default: + return nil, fmt.Errorf("unsupported database driver: %s", c.config.DBDriver) + } +} + +// initTokenizationTokenRepository creates the tokenization token repository. +func (c *Container) initTokenizationTokenRepository() (tokenizationUseCase.TokenRepository, error) { + db, err := c.DB() + if err != nil { + return nil, fmt.Errorf("failed to get database for tokenization token repository: %w", err) + } + + switch c.config.DBDriver { + case "postgres": + return tokenizationRepository.NewPostgreSQLTokenRepository(db), nil + case "mysql": + return tokenizationRepository.NewMySQLTokenRepository(db), nil + default: + return nil, fmt.Errorf("unsupported database driver: %s", c.config.DBDriver) + } +} + +// initTokenizationDekRepository creates the DEK repository for tokenization use case. +func (c *Container) initTokenizationDekRepository() (tokenizationUseCase.DekRepository, error) { + db, err := c.DB() + if err != nil { + return nil, fmt.Errorf("failed to get database for tokenization dek repository: %w", err) + } + + switch c.config.DBDriver { + case "postgres": + return cryptoRepository.NewPostgreSQLDekRepository(db), nil + case "mysql": + return cryptoRepository.NewMySQLDekRepository(db), nil + default: + return nil, fmt.Errorf("unsupported database driver: %s", c.config.DBDriver) + } +} + +// initTokenizationKeyUseCase creates the tokenization key use case. +func (c *Container) initTokenizationKeyUseCase() (tokenizationUseCase.TokenizationKeyUseCase, error) { + txManager, err := c.TxManager() + if err != nil { + return nil, fmt.Errorf("failed to get tx manager for tokenization key use case: %w", err) + } + + tokenizationKeyRepository, err := c.TokenizationKeyRepository() + if err != nil { + return nil, fmt.Errorf( + "failed to get tokenization key repository for tokenization key use case: %w", + err, + ) + } + + dekRepository, err := c.TokenizationDekRepository() + if err != nil { + return nil, fmt.Errorf("failed to get dek repository for tokenization key use case: %w", err) + } + + keyManager := c.KeyManager() + + kekChain, err := c.loadKekChain() + if err != nil { + return nil, fmt.Errorf("failed to load kek chain for tokenization key use case: %w", err) + } + + baseUseCase := tokenizationUseCase.NewTokenizationKeyUseCase( + txManager, + tokenizationKeyRepository, + dekRepository, + keyManager, + kekChain, + ) + + // Wrap with metrics if enabled + if c.config.MetricsEnabled { + businessMetrics, err := c.BusinessMetrics() + if err != nil { + return nil, fmt.Errorf("failed to get business metrics for tokenization key use case: %w", err) + } + return tokenizationUseCase.NewTokenizationKeyUseCaseWithMetrics(baseUseCase, businessMetrics), nil + } + + return baseUseCase, nil +} + +// initTokenizationUseCase creates the tokenization use case. +func (c *Container) initTokenizationUseCase() (tokenizationUseCase.TokenizationUseCase, error) { + txManager, err := c.TxManager() + if err != nil { + return nil, fmt.Errorf("failed to get tx manager for tokenization use case: %w", err) + } + + tokenizationKeyRepository, err := c.TokenizationKeyRepository() + if err != nil { + return nil, fmt.Errorf("failed to get tokenization key repository for tokenization use case: %w", err) + } + + tokenRepository, err := c.TokenizationTokenRepository() + if err != nil { + return nil, fmt.Errorf("failed to get token repository for tokenization use case: %w", err) + } + + dekRepository, err := c.TokenizationDekRepository() + if err != nil { + return nil, fmt.Errorf("failed to get dek repository for tokenization use case: %w", err) + } + + aeadManager := c.AEADManager() + + keyManager := c.KeyManager() + + hashService := tokenizationUseCase.NewSHA256HashService() + + kekChain, err := c.loadKekChain() + if err != nil { + return nil, fmt.Errorf("failed to load kek chain for tokenization use case: %w", err) + } + + baseUseCase := tokenizationUseCase.NewTokenizationUseCase( + txManager, + tokenizationKeyRepository, + tokenRepository, + dekRepository, + aeadManager, + keyManager, + hashService, + kekChain, + ) + + // Wrap with metrics if enabled + if c.config.MetricsEnabled { + businessMetrics, err := c.BusinessMetrics() + if err != nil { + return nil, fmt.Errorf("failed to get business metrics for tokenization use case: %w", err) + } + return tokenizationUseCase.NewTokenizationUseCaseWithMetrics(baseUseCase, businessMetrics), nil + } + + return baseUseCase, nil +} + +// initTokenizationKeyHandler creates the tokenization key HTTP handler. +func (c *Container) initTokenizationKeyHandler() (*tokenizationHTTP.TokenizationKeyHandler, error) { + tokenizationKeyUseCase, err := c.TokenizationKeyUseCase() + if err != nil { + return nil, fmt.Errorf( + "failed to get tokenization key use case for tokenization key handler: %w", + err, + ) + } + + logger := c.Logger() + + return tokenizationHTTP.NewTokenizationKeyHandler(tokenizationKeyUseCase, logger), nil +} + +// initTokenizationHandler creates the tokenization HTTP handler. +func (c *Container) initTokenizationHandler() (*tokenizationHTTP.TokenizationHandler, error) { + tokenizationUseCase, err := c.TokenizationUseCase() + if err != nil { + return nil, fmt.Errorf("failed to get tokenization use case for tokenization handler: %w", err) + } + + logger := c.Logger() + + return tokenizationHTTP.NewTokenizationHandler(tokenizationUseCase, logger), nil +} diff --git a/internal/http/server.go b/internal/http/server.go index 7408511..9331f70 100644 --- a/internal/http/server.go +++ b/internal/http/server.go @@ -25,6 +25,7 @@ import ( authUseCase "github.com/allisson/secrets/internal/auth/usecase" "github.com/allisson/secrets/internal/metrics" secretsHTTP "github.com/allisson/secrets/internal/secrets/http" + tokenizationHTTP "github.com/allisson/secrets/internal/tokenization/http" transitHTTP "github.com/allisson/secrets/internal/transit/http" ) @@ -61,6 +62,8 @@ func (s *Server) SetupRouter( secretHandler *secretsHTTP.SecretHandler, transitKeyHandler *transitHTTP.TransitKeyHandler, cryptoHandler *transitHTTP.CryptoHandler, + tokenizationKeyHandler *tokenizationHTTP.TokenizationKeyHandler, + tokenizationHandler *tokenizationHTTP.TokenizationHandler, tokenUseCase authUseCase.TokenUseCase, tokenService authService.TokenService, auditLogUseCase authUseCase.AuditLogUseCase, @@ -195,6 +198,56 @@ func (s *Server) SetupRouter( ) } } + + // Tokenization endpoints + tokenization := v1.Group("/tokenization") + tokenization.Use(authMiddleware) // All tokenization routes require authentication + { + keys := tokenization.Group("/keys") + { + // Create new tokenization key + keys.POST("", + authHTTP.AuthorizationMiddleware(authDomain.WriteCapability, auditLogUseCase, s.logger), + tokenizationKeyHandler.CreateHandler, + ) + + // Rotate tokenization key to new version + keys.POST("/:name/rotate", + authHTTP.AuthorizationMiddleware(authDomain.RotateCapability, auditLogUseCase, s.logger), + tokenizationKeyHandler.RotateHandler, + ) + + // Delete tokenization key + keys.DELETE("/:id", + authHTTP.AuthorizationMiddleware(authDomain.DeleteCapability, auditLogUseCase, s.logger), + tokenizationKeyHandler.DeleteHandler, + ) + + // Tokenize plaintext with tokenization key + keys.POST("/:name/tokenize", + authHTTP.AuthorizationMiddleware(authDomain.EncryptCapability, auditLogUseCase, s.logger), + tokenizationHandler.TokenizeHandler, + ) + } + + // Detokenize token to retrieve plaintext + tokenization.POST("/detokenize", + authHTTP.AuthorizationMiddleware(authDomain.DecryptCapability, auditLogUseCase, s.logger), + tokenizationHandler.DetokenizeHandler, + ) + + // Validate token existence and validity + tokenization.POST("/validate", + authHTTP.AuthorizationMiddleware(authDomain.ReadCapability, auditLogUseCase, s.logger), + tokenizationHandler.ValidateHandler, + ) + + // Revoke token to prevent further detokenization + tokenization.POST("/revoke", + authHTTP.AuthorizationMiddleware(authDomain.DeleteCapability, auditLogUseCase, s.logger), + tokenizationHandler.RevokeHandler, + ) + } } s.router = router diff --git a/internal/testutil/database.go b/internal/testutil/database.go index 9332e50..b3955cd 100644 --- a/internal/testutil/database.go +++ b/internal/testutil/database.go @@ -76,7 +76,7 @@ func CleanupPostgresDB(t *testing.T, db *sql.DB) { // Truncate tables in reverse order to respect foreign key constraints _, err := db.Exec( - "TRUNCATE TABLE audit_logs, transit_keys, secrets, deks, keks, tokens, clients RESTART IDENTITY CASCADE", + "TRUNCATE TABLE audit_logs, transit_keys, secrets, tokenization_tokens, tokenization_keys, deks, keks, tokens, clients RESTART IDENTITY CASCADE", ) require.NoError(t, err, "failed to truncate postgres tables") } @@ -99,6 +99,12 @@ func CleanupMySQLDB(t *testing.T, db *sql.DB) { _, err = db.Exec("TRUNCATE TABLE secrets") require.NoError(t, err, "failed to truncate secrets table") + _, err = db.Exec("TRUNCATE TABLE tokenization_tokens") + require.NoError(t, err, "failed to truncate tokenization_tokens table") + + _, err = db.Exec("TRUNCATE TABLE tokenization_keys") + require.NoError(t, err, "failed to truncate tokenization_keys table") + _, err = db.Exec("TRUNCATE TABLE deks") require.NoError(t, err, "failed to truncate deks table") diff --git a/internal/tokenization/domain/const.go b/internal/tokenization/domain/const.go new file mode 100644 index 0000000..d88a2d7 --- /dev/null +++ b/internal/tokenization/domain/const.go @@ -0,0 +1,32 @@ +// Package domain defines core tokenization domain models for data tokenization. +// Supports multiple token formats (UUID, Numeric, Luhn-preserving, Alphanumeric) with configurable deterministic behavior. +package domain + +import ( + "errors" +) + +// FormatType defines the token format type. +type FormatType string + +const ( + FormatUUID FormatType = "uuid" + FormatNumeric FormatType = "numeric" + FormatLuhnPreserving FormatType = "luhn-preserving" + FormatAlphanumeric FormatType = "alphanumeric" +) + +// Validate checks if the format type is valid. +func (f FormatType) Validate() error { + switch f { + case FormatUUID, FormatNumeric, FormatLuhnPreserving, FormatAlphanumeric: + return nil + default: + return errors.New("invalid format type") + } +} + +// String returns the string representation of the format type. +func (f FormatType) String() string { + return string(f) +} diff --git a/internal/tokenization/domain/const_test.go b/internal/tokenization/domain/const_test.go new file mode 100644 index 0000000..a4708e5 --- /dev/null +++ b/internal/tokenization/domain/const_test.go @@ -0,0 +1,93 @@ +package domain + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFormatType_Validate(t *testing.T) { + tests := []struct { + name string + formatType FormatType + expectError bool + }{ + { + name: "Valid_UUID", + formatType: FormatUUID, + expectError: false, + }, + { + name: "Valid_Numeric", + formatType: FormatNumeric, + expectError: false, + }, + { + name: "Valid_LuhnPreserving", + formatType: FormatLuhnPreserving, + expectError: false, + }, + { + name: "Valid_Alphanumeric", + formatType: FormatAlphanumeric, + expectError: false, + }, + { + name: "Invalid_UnknownFormat", + formatType: FormatType("unknown"), + expectError: true, + }, + { + name: "Invalid_EmptyString", + formatType: FormatType(""), + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.formatType.Validate() + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestFormatType_String(t *testing.T) { + tests := []struct { + name string + formatType FormatType + expected string + }{ + { + name: "UUID", + formatType: FormatUUID, + expected: "uuid", + }, + { + name: "Numeric", + formatType: FormatNumeric, + expected: "numeric", + }, + { + name: "LuhnPreserving", + formatType: FormatLuhnPreserving, + expected: "luhn-preserving", + }, + { + name: "Alphanumeric", + formatType: FormatAlphanumeric, + expected: "alphanumeric", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.formatType.String() + assert.Equal(t, tt.expected, result) + }) + } +} diff --git a/internal/tokenization/domain/errors.go b/internal/tokenization/domain/errors.go new file mode 100644 index 0000000..49dd674 --- /dev/null +++ b/internal/tokenization/domain/errors.go @@ -0,0 +1,31 @@ +package domain + +import ( + "github.com/allisson/secrets/internal/errors" +) + +var ( + // ErrTokenizationKeyNotFound indicates the tokenization key was not found. + ErrTokenizationKeyNotFound = errors.Wrap(errors.ErrNotFound, "tokenization key not found") + + // ErrTokenizationKeyAlreadyExists indicates a tokenization key with the same name and version already exists. + ErrTokenizationKeyAlreadyExists = errors.Wrap(errors.ErrConflict, "tokenization key already exists") + + // ErrTokenNotFound indicates the token was not found. + ErrTokenNotFound = errors.Wrap(errors.ErrNotFound, "token not found") + + // ErrTokenExpired indicates the token has expired. + ErrTokenExpired = errors.Wrap(errors.ErrInvalidInput, "token has expired") + + // ErrTokenRevoked indicates the token has been revoked. + ErrTokenRevoked = errors.Wrap(errors.ErrInvalidInput, "token has been revoked") + + // ErrInvalidFormatType indicates an invalid token format type was provided. + ErrInvalidFormatType = errors.Wrap(errors.ErrInvalidInput, "invalid format type") + + // ErrInvalidTokenLength indicates the token length is invalid for the specified format. + ErrInvalidTokenLength = errors.Wrap(errors.ErrInvalidInput, "invalid token length for format") + + // ErrValueTooLong indicates the value exceeds the maximum allowed length. + ErrValueTooLong = errors.Wrap(errors.ErrInvalidInput, "value exceeds maximum length") +) diff --git a/internal/tokenization/domain/token.go b/internal/tokenization/domain/token.go new file mode 100644 index 0000000..1fc23c0 --- /dev/null +++ b/internal/tokenization/domain/token.go @@ -0,0 +1,40 @@ +package domain + +import ( + "time" + + "github.com/google/uuid" +) + +// Token represents a tokenization mapping between a token and its encrypted original value. +// Supports optional expiration, revocation, and metadata for display purposes. +type Token struct { + ID uuid.UUID + TokenizationKeyID uuid.UUID + Token string + ValueHash *string + Ciphertext []byte + Nonce []byte + Metadata map[string]any + CreatedAt time.Time + ExpiresAt *time.Time + RevokedAt *time.Time +} + +// IsExpired checks if the token has expired. All time comparisons use UTC. +func (t *Token) IsExpired() bool { + if t.ExpiresAt == nil { + return false + } + return time.Now().UTC().After(t.ExpiresAt.UTC()) +} + +// IsRevoked checks if the token has been revoked. +func (t *Token) IsRevoked() bool { + return t.RevokedAt != nil +} + +// IsValid checks if the token is valid (not expired and not revoked). +func (t *Token) IsValid() bool { + return !t.IsExpired() && !t.IsRevoked() +} diff --git a/internal/tokenization/domain/token_test.go b/internal/tokenization/domain/token_test.go new file mode 100644 index 0000000..c98ce27 --- /dev/null +++ b/internal/tokenization/domain/token_test.go @@ -0,0 +1,152 @@ +package domain + +import ( + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" +) + +func TestToken_IsExpired(t *testing.T) { + now := time.Now().UTC() + past := now.Add(-1 * time.Hour) + future := now.Add(1 * time.Hour) + + tests := []struct { + name string + token *Token + expectExp bool + }{ + { + name: "NoExpiration_NotExpired", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: nil, + }, + expectExp: false, + }, + { + name: "FutureExpiration_NotExpired", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: &future, + }, + expectExp: false, + }, + { + name: "PastExpiration_Expired", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: &past, + }, + expectExp: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.token.IsExpired() + assert.Equal(t, tt.expectExp, result) + }) + } +} + +func TestToken_IsRevoked(t *testing.T) { + now := time.Now().UTC() + + tests := []struct { + name string + token *Token + expectRevo bool + }{ + { + name: "NotRevoked", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + RevokedAt: nil, + }, + expectRevo: false, + }, + { + name: "Revoked", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + RevokedAt: &now, + }, + expectRevo: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.token.IsRevoked() + assert.Equal(t, tt.expectRevo, result) + }) + } +} + +func TestToken_IsValid(t *testing.T) { + now := time.Now().UTC() + past := now.Add(-1 * time.Hour) + future := now.Add(1 * time.Hour) + + tests := []struct { + name string + token *Token + expectValid bool + }{ + { + name: "ValidToken_NotExpiredNotRevoked", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: nil, + RevokedAt: nil, + }, + expectValid: true, + }, + { + name: "ValidToken_FutureExpirationNotRevoked", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: &future, + RevokedAt: nil, + }, + expectValid: true, + }, + { + name: "InvalidToken_Expired", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: &past, + RevokedAt: nil, + }, + expectValid: false, + }, + { + name: "InvalidToken_Revoked", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: nil, + RevokedAt: &now, + }, + expectValid: false, + }, + { + name: "InvalidToken_ExpiredAndRevoked", + token: &Token{ + ID: uuid.Must(uuid.NewV7()), + ExpiresAt: &past, + RevokedAt: &now, + }, + expectValid: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := tt.token.IsValid() + assert.Equal(t, tt.expectValid, result) + }) + } +} diff --git a/internal/tokenization/domain/tokenization_key.go b/internal/tokenization/domain/tokenization_key.go new file mode 100644 index 0000000..74d7b28 --- /dev/null +++ b/internal/tokenization/domain/tokenization_key.go @@ -0,0 +1,20 @@ +package domain + +import ( + "time" + + "github.com/google/uuid" +) + +// TokenizationKey represents a versioned tokenization key configuration. +// Each key defines the token format and deterministic behavior for tokenization operations. +type TokenizationKey struct { + ID uuid.UUID + Name string + Version uint + FormatType FormatType + IsDeterministic bool + DekID uuid.UUID + CreatedAt time.Time + DeletedAt *time.Time +} diff --git a/internal/tokenization/http/dto/request.go b/internal/tokenization/http/dto/request.go new file mode 100644 index 0000000..94799da --- /dev/null +++ b/internal/tokenization/http/dto/request.go @@ -0,0 +1,184 @@ +// Package dto provides data transfer objects for HTTP request and response handling. +package dto + +import ( + "fmt" + + validation "github.com/jellydator/validation" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + customValidation "github.com/allisson/secrets/internal/validation" +) + +// CreateTokenizationKeyRequest contains the parameters for creating a new tokenization key. +type CreateTokenizationKeyRequest struct { + Name string `json:"name"` + FormatType string `json:"format_type"` // "uuid", "numeric", "luhn-preserving", "alphanumeric" + IsDeterministic bool `json:"is_deterministic"` // If true, same value produces same token + Algorithm string `json:"algorithm"` // "aes-gcm" or "chacha20-poly1305" +} + +// Validate checks if the create tokenization key request is valid. +func (r *CreateTokenizationKeyRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.Name, + validation.Required, + customValidation.NotBlank, + validation.Length(1, 255), + ), + validation.Field(&r.FormatType, + validation.Required, + customValidation.NotBlank, + validation.By(validateFormatType), + ), + validation.Field(&r.Algorithm, + validation.Required, + customValidation.NotBlank, + validation.By(validateAlgorithm), + ), + ) +} + +// RotateTokenizationKeyRequest contains the parameters for rotating a tokenization key. +type RotateTokenizationKeyRequest struct { + FormatType string `json:"format_type"` // "uuid", "numeric", "luhn-preserving", "alphanumeric" + IsDeterministic bool `json:"is_deterministic"` // If true, same value produces same token + Algorithm string `json:"algorithm"` // "aes-gcm" or "chacha20-poly1305" +} + +// Validate checks if the rotate tokenization key request is valid. +func (r *RotateTokenizationKeyRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.FormatType, + validation.Required, + customValidation.NotBlank, + validation.By(validateFormatType), + ), + validation.Field(&r.Algorithm, + validation.Required, + customValidation.NotBlank, + validation.By(validateAlgorithm), + ), + ) +} + +// TokenizeRequest contains the parameters for tokenizing a value. +type TokenizeRequest struct { + Plaintext string `json:"plaintext"` // Base64-encoded plaintext + Metadata map[string]any `json:"metadata,omitempty"` + TTL *int `json:"ttl,omitempty"` // Time-to-live in seconds (optional) +} + +// Validate checks if the tokenize request is valid. +func (r *TokenizeRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.Plaintext, + validation.Required, + customValidation.NotBlank, + customValidation.Base64, + ), + validation.Field(&r.TTL, + validation.When(r.TTL != nil, validation.Min(1)), + ), + ) +} + +// DetokenizeRequest contains the parameters for detokenizing a value. +type DetokenizeRequest struct { + Token string `json:"token"` +} + +// Validate checks if the detokenize request is valid. +func (r *DetokenizeRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.Token, + validation.Required, + customValidation.NotBlank, + ), + ) +} + +// ValidateTokenRequest contains the parameters for validating a token. +type ValidateTokenRequest struct { + Token string `json:"token"` +} + +// Validate checks if the validate token request is valid. +func (r *ValidateTokenRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.Token, + validation.Required, + customValidation.NotBlank, + ), + ) +} + +// RevokeTokenRequest contains the parameters for revoking a token. +type RevokeTokenRequest struct { + Token string `json:"token"` +} + +// Validate checks if the revoke token request is valid. +func (r *RevokeTokenRequest) Validate() error { + return validation.ValidateStruct(r, + validation.Field(&r.Token, + validation.Required, + customValidation.NotBlank, + ), + ) +} + +// validateFormatType validates that the format type is supported. +func validateFormatType(value interface{}) error { + formatType, ok := value.(string) + if !ok { + return validation.NewError("validation_format_type", "must be a string") + } + + _, err := ParseFormatType(formatType) + return err +} + +// ParseFormatType converts a string to a tokenizationDomain.FormatType. +// Returns an error if the format type is not supported. +func ParseFormatType(formatType string) (tokenizationDomain.FormatType, error) { + switch formatType { + case "uuid": + return tokenizationDomain.FormatUUID, nil + case "numeric": + return tokenizationDomain.FormatNumeric, nil + case "luhn-preserving": + return tokenizationDomain.FormatLuhnPreserving, nil + case "alphanumeric": + return tokenizationDomain.FormatAlphanumeric, nil + default: + return "", fmt.Errorf( + "invalid format type: must be 'uuid', 'numeric', 'luhn-preserving', or 'alphanumeric'", + ) + } +} + +// validateAlgorithm validates that the algorithm is supported. +func validateAlgorithm(value interface{}) error { + alg, ok := value.(string) + if !ok { + return validation.NewError("validation_algorithm_type", "must be a string") + } + + _, err := ParseAlgorithm(alg) + return err +} + +// ParseAlgorithm converts a string to a cryptoDomain.Algorithm. +// Returns an error if the algorithm is not supported. +func ParseAlgorithm(alg string) (cryptoDomain.Algorithm, error) { + switch alg { + case "aes-gcm": + return cryptoDomain.AESGCM, nil + case "chacha20-poly1305": + return cryptoDomain.ChaCha20, nil + default: + return "", fmt.Errorf("invalid algorithm: must be 'aes-gcm' or 'chacha20-poly1305'") + } +} diff --git a/internal/tokenization/http/dto/request_test.go b/internal/tokenization/http/dto/request_test.go new file mode 100644 index 0000000..f3ccdeb --- /dev/null +++ b/internal/tokenization/http/dto/request_test.go @@ -0,0 +1,479 @@ +package dto + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +func TestCreateTokenizationKeyRequest_Validate(t *testing.T) { + t.Run("Success_ValidRequest_UUID", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Success_ValidRequest_Numeric", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "numeric-key", + FormatType: "numeric", + IsDeterministic: true, + Algorithm: "chacha20-poly1305", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Success_ValidRequest_LuhnPreserving", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "luhn-key", + FormatType: "luhn-preserving", + IsDeterministic: true, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Success_ValidRequest_Alphanumeric", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "alpha-key", + FormatType: "alphanumeric", + IsDeterministic: false, + Algorithm: "chacha20-poly1305", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_MissingName", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_BlankName", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: " ", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_MissingFormatType", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_InvalidFormatType", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "invalid-format", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_MissingAlgorithm", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_InvalidAlgorithm", func(t *testing.T) { + req := CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "invalid-algorithm", + } + + err := req.Validate() + assert.Error(t, err) + }) +} + +func TestRotateTokenizationKeyRequest_Validate(t *testing.T) { + t.Run("Success_ValidRequest_UUID", func(t *testing.T) { + req := RotateTokenizationKeyRequest{ + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Success_ValidRequest_Numeric", func(t *testing.T) { + req := RotateTokenizationKeyRequest{ + FormatType: "numeric", + IsDeterministic: true, + Algorithm: "chacha20-poly1305", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_MissingFormatType", func(t *testing.T) { + req := RotateTokenizationKeyRequest{ + FormatType: "", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_InvalidFormatType", func(t *testing.T) { + req := RotateTokenizationKeyRequest{ + FormatType: "invalid-format", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_MissingAlgorithm", func(t *testing.T) { + req := RotateTokenizationKeyRequest{ + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_InvalidAlgorithm", func(t *testing.T) { + req := RotateTokenizationKeyRequest{ + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "invalid-algorithm", + } + + err := req.Validate() + assert.Error(t, err) + }) +} + +func TestTokenizeRequest_Validate(t *testing.T) { + ttl := 3600 + + t.Run("Success_ValidRequest_WithTTL", func(t *testing.T) { + req := TokenizeRequest{ + Plaintext: "SGVsbG8gV29ybGQ=", // "Hello World" in base64 + Metadata: map[string]any{"key": "value"}, + TTL: &ttl, + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Success_ValidRequest_WithoutTTL", func(t *testing.T) { + req := TokenizeRequest{ + Plaintext: "SGVsbG8gV29ybGQ=", + Metadata: map[string]any{"key": "value"}, + TTL: nil, + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Success_ValidRequest_WithoutMetadata", func(t *testing.T) { + req := TokenizeRequest{ + Plaintext: "SGVsbG8gV29ybGQ=", + Metadata: nil, + TTL: &ttl, + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_MissingPlaintext", func(t *testing.T) { + req := TokenizeRequest{ + Plaintext: "", + Metadata: map[string]any{"key": "value"}, + TTL: &ttl, + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_BlankPlaintext", func(t *testing.T) { + req := TokenizeRequest{ + Plaintext: " ", + Metadata: map[string]any{"key": "value"}, + TTL: &ttl, + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_InvalidBase64", func(t *testing.T) { + req := TokenizeRequest{ + Plaintext: "not-valid-base64!!!", + Metadata: map[string]any{"key": "value"}, + TTL: &ttl, + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_NegativeTTL", func(t *testing.T) { + negativeTTL := -1 + req := TokenizeRequest{ + Plaintext: "SGVsbG8gV29ybGQ=", + Metadata: map[string]any{"key": "value"}, + TTL: &negativeTTL, + } + + err := req.Validate() + assert.Error(t, err) + }) +} + +func TestDetokenizeRequest_Validate(t *testing.T) { + t.Run("Success_ValidRequest", func(t *testing.T) { + req := DetokenizeRequest{ + Token: "tok_1234567890", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_MissingToken", func(t *testing.T) { + req := DetokenizeRequest{ + Token: "", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_BlankToken", func(t *testing.T) { + req := DetokenizeRequest{ + Token: " ", + } + + err := req.Validate() + assert.Error(t, err) + }) +} + +func TestValidateTokenRequest_Validate(t *testing.T) { + t.Run("Success_ValidRequest", func(t *testing.T) { + req := ValidateTokenRequest{ + Token: "tok_1234567890", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_MissingToken", func(t *testing.T) { + req := ValidateTokenRequest{ + Token: "", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_BlankToken", func(t *testing.T) { + req := ValidateTokenRequest{ + Token: " ", + } + + err := req.Validate() + assert.Error(t, err) + }) +} + +func TestRevokeTokenRequest_Validate(t *testing.T) { + t.Run("Success_ValidRequest", func(t *testing.T) { + req := RevokeTokenRequest{ + Token: "tok_1234567890", + } + + err := req.Validate() + assert.NoError(t, err) + }) + + t.Run("Error_MissingToken", func(t *testing.T) { + req := RevokeTokenRequest{ + Token: "", + } + + err := req.Validate() + assert.Error(t, err) + }) + + t.Run("Error_BlankToken", func(t *testing.T) { + req := RevokeTokenRequest{ + Token: " ", + } + + err := req.Validate() + assert.Error(t, err) + }) +} + +func TestParseFormatType(t *testing.T) { + t.Run("Success_UUID", func(t *testing.T) { + formatType, err := ParseFormatType("uuid") + assert.NoError(t, err) + assert.Equal(t, tokenizationDomain.FormatUUID, formatType) + }) + + t.Run("Success_Numeric", func(t *testing.T) { + formatType, err := ParseFormatType("numeric") + assert.NoError(t, err) + assert.Equal(t, tokenizationDomain.FormatNumeric, formatType) + }) + + t.Run("Success_LuhnPreserving", func(t *testing.T) { + formatType, err := ParseFormatType("luhn-preserving") + assert.NoError(t, err) + assert.Equal(t, tokenizationDomain.FormatLuhnPreserving, formatType) + }) + + t.Run("Success_Alphanumeric", func(t *testing.T) { + formatType, err := ParseFormatType("alphanumeric") + assert.NoError(t, err) + assert.Equal(t, tokenizationDomain.FormatAlphanumeric, formatType) + }) + + t.Run("Error_InvalidFormatType", func(t *testing.T) { + _, err := ParseFormatType("invalid") + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid format type") + }) + + t.Run("Error_EmptyFormatType", func(t *testing.T) { + _, err := ParseFormatType("") + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid format type") + }) + + t.Run("Error_CaseSensitive", func(t *testing.T) { + _, err := ParseFormatType("UUID") + assert.Error(t, err) + }) +} + +func TestParseAlgorithm(t *testing.T) { + t.Run("Success_AESGCM", func(t *testing.T) { + alg, err := ParseAlgorithm("aes-gcm") + assert.NoError(t, err) + assert.Equal(t, cryptoDomain.AESGCM, alg) + }) + + t.Run("Success_ChaCha20", func(t *testing.T) { + alg, err := ParseAlgorithm("chacha20-poly1305") + assert.NoError(t, err) + assert.Equal(t, cryptoDomain.ChaCha20, alg) + }) + + t.Run("Error_InvalidAlgorithm", func(t *testing.T) { + _, err := ParseAlgorithm("invalid") + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid algorithm") + }) + + t.Run("Error_EmptyAlgorithm", func(t *testing.T) { + _, err := ParseAlgorithm("") + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid algorithm") + }) + + t.Run("Error_CaseSensitive", func(t *testing.T) { + _, err := ParseAlgorithm("AES-GCM") + assert.Error(t, err) + }) +} + +func TestValidateFormatType(t *testing.T) { + t.Run("Success_ValidString", func(t *testing.T) { + err := validateFormatType("uuid") + assert.NoError(t, err) + }) + + t.Run("Error_InvalidType", func(t *testing.T) { + err := validateFormatType(123) + assert.Error(t, err) + assert.Contains(t, err.Error(), "must be a string") + }) + + t.Run("Error_InvalidFormatType", func(t *testing.T) { + err := validateFormatType("invalid") + assert.Error(t, err) + }) +} + +func TestValidateAlgorithm(t *testing.T) { + t.Run("Success_ValidString", func(t *testing.T) { + err := validateAlgorithm("aes-gcm") + assert.NoError(t, err) + }) + + t.Run("Error_InvalidType", func(t *testing.T) { + err := validateAlgorithm(123) + assert.Error(t, err) + assert.Contains(t, err.Error(), "must be a string") + }) + + t.Run("Error_InvalidAlgorithm", func(t *testing.T) { + err := validateAlgorithm("invalid") + assert.Error(t, err) + }) +} diff --git a/internal/tokenization/http/dto/response.go b/internal/tokenization/http/dto/response.go new file mode 100644 index 0000000..7c7010e --- /dev/null +++ b/internal/tokenization/http/dto/response.go @@ -0,0 +1,59 @@ +// Package dto provides data transfer objects for HTTP request and response handling. +package dto + +import ( + "time" + + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// TokenizationKeyResponse represents a tokenization key in API responses. +type TokenizationKeyResponse struct { + ID string `json:"id"` + Name string `json:"name"` + Version uint `json:"version"` + FormatType string `json:"format_type"` + IsDeterministic bool `json:"is_deterministic"` + CreatedAt time.Time `json:"created_at"` +} + +// MapTokenizationKeyToResponse converts a domain tokenization key to an API response. +func MapTokenizationKeyToResponse(key *tokenizationDomain.TokenizationKey) TokenizationKeyResponse { + return TokenizationKeyResponse{ + ID: key.ID.String(), + Name: key.Name, + Version: key.Version, + FormatType: string(key.FormatType), + IsDeterministic: key.IsDeterministic, + CreatedAt: key.CreatedAt, + } +} + +// TokenizeResponse represents the result of tokenizing a value. +type TokenizeResponse struct { + Token string `json:"token"` + Metadata map[string]any `json:"metadata,omitempty"` + CreatedAt time.Time `json:"created_at"` + ExpiresAt *time.Time `json:"expires_at,omitempty"` +} + +// MapTokenToTokenizeResponse converts a domain token to a tokenize API response. +func MapTokenToTokenizeResponse(token *tokenizationDomain.Token) TokenizeResponse { + return TokenizeResponse{ + Token: token.Token, + Metadata: token.Metadata, + CreatedAt: token.CreatedAt, + ExpiresAt: token.ExpiresAt, + } +} + +// DetokenizeResponse represents the result of detokenizing a token. +type DetokenizeResponse struct { + Plaintext string `json:"plaintext"` // Base64-encoded plaintext + Metadata map[string]any `json:"metadata,omitempty"` +} + +// ValidateTokenResponse represents the result of validating a token. +type ValidateTokenResponse struct { + Valid bool `json:"valid"` +} diff --git a/internal/tokenization/http/dto/response_test.go b/internal/tokenization/http/dto/response_test.go new file mode 100644 index 0000000..c88dc42 --- /dev/null +++ b/internal/tokenization/http/dto/response_test.go @@ -0,0 +1,260 @@ +package dto + +import ( + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +func TestMapTokenizationKeyToResponse(t *testing.T) { + t.Run("Success_MapAllFields_UUID", func(t *testing.T) { + id := uuid.Must(uuid.NewV7()) + now := time.Now().UTC() + + key := &tokenizationDomain.TokenizationKey{ + ID: id, + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: uuid.Must(uuid.NewV7()), + CreatedAt: now, + } + + response := MapTokenizationKeyToResponse(key) + + assert.Equal(t, id.String(), response.ID) + assert.Equal(t, "test-key", response.Name) + assert.Equal(t, uint(1), response.Version) + assert.Equal(t, "uuid", response.FormatType) + assert.False(t, response.IsDeterministic) + assert.Equal(t, now, response.CreatedAt) + }) + + t.Run("Success_MapAllFields_Numeric", func(t *testing.T) { + id := uuid.Must(uuid.NewV7()) + now := time.Now().UTC() + + key := &tokenizationDomain.TokenizationKey{ + ID: id, + Name: "numeric-key", + Version: 2, + FormatType: tokenizationDomain.FormatNumeric, + IsDeterministic: true, + DekID: uuid.Must(uuid.NewV7()), + CreatedAt: now, + } + + response := MapTokenizationKeyToResponse(key) + + assert.Equal(t, id.String(), response.ID) + assert.Equal(t, "numeric-key", response.Name) + assert.Equal(t, uint(2), response.Version) + assert.Equal(t, "numeric", response.FormatType) + assert.True(t, response.IsDeterministic) + assert.Equal(t, now, response.CreatedAt) + }) + + t.Run("Success_MapAllFields_LuhnPreserving", func(t *testing.T) { + id := uuid.Must(uuid.NewV7()) + now := time.Now().UTC() + + key := &tokenizationDomain.TokenizationKey{ + ID: id, + Name: "luhn-key", + Version: 3, + FormatType: tokenizationDomain.FormatLuhnPreserving, + IsDeterministic: true, + DekID: uuid.Must(uuid.NewV7()), + CreatedAt: now, + } + + response := MapTokenizationKeyToResponse(key) + + assert.Equal(t, id.String(), response.ID) + assert.Equal(t, "luhn-key", response.Name) + assert.Equal(t, uint(3), response.Version) + assert.Equal(t, "luhn-preserving", response.FormatType) + assert.True(t, response.IsDeterministic) + assert.Equal(t, now, response.CreatedAt) + }) + + t.Run("Success_MapAllFields_Alphanumeric", func(t *testing.T) { + id := uuid.Must(uuid.NewV7()) + now := time.Now().UTC() + + key := &tokenizationDomain.TokenizationKey{ + ID: id, + Name: "alpha-key", + Version: 4, + FormatType: tokenizationDomain.FormatAlphanumeric, + IsDeterministic: false, + DekID: uuid.Must(uuid.NewV7()), + CreatedAt: now, + } + + response := MapTokenizationKeyToResponse(key) + + assert.Equal(t, id.String(), response.ID) + assert.Equal(t, "alpha-key", response.Name) + assert.Equal(t, uint(4), response.Version) + assert.Equal(t, "alphanumeric", response.FormatType) + assert.False(t, response.IsDeterministic) + assert.Equal(t, now, response.CreatedAt) + }) +} + +func TestMapTokenToTokenizeResponse(t *testing.T) { + t.Run("Success_WithMetadataAndExpiration", func(t *testing.T) { + now := time.Now().UTC() + expiresAt := now.Add(time.Hour) + valueHash := "hash" + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: "tok_1234567890", + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + ValueHash: &valueHash, + Metadata: map[string]any{"key": "value", "count": 42}, + CreatedAt: now, + ExpiresAt: &expiresAt, + RevokedAt: nil, + } + + response := MapTokenToTokenizeResponse(token) + + assert.Equal(t, "tok_1234567890", response.Token) + assert.NotNil(t, response.Metadata) + assert.Equal(t, "value", response.Metadata["key"]) + assert.Equal(t, 42, response.Metadata["count"]) + assert.Equal(t, now, response.CreatedAt) + assert.NotNil(t, response.ExpiresAt) + assert.Equal(t, expiresAt, *response.ExpiresAt) + }) + + t.Run("Success_WithoutMetadata", func(t *testing.T) { + now := time.Now().UTC() + expiresAt := now.Add(time.Hour) + valueHash := "hash" + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: "tok_9876543210", + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + ValueHash: &valueHash, + Metadata: nil, + CreatedAt: now, + ExpiresAt: &expiresAt, + RevokedAt: nil, + } + + response := MapTokenToTokenizeResponse(token) + + assert.Equal(t, "tok_9876543210", response.Token) + assert.Nil(t, response.Metadata) + assert.Equal(t, now, response.CreatedAt) + assert.NotNil(t, response.ExpiresAt) + assert.Equal(t, expiresAt, *response.ExpiresAt) + }) + + t.Run("Success_WithoutExpiration", func(t *testing.T) { + now := time.Now().UTC() + valueHash := "hash" + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: "tok_permanent", + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + ValueHash: &valueHash, + Metadata: map[string]any{"type": "permanent"}, + CreatedAt: now, + ExpiresAt: nil, + RevokedAt: nil, + } + + response := MapTokenToTokenizeResponse(token) + + assert.Equal(t, "tok_permanent", response.Token) + assert.NotNil(t, response.Metadata) + assert.Equal(t, "permanent", response.Metadata["type"]) + assert.Equal(t, now, response.CreatedAt) + assert.Nil(t, response.ExpiresAt) + }) + + t.Run("Success_EmptyMetadataMap", func(t *testing.T) { + now := time.Now().UTC() + valueHash := "hash" + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: "tok_empty_metadata", + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + ValueHash: &valueHash, + Metadata: map[string]any{}, + CreatedAt: now, + ExpiresAt: nil, + RevokedAt: nil, + } + + response := MapTokenToTokenizeResponse(token) + + assert.Equal(t, "tok_empty_metadata", response.Token) + assert.NotNil(t, response.Metadata) + assert.Empty(t, response.Metadata) + assert.Equal(t, now, response.CreatedAt) + assert.Nil(t, response.ExpiresAt) + }) +} + +func TestDetokenizeResponse(t *testing.T) { + t.Run("Success_CreateResponse_WithMetadata", func(t *testing.T) { + response := DetokenizeResponse{ + Plaintext: "SGVsbG8gV29ybGQ=", + Metadata: map[string]any{"key": "value"}, + } + + assert.Equal(t, "SGVsbG8gV29ybGQ=", response.Plaintext) + assert.NotNil(t, response.Metadata) + assert.Equal(t, "value", response.Metadata["key"]) + }) + + t.Run("Success_CreateResponse_WithoutMetadata", func(t *testing.T) { + response := DetokenizeResponse{ + Plaintext: "SGVsbG8gV29ybGQ=", + Metadata: nil, + } + + assert.Equal(t, "SGVsbG8gV29ybGQ=", response.Plaintext) + assert.Nil(t, response.Metadata) + }) +} + +func TestValidateTokenResponse(t *testing.T) { + t.Run("Success_ValidToken", func(t *testing.T) { + response := ValidateTokenResponse{ + Valid: true, + } + + assert.True(t, response.Valid) + }) + + t.Run("Success_InvalidToken", func(t *testing.T) { + response := ValidateTokenResponse{ + Valid: false, + } + + assert.False(t, response.Valid) + }) +} diff --git a/internal/tokenization/http/test_helpers.go b/internal/tokenization/http/test_helpers.go new file mode 100644 index 0000000..0e1aaf7 --- /dev/null +++ b/internal/tokenization/http/test_helpers.go @@ -0,0 +1,29 @@ +// Package http provides HTTP handlers for tokenization key management and token operations. +package http + +import ( + "bytes" + "encoding/json" + "io" + "net/http/httptest" + + "github.com/gin-gonic/gin" +) + +// createTestContext creates a test Gin context with the given request. +func createTestContext(method, path string, body interface{}) (*gin.Context, *httptest.ResponseRecorder) { + w := httptest.NewRecorder() + c, _ := gin.CreateTestContext(w) + + var bodyReader io.Reader + if body != nil { + bodyBytes, _ := json.Marshal(body) + bodyReader = bytes.NewReader(bodyBytes) + } + + req := httptest.NewRequest(method, path, bodyReader) + req.Header.Set("Content-Type", "application/json") + c.Request = req + + return c, w +} diff --git a/internal/tokenization/http/tokenization_handler.go b/internal/tokenization/http/tokenization_handler.go new file mode 100644 index 0000000..2a98e0b --- /dev/null +++ b/internal/tokenization/http/tokenization_handler.go @@ -0,0 +1,202 @@ +// Package http provides HTTP handlers for tokenization key management and token operations. +package http + +import ( + "encoding/base64" + "fmt" + "log/slog" + "net/http" + "time" + + "github.com/gin-gonic/gin" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + "github.com/allisson/secrets/internal/httputil" + "github.com/allisson/secrets/internal/tokenization/http/dto" + tokenizationUseCase "github.com/allisson/secrets/internal/tokenization/usecase" + customValidation "github.com/allisson/secrets/internal/validation" +) + +// TokenizationHandler handles HTTP requests for tokenization operations. +// Coordinates tokenize, detokenize, validate, and revoke operations with TokenizationUseCase. +type TokenizationHandler struct { + tokenizationUseCase tokenizationUseCase.TokenizationUseCase + logger *slog.Logger +} + +// NewTokenizationHandler creates a new tokenization handler with required dependencies. +func NewTokenizationHandler( + tokenizationUseCase tokenizationUseCase.TokenizationUseCase, + logger *slog.Logger, +) *TokenizationHandler { + return &TokenizationHandler{ + tokenizationUseCase: tokenizationUseCase, + logger: logger, + } +} + +// TokenizeHandler generates a token for the given plaintext value using the named key. +// POST /v1/tokenization/keys/:name/tokenize - Requires EncryptCapability. +// In deterministic mode, returns existing token if the value has been tokenized before. +// Returns 201 Created with token and metadata. +func (h *TokenizationHandler) TokenizeHandler(c *gin.Context) { + var req dto.TokenizeRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Get key name from URL parameter + keyName := c.Param("name") + if keyName == "" { + httputil.HandleValidationErrorGin(c, + fmt.Errorf("key name is required in URL path"), + h.logger) + return + } + + // Decode base64 plaintext + plaintext, err := base64.StdEncoding.DecodeString(req.Plaintext) + if err != nil { + httputil.HandleValidationErrorGin(c, + fmt.Errorf("plaintext must be valid base64"), + h.logger) + return + } + + // Calculate expiration time if TTL is provided + var expiresAt *time.Time + if req.TTL != nil { + expiry := time.Now().UTC().Add(time.Duration(*req.TTL) * time.Second) + expiresAt = &expiry + } + + // Call use case + token, err := h.tokenizationUseCase.Tokenize( + c.Request.Context(), + keyName, + plaintext, + req.Metadata, + expiresAt, + ) + if err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // Return response + response := dto.MapTokenToTokenizeResponse(token) + c.JSON(http.StatusCreated, response) +} + +// DetokenizeHandler retrieves the original plaintext value for a given token. +// POST /v1/tokenization/detokenize - Requires DecryptCapability. +// Returns 200 OK with base64-encoded plaintext and metadata. +func (h *TokenizationHandler) DetokenizeHandler(c *gin.Context) { + var req dto.DetokenizeRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Call use case + plaintext, metadata, err := h.tokenizationUseCase.Detokenize( + c.Request.Context(), + req.Token, + ) + if err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + // SECURITY: Zero plaintext from memory after encoding + defer cryptoDomain.Zero(plaintext) + + // Encode plaintext as base64 for JSON response + plaintextB64 := base64.StdEncoding.EncodeToString(plaintext) + + // Return response + response := dto.DetokenizeResponse{ + Plaintext: plaintextB64, + Metadata: metadata, + } + c.JSON(http.StatusOK, response) +} + +// ValidateHandler checks if a token exists and is valid (not expired or revoked). +// POST /v1/tokenization/validate - Requires ReadCapability. +// Returns 200 OK with validation result. +func (h *TokenizationHandler) ValidateHandler(c *gin.Context) { + var req dto.ValidateTokenRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Call use case + isValid, err := h.tokenizationUseCase.Validate( + c.Request.Context(), + req.Token, + ) + if err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // Return response + response := dto.ValidateTokenResponse{ + Valid: isValid, + } + c.JSON(http.StatusOK, response) +} + +// RevokeHandler marks a token as revoked, preventing further detokenization. +// POST /v1/tokenization/revoke - Requires DeleteCapability. +// Returns 204 No Content on success. +func (h *TokenizationHandler) RevokeHandler(c *gin.Context) { + var req dto.RevokeTokenRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Call use case + if err := h.tokenizationUseCase.Revoke(c.Request.Context(), req.Token); err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // Return 204 No Content + c.Data(http.StatusNoContent, "application/json", nil) +} diff --git a/internal/tokenization/http/tokenization_handler_test.go b/internal/tokenization/http/tokenization_handler_test.go new file mode 100644 index 0000000..c149215 --- /dev/null +++ b/internal/tokenization/http/tokenization_handler_test.go @@ -0,0 +1,525 @@ +package http + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "errors" + "io" + "log/slog" + "net/http" + "testing" + "time" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + "github.com/allisson/secrets/internal/tokenization/http/dto" + "github.com/allisson/secrets/internal/tokenization/usecase/mocks" +) + +// setupTestTokenizationHandler creates a test handler with mocked dependencies. +func setupTestTokenizationHandler(t *testing.T) (*TokenizationHandler, *mocks.MockTokenizationUseCase) { + t.Helper() + + gin.SetMode(gin.TestMode) + + mockTokenizationUseCase := mocks.NewMockTokenizationUseCase(t) + logger := slog.New(slog.NewTextHandler(io.Discard, nil)) + + handler := NewTokenizationHandler(mockTokenizationUseCase, logger) + + return handler, mockTokenizationUseCase +} + +func TestTokenizationHandler_TokenizeHandler(t *testing.T) { + t.Run("Success_TokenizeValue", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + plaintext := []byte("test-value") + plaintextB64 := base64.StdEncoding.EncodeToString(plaintext) + metadata := map[string]any{"last4": "alue"} + ttl := 3600 + + request := dto.TokenizeRequest{ + Plaintext: plaintextB64, + Metadata: metadata, + TTL: &ttl, + } + + expectedToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: "tok_123456", + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: metadata, + CreatedAt: time.Now().UTC(), + ExpiresAt: func() *time.Time { t := time.Now().UTC().Add(1 * time.Hour); return &t }(), + } + + mockUseCase.EXPECT(). + Tokenize( + mock.Anything, + "test-key", + plaintext, + metadata, + mock.MatchedBy(func(expiresAt *time.Time) bool { + return expiresAt != nil + }), + ). + Return(expectedToken, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/tokenize", request) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.TokenizeHandler(c) + + assert.Equal(t, http.StatusCreated, w.Code) + + var response dto.TokenizeResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, "tok_123456", response.Token) + assert.Equal(t, metadata, response.Metadata) + assert.NotNil(t, response.ExpiresAt) + }) + + t.Run("Success_TokenizeWithoutTTL", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + plaintext := []byte("test-value") + plaintextB64 := base64.StdEncoding.EncodeToString(plaintext) + + request := dto.TokenizeRequest{ + Plaintext: plaintextB64, + Metadata: nil, + TTL: nil, + } + + expectedToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: "tok_123456", + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + } + + mockUseCase.EXPECT(). + Tokenize( + mock.Anything, + "test-key", + plaintext, + mock.Anything, + mock.Anything, + ). + Return(expectedToken, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/tokenize", request) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.TokenizeHandler(c) + + assert.Equal(t, http.StatusCreated, w.Code) + + var response dto.TokenizeResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, "tok_123456", response.Token) + assert.Nil(t, response.ExpiresAt) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/tokenize", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.TokenizeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_MissingPlaintext", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + request := dto.TokenizeRequest{ + Plaintext: "", + Metadata: nil, + TTL: nil, + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/tokenize", request) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.TokenizeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_InvalidBase64", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + request := dto.TokenizeRequest{ + Plaintext: "not-valid-base64!!!", + Metadata: nil, + TTL: nil, + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/tokenize", request) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.TokenizeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_MissingKeyName", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + plaintext := []byte("test-value") + plaintextB64 := base64.StdEncoding.EncodeToString(plaintext) + + request := dto.TokenizeRequest{ + Plaintext: plaintextB64, + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys//tokenize", request) + c.Params = gin.Params{{Key: "name", Value: ""}} + + handler.TokenizeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_KeyNotFound", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + plaintext := []byte("test-value") + plaintextB64 := base64.StdEncoding.EncodeToString(plaintext) + + request := dto.TokenizeRequest{ + Plaintext: plaintextB64, + } + + mockUseCase.EXPECT(). + Tokenize(mock.Anything, "nonexistent-key", plaintext, mock.Anything, mock.Anything). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/nonexistent-key/tokenize", request) + c.Params = gin.Params{{Key: "name", Value: "nonexistent-key"}} + + handler.TokenizeHandler(c) + + assert.Equal(t, http.StatusNotFound, w.Code) + }) +} + +func TestTokenizationHandler_DetokenizeHandler(t *testing.T) { + t.Run("Success_Detokenize", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + plaintext := []byte("original-value") + plaintextCopy := make([]byte, len(plaintext)) + copy(plaintextCopy, plaintext) + metadata := map[string]any{"last4": "alue"} + + request := dto.DetokenizeRequest{ + Token: "tok_123456", + } + + mockUseCase.EXPECT(). + Detokenize(mock.Anything, "tok_123456"). + Return(plaintext, metadata, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize", request) + + handler.DetokenizeHandler(c) + + assert.Equal(t, http.StatusOK, w.Code) + + var response dto.DetokenizeResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + + decodedPlaintext, err := base64.StdEncoding.DecodeString(response.Plaintext) + assert.NoError(t, err) + assert.Equal(t, plaintextCopy, decodedPlaintext) + assert.Equal(t, metadata, response.Metadata) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + + handler.DetokenizeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_MissingToken", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + request := dto.DetokenizeRequest{ + Token: "", + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize", request) + + handler.DetokenizeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_TokenNotFound", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.DetokenizeRequest{ + Token: "tok_nonexistent", + } + + mockUseCase.EXPECT(). + Detokenize(mock.Anything, "tok_nonexistent"). + Return(nil, nil, tokenizationDomain.ErrTokenNotFound). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize", request) + + handler.DetokenizeHandler(c) + + assert.Equal(t, http.StatusNotFound, w.Code) + }) + + t.Run("Error_TokenExpired", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.DetokenizeRequest{ + Token: "tok_expired", + } + + mockUseCase.EXPECT(). + Detokenize(mock.Anything, "tok_expired"). + Return(nil, nil, tokenizationDomain.ErrTokenExpired). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize", request) + + handler.DetokenizeHandler(c) + + assert.Equal(t, http.StatusUnprocessableEntity, w.Code) + }) + + t.Run("Error_TokenRevoked", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.DetokenizeRequest{ + Token: "tok_revoked", + } + + mockUseCase.EXPECT(). + Detokenize(mock.Anything, "tok_revoked"). + Return(nil, nil, tokenizationDomain.ErrTokenRevoked). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/detokenize", request) + + handler.DetokenizeHandler(c) + + assert.Equal(t, http.StatusUnprocessableEntity, w.Code) + }) +} + +func TestTokenizationHandler_ValidateHandler(t *testing.T) { + t.Run("Success_ValidToken", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.ValidateTokenRequest{ + Token: "tok_valid", + } + + mockUseCase.EXPECT(). + Validate(mock.Anything, "tok_valid"). + Return(true, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/validate", request) + + handler.ValidateHandler(c) + + assert.Equal(t, http.StatusOK, w.Code) + + var response dto.ValidateTokenResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.True(t, response.Valid) + }) + + t.Run("Success_InvalidToken", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.ValidateTokenRequest{ + Token: "tok_invalid", + } + + mockUseCase.EXPECT(). + Validate(mock.Anything, "tok_invalid"). + Return(false, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/validate", request) + + handler.ValidateHandler(c) + + assert.Equal(t, http.StatusOK, w.Code) + + var response dto.ValidateTokenResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.False(t, response.Valid) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/validate", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + + handler.ValidateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_MissingToken", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + request := dto.ValidateTokenRequest{ + Token: "", + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/validate", request) + + handler.ValidateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_UseCaseError", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.ValidateTokenRequest{ + Token: "tok_test", + } + + dbError := errors.New("database error") + + mockUseCase.EXPECT(). + Validate(mock.Anything, "tok_test"). + Return(false, dbError). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/validate", request) + + handler.ValidateHandler(c) + + assert.Equal(t, http.StatusInternalServerError, w.Code) + }) +} + +func TestTokenizationHandler_RevokeHandler(t *testing.T) { + t.Run("Success_RevokeToken", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.RevokeTokenRequest{ + Token: "tok_revoke", + } + + mockUseCase.EXPECT(). + Revoke(mock.Anything, "tok_revoke"). + Return(nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/revoke", request) + + handler.RevokeHandler(c) + + assert.Equal(t, http.StatusNoContent, w.Code) + assert.Empty(t, w.Body.String()) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/revoke", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + + handler.RevokeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_MissingToken", func(t *testing.T) { + handler, _ := setupTestTokenizationHandler(t) + + request := dto.RevokeTokenRequest{ + Token: "", + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/revoke", request) + + handler.RevokeHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_TokenNotFound", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.RevokeTokenRequest{ + Token: "tok_nonexistent", + } + + mockUseCase.EXPECT(). + Revoke(mock.Anything, "tok_nonexistent"). + Return(tokenizationDomain.ErrTokenNotFound). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/revoke", request) + + handler.RevokeHandler(c) + + assert.Equal(t, http.StatusNotFound, w.Code) + }) + + t.Run("Error_UseCaseError", func(t *testing.T) { + handler, mockUseCase := setupTestTokenizationHandler(t) + + request := dto.RevokeTokenRequest{ + Token: "tok_test", + } + + dbError := errors.New("database error") + + mockUseCase.EXPECT(). + Revoke(mock.Anything, "tok_test"). + Return(dbError). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/revoke", request) + + handler.RevokeHandler(c) + + assert.Equal(t, http.StatusInternalServerError, w.Code) + }) +} diff --git a/internal/tokenization/http/tokenization_key_handler.go b/internal/tokenization/http/tokenization_key_handler.go new file mode 100644 index 0000000..df2cee4 --- /dev/null +++ b/internal/tokenization/http/tokenization_key_handler.go @@ -0,0 +1,164 @@ +// Package http provides HTTP handlers for tokenization key management and token operations. +package http + +import ( + "fmt" + "log/slog" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + + "github.com/allisson/secrets/internal/httputil" + "github.com/allisson/secrets/internal/tokenization/http/dto" + tokenizationUseCase "github.com/allisson/secrets/internal/tokenization/usecase" + customValidation "github.com/allisson/secrets/internal/validation" +) + +// TokenizationKeyHandler handles HTTP requests for tokenization key management operations. +// Coordinates key creation, rotation, and deletion with TokenizationKeyUseCase. +type TokenizationKeyHandler struct { + keyUseCase tokenizationUseCase.TokenizationKeyUseCase + logger *slog.Logger +} + +// NewTokenizationKeyHandler creates a new tokenization key handler with required dependencies. +func NewTokenizationKeyHandler( + keyUseCase tokenizationUseCase.TokenizationKeyUseCase, + logger *slog.Logger, +) *TokenizationKeyHandler { + return &TokenizationKeyHandler{ + keyUseCase: keyUseCase, + logger: logger, + } +} + +// CreateHandler creates a new tokenization key with version 1. +// POST /v1/tokenization/keys - Requires WriteCapability. +// Returns 201 Created with key details. +func (h *TokenizationKeyHandler) CreateHandler(c *gin.Context) { + var req dto.CreateTokenizationKeyRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Parse format type and algorithm + formatType, err := dto.ParseFormatType(req.FormatType) + if err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + algorithm, err := dto.ParseAlgorithm(req.Algorithm) + if err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Call use case + key, err := h.keyUseCase.Create( + c.Request.Context(), + req.Name, + formatType, + req.IsDeterministic, + algorithm, + ) + if err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // Return response + response := dto.MapTokenizationKeyToResponse(key) + c.JSON(http.StatusCreated, response) +} + +// RotateHandler creates a new version of an existing tokenization key. +// POST /v1/tokenization/keys/:name/rotate - Requires WriteCapability. +// Returns 201 Created with new key version. +func (h *TokenizationKeyHandler) RotateHandler(c *gin.Context) { + var req dto.RotateTokenizationKeyRequest + + // Parse and bind JSON + if err := c.ShouldBindJSON(&req); err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Validate request + if err := req.Validate(); err != nil { + httputil.HandleValidationErrorGin(c, customValidation.WrapValidationError(err), h.logger) + return + } + + // Get key name from URL parameter + keyName := c.Param("name") + if keyName == "" { + httputil.HandleValidationErrorGin(c, + fmt.Errorf("key name is required in URL path"), + h.logger) + return + } + + // Parse format type and algorithm + formatType, err := dto.ParseFormatType(req.FormatType) + if err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + algorithm, err := dto.ParseAlgorithm(req.Algorithm) + if err != nil { + httputil.HandleValidationErrorGin(c, err, h.logger) + return + } + + // Call use case + key, err := h.keyUseCase.Rotate( + c.Request.Context(), + keyName, + formatType, + req.IsDeterministic, + algorithm, + ) + if err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // Return response + response := dto.MapTokenizationKeyToResponse(key) + c.JSON(http.StatusCreated, response) +} + +// DeleteHandler soft-deletes a tokenization key by ID. +// DELETE /v1/tokenization/keys/:id - Requires DeleteCapability. +// Returns 204 No Content on success. +func (h *TokenizationKeyHandler) DeleteHandler(c *gin.Context) { + // Parse and validate UUID + keyID, err := uuid.Parse(c.Param("id")) + if err != nil { + httputil.HandleValidationErrorGin(c, + fmt.Errorf("invalid key ID format: must be a valid UUID"), + h.logger) + return + } + + // Call use case + if err := h.keyUseCase.Delete(c.Request.Context(), keyID); err != nil { + httputil.HandleErrorGin(c, err, h.logger) + return + } + + // Return 204 No Content + c.Data(http.StatusNoContent, "application/json", nil) +} diff --git a/internal/tokenization/http/tokenization_key_handler_test.go b/internal/tokenization/http/tokenization_key_handler_test.go new file mode 100644 index 0000000..7b1ece3 --- /dev/null +++ b/internal/tokenization/http/tokenization_key_handler_test.go @@ -0,0 +1,413 @@ +package http + +import ( + "bytes" + "encoding/json" + "errors" + "io" + "log/slog" + "net/http" + "testing" + "time" + + "github.com/gin-gonic/gin" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + "github.com/allisson/secrets/internal/tokenization/http/dto" + "github.com/allisson/secrets/internal/tokenization/usecase/mocks" +) + +// setupTestKeyHandler creates a test handler with mocked dependencies. +func setupTestKeyHandler(t *testing.T) (*TokenizationKeyHandler, *mocks.MockTokenizationKeyUseCase) { + t.Helper() + + gin.SetMode(gin.TestMode) + + mockKeyUseCase := mocks.NewMockTokenizationKeyUseCase(t) + logger := slog.New(slog.NewTextHandler(io.Discard, nil)) + + handler := NewTokenizationKeyHandler(mockKeyUseCase, logger) + + return handler, mockKeyUseCase +} + +func TestTokenizationKeyHandler_CreateHandler(t *testing.T) { + t.Run("Success_CreateKeyWithUUID", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + keyID := uuid.Must(uuid.NewV7()) + request := dto.CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + expectedKey := &tokenizationDomain.TokenizationKey{ + ID: keyID, + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: uuid.Must(uuid.NewV7()), + CreatedAt: time.Now().UTC(), + } + + mockUseCase.EXPECT(). + Create(mock.Anything, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM). + Return(expectedKey, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys", request) + + handler.CreateHandler(c) + + assert.Equal(t, http.StatusCreated, w.Code) + + var response dto.TokenizationKeyResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, keyID.String(), response.ID) + assert.Equal(t, "test-key", response.Name) + assert.Equal(t, uint(1), response.Version) + assert.Equal(t, "uuid", response.FormatType) + assert.False(t, response.IsDeterministic) + }) + + t.Run("Success_CreateKeyWithLuhnPreserving", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + keyID := uuid.Must(uuid.NewV7()) + request := dto.CreateTokenizationKeyRequest{ + Name: "cc-tokenizer", + FormatType: "luhn-preserving", + IsDeterministic: true, + Algorithm: "chacha20-poly1305", + } + + expectedKey := &tokenizationDomain.TokenizationKey{ + ID: keyID, + Name: "cc-tokenizer", + Version: 1, + FormatType: tokenizationDomain.FormatLuhnPreserving, + IsDeterministic: true, + DekID: uuid.Must(uuid.NewV7()), + CreatedAt: time.Now().UTC(), + } + + mockUseCase.EXPECT(). + Create( + mock.Anything, + "cc-tokenizer", + tokenizationDomain.FormatLuhnPreserving, + true, + cryptoDomain.ChaCha20, + ). + Return(expectedKey, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys", request) + + handler.CreateHandler(c) + + assert.Equal(t, http.StatusCreated, w.Code) + + var response dto.TokenizationKeyResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, keyID.String(), response.ID) + assert.Equal(t, "cc-tokenizer", response.Name) + assert.Equal(t, "luhn-preserving", response.FormatType) + assert.True(t, response.IsDeterministic) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestKeyHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + + handler.CreateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + + var response map[string]interface{} + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, "validation_error", response["error"]) + }) + + t.Run("Error_MissingName", func(t *testing.T) { + handler, _ := setupTestKeyHandler(t) + + request := dto.CreateTokenizationKeyRequest{ + Name: "", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys", request) + + handler.CreateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + + var response map[string]interface{} + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, "validation_error", response["error"]) + }) + + t.Run("Error_InvalidFormatType", func(t *testing.T) { + handler, _ := setupTestKeyHandler(t) + + request := dto.CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "invalid-format", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys", request) + + handler.CreateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + + var response map[string]interface{} + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, "validation_error", response["error"]) + }) + + t.Run("Error_InvalidAlgorithm", func(t *testing.T) { + handler, _ := setupTestKeyHandler(t) + + request := dto.CreateTokenizationKeyRequest{ + Name: "test-key", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "invalid-alg", + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys", request) + + handler.CreateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + + var response map[string]interface{} + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, "validation_error", response["error"]) + }) + + t.Run("Error_KeyAlreadyExists", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + request := dto.CreateTokenizationKeyRequest{ + Name: "existing-key", + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + mockUseCase.EXPECT(). + Create(mock.Anything, "existing-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM). + Return(nil, tokenizationDomain.ErrTokenizationKeyAlreadyExists). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys", request) + + handler.CreateHandler(c) + + assert.Equal(t, http.StatusConflict, w.Code) + }) +} + +func TestTokenizationKeyHandler_RotateHandler(t *testing.T) { + t.Run("Success_RotateKey", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + keyID := uuid.Must(uuid.NewV7()) + request := dto.RotateTokenizationKeyRequest{ + FormatType: "numeric", + IsDeterministic: true, + Algorithm: "aes-gcm", + } + + expectedKey := &tokenizationDomain.TokenizationKey{ + ID: keyID, + Name: "existing-key", + Version: 2, + FormatType: tokenizationDomain.FormatNumeric, + IsDeterministic: true, + DekID: uuid.Must(uuid.NewV7()), + CreatedAt: time.Now().UTC(), + } + + mockUseCase.EXPECT(). + Rotate( + mock.Anything, + "existing-key", + tokenizationDomain.FormatNumeric, + true, + cryptoDomain.AESGCM, + ). + Return(expectedKey, nil). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/existing-key/rotate", request) + c.Params = gin.Params{{Key: "name", Value: "existing-key"}} + + handler.RotateHandler(c) + + assert.Equal(t, http.StatusCreated, w.Code) + + var response dto.TokenizationKeyResponse + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, keyID.String(), response.ID) + assert.Equal(t, "existing-key", response.Name) + assert.Equal(t, uint(2), response.Version) + assert.Equal(t, "numeric", response.FormatType) + assert.True(t, response.IsDeterministic) + }) + + t.Run("Error_MissingKeyNameInURL", func(t *testing.T) { + handler, _ := setupTestKeyHandler(t) + + request := dto.RotateTokenizationKeyRequest{ + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys//rotate", request) + c.Params = gin.Params{{Key: "name", Value: ""}} + + handler.RotateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_InvalidJSON", func(t *testing.T) { + handler, _ := setupTestKeyHandler(t) + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/test-key/rotate", nil) + c.Request.Body = io.NopCloser(bytes.NewReader([]byte("invalid json"))) + c.Params = gin.Params{{Key: "name", Value: "test-key"}} + + handler.RotateHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + }) + + t.Run("Error_KeyNotFound", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + request := dto.RotateTokenizationKeyRequest{ + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + mockUseCase.EXPECT(). + Rotate( + mock.Anything, + "nonexistent-key", + tokenizationDomain.FormatUUID, + false, + cryptoDomain.AESGCM, + ). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + c, w := createTestContext(http.MethodPost, "/v1/tokenization/keys/nonexistent-key/rotate", request) + c.Params = gin.Params{{Key: "name", Value: "nonexistent-key"}} + + handler.RotateHandler(c) + + assert.Equal(t, http.StatusNotFound, w.Code) + }) +} + +func TestTokenizationKeyHandler_DeleteHandler(t *testing.T) { + t.Run("Success_DeleteKey", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + keyID := uuid.Must(uuid.NewV7()) + + mockUseCase.EXPECT(). + Delete(mock.Anything, keyID). + Return(nil). + Once() + + c, w := createTestContext(http.MethodDelete, "/v1/tokenization/keys/"+keyID.String(), nil) + c.Params = gin.Params{{Key: "id", Value: keyID.String()}} + + handler.DeleteHandler(c) + + assert.Equal(t, http.StatusNoContent, w.Code) + assert.Empty(t, w.Body.String()) + }) + + t.Run("Error_InvalidUUID", func(t *testing.T) { + handler, _ := setupTestKeyHandler(t) + + c, w := createTestContext(http.MethodDelete, "/v1/tokenization/keys/invalid-uuid", nil) + c.Params = gin.Params{{Key: "id", Value: "invalid-uuid"}} + + handler.DeleteHandler(c) + + assert.Equal(t, http.StatusBadRequest, w.Code) + + var response map[string]interface{} + err := json.Unmarshal(w.Body.Bytes(), &response) + assert.NoError(t, err) + assert.Equal(t, "validation_error", response["error"]) + assert.Contains(t, response["message"], "invalid key ID format") + }) + + t.Run("Error_KeyNotFound", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + keyID := uuid.Must(uuid.NewV7()) + + mockUseCase.EXPECT(). + Delete(mock.Anything, keyID). + Return(tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + c, w := createTestContext(http.MethodDelete, "/v1/tokenization/keys/"+keyID.String(), nil) + c.Params = gin.Params{{Key: "id", Value: keyID.String()}} + + handler.DeleteHandler(c) + + assert.Equal(t, http.StatusNotFound, w.Code) + }) + + t.Run("Error_UseCaseError", func(t *testing.T) { + handler, mockUseCase := setupTestKeyHandler(t) + + keyID := uuid.Must(uuid.NewV7()) + dbError := errors.New("database error") + + mockUseCase.EXPECT(). + Delete(mock.Anything, keyID). + Return(dbError). + Once() + + c, w := createTestContext(http.MethodDelete, "/v1/tokenization/keys/"+keyID.String(), nil) + c.Params = gin.Params{{Key: "id", Value: keyID.String()}} + + handler.DeleteHandler(c) + + assert.Equal(t, http.StatusInternalServerError, w.Code) + }) +} diff --git a/internal/tokenization/repository/mysql_repository.go b/internal/tokenization/repository/mysql_repository.go new file mode 100644 index 0000000..da87602 --- /dev/null +++ b/internal/tokenization/repository/mysql_repository.go @@ -0,0 +1,462 @@ +package repository + +import ( + "context" + "database/sql" + "encoding/json" + "errors" + "time" + + "github.com/google/uuid" + + "github.com/allisson/secrets/internal/database" + apperrors "github.com/allisson/secrets/internal/errors" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// MySQLTokenizationKeyRepository implements tokenization key persistence for MySQL databases. +type MySQLTokenizationKeyRepository struct { + db *sql.DB +} + +// Create inserts a new tokenization key into the MySQL database. +func (m *MySQLTokenizationKeyRepository) Create( + ctx context.Context, + key *tokenizationDomain.TokenizationKey, +) error { + querier := database.GetTx(ctx, m.db) + + query := `INSERT INTO tokenization_keys (id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?)` + + id, err := key.ID.MarshalBinary() + if err != nil { + return apperrors.Wrap(err, "failed to marshal tokenization key id") + } + + dekID, err := key.DekID.MarshalBinary() + if err != nil { + return apperrors.Wrap(err, "failed to marshal dek id") + } + + _, err = querier.ExecContext( + ctx, + query, + id, + key.Name, + key.Version, + key.FormatType, + key.IsDeterministic, + dekID, + key.CreatedAt, + key.DeletedAt, + ) + if err != nil { + return apperrors.Wrap(err, "failed to create tokenization key") + } + return nil +} + +// Delete soft-deletes a tokenization key by setting its deleted_at timestamp. +func (m *MySQLTokenizationKeyRepository) Delete(ctx context.Context, keyID uuid.UUID) error { + querier := database.GetTx(ctx, m.db) + + query := `UPDATE tokenization_keys SET deleted_at = NOW() WHERE id = ?` + + id, err := keyID.MarshalBinary() + if err != nil { + return apperrors.Wrap(err, "failed to marshal tokenization key id") + } + + _, err = querier.ExecContext(ctx, query, id) + if err != nil { + return apperrors.Wrap(err, "failed to delete tokenization key") + } + + return nil +} + +// GetByName retrieves the latest non-deleted version of a tokenization key by name. +func (m *MySQLTokenizationKeyRepository) GetByName( + ctx context.Context, + name string, +) (*tokenizationDomain.TokenizationKey, error) { + querier := database.GetTx(ctx, m.db) + + query := `SELECT id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at + FROM tokenization_keys + WHERE name = ? AND deleted_at IS NULL + ORDER BY version DESC + LIMIT 1` + + var key tokenizationDomain.TokenizationKey + var id, dekID []byte + var formatType string + + err := querier.QueryRowContext(ctx, query, name).Scan( + &id, + &key.Name, + &key.Version, + &formatType, + &key.IsDeterministic, + &dekID, + &key.CreatedAt, + &key.DeletedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenizationKeyNotFound + } + return nil, apperrors.Wrap(err, "failed to get tokenization key by name") + } + + if err := key.ID.UnmarshalBinary(id); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal tokenization key id") + } + + if err := key.DekID.UnmarshalBinary(dekID); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal dek id") + } + + key.FormatType = tokenizationDomain.FormatType(formatType) + return &key, nil +} + +// Get retrieves a tokenization key by its ID. +func (m *MySQLTokenizationKeyRepository) Get( + ctx context.Context, + keyID uuid.UUID, +) (*tokenizationDomain.TokenizationKey, error) { + querier := database.GetTx(ctx, m.db) + + query := `SELECT id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at + FROM tokenization_keys + WHERE id = ? AND deleted_at IS NULL` + + id, err := keyID.MarshalBinary() + if err != nil { + return nil, apperrors.Wrap(err, "failed to marshal tokenization key id") + } + + var key tokenizationDomain.TokenizationKey + var keyIDBinary, dekID []byte + var formatType string + + err = querier.QueryRowContext(ctx, query, id).Scan( + &keyIDBinary, + &key.Name, + &key.Version, + &formatType, + &key.IsDeterministic, + &dekID, + &key.CreatedAt, + &key.DeletedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenizationKeyNotFound + } + return nil, apperrors.Wrap(err, "failed to get tokenization key by id") + } + + if err := key.ID.UnmarshalBinary(keyIDBinary); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal tokenization key id") + } + + if err := key.DekID.UnmarshalBinary(dekID); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal dek id") + } + + key.FormatType = tokenizationDomain.FormatType(formatType) + return &key, nil +} + +// GetByNameAndVersion retrieves a specific version of a tokenization key by name and version. +func (m *MySQLTokenizationKeyRepository) GetByNameAndVersion( + ctx context.Context, + name string, + version uint, +) (*tokenizationDomain.TokenizationKey, error) { + querier := database.GetTx(ctx, m.db) + + query := `SELECT id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at + FROM tokenization_keys + WHERE name = ? AND version = ? AND deleted_at IS NULL` + + var key tokenizationDomain.TokenizationKey + var id, dekID []byte + var formatType string + + err := querier.QueryRowContext(ctx, query, name, version).Scan( + &id, + &key.Name, + &key.Version, + &formatType, + &key.IsDeterministic, + &dekID, + &key.CreatedAt, + &key.DeletedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenizationKeyNotFound + } + return nil, apperrors.Wrap(err, "failed to get tokenization key by name and version") + } + + if err := key.ID.UnmarshalBinary(id); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal tokenization key id") + } + + if err := key.DekID.UnmarshalBinary(dekID); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal dek id") + } + + key.FormatType = tokenizationDomain.FormatType(formatType) + return &key, nil +} + +// NewMySQLTokenizationKeyRepository creates a new MySQL tokenization key repository instance. +func NewMySQLTokenizationKeyRepository(db *sql.DB) *MySQLTokenizationKeyRepository { + return &MySQLTokenizationKeyRepository{db: db} +} + +// MySQLTokenRepository implements token persistence for MySQL databases. +type MySQLTokenRepository struct { + db *sql.DB +} + +// Create inserts a new token mapping into the MySQL database. +func (m *MySQLTokenRepository) Create( + ctx context.Context, + token *tokenizationDomain.Token, +) error { + querier := database.GetTx(ctx, m.db) + + // Convert metadata to JSON + var metadataJSON []byte + var err error + if token.Metadata != nil { + metadataJSON, err = json.Marshal(token.Metadata) + if err != nil { + return apperrors.Wrap(err, "failed to marshal metadata") + } + } + + query := `INSERT INTO tokenization_tokens + (id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)` + + id, err := token.ID.MarshalBinary() + if err != nil { + return apperrors.Wrap(err, "failed to marshal token id") + } + + keyID, err := token.TokenizationKeyID.MarshalBinary() + if err != nil { + return apperrors.Wrap(err, "failed to marshal tokenization key id") + } + + _, err = querier.ExecContext( + ctx, + query, + id, + keyID, + token.Token, + token.ValueHash, + token.Ciphertext, + token.Nonce, + metadataJSON, + token.CreatedAt, + token.ExpiresAt, + token.RevokedAt, + ) + if err != nil { + return apperrors.Wrap(err, "failed to create token") + } + return nil +} + +// GetByToken retrieves a token mapping by its token string. +func (m *MySQLTokenRepository) GetByToken( + ctx context.Context, + tokenStr string, +) (*tokenizationDomain.Token, error) { + querier := database.GetTx(ctx, m.db) + + query := `SELECT id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at + FROM tokenization_tokens + WHERE token = ?` + + var token tokenizationDomain.Token + var id, keyID []byte + var metadataJSON []byte + + err := querier.QueryRowContext(ctx, query, tokenStr).Scan( + &id, + &keyID, + &token.Token, + &token.ValueHash, + &token.Ciphertext, + &token.Nonce, + &metadataJSON, + &token.CreatedAt, + &token.ExpiresAt, + &token.RevokedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenNotFound + } + return nil, apperrors.Wrap(err, "failed to get token by token string") + } + + if err := token.ID.UnmarshalBinary(id); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal token id") + } + + if err := token.TokenizationKeyID.UnmarshalBinary(keyID); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal tokenization key id") + } + + // Parse metadata if present + if len(metadataJSON) > 0 { + if err := json.Unmarshal(metadataJSON, &token.Metadata); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal metadata") + } + } + + return &token, nil +} + +// GetByValueHash retrieves a token by its value hash (for deterministic mode). +func (m *MySQLTokenRepository) GetByValueHash( + ctx context.Context, + keyID uuid.UUID, + valueHash string, +) (*tokenizationDomain.Token, error) { + querier := database.GetTx(ctx, m.db) + + query := `SELECT id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at + FROM tokenization_tokens + WHERE tokenization_key_id = ? AND value_hash = ?` + + keyIDBinary, err := keyID.MarshalBinary() + if err != nil { + return nil, apperrors.Wrap(err, "failed to marshal tokenization key id") + } + + var token tokenizationDomain.Token + var id, tokenKeyID []byte + var metadataJSON []byte + + err = querier.QueryRowContext(ctx, query, keyIDBinary, valueHash).Scan( + &id, + &tokenKeyID, + &token.Token, + &token.ValueHash, + &token.Ciphertext, + &token.Nonce, + &metadataJSON, + &token.CreatedAt, + &token.ExpiresAt, + &token.RevokedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenNotFound + } + return nil, apperrors.Wrap(err, "failed to get token by value hash") + } + + if err := token.ID.UnmarshalBinary(id); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal token id") + } + + if err := token.TokenizationKeyID.UnmarshalBinary(tokenKeyID); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal tokenization key id") + } + + // Parse metadata if present + if len(metadataJSON) > 0 { + if err := json.Unmarshal(metadataJSON, &token.Metadata); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal metadata") + } + } + + return &token, nil +} + +// Revoke marks a token as revoked by setting its revoked_at timestamp. +func (m *MySQLTokenRepository) Revoke(ctx context.Context, tokenStr string) error { + querier := database.GetTx(ctx, m.db) + + query := `UPDATE tokenization_tokens SET revoked_at = NOW() WHERE token = ?` + + result, err := querier.ExecContext(ctx, query, tokenStr) + if err != nil { + return apperrors.Wrap(err, "failed to revoke token") + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return apperrors.Wrap(err, "failed to get rows affected") + } + + if rowsAffected == 0 { + return tokenizationDomain.ErrTokenNotFound + } + + return nil +} + +// DeleteExpired deletes tokens that expired before the specified timestamp. +// Returns the number of deleted tokens. Uses transaction support via database.GetTx(). +// All timestamps are expected in UTC. +func (m *MySQLTokenRepository) DeleteExpired(ctx context.Context, olderThan time.Time) (int64, error) { + if olderThan.IsZero() { + return 0, apperrors.New("olderThan timestamp cannot be zero") + } + + querier := database.GetTx(ctx, m.db) + + query := `DELETE FROM tokenization_tokens WHERE expires_at < ?` + + result, err := querier.ExecContext(ctx, query, olderThan) + if err != nil { + return 0, apperrors.Wrap(err, "failed to delete expired tokens") + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return 0, apperrors.Wrap(err, "failed to get rows affected") + } + + return rowsAffected, nil +} + +// CountExpired counts tokens that expired before the specified timestamp without deleting them. +// Returns the count of matching tokens. Uses transaction support via database.GetTx(). +// All timestamps are expected in UTC. +func (m *MySQLTokenRepository) CountExpired(ctx context.Context, olderThan time.Time) (int64, error) { + if olderThan.IsZero() { + return 0, apperrors.New("olderThan timestamp cannot be zero") + } + + querier := database.GetTx(ctx, m.db) + + query := `SELECT COUNT(*) FROM tokenization_tokens WHERE expires_at < ?` + + var count int64 + err := querier.QueryRowContext(ctx, query, olderThan).Scan(&count) + if err != nil { + return 0, apperrors.Wrap(err, "failed to count expired tokens") + } + + return count, nil +} + +// NewMySQLTokenRepository creates a new MySQL token repository instance. +func NewMySQLTokenRepository(db *sql.DB) *MySQLTokenRepository { + return &MySQLTokenRepository{db: db} +} diff --git a/internal/tokenization/repository/mysql_repository_test.go b/internal/tokenization/repository/mysql_repository_test.go new file mode 100644 index 0000000..6101dca --- /dev/null +++ b/internal/tokenization/repository/mysql_repository_test.go @@ -0,0 +1,441 @@ +package repository + +import ( + "context" + "database/sql" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + cryptoRepository "github.com/allisson/secrets/internal/crypto/repository" + "github.com/allisson/secrets/internal/testutil" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// createKekAndDekMySQL creates a KEK and DEK for MySQL testing and returns their IDs +func createKekAndDekMySQL(t *testing.T, db *sql.DB) (kekID uuid.UUID, dekID uuid.UUID) { + t.Helper() + + ctx := context.Background() + + // Create KEK + kekID = uuid.Must(uuid.NewV7()) + kekRepo := cryptoRepository.NewMySQLKekRepository(db) + kek := &cryptoDomain.Kek{ + ID: kekID, + MasterKeyID: "master-key-1", + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-kek-data"), + Nonce: []byte("kek-nonce-12345"), + Version: 1, + CreatedAt: time.Now().UTC(), + } + err := kekRepo.Create(ctx, kek) + require.NoError(t, err) + + // Create DEK + dekID = uuid.Must(uuid.NewV7()) + dekRepo := cryptoRepository.NewMySQLDekRepository(db) + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: kekID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek-data"), + Nonce: []byte("dek-nonce-12345"), + CreatedAt: time.Now().UTC(), + } + err = dekRepo.Create(ctx, dek) + require.NoError(t, err) + + return kekID, dekID +} + +// createTokenizationKeyMySQL creates a tokenization key for MySQL testing and returns its ID +func createTokenizationKeyMySQL(t *testing.T, db *sql.DB) uuid.UUID { + t.Helper() + + ctx := context.Background() + _, dekID := createKekAndDekMySQL(t, db) + + keyRepo := NewMySQLTokenizationKeyRepository(db) + keyID := uuid.Must(uuid.NewV7()) + tokKey := &tokenizationDomain.TokenizationKey{ + ID: keyID, + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + err := keyRepo.Create(ctx, tokKey) + require.NoError(t, err) + + return keyID +} + +func TestNewMySQLTokenizationKeyRepository(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + + repo := NewMySQLTokenizationKeyRepository(db) + assert.NotNil(t, repo) + assert.IsType(t, &MySQLTokenizationKeyRepository{}, repo) +} + +func TestMySQLTokenizationKeyRepository_Create(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenizationKeyRepository(db) + ctx := context.Background() + + // Create DEK dependency + _, dekID := createKekAndDekMySQL(t, db) + + key := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + + err := repo.Create(ctx, key) + require.NoError(t, err) + + // Verify by fetching + retrieved, err := repo.GetByName(ctx, key.Name) + require.NoError(t, err) + assert.Equal(t, key.ID, retrieved.ID) + assert.Equal(t, key.Name, retrieved.Name) + assert.Equal(t, key.Version, retrieved.Version) + assert.Equal(t, key.FormatType, retrieved.FormatType) + assert.Equal(t, key.IsDeterministic, retrieved.IsDeterministic) +} + +func TestMySQLTokenizationKeyRepository_GetByName(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenizationKeyRepository(db) + ctx := context.Background() + + // Create DEK dependency + _, dekID := createKekAndDekMySQL(t, db) + + // Create first version + key1 := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatNumeric, + IsDeterministic: true, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + err := repo.Create(ctx, key1) + require.NoError(t, err) + + // Create second version (newer) + time.Sleep(time.Millisecond) + key2 := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "test-key", + Version: 2, + FormatType: tokenizationDomain.FormatNumeric, + IsDeterministic: true, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + err = repo.Create(ctx, key2) + require.NoError(t, err) + + // GetByName should return the latest (highest version) + retrieved, err := repo.GetByName(ctx, "test-key") + require.NoError(t, err) + assert.Equal(t, key2.ID, retrieved.ID) + assert.Equal(t, uint(2), retrieved.Version) +} + +func TestMySQLTokenizationKeyRepository_Delete(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenizationKeyRepository(db) + ctx := context.Background() + + // Create DEK dependency + _, dekID := createKekAndDekMySQL(t, db) + + key := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "delete-test", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + + err := repo.Create(ctx, key) + require.NoError(t, err) + + // Delete the key + err = repo.Delete(ctx, key.ID) + require.NoError(t, err) + + // Verify soft delete - key should not be found + _, err = repo.Get(ctx, key.ID) + assert.Error(t, err) + assert.Contains(t, err.Error(), "tokenization key not found") +} + +func TestNewMySQLTokenRepository(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + + repo := NewMySQLTokenRepository(db) + assert.NotNil(t, repo) + assert.IsType(t, &MySQLTokenRepository{}, repo) +} + +func TestMySQLTokenRepository_Create(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + tokenRepo := NewMySQLTokenRepository(db) + ctx := context.Background() + + // Create tokenization key dependency + keyID := createTokenizationKeyMySQL(t, db) + + valueHash := "test-hash" + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_test123", + ValueHash: &valueHash, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce123"), + Metadata: map[string]any{"key": "value"}, + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + err := tokenRepo.Create(ctx, token) + require.NoError(t, err) + + // Verify by fetching + retrieved, err := tokenRepo.GetByToken(ctx, token.Token) + require.NoError(t, err) + assert.Equal(t, token.ID, retrieved.ID) + assert.Equal(t, token.Token, retrieved.Token) + assert.Equal(t, token.Ciphertext, retrieved.Ciphertext) +} + +func TestMySQLTokenRepository_GetByValueHash(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKeyMySQL(t, db) + valueHash := "deterministic-hash" + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_deterministic", + ValueHash: &valueHash, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + err := repo.Create(ctx, token) + require.NoError(t, err) + + // Retrieve by value hash + retrieved, err := repo.GetByValueHash(ctx, keyID, valueHash) + require.NoError(t, err) + assert.Equal(t, token.ID, retrieved.ID) + assert.Equal(t, token.Token, retrieved.Token) +} + +func TestMySQLTokenRepository_Revoke(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKeyMySQL(t, db) + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_revoke_test", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + err := repo.Create(ctx, token) + require.NoError(t, err) + + // Revoke the token + err = repo.Revoke(ctx, token.Token) + require.NoError(t, err) + + // Verify revocation + retrieved, err := repo.GetByToken(ctx, token.Token) + require.NoError(t, err) + assert.NotNil(t, retrieved.RevokedAt) +} + +func TestMySQLTokenRepository_CountExpired(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKeyMySQL(t, db) + pastTime := time.Now().UTC().Add(-2 * time.Hour) + + // Create expired token + expiredToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_expired", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: &pastTime, + RevokedAt: nil, + } + err := repo.Create(ctx, expiredToken) + require.NoError(t, err) + + // Create non-expired token + futureTime := time.Now().UTC().Add(2 * time.Hour) + validToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_valid", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: &futureTime, + RevokedAt: nil, + } + err = repo.Create(ctx, validToken) + require.NoError(t, err) + + // Count expired tokens (check before current time minus 1 hour) + beforeTimestamp := time.Now().UTC().Add(-1 * time.Hour) + count, err := repo.CountExpired(ctx, beforeTimestamp) + require.NoError(t, err) + assert.Equal(t, int64(1), count) +} + +func TestMySQLTokenRepository_DeleteExpired(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKeyMySQL(t, db) + pastTime := time.Now().UTC().Add(-2 * time.Hour) + + // Create expired token + expiredToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_to_delete", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: &pastTime, + RevokedAt: nil, + } + err := repo.Create(ctx, expiredToken) + require.NoError(t, err) + + // Delete expired tokens (before current time minus 1 hour) + beforeTimestamp := time.Now().UTC().Add(-1 * time.Hour) + count, err := repo.DeleteExpired(ctx, beforeTimestamp) + require.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Verify deletion + _, err = repo.GetByToken(ctx, expiredToken.Token) + assert.Error(t, err) + assert.Contains(t, err.Error(), "token not found") +} + +func TestMySQLTokenRepository_CountExpired_ZeroTime(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + // Test with zero time + count, err := repo.CountExpired(ctx, time.Time{}) + assert.Error(t, err) + assert.Equal(t, int64(0), count) + assert.Contains(t, err.Error(), "olderThan timestamp cannot be zero") +} + +func TestMySQLTokenRepository_DeleteExpired_ZeroTime(t *testing.T) { + db := testutil.SetupMySQLDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupMySQLDB(t, db) + + repo := NewMySQLTokenRepository(db) + ctx := context.Background() + + // Test with zero time + count, err := repo.DeleteExpired(ctx, time.Time{}) + assert.Error(t, err) + assert.Equal(t, int64(0), count) + assert.Contains(t, err.Error(), "olderThan timestamp cannot be zero") +} diff --git a/internal/tokenization/repository/postgresql_repository.go b/internal/tokenization/repository/postgresql_repository.go new file mode 100644 index 0000000..d42f0c2 --- /dev/null +++ b/internal/tokenization/repository/postgresql_repository.go @@ -0,0 +1,384 @@ +// Package repository implements data persistence for tokenization key and token management. +// Supports versioning, soft deletion, deterministic token lookups, and dual database support (PostgreSQL and MySQL). +package repository + +import ( + "context" + "database/sql" + "encoding/json" + "errors" + "time" + + "github.com/google/uuid" + + "github.com/allisson/secrets/internal/database" + apperrors "github.com/allisson/secrets/internal/errors" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// PostgreSQLTokenizationKeyRepository implements tokenization key persistence for PostgreSQL databases. +type PostgreSQLTokenizationKeyRepository struct { + db *sql.DB +} + +// Create inserts a new tokenization key into the PostgreSQL database. +func (p *PostgreSQLTokenizationKeyRepository) Create( + ctx context.Context, + key *tokenizationDomain.TokenizationKey, +) error { + querier := database.GetTx(ctx, p.db) + + query := `INSERT INTO tokenization_keys (id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8)` + + _, err := querier.ExecContext( + ctx, + query, + key.ID, + key.Name, + key.Version, + key.FormatType, + key.IsDeterministic, + key.DekID, + key.CreatedAt, + key.DeletedAt, + ) + if err != nil { + return apperrors.Wrap(err, "failed to create tokenization key") + } + return nil +} + +// Delete soft-deletes a tokenization key by setting its deleted_at timestamp. +func (p *PostgreSQLTokenizationKeyRepository) Delete(ctx context.Context, keyID uuid.UUID) error { + querier := database.GetTx(ctx, p.db) + + query := `UPDATE tokenization_keys SET deleted_at = NOW() WHERE id = $1` + + _, err := querier.ExecContext(ctx, query, keyID) + if err != nil { + return apperrors.Wrap(err, "failed to delete tokenization key") + } + + return nil +} + +// GetByName retrieves the latest non-deleted version of a tokenization key by name. +func (p *PostgreSQLTokenizationKeyRepository) GetByName( + ctx context.Context, + name string, +) (*tokenizationDomain.TokenizationKey, error) { + querier := database.GetTx(ctx, p.db) + + query := `SELECT id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at + FROM tokenization_keys + WHERE name = $1 AND deleted_at IS NULL + ORDER BY version DESC + LIMIT 1` + + var key tokenizationDomain.TokenizationKey + var formatType string + + err := querier.QueryRowContext(ctx, query, name).Scan( + &key.ID, + &key.Name, + &key.Version, + &formatType, + &key.IsDeterministic, + &key.DekID, + &key.CreatedAt, + &key.DeletedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenizationKeyNotFound + } + return nil, apperrors.Wrap(err, "failed to get tokenization key by name") + } + + key.FormatType = tokenizationDomain.FormatType(formatType) + return &key, nil +} + +// Get retrieves a tokenization key by its ID. +func (p *PostgreSQLTokenizationKeyRepository) Get( + ctx context.Context, + keyID uuid.UUID, +) (*tokenizationDomain.TokenizationKey, error) { + querier := database.GetTx(ctx, p.db) + + query := `SELECT id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at + FROM tokenization_keys + WHERE id = $1 AND deleted_at IS NULL` + + var key tokenizationDomain.TokenizationKey + var formatType string + + err := querier.QueryRowContext(ctx, query, keyID).Scan( + &key.ID, + &key.Name, + &key.Version, + &formatType, + &key.IsDeterministic, + &key.DekID, + &key.CreatedAt, + &key.DeletedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenizationKeyNotFound + } + return nil, apperrors.Wrap(err, "failed to get tokenization key by id") + } + + key.FormatType = tokenizationDomain.FormatType(formatType) + return &key, nil +} + +// GetByNameAndVersion retrieves a specific version of a tokenization key by name and version. +func (p *PostgreSQLTokenizationKeyRepository) GetByNameAndVersion( + ctx context.Context, + name string, + version uint, +) (*tokenizationDomain.TokenizationKey, error) { + querier := database.GetTx(ctx, p.db) + + query := `SELECT id, name, version, format_type, is_deterministic, dek_id, created_at, deleted_at + FROM tokenization_keys + WHERE name = $1 AND version = $2 AND deleted_at IS NULL` + + var key tokenizationDomain.TokenizationKey + var formatType string + + err := querier.QueryRowContext(ctx, query, name, version).Scan( + &key.ID, + &key.Name, + &key.Version, + &formatType, + &key.IsDeterministic, + &key.DekID, + &key.CreatedAt, + &key.DeletedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenizationKeyNotFound + } + return nil, apperrors.Wrap(err, "failed to get tokenization key by name and version") + } + + key.FormatType = tokenizationDomain.FormatType(formatType) + return &key, nil +} + +// NewPostgreSQLTokenizationKeyRepository creates a new PostgreSQL tokenization key repository instance. +func NewPostgreSQLTokenizationKeyRepository(db *sql.DB) *PostgreSQLTokenizationKeyRepository { + return &PostgreSQLTokenizationKeyRepository{db: db} +} + +// PostgreSQLTokenRepository implements token persistence for PostgreSQL databases. +type PostgreSQLTokenRepository struct { + db *sql.DB +} + +// Create inserts a new token mapping into the PostgreSQL database. +func (p *PostgreSQLTokenRepository) Create( + ctx context.Context, + token *tokenizationDomain.Token, +) error { + querier := database.GetTx(ctx, p.db) + + // Convert metadata to JSONB + var metadataJSON []byte + var err error + if token.Metadata != nil { + metadataJSON, err = json.Marshal(token.Metadata) + if err != nil { + return apperrors.Wrap(err, "failed to marshal metadata") + } + } + + query := `INSERT INTO tokenization_tokens + (id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)` + + _, err = querier.ExecContext( + ctx, + query, + token.ID, + token.TokenizationKeyID, + token.Token, + token.ValueHash, + token.Ciphertext, + token.Nonce, + metadataJSON, + token.CreatedAt, + token.ExpiresAt, + token.RevokedAt, + ) + if err != nil { + return apperrors.Wrap(err, "failed to create token") + } + return nil +} + +// GetByToken retrieves a token mapping by its token string. +func (p *PostgreSQLTokenRepository) GetByToken( + ctx context.Context, + tokenStr string, +) (*tokenizationDomain.Token, error) { + querier := database.GetTx(ctx, p.db) + + query := `SELECT id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at + FROM tokenization_tokens + WHERE token = $1` + + var token tokenizationDomain.Token + var metadataJSON []byte + + err := querier.QueryRowContext(ctx, query, tokenStr).Scan( + &token.ID, + &token.TokenizationKeyID, + &token.Token, + &token.ValueHash, + &token.Ciphertext, + &token.Nonce, + &metadataJSON, + &token.CreatedAt, + &token.ExpiresAt, + &token.RevokedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenNotFound + } + return nil, apperrors.Wrap(err, "failed to get token by token string") + } + + // Parse metadata if present + if len(metadataJSON) > 0 { + if err := json.Unmarshal(metadataJSON, &token.Metadata); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal metadata") + } + } + + return &token, nil +} + +// GetByValueHash retrieves a token by its value hash (for deterministic mode). +func (p *PostgreSQLTokenRepository) GetByValueHash( + ctx context.Context, + keyID uuid.UUID, + valueHash string, +) (*tokenizationDomain.Token, error) { + querier := database.GetTx(ctx, p.db) + + query := `SELECT id, tokenization_key_id, token, value_hash, ciphertext, nonce, metadata, created_at, expires_at, revoked_at + FROM tokenization_tokens + WHERE tokenization_key_id = $1 AND value_hash = $2` + + var token tokenizationDomain.Token + var metadataJSON []byte + + err := querier.QueryRowContext(ctx, query, keyID, valueHash).Scan( + &token.ID, + &token.TokenizationKeyID, + &token.Token, + &token.ValueHash, + &token.Ciphertext, + &token.Nonce, + &metadataJSON, + &token.CreatedAt, + &token.ExpiresAt, + &token.RevokedAt, + ) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, tokenizationDomain.ErrTokenNotFound + } + return nil, apperrors.Wrap(err, "failed to get token by value hash") + } + + // Parse metadata if present + if len(metadataJSON) > 0 { + if err := json.Unmarshal(metadataJSON, &token.Metadata); err != nil { + return nil, apperrors.Wrap(err, "failed to unmarshal metadata") + } + } + + return &token, nil +} + +// Revoke marks a token as revoked by setting its revoked_at timestamp. +func (p *PostgreSQLTokenRepository) Revoke(ctx context.Context, tokenStr string) error { + querier := database.GetTx(ctx, p.db) + + query := `UPDATE tokenization_tokens SET revoked_at = NOW() WHERE token = $1` + + result, err := querier.ExecContext(ctx, query, tokenStr) + if err != nil { + return apperrors.Wrap(err, "failed to revoke token") + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return apperrors.Wrap(err, "failed to get rows affected") + } + + if rowsAffected == 0 { + return tokenizationDomain.ErrTokenNotFound + } + + return nil +} + +// DeleteExpired deletes tokens that expired before the specified timestamp. +// Returns the number of deleted tokens. Uses transaction support via database.GetTx(). +// All timestamps are expected in UTC. +func (p *PostgreSQLTokenRepository) DeleteExpired(ctx context.Context, olderThan time.Time) (int64, error) { + if olderThan.IsZero() { + return 0, apperrors.New("olderThan timestamp cannot be zero") + } + + querier := database.GetTx(ctx, p.db) + + query := `DELETE FROM tokenization_tokens WHERE expires_at < $1` + + result, err := querier.ExecContext(ctx, query, olderThan) + if err != nil { + return 0, apperrors.Wrap(err, "failed to delete expired tokens") + } + + rowsAffected, err := result.RowsAffected() + if err != nil { + return 0, apperrors.Wrap(err, "failed to get rows affected") + } + + return rowsAffected, nil +} + +// CountExpired counts tokens that expired before the specified timestamp without deleting them. +// Returns the count of matching tokens. Uses transaction support via database.GetTx(). +// All timestamps are expected in UTC. +func (p *PostgreSQLTokenRepository) CountExpired(ctx context.Context, olderThan time.Time) (int64, error) { + if olderThan.IsZero() { + return 0, apperrors.New("olderThan timestamp cannot be zero") + } + + querier := database.GetTx(ctx, p.db) + + query := `SELECT COUNT(*) FROM tokenization_tokens WHERE expires_at < $1` + + var count int64 + err := querier.QueryRowContext(ctx, query, olderThan).Scan(&count) + if err != nil { + return 0, apperrors.Wrap(err, "failed to count expired tokens") + } + + return count, nil +} + +// NewPostgreSQLTokenRepository creates a new PostgreSQL token repository instance. +func NewPostgreSQLTokenRepository(db *sql.DB) *PostgreSQLTokenRepository { + return &PostgreSQLTokenRepository{db: db} +} diff --git a/internal/tokenization/repository/postgresql_repository_test.go b/internal/tokenization/repository/postgresql_repository_test.go new file mode 100644 index 0000000..e072588 --- /dev/null +++ b/internal/tokenization/repository/postgresql_repository_test.go @@ -0,0 +1,441 @@ +package repository + +import ( + "context" + "database/sql" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + cryptoRepository "github.com/allisson/secrets/internal/crypto/repository" + "github.com/allisson/secrets/internal/testutil" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// createKekAndDek creates a KEK and DEK for testing and returns their IDs +func createKekAndDek(t *testing.T, db *sql.DB) (kekID uuid.UUID, dekID uuid.UUID) { + t.Helper() + + ctx := context.Background() + + // Create KEK + kekID = uuid.Must(uuid.NewV7()) + kekRepo := cryptoRepository.NewPostgreSQLKekRepository(db) + kek := &cryptoDomain.Kek{ + ID: kekID, + MasterKeyID: "master-key-1", + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-kek-data"), + Nonce: []byte("kek-nonce-12345"), + Version: 1, + CreatedAt: time.Now().UTC(), + } + err := kekRepo.Create(ctx, kek) + require.NoError(t, err) + + // Create DEK + dekID = uuid.Must(uuid.NewV7()) + dekRepo := cryptoRepository.NewPostgreSQLDekRepository(db) + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: kekID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek-data"), + Nonce: []byte("dek-nonce-12345"), + CreatedAt: time.Now().UTC(), + } + err = dekRepo.Create(ctx, dek) + require.NoError(t, err) + + return kekID, dekID +} + +// createTokenizationKey creates a tokenization key for testing and returns its ID +func createTokenizationKey(t *testing.T, db *sql.DB) uuid.UUID { + t.Helper() + + ctx := context.Background() + _, dekID := createKekAndDek(t, db) + + keyRepo := NewPostgreSQLTokenizationKeyRepository(db) + keyID := uuid.Must(uuid.NewV7()) + tokKey := &tokenizationDomain.TokenizationKey{ + ID: keyID, + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + err := keyRepo.Create(ctx, tokKey) + require.NoError(t, err) + + return keyID +} + +func TestNewPostgreSQLTokenizationKeyRepository(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + + repo := NewPostgreSQLTokenizationKeyRepository(db) + assert.NotNil(t, repo) + assert.IsType(t, &PostgreSQLTokenizationKeyRepository{}, repo) +} + +func TestPostgreSQLTokenizationKeyRepository_Create(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenizationKeyRepository(db) + ctx := context.Background() + + // Create DEK dependency + _, dekID := createKekAndDek(t, db) + + key := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + + err := repo.Create(ctx, key) + require.NoError(t, err) + + // Verify by fetching + retrieved, err := repo.GetByName(ctx, key.Name) + require.NoError(t, err) + assert.Equal(t, key.ID, retrieved.ID) + assert.Equal(t, key.Name, retrieved.Name) + assert.Equal(t, key.Version, retrieved.Version) + assert.Equal(t, key.FormatType, retrieved.FormatType) + assert.Equal(t, key.IsDeterministic, retrieved.IsDeterministic) +} + +func TestPostgreSQLTokenizationKeyRepository_GetByName(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenizationKeyRepository(db) + ctx := context.Background() + + // Create DEK dependency + _, dekID := createKekAndDek(t, db) + + // Create first version + key1 := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatNumeric, + IsDeterministic: true, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + err := repo.Create(ctx, key1) + require.NoError(t, err) + + // Create second version (newer) + time.Sleep(time.Millisecond) + key2 := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "test-key", + Version: 2, + FormatType: tokenizationDomain.FormatNumeric, + IsDeterministic: true, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + err = repo.Create(ctx, key2) + require.NoError(t, err) + + // GetByName should return the latest (highest version) + retrieved, err := repo.GetByName(ctx, "test-key") + require.NoError(t, err) + assert.Equal(t, key2.ID, retrieved.ID) + assert.Equal(t, uint(2), retrieved.Version) +} + +func TestPostgreSQLTokenizationKeyRepository_Delete(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenizationKeyRepository(db) + ctx := context.Background() + + // Create DEK dependency + _, dekID := createKekAndDek(t, db) + + key := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "delete-test", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + DekID: dekID, + CreatedAt: time.Now().UTC(), + DeletedAt: nil, + } + + err := repo.Create(ctx, key) + require.NoError(t, err) + + // Delete the key + err = repo.Delete(ctx, key.ID) + require.NoError(t, err) + + // Verify soft delete - key should not be found + _, err = repo.Get(ctx, key.ID) + assert.Error(t, err) + assert.Contains(t, err.Error(), "tokenization key not found") +} + +func TestNewPostgreSQLTokenRepository(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + assert.NotNil(t, repo) + assert.IsType(t, &PostgreSQLTokenRepository{}, repo) +} + +func TestPostgreSQLTokenRepository_Create(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + tokenRepo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + // Create tokenization key dependency + keyID := createTokenizationKey(t, db) + + valueHash := "test-hash" + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_test123", + ValueHash: &valueHash, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce123"), + Metadata: map[string]any{"key": "value"}, + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + err := tokenRepo.Create(ctx, token) + require.NoError(t, err) + + // Verify by fetching + retrieved, err := tokenRepo.GetByToken(ctx, token.Token) + require.NoError(t, err) + assert.Equal(t, token.ID, retrieved.ID) + assert.Equal(t, token.Token, retrieved.Token) + assert.Equal(t, token.Ciphertext, retrieved.Ciphertext) +} + +func TestPostgreSQLTokenRepository_GetByValueHash(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKey(t, db) + valueHash := "deterministic-hash" + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_deterministic", + ValueHash: &valueHash, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + err := repo.Create(ctx, token) + require.NoError(t, err) + + // Retrieve by value hash + retrieved, err := repo.GetByValueHash(ctx, keyID, valueHash) + require.NoError(t, err) + assert.Equal(t, token.ID, retrieved.ID) + assert.Equal(t, token.Token, retrieved.Token) +} + +func TestPostgreSQLTokenRepository_Revoke(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKey(t, db) + + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_revoke_test", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + err := repo.Create(ctx, token) + require.NoError(t, err) + + // Revoke the token + err = repo.Revoke(ctx, token.Token) + require.NoError(t, err) + + // Verify revocation + retrieved, err := repo.GetByToken(ctx, token.Token) + require.NoError(t, err) + assert.NotNil(t, retrieved.RevokedAt) +} + +func TestPostgreSQLTokenRepository_CountExpired(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKey(t, db) + pastTime := time.Now().UTC().Add(-2 * time.Hour) + + // Create expired token + expiredToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_expired", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: &pastTime, + RevokedAt: nil, + } + err := repo.Create(ctx, expiredToken) + require.NoError(t, err) + + // Create non-expired token + futureTime := time.Now().UTC().Add(2 * time.Hour) + validToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_valid", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: &futureTime, + RevokedAt: nil, + } + err = repo.Create(ctx, validToken) + require.NoError(t, err) + + // Count expired tokens (check before current time minus 1 hour) + beforeTimestamp := time.Now().UTC().Add(-1 * time.Hour) + count, err := repo.CountExpired(ctx, beforeTimestamp) + require.NoError(t, err) + assert.Equal(t, int64(1), count) +} + +func TestPostgreSQLTokenRepository_DeleteExpired(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + keyID := createTokenizationKey(t, db) + pastTime := time.Now().UTC().Add(-2 * time.Hour) + + // Create expired token + expiredToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: keyID, + Token: "tok_to_delete", + ValueHash: nil, + Ciphertext: []byte("encrypted"), + Nonce: []byte("nonce"), + Metadata: nil, + CreatedAt: time.Now().UTC(), + ExpiresAt: &pastTime, + RevokedAt: nil, + } + err := repo.Create(ctx, expiredToken) + require.NoError(t, err) + + // Delete expired tokens (before current time minus 1 hour) + beforeTimestamp := time.Now().UTC().Add(-1 * time.Hour) + count, err := repo.DeleteExpired(ctx, beforeTimestamp) + require.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Verify deletion + _, err = repo.GetByToken(ctx, expiredToken.Token) + assert.Error(t, err) + assert.Contains(t, err.Error(), "token not found") +} + +func TestPostgreSQLTokenRepository_CountExpired_ZeroTime(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + // Test with zero time + count, err := repo.CountExpired(ctx, time.Time{}) + assert.Error(t, err) + assert.Equal(t, int64(0), count) + assert.Contains(t, err.Error(), "olderThan timestamp cannot be zero") +} + +func TestPostgreSQLTokenRepository_DeleteExpired_ZeroTime(t *testing.T) { + db := testutil.SetupPostgresDB(t) + defer testutil.TeardownDB(t, db) + defer testutil.CleanupPostgresDB(t, db) + + repo := NewPostgreSQLTokenRepository(db) + ctx := context.Background() + + // Test with zero time + count, err := repo.DeleteExpired(ctx, time.Time{}) + assert.Error(t, err) + assert.Equal(t, int64(0), count) + assert.Contains(t, err.Error(), "olderThan timestamp cannot be zero") +} diff --git a/internal/tokenization/service/alphanumeric_generator.go b/internal/tokenization/service/alphanumeric_generator.go new file mode 100644 index 0000000..4a980e7 --- /dev/null +++ b/internal/tokenization/service/alphanumeric_generator.go @@ -0,0 +1,62 @@ +package service + +import ( + "crypto/rand" + "errors" + "fmt" + "math/big" +) + +const alphanumericChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" + +type alphanumericGenerator struct{} + +// NewAlphanumericGenerator creates a new alphanumeric token generator. Generates +// cryptographically secure random alphanumeric tokens using [A-Za-z0-9]. +func NewAlphanumericGenerator() TokenGenerator { + return &alphanumericGenerator{} +} + +// Generate creates a cryptographically secure random alphanumeric token of the specified length. +// Uses characters from [A-Za-z0-9]. Returns an error if length is less than 1 or greater than 255. +func (g *alphanumericGenerator) Generate(length int) (string, error) { + if length < 1 { + return "", errors.New("length must be at least 1") + } + if length > 255 { + return "", errors.New("length must not exceed 255") + } + + token := make([]byte, length) + charsLen := big.NewInt(int64(len(alphanumericChars))) + + for i := 0; i < length; i++ { + n, err := rand.Int(rand.Reader, charsLen) + if err != nil { + return "", fmt.Errorf("failed to generate random character: %w", err) + } + token[i] = alphanumericChars[n.Int64()] + } + + return string(token), nil +} + +// Validate checks if the token contains only alphanumeric characters [A-Za-z0-9]. +func (g *alphanumericGenerator) Validate(token string) error { + if len(token) == 0 { + return errors.New("token cannot be empty") + } + + for _, c := range token { + if !isAlphanumeric(c) { + return errors.New("token must contain only alphanumeric characters [A-Za-z0-9]") + } + } + + return nil +} + +// isAlphanumeric checks if a character is alphanumeric [A-Za-z0-9]. +func isAlphanumeric(c rune) bool { + return (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') +} diff --git a/internal/tokenization/service/alphanumeric_generator_test.go b/internal/tokenization/service/alphanumeric_generator_test.go new file mode 100644 index 0000000..15cd6bf --- /dev/null +++ b/internal/tokenization/service/alphanumeric_generator_test.go @@ -0,0 +1,225 @@ +package service + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestAlphanumericGenerator_Generate(t *testing.T) { + gen := NewAlphanumericGenerator() + + tests := []struct { + name string + length int + expectError bool + validateToken bool + }{ + { + name: "Success_Length1", + length: 1, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length32", + length: 32, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length64", + length: 64, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length255", + length: 255, + expectError: false, + validateToken: true, + }, + { + name: "Error_LengthZero", + length: 0, + expectError: true, + }, + { + name: "Error_NegativeLength", + length: -1, + expectError: true, + }, + { + name: "Error_LengthTooLarge", + length: 256, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + token, err := gen.Generate(tt.length) + + if tt.expectError { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + assert.Len(t, token, tt.length) + + if tt.validateToken { + // Verify all characters are alphanumeric + for _, c := range token { + isValid := (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') + assert.True(t, isValid, "character %c is not alphanumeric", c) + } + } + }) + } +} + +func TestAlphanumericGenerator_Validate(t *testing.T) { + gen := NewAlphanumericGenerator() + + tests := []struct { + name string + token string + expectError bool + }{ + { + name: "Valid_Uppercase", + token: "ABCDEFGHIJKLMNOPQRSTUVWXYZ", + expectError: false, + }, + { + name: "Valid_Lowercase", + token: "abcdefghijklmnopqrstuvwxyz", + expectError: false, + }, + { + name: "Valid_Digits", + token: "0123456789", + expectError: false, + }, + { + name: "Valid_Mixed", + token: "aB3dE5fG7h", + expectError: false, + }, + { + name: "Invalid_Empty", + token: "", + expectError: true, + }, + { + name: "Invalid_ContainsHyphen", + token: "abc-def", + expectError: true, + }, + { + name: "Invalid_ContainsUnderscore", + token: "abc_def", + expectError: true, + }, + { + name: "Invalid_ContainsSpaces", + token: "abc def", + expectError: true, + }, + { + name: "Invalid_ContainsSpecialChars", + token: "abc@def!", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := gen.Validate(tt.token) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestAlphanumericGenerator_Randomness(t *testing.T) { + gen := NewAlphanumericGenerator() + + // Generate multiple tokens and ensure they're different + tokens := make(map[string]bool) + length := 32 + + for i := 0; i < 100; i++ { + token, err := gen.Generate(length) + assert.NoError(t, err) + tokens[token] = true + } + + // With 32-character alphanumeric tokens, we should have 100 unique values + assert.Equal(t, 100, len(tokens), "expected all tokens to be unique") +} + +func TestAlphanumericGenerator_CharacterDistribution(t *testing.T) { + gen := NewAlphanumericGenerator() + + // Generate a large token to check character distribution (within limit) + length := 255 + token, err := gen.Generate(length) + assert.NoError(t, err) + + // Count character types + uppercaseCount := 0 + lowercaseCount := 0 + digitCount := 0 + + for _, c := range token { + switch { + case c >= 'A' && c <= 'Z': + uppercaseCount++ + case c >= 'a' && c <= 'z': + lowercaseCount++ + case c >= '0' && c <= '9': + digitCount++ + } + } + + // Verify we have a mix of all character types (probabilistic test) + assert.Greater(t, uppercaseCount, 0, "should contain uppercase letters") + assert.Greater(t, lowercaseCount, 0, "should contain lowercase letters") + assert.Greater(t, digitCount, 0, "should contain digits") + + // Verify counts sum to total length (if we have any characters) + if uppercaseCount+lowercaseCount+digitCount > 0 { + assert.Equal(t, length, uppercaseCount+lowercaseCount+digitCount) + } +} + +func TestIsAlphanumeric(t *testing.T) { + tests := []struct { + name string + char rune + expected bool + }{ + {name: "Uppercase_A", char: 'A', expected: true}, + {name: "Uppercase_Z", char: 'Z', expected: true}, + {name: "Lowercase_a", char: 'a', expected: true}, + {name: "Lowercase_z", char: 'z', expected: true}, + {name: "Digit_0", char: '0', expected: true}, + {name: "Digit_9", char: '9', expected: true}, + {name: "Space", char: ' ', expected: false}, + {name: "Hyphen", char: '-', expected: false}, + {name: "Underscore", char: '_', expected: false}, + {name: "At", char: '@', expected: false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := isAlphanumeric(tt.char) + assert.Equal(t, tt.expected, result) + }) + } +} diff --git a/internal/tokenization/service/interface.go b/internal/tokenization/service/interface.go new file mode 100644 index 0000000..d77d9cd --- /dev/null +++ b/internal/tokenization/service/interface.go @@ -0,0 +1,9 @@ +// Package service provides token generation services for various formats. +// Supports UUID, numeric, Luhn-preserving, and alphanumeric token generation. +package service + +// TokenGenerator defines the interface for token generation. +type TokenGenerator interface { + Generate(length int) (string, error) + Validate(token string) error +} diff --git a/internal/tokenization/service/luhn_generator.go b/internal/tokenization/service/luhn_generator.go new file mode 100644 index 0000000..753b458 --- /dev/null +++ b/internal/tokenization/service/luhn_generator.go @@ -0,0 +1,120 @@ +package service + +import ( + "crypto/rand" + "errors" + "fmt" + "math/big" +) + +type luhnGenerator struct{} + +// NewLuhnGenerator creates a new Luhn algorithm compliant token generator. Generates +// cryptographically secure random numeric tokens that pass Luhn validation (used for payment cards). +func NewLuhnGenerator() TokenGenerator { + return &luhnGenerator{} +} + +// Generate creates a Luhn algorithm compliant numeric token of the specified length. +// The last digit is calculated as the Luhn check digit. Returns an error if length is less than 2. +func (g *luhnGenerator) Generate(length int) (string, error) { + if length < 2 { + return "", errors.New("length must be at least 2 for Luhn tokens") + } + if length > 255 { + return "", errors.New("length must not exceed 255") + } + + // Generate random digits for all positions except the last one + digits := make([]int, length) + for i := 0; i < length-1; i++ { + n, err := rand.Int(rand.Reader, big.NewInt(10)) + if err != nil { + return "", fmt.Errorf("failed to generate random digit: %w", err) + } + digits[i] = int(n.Int64()) + } + + // Calculate and append the Luhn check digit + digits[length-1] = calculateLuhnCheckDigit(digits[:length-1]) + + // Convert to string + token := make([]byte, length) + for i, d := range digits { + token[i] = byte('0' + d) + } + + return string(token), nil +} + +// Validate checks if the token is Luhn algorithm compliant. +func (g *luhnGenerator) Validate(token string) error { + if len(token) < 2 { + return errors.New("token must be at least 2 characters for Luhn validation") + } + + // Check if all characters are numeric + digits := make([]int, len(token)) + for i, c := range token { + if c < '0' || c > '9' { + return errors.New("token must contain only numeric characters") + } + digits[i] = int(c - '0') + } + + // Validate using Luhn algorithm + if !validateLuhn(digits) { + return errors.New("token failed Luhn validation") + } + + return nil +} + +// calculateLuhnCheckDigit calculates the Luhn check digit for the given digits. +// The digits slice should NOT include the check digit position. +func calculateLuhnCheckDigit(digits []int) int { + sum := 0 + length := len(digits) + + // Process digits from right to left (excluding the check digit position) + for i := 0; i < length; i++ { + digit := digits[length-1-i] + + // Double every second digit from the right + if i%2 == 0 { + digit *= 2 + if digit > 9 { + digit -= 9 + } + } + + sum += digit + } + + // Calculate check digit + checkDigit := (10 - (sum % 10)) % 10 + return checkDigit +} + +// validateLuhn validates a complete number (including check digit) using the Luhn algorithm. +func validateLuhn(digits []int) bool { + sum := 0 + length := len(digits) + + // Process all digits from right to left + for i := 0; i < length; i++ { + digit := digits[length-1-i] + + // Double every second digit from the right (skipping the check digit itself) + if i%2 == 1 { + digit *= 2 + if digit > 9 { + digit -= 9 + } + } + + sum += digit + } + + return sum%10 == 0 +} diff --git a/internal/tokenization/service/luhn_generator_test.go b/internal/tokenization/service/luhn_generator_test.go new file mode 100644 index 0000000..46832a5 --- /dev/null +++ b/internal/tokenization/service/luhn_generator_test.go @@ -0,0 +1,233 @@ +package service + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestLuhnGenerator_Generate(t *testing.T) { + gen := NewLuhnGenerator() + + tests := []struct { + name string + length int + expectError bool + validateToken bool + }{ + { + name: "Success_Length2", + length: 2, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length16_CreditCard", + length: 16, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length19_AmexCard", + length: 19, + expectError: false, + validateToken: true, + }, + { + name: "Error_LengthOne", + length: 1, + expectError: true, + }, + { + name: "Error_LengthZero", + length: 0, + expectError: true, + }, + { + name: "Error_LengthTooLarge", + length: 256, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + token, err := gen.Generate(tt.length) + + if tt.expectError { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + assert.Len(t, token, tt.length) + + if tt.validateToken { + // Verify it's numeric + for _, c := range token { + assert.True(t, c >= '0' && c <= '9', "character %c is not a digit", c) + } + + // Verify it passes Luhn validation + err := gen.Validate(token) + assert.NoError(t, err, "generated token should pass Luhn validation") + } + }) + } +} + +func TestLuhnGenerator_Validate(t *testing.T) { + gen := NewLuhnGenerator() + + tests := []struct { + name string + token string + expectError bool + }{ + { + name: "Valid_KnownLuhnNumber_79927398713", + token: "79927398713", + expectError: false, + }, + { + name: "Valid_KnownLuhnNumber_4532015112830366", + token: "4532015112830366", + expectError: false, + }, + { + name: "Valid_SimpleCase_18", + token: "18", + expectError: false, + }, + { + name: "Invalid_KnownInvalidNumber", + token: "4532015112830367", + expectError: true, + }, + { + name: "Invalid_Empty", + token: "", + expectError: true, + }, + { + name: "Invalid_SingleDigit", + token: "5", + expectError: true, + }, + { + name: "Invalid_ContainsLetters", + token: "453201511283036a", + expectError: true, + }, + { + name: "Invalid_ContainsSpaces", + token: "4532 0151 1283 0366", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := gen.Validate(tt.token) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestCalculateLuhnCheckDigit(t *testing.T) { + tests := []struct { + name string + digits []int + expectedDigit int + }{ + { + name: "SimpleCase_1", + digits: []int{1}, + expectedDigit: 8, + }, + { + name: "SimpleCase_79927398713", + digits: []int{7, 9, 9, 2, 7, 3, 9, 8, 7, 1}, + expectedDigit: 3, + }, + { + name: "CreditCard_453201511283036", + digits: []int{4, 5, 3, 2, 0, 1, 5, 1, 1, 2, 8, 3, 0, 3, 6}, + expectedDigit: 6, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + checkDigit := calculateLuhnCheckDigit(tt.digits) + assert.Equal(t, tt.expectedDigit, checkDigit) + }) + } +} + +func TestValidateLuhn(t *testing.T) { + tests := []struct { + name string + digits []int + expected bool + }{ + { + name: "Valid_18", + digits: []int{1, 8}, + expected: true, + }, + { + name: "Valid_79927398713", + digits: []int{7, 9, 9, 2, 7, 3, 9, 8, 7, 1, 3}, + expected: true, + }, + { + name: "Valid_4532015112830366", + digits: []int{4, 5, 3, 2, 0, 1, 5, 1, 1, 2, 8, 3, 0, 3, 6, 6}, + expected: true, + }, + { + name: "Invalid_17", + digits: []int{1, 7}, + expected: false, + }, + { + name: "Invalid_79927398712", + digits: []int{7, 9, 9, 2, 7, 3, 9, 8, 7, 1, 2}, + expected: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := validateLuhn(tt.digits) + assert.Equal(t, tt.expected, result) + }) + } +} + +func TestLuhnGenerator_Randomness(t *testing.T) { + gen := NewLuhnGenerator() + + // Generate multiple tokens and ensure they're different and all pass Luhn validation + tokens := make(map[string]bool) + length := 16 + + for i := 0; i < 100; i++ { + token, err := gen.Generate(length) + assert.NoError(t, err) + + // Verify Luhn compliance + err = gen.Validate(token) + assert.NoError(t, err, "token %s should pass Luhn validation", token) + + tokens[token] = true + } + + // With 16-digit tokens, we should have 100 unique values + assert.Equal(t, 100, len(tokens), "expected all tokens to be unique") +} diff --git a/internal/tokenization/service/numeric_generator.go b/internal/tokenization/service/numeric_generator.go new file mode 100644 index 0000000..bc8c9c0 --- /dev/null +++ b/internal/tokenization/service/numeric_generator.go @@ -0,0 +1,53 @@ +package service + +import ( + "crypto/rand" + "errors" + "fmt" + "math/big" +) + +type numericGenerator struct{} + +// NewNumericGenerator creates a new numeric token generator. Generates cryptographically +// secure random numeric strings of the specified length. +func NewNumericGenerator() TokenGenerator { + return &numericGenerator{} +} + +// Generate creates a cryptographically secure random numeric token of the specified length. +// Returns an error if length is less than 1 or greater than 255. +func (g *numericGenerator) Generate(length int) (string, error) { + if length < 1 { + return "", errors.New("length must be at least 1") + } + if length > 255 { + return "", errors.New("length must not exceed 255") + } + + digits := make([]byte, length) + for i := 0; i < length; i++ { + n, err := rand.Int(rand.Reader, big.NewInt(10)) + if err != nil { + return "", fmt.Errorf("failed to generate random digit: %w", err) + } + digits[i] = byte('0' + n.Int64()) + } + + return string(digits), nil +} + +// Validate checks if the token contains only numeric characters. +func (g *numericGenerator) Validate(token string) error { + if len(token) == 0 { + return errors.New("token cannot be empty") + } + + for _, c := range token { + if c < '0' || c > '9' { + return errors.New("token must contain only numeric characters") + } + } + + return nil +} diff --git a/internal/tokenization/service/numeric_generator_test.go b/internal/tokenization/service/numeric_generator_test.go new file mode 100644 index 0000000..74891e4 --- /dev/null +++ b/internal/tokenization/service/numeric_generator_test.go @@ -0,0 +1,153 @@ +package service + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNumericGenerator_Generate(t *testing.T) { + gen := NewNumericGenerator() + + tests := []struct { + name string + length int + expectError bool + validateToken bool + }{ + { + name: "Success_Length1", + length: 1, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length16", + length: 16, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length32", + length: 32, + expectError: false, + validateToken: true, + }, + { + name: "Success_Length255", + length: 255, + expectError: false, + validateToken: true, + }, + { + name: "Error_LengthZero", + length: 0, + expectError: true, + }, + { + name: "Error_NegativeLength", + length: -1, + expectError: true, + }, + { + name: "Error_LengthTooLarge", + length: 256, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + token, err := gen.Generate(tt.length) + + if tt.expectError { + assert.Error(t, err) + return + } + + assert.NoError(t, err) + assert.Len(t, token, tt.length) + + if tt.validateToken { + // Verify all characters are digits + for _, c := range token { + assert.True(t, c >= '0' && c <= '9', "character %c is not a digit", c) + } + } + }) + } +} + +func TestNumericGenerator_Validate(t *testing.T) { + gen := NewNumericGenerator() + + tests := []struct { + name string + token string + expectError bool + }{ + { + name: "Valid_SingleDigit", + token: "5", + expectError: false, + }, + { + name: "Valid_MultipleDigits", + token: "1234567890", + expectError: false, + }, + { + name: "Valid_LeadingZeros", + token: "0001234", + expectError: false, + }, + { + name: "Invalid_Empty", + token: "", + expectError: true, + }, + { + name: "Invalid_ContainsLetters", + token: "123abc456", + expectError: true, + }, + { + name: "Invalid_ContainsSpecialChars", + token: "123-456", + expectError: true, + }, + { + name: "Invalid_ContainsSpaces", + token: "123 456", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := gen.Validate(tt.token) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestNumericGenerator_Randomness(t *testing.T) { + gen := NewNumericGenerator() + + // Generate multiple tokens and ensure they're different (probabilistic test) + tokens := make(map[string]bool) + length := 16 + + for i := 0; i < 100; i++ { + token, err := gen.Generate(length) + assert.NoError(t, err) + tokens[token] = true + } + + // With 16-digit tokens, we should have 100 unique values + assert.Equal(t, 100, len(tokens), "expected all tokens to be unique") +} diff --git a/internal/tokenization/service/token_generator_factory.go b/internal/tokenization/service/token_generator_factory.go new file mode 100644 index 0000000..3410963 --- /dev/null +++ b/internal/tokenization/service/token_generator_factory.go @@ -0,0 +1,21 @@ +package service + +import ( + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// NewTokenGenerator creates a new token generator based on the specified format type. +func NewTokenGenerator(formatType tokenizationDomain.FormatType) (TokenGenerator, error) { + switch formatType { + case tokenizationDomain.FormatUUID: + return NewUUIDGenerator(), nil + case tokenizationDomain.FormatNumeric: + return NewNumericGenerator(), nil + case tokenizationDomain.FormatLuhnPreserving: + return NewLuhnGenerator(), nil + case tokenizationDomain.FormatAlphanumeric: + return NewAlphanumericGenerator(), nil + default: + return nil, tokenizationDomain.ErrInvalidFormatType + } +} diff --git a/internal/tokenization/service/token_generator_factory_test.go b/internal/tokenization/service/token_generator_factory_test.go new file mode 100644 index 0000000..b4dd033 --- /dev/null +++ b/internal/tokenization/service/token_generator_factory_test.go @@ -0,0 +1,95 @@ +package service + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +func TestNewTokenGenerator(t *testing.T) { + tests := []struct { + name string + formatType tokenizationDomain.FormatType + expectError bool + expectedType string + }{ + { + name: "Success_UUID", + formatType: tokenizationDomain.FormatUUID, + expectError: false, + expectedType: "*service.uuidGenerator", + }, + { + name: "Success_Numeric", + formatType: tokenizationDomain.FormatNumeric, + expectError: false, + expectedType: "*service.numericGenerator", + }, + { + name: "Success_LuhnPreserving", + formatType: tokenizationDomain.FormatLuhnPreserving, + expectError: false, + expectedType: "*service.luhnGenerator", + }, + { + name: "Success_Alphanumeric", + formatType: tokenizationDomain.FormatAlphanumeric, + expectError: false, + expectedType: "*service.alphanumericGenerator", + }, + { + name: "Error_InvalidFormatType", + formatType: tokenizationDomain.FormatType("invalid"), + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gen, err := NewTokenGenerator(tt.formatType) + + if tt.expectError { + assert.Error(t, err) + assert.Nil(t, gen) + assert.ErrorIs(t, err, tokenizationDomain.ErrInvalidFormatType) + } else { + assert.NoError(t, err) + assert.NotNil(t, gen) + } + }) + } +} + +func TestNewTokenGenerator_FunctionalTest(t *testing.T) { + // Test that each generator can actually generate tokens + formatTypes := []tokenizationDomain.FormatType{ + tokenizationDomain.FormatUUID, + tokenizationDomain.FormatNumeric, + tokenizationDomain.FormatLuhnPreserving, + tokenizationDomain.FormatAlphanumeric, + } + + for _, formatType := range formatTypes { + t.Run("Generate_"+formatType.String(), func(t *testing.T) { + gen, err := NewTokenGenerator(formatType) + assert.NoError(t, err) + assert.NotNil(t, gen) + + // Generate a token + length := 16 + if formatType == tokenizationDomain.FormatUUID { + length = 0 // UUID ignores length + } + + token, err := gen.Generate(length) + assert.NoError(t, err) + assert.NotEmpty(t, token) + + // Validate the generated token + err = gen.Validate(token) + assert.NoError(t, err) + }) + } +} diff --git a/internal/tokenization/service/uuid_generator.go b/internal/tokenization/service/uuid_generator.go new file mode 100644 index 0000000..c06aecb --- /dev/null +++ b/internal/tokenization/service/uuid_generator.go @@ -0,0 +1,32 @@ +package service + +import ( + "errors" + + "github.com/google/uuid" +) + +type uuidGenerator struct{} + +// NewUUIDGenerator creates a new UUID token generator. Generates UUIDv7 tokens. +func NewUUIDGenerator() TokenGenerator { + return &uuidGenerator{} +} + +// Generate creates a new UUIDv7 token. The length parameter is ignored for UUID generation. +func (g *uuidGenerator) Generate(length int) (string, error) { + id, err := uuid.NewV7() + if err != nil { + return "", err + } + return id.String(), nil +} + +// Validate checks if the token is a valid UUID format. +func (g *uuidGenerator) Validate(token string) error { + _, err := uuid.Parse(token) + if err != nil { + return errors.New("invalid UUID format") + } + return nil +} diff --git a/internal/tokenization/service/uuid_generator_test.go b/internal/tokenization/service/uuid_generator_test.go new file mode 100644 index 0000000..8ba0117 --- /dev/null +++ b/internal/tokenization/service/uuid_generator_test.go @@ -0,0 +1,78 @@ +package service + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" +) + +func TestUUIDGenerator_Generate(t *testing.T) { + gen := NewUUIDGenerator() + + t.Run("Success_GeneratesValidUUID", func(t *testing.T) { + token, err := gen.Generate(0) // length parameter is ignored + assert.NoError(t, err) + assert.NotEmpty(t, token) + + // Validate it's a proper UUID + _, err = uuid.Parse(token) + assert.NoError(t, err) + }) + + t.Run("Success_GeneratesUniqueTokens", func(t *testing.T) { + token1, err1 := gen.Generate(0) + token2, err2 := gen.Generate(0) + + assert.NoError(t, err1) + assert.NoError(t, err2) + assert.NotEqual(t, token1, token2, "tokens should be unique") + }) +} + +func TestUUIDGenerator_Validate(t *testing.T) { + gen := NewUUIDGenerator() + + tests := []struct { + name string + token string + expectError bool + }{ + { + name: "Valid_UUIDv4", + token: "550e8400-e29b-41d4-a716-446655440000", + expectError: false, + }, + { + name: "Valid_UUIDv7", + token: uuid.Must(uuid.NewV7()).String(), + expectError: false, + }, + { + name: "Invalid_NotUUID", + token: "not-a-uuid", + expectError: true, + }, + { + name: "Invalid_Empty", + token: "", + expectError: true, + }, + { + name: "Invalid_PartialUUID", + token: "550e8400-e29b-41d4", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := gen.Validate(tt.token) + if tt.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} diff --git a/internal/tokenization/usecase/hash_service.go b/internal/tokenization/usecase/hash_service.go new file mode 100644 index 0000000..fff7f72 --- /dev/null +++ b/internal/tokenization/usecase/hash_service.go @@ -0,0 +1,24 @@ +package usecase + +import ( + "crypto/sha256" + "encoding/hex" +) + +// HashService provides cryptographic hashing for deterministic token lookups. +type HashService interface { + Hash(value []byte) string +} + +type sha256HashService struct{} + +// NewSHA256HashService creates a new SHA-256 hash service. +func NewSHA256HashService() HashService { + return &sha256HashService{} +} + +// Hash computes the SHA-256 hash of the input value and returns it as a hex string. +func (s *sha256HashService) Hash(value []byte) string { + hash := sha256.Sum256(value) + return hex.EncodeToString(hash[:]) +} diff --git a/internal/tokenization/usecase/interface.go b/internal/tokenization/usecase/interface.go new file mode 100644 index 0000000..a9b142b --- /dev/null +++ b/internal/tokenization/usecase/interface.go @@ -0,0 +1,105 @@ +// Package usecase defines interfaces and implementations for tokenization use cases. +// Provides format-preserving token generation with configurable deterministic behavior and full lifecycle management. +package usecase + +import ( + "context" + "time" + + "github.com/google/uuid" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// DekRepository defines the interface for DEK persistence operations. +type DekRepository interface { + Create(ctx context.Context, dek *cryptoDomain.Dek) error + Get(ctx context.Context, dekID uuid.UUID) (*cryptoDomain.Dek, error) +} + +// TokenizationKeyRepository defines the interface for tokenization key persistence. +type TokenizationKeyRepository interface { + Create(ctx context.Context, key *tokenizationDomain.TokenizationKey) error + Delete(ctx context.Context, keyID uuid.UUID) error + Get(ctx context.Context, keyID uuid.UUID) (*tokenizationDomain.TokenizationKey, error) + GetByName(ctx context.Context, name string) (*tokenizationDomain.TokenizationKey, error) + GetByNameAndVersion( + ctx context.Context, + name string, + version uint, + ) (*tokenizationDomain.TokenizationKey, error) +} + +// TokenRepository defines the interface for token mapping persistence. +type TokenRepository interface { + Create(ctx context.Context, token *tokenizationDomain.Token) error + GetByToken(ctx context.Context, token string) (*tokenizationDomain.Token, error) + GetByValueHash(ctx context.Context, keyID uuid.UUID, valueHash string) (*tokenizationDomain.Token, error) + Revoke(ctx context.Context, token string) error + + // DeleteExpired deletes tokens that expired before the specified timestamp. + // Returns the number of deleted tokens. Uses transaction support via database.GetTx(). + // All timestamps are expected in UTC. + DeleteExpired(ctx context.Context, olderThan time.Time) (int64, error) + + // CountExpired counts tokens that expired before the specified timestamp without deleting them. + // Returns the count of matching tokens. Uses transaction support via database.GetTx(). + // All timestamps are expected in UTC. + CountExpired(ctx context.Context, olderThan time.Time) (int64, error) +} + +// TokenizationKeyUseCase defines the interface for tokenization key management operations. +type TokenizationKeyUseCase interface { + // Create generates a new tokenization key with version 1 and an associated DEK. + // The key name must be unique. + Create( + ctx context.Context, + name string, + formatType tokenizationDomain.FormatType, + isDeterministic bool, + alg cryptoDomain.Algorithm, + ) (*tokenizationDomain.TokenizationKey, error) + + // Rotate creates a new version of an existing tokenization key by incrementing the version number. + // Generates a new DEK for the new version while preserving old versions for detokenization. + Rotate( + ctx context.Context, + name string, + formatType tokenizationDomain.FormatType, + isDeterministic bool, + alg cryptoDomain.Algorithm, + ) (*tokenizationDomain.TokenizationKey, error) + + // Delete soft deletes a tokenization key and all its versions by key ID. + Delete(ctx context.Context, keyID uuid.UUID) error +} + +// TokenizationUseCase defines the interface for token generation and management operations. +type TokenizationUseCase interface { + // Tokenize generates a token for the given plaintext value using the latest version of the named key. + // In deterministic mode, returns the existing token if the value has been tokenized before. + // Metadata is optional display data (e.g., last 4 digits, expiry date) stored unencrypted. + Tokenize( + ctx context.Context, + keyName string, + plaintext []byte, + metadata map[string]any, + expiresAt *time.Time, + ) (*tokenizationDomain.Token, error) + + // Detokenize retrieves the original plaintext value for a given token. + // Returns ErrTokenNotFound if token doesn't exist, ErrTokenExpired if expired, ErrTokenRevoked if revoked. + // Security Note: Callers MUST zero the returned plaintext after use: cryptoDomain.Zero(plaintext). + Detokenize(ctx context.Context, token string) (plaintext []byte, metadata map[string]any, err error) + + // Validate checks if a token exists and is valid (not expired or revoked). + Validate(ctx context.Context, token string) (bool, error) + + // Revoke marks a token as revoked, preventing further detokenization. + Revoke(ctx context.Context, token string) error + + // CleanupExpired deletes tokens that expired more than the specified number of days ago. + // Returns the number of deleted tokens. Use dryRun=true to preview count without deletion. + CleanupExpired(ctx context.Context, days int, dryRun bool) (int64, error) +} diff --git a/internal/tokenization/usecase/mocks/mocks.go b/internal/tokenization/usecase/mocks/mocks.go new file mode 100644 index 0000000..44116e1 --- /dev/null +++ b/internal/tokenization/usecase/mocks/mocks.go @@ -0,0 +1,1651 @@ +// Code generated by mockery; DO NOT EDIT. +// github.com/vektra/mockery +// template: testify + +package mocks + +import ( + "context" + "time" + + "github.com/allisson/secrets/internal/crypto/domain" + domain0 "github.com/allisson/secrets/internal/tokenization/domain" + "github.com/google/uuid" + mock "github.com/stretchr/testify/mock" +) + +// NewMockHashService creates a new instance of MockHashService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockHashService(t interface { + mock.TestingT + Cleanup(func()) +}) *MockHashService { + mock := &MockHashService{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} + +// MockHashService is an autogenerated mock type for the HashService type +type MockHashService struct { + mock.Mock +} + +type MockHashService_Expecter struct { + mock *mock.Mock +} + +func (_m *MockHashService) EXPECT() *MockHashService_Expecter { + return &MockHashService_Expecter{mock: &_m.Mock} +} + +// Hash provides a mock function for the type MockHashService +func (_mock *MockHashService) Hash(value []byte) string { + ret := _mock.Called(value) + + if len(ret) == 0 { + panic("no return value specified for Hash") + } + + var r0 string + if returnFunc, ok := ret.Get(0).(func([]byte) string); ok { + r0 = returnFunc(value) + } else { + r0 = ret.Get(0).(string) + } + return r0 +} + +// MockHashService_Hash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Hash' +type MockHashService_Hash_Call struct { + *mock.Call +} + +// Hash is a helper method to define mock.On call +// - value []byte +func (_e *MockHashService_Expecter) Hash(value interface{}) *MockHashService_Hash_Call { + return &MockHashService_Hash_Call{Call: _e.mock.On("Hash", value)} +} + +func (_c *MockHashService_Hash_Call) Run(run func(value []byte)) *MockHashService_Hash_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 []byte + if args[0] != nil { + arg0 = args[0].([]byte) + } + run( + arg0, + ) + }) + return _c +} + +func (_c *MockHashService_Hash_Call) Return(s string) *MockHashService_Hash_Call { + _c.Call.Return(s) + return _c +} + +func (_c *MockHashService_Hash_Call) RunAndReturn(run func(value []byte) string) *MockHashService_Hash_Call { + _c.Call.Return(run) + return _c +} + +// NewMockDekRepository creates a new instance of MockDekRepository. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockDekRepository(t interface { + mock.TestingT + Cleanup(func()) +}) *MockDekRepository { + mock := &MockDekRepository{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} + +// MockDekRepository is an autogenerated mock type for the DekRepository type +type MockDekRepository struct { + mock.Mock +} + +type MockDekRepository_Expecter struct { + mock *mock.Mock +} + +func (_m *MockDekRepository) EXPECT() *MockDekRepository_Expecter { + return &MockDekRepository_Expecter{mock: &_m.Mock} +} + +// Create provides a mock function for the type MockDekRepository +func (_mock *MockDekRepository) Create(ctx context.Context, dek *domain.Dek) error { + ret := _mock.Called(ctx, dek) + + if len(ret) == 0 { + panic("no return value specified for Create") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, *domain.Dek) error); ok { + r0 = returnFunc(ctx, dek) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockDekRepository_Create_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Create' +type MockDekRepository_Create_Call struct { + *mock.Call +} + +// Create is a helper method to define mock.On call +// - ctx context.Context +// - dek *domain.Dek +func (_e *MockDekRepository_Expecter) Create(ctx interface{}, dek interface{}) *MockDekRepository_Create_Call { + return &MockDekRepository_Create_Call{Call: _e.mock.On("Create", ctx, dek)} +} + +func (_c *MockDekRepository_Create_Call) Run(run func(ctx context.Context, dek *domain.Dek)) *MockDekRepository_Create_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 *domain.Dek + if args[1] != nil { + arg1 = args[1].(*domain.Dek) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockDekRepository_Create_Call) Return(err error) *MockDekRepository_Create_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockDekRepository_Create_Call) RunAndReturn(run func(ctx context.Context, dek *domain.Dek) error) *MockDekRepository_Create_Call { + _c.Call.Return(run) + return _c +} + +// Get provides a mock function for the type MockDekRepository +func (_mock *MockDekRepository) Get(ctx context.Context, dekID uuid.UUID) (*domain.Dek, error) { + ret := _mock.Called(ctx, dekID) + + if len(ret) == 0 { + panic("no return value specified for Get") + } + + var r0 *domain.Dek + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID) (*domain.Dek, error)); ok { + return returnFunc(ctx, dekID) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID) *domain.Dek); ok { + r0 = returnFunc(ctx, dekID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain.Dek) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, uuid.UUID) error); ok { + r1 = returnFunc(ctx, dekID) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockDekRepository_Get_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Get' +type MockDekRepository_Get_Call struct { + *mock.Call +} + +// Get is a helper method to define mock.On call +// - ctx context.Context +// - dekID uuid.UUID +func (_e *MockDekRepository_Expecter) Get(ctx interface{}, dekID interface{}) *MockDekRepository_Get_Call { + return &MockDekRepository_Get_Call{Call: _e.mock.On("Get", ctx, dekID)} +} + +func (_c *MockDekRepository_Get_Call) Run(run func(ctx context.Context, dekID uuid.UUID)) *MockDekRepository_Get_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 uuid.UUID + if args[1] != nil { + arg1 = args[1].(uuid.UUID) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockDekRepository_Get_Call) Return(dek *domain.Dek, err error) *MockDekRepository_Get_Call { + _c.Call.Return(dek, err) + return _c +} + +func (_c *MockDekRepository_Get_Call) RunAndReturn(run func(ctx context.Context, dekID uuid.UUID) (*domain.Dek, error)) *MockDekRepository_Get_Call { + _c.Call.Return(run) + return _c +} + +// NewMockTokenizationKeyRepository creates a new instance of MockTokenizationKeyRepository. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockTokenizationKeyRepository(t interface { + mock.TestingT + Cleanup(func()) +}) *MockTokenizationKeyRepository { + mock := &MockTokenizationKeyRepository{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} + +// MockTokenizationKeyRepository is an autogenerated mock type for the TokenizationKeyRepository type +type MockTokenizationKeyRepository struct { + mock.Mock +} + +type MockTokenizationKeyRepository_Expecter struct { + mock *mock.Mock +} + +func (_m *MockTokenizationKeyRepository) EXPECT() *MockTokenizationKeyRepository_Expecter { + return &MockTokenizationKeyRepository_Expecter{mock: &_m.Mock} +} + +// Create provides a mock function for the type MockTokenizationKeyRepository +func (_mock *MockTokenizationKeyRepository) Create(ctx context.Context, key *domain0.TokenizationKey) error { + ret := _mock.Called(ctx, key) + + if len(ret) == 0 { + panic("no return value specified for Create") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, *domain0.TokenizationKey) error); ok { + r0 = returnFunc(ctx, key) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockTokenizationKeyRepository_Create_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Create' +type MockTokenizationKeyRepository_Create_Call struct { + *mock.Call +} + +// Create is a helper method to define mock.On call +// - ctx context.Context +// - key *domain0.TokenizationKey +func (_e *MockTokenizationKeyRepository_Expecter) Create(ctx interface{}, key interface{}) *MockTokenizationKeyRepository_Create_Call { + return &MockTokenizationKeyRepository_Create_Call{Call: _e.mock.On("Create", ctx, key)} +} + +func (_c *MockTokenizationKeyRepository_Create_Call) Run(run func(ctx context.Context, key *domain0.TokenizationKey)) *MockTokenizationKeyRepository_Create_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 *domain0.TokenizationKey + if args[1] != nil { + arg1 = args[1].(*domain0.TokenizationKey) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyRepository_Create_Call) Return(err error) *MockTokenizationKeyRepository_Create_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockTokenizationKeyRepository_Create_Call) RunAndReturn(run func(ctx context.Context, key *domain0.TokenizationKey) error) *MockTokenizationKeyRepository_Create_Call { + _c.Call.Return(run) + return _c +} + +// Delete provides a mock function for the type MockTokenizationKeyRepository +func (_mock *MockTokenizationKeyRepository) Delete(ctx context.Context, keyID uuid.UUID) error { + ret := _mock.Called(ctx, keyID) + + if len(ret) == 0 { + panic("no return value specified for Delete") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID) error); ok { + r0 = returnFunc(ctx, keyID) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockTokenizationKeyRepository_Delete_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Delete' +type MockTokenizationKeyRepository_Delete_Call struct { + *mock.Call +} + +// Delete is a helper method to define mock.On call +// - ctx context.Context +// - keyID uuid.UUID +func (_e *MockTokenizationKeyRepository_Expecter) Delete(ctx interface{}, keyID interface{}) *MockTokenizationKeyRepository_Delete_Call { + return &MockTokenizationKeyRepository_Delete_Call{Call: _e.mock.On("Delete", ctx, keyID)} +} + +func (_c *MockTokenizationKeyRepository_Delete_Call) Run(run func(ctx context.Context, keyID uuid.UUID)) *MockTokenizationKeyRepository_Delete_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 uuid.UUID + if args[1] != nil { + arg1 = args[1].(uuid.UUID) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyRepository_Delete_Call) Return(err error) *MockTokenizationKeyRepository_Delete_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockTokenizationKeyRepository_Delete_Call) RunAndReturn(run func(ctx context.Context, keyID uuid.UUID) error) *MockTokenizationKeyRepository_Delete_Call { + _c.Call.Return(run) + return _c +} + +// Get provides a mock function for the type MockTokenizationKeyRepository +func (_mock *MockTokenizationKeyRepository) Get(ctx context.Context, keyID uuid.UUID) (*domain0.TokenizationKey, error) { + ret := _mock.Called(ctx, keyID) + + if len(ret) == 0 { + panic("no return value specified for Get") + } + + var r0 *domain0.TokenizationKey + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID) (*domain0.TokenizationKey, error)); ok { + return returnFunc(ctx, keyID) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID) *domain0.TokenizationKey); ok { + r0 = returnFunc(ctx, keyID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.TokenizationKey) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, uuid.UUID) error); ok { + r1 = returnFunc(ctx, keyID) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationKeyRepository_Get_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Get' +type MockTokenizationKeyRepository_Get_Call struct { + *mock.Call +} + +// Get is a helper method to define mock.On call +// - ctx context.Context +// - keyID uuid.UUID +func (_e *MockTokenizationKeyRepository_Expecter) Get(ctx interface{}, keyID interface{}) *MockTokenizationKeyRepository_Get_Call { + return &MockTokenizationKeyRepository_Get_Call{Call: _e.mock.On("Get", ctx, keyID)} +} + +func (_c *MockTokenizationKeyRepository_Get_Call) Run(run func(ctx context.Context, keyID uuid.UUID)) *MockTokenizationKeyRepository_Get_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 uuid.UUID + if args[1] != nil { + arg1 = args[1].(uuid.UUID) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyRepository_Get_Call) Return(tokenizationKey *domain0.TokenizationKey, err error) *MockTokenizationKeyRepository_Get_Call { + _c.Call.Return(tokenizationKey, err) + return _c +} + +func (_c *MockTokenizationKeyRepository_Get_Call) RunAndReturn(run func(ctx context.Context, keyID uuid.UUID) (*domain0.TokenizationKey, error)) *MockTokenizationKeyRepository_Get_Call { + _c.Call.Return(run) + return _c +} + +// GetByName provides a mock function for the type MockTokenizationKeyRepository +func (_mock *MockTokenizationKeyRepository) GetByName(ctx context.Context, name string) (*domain0.TokenizationKey, error) { + ret := _mock.Called(ctx, name) + + if len(ret) == 0 { + panic("no return value specified for GetByName") + } + + var r0 *domain0.TokenizationKey + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string) (*domain0.TokenizationKey, error)); ok { + return returnFunc(ctx, name) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string) *domain0.TokenizationKey); ok { + r0 = returnFunc(ctx, name) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.TokenizationKey) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = returnFunc(ctx, name) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationKeyRepository_GetByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetByName' +type MockTokenizationKeyRepository_GetByName_Call struct { + *mock.Call +} + +// GetByName is a helper method to define mock.On call +// - ctx context.Context +// - name string +func (_e *MockTokenizationKeyRepository_Expecter) GetByName(ctx interface{}, name interface{}) *MockTokenizationKeyRepository_GetByName_Call { + return &MockTokenizationKeyRepository_GetByName_Call{Call: _e.mock.On("GetByName", ctx, name)} +} + +func (_c *MockTokenizationKeyRepository_GetByName_Call) Run(run func(ctx context.Context, name string)) *MockTokenizationKeyRepository_GetByName_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyRepository_GetByName_Call) Return(tokenizationKey *domain0.TokenizationKey, err error) *MockTokenizationKeyRepository_GetByName_Call { + _c.Call.Return(tokenizationKey, err) + return _c +} + +func (_c *MockTokenizationKeyRepository_GetByName_Call) RunAndReturn(run func(ctx context.Context, name string) (*domain0.TokenizationKey, error)) *MockTokenizationKeyRepository_GetByName_Call { + _c.Call.Return(run) + return _c +} + +// GetByNameAndVersion provides a mock function for the type MockTokenizationKeyRepository +func (_mock *MockTokenizationKeyRepository) GetByNameAndVersion(ctx context.Context, name string, version uint) (*domain0.TokenizationKey, error) { + ret := _mock.Called(ctx, name, version) + + if len(ret) == 0 { + panic("no return value specified for GetByNameAndVersion") + } + + var r0 *domain0.TokenizationKey + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string, uint) (*domain0.TokenizationKey, error)); ok { + return returnFunc(ctx, name, version) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string, uint) *domain0.TokenizationKey); ok { + r0 = returnFunc(ctx, name, version) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.TokenizationKey) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string, uint) error); ok { + r1 = returnFunc(ctx, name, version) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationKeyRepository_GetByNameAndVersion_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetByNameAndVersion' +type MockTokenizationKeyRepository_GetByNameAndVersion_Call struct { + *mock.Call +} + +// GetByNameAndVersion is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - version uint +func (_e *MockTokenizationKeyRepository_Expecter) GetByNameAndVersion(ctx interface{}, name interface{}, version interface{}) *MockTokenizationKeyRepository_GetByNameAndVersion_Call { + return &MockTokenizationKeyRepository_GetByNameAndVersion_Call{Call: _e.mock.On("GetByNameAndVersion", ctx, name, version)} +} + +func (_c *MockTokenizationKeyRepository_GetByNameAndVersion_Call) Run(run func(ctx context.Context, name string, version uint)) *MockTokenizationKeyRepository_GetByNameAndVersion_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + var arg2 uint + if args[2] != nil { + arg2 = args[2].(uint) + } + run( + arg0, + arg1, + arg2, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyRepository_GetByNameAndVersion_Call) Return(tokenizationKey *domain0.TokenizationKey, err error) *MockTokenizationKeyRepository_GetByNameAndVersion_Call { + _c.Call.Return(tokenizationKey, err) + return _c +} + +func (_c *MockTokenizationKeyRepository_GetByNameAndVersion_Call) RunAndReturn(run func(ctx context.Context, name string, version uint) (*domain0.TokenizationKey, error)) *MockTokenizationKeyRepository_GetByNameAndVersion_Call { + _c.Call.Return(run) + return _c +} + +// NewMockTokenRepository creates a new instance of MockTokenRepository. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockTokenRepository(t interface { + mock.TestingT + Cleanup(func()) +}) *MockTokenRepository { + mock := &MockTokenRepository{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} + +// MockTokenRepository is an autogenerated mock type for the TokenRepository type +type MockTokenRepository struct { + mock.Mock +} + +type MockTokenRepository_Expecter struct { + mock *mock.Mock +} + +func (_m *MockTokenRepository) EXPECT() *MockTokenRepository_Expecter { + return &MockTokenRepository_Expecter{mock: &_m.Mock} +} + +// CountExpired provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) CountExpired(ctx context.Context, olderThan time.Time) (int64, error) { + ret := _mock.Called(ctx, olderThan) + + if len(ret) == 0 { + panic("no return value specified for CountExpired") + } + + var r0 int64 + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, time.Time) (int64, error)); ok { + return returnFunc(ctx, olderThan) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, time.Time) int64); ok { + r0 = returnFunc(ctx, olderThan) + } else { + r0 = ret.Get(0).(int64) + } + if returnFunc, ok := ret.Get(1).(func(context.Context, time.Time) error); ok { + r1 = returnFunc(ctx, olderThan) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenRepository_CountExpired_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CountExpired' +type MockTokenRepository_CountExpired_Call struct { + *mock.Call +} + +// CountExpired is a helper method to define mock.On call +// - ctx context.Context +// - olderThan time.Time +func (_e *MockTokenRepository_Expecter) CountExpired(ctx interface{}, olderThan interface{}) *MockTokenRepository_CountExpired_Call { + return &MockTokenRepository_CountExpired_Call{Call: _e.mock.On("CountExpired", ctx, olderThan)} +} + +func (_c *MockTokenRepository_CountExpired_Call) Run(run func(ctx context.Context, olderThan time.Time)) *MockTokenRepository_CountExpired_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 time.Time + if args[1] != nil { + arg1 = args[1].(time.Time) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenRepository_CountExpired_Call) Return(n int64, err error) *MockTokenRepository_CountExpired_Call { + _c.Call.Return(n, err) + return _c +} + +func (_c *MockTokenRepository_CountExpired_Call) RunAndReturn(run func(ctx context.Context, olderThan time.Time) (int64, error)) *MockTokenRepository_CountExpired_Call { + _c.Call.Return(run) + return _c +} + +// Create provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) Create(ctx context.Context, token *domain0.Token) error { + ret := _mock.Called(ctx, token) + + if len(ret) == 0 { + panic("no return value specified for Create") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, *domain0.Token) error); ok { + r0 = returnFunc(ctx, token) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockTokenRepository_Create_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Create' +type MockTokenRepository_Create_Call struct { + *mock.Call +} + +// Create is a helper method to define mock.On call +// - ctx context.Context +// - token *domain0.Token +func (_e *MockTokenRepository_Expecter) Create(ctx interface{}, token interface{}) *MockTokenRepository_Create_Call { + return &MockTokenRepository_Create_Call{Call: _e.mock.On("Create", ctx, token)} +} + +func (_c *MockTokenRepository_Create_Call) Run(run func(ctx context.Context, token *domain0.Token)) *MockTokenRepository_Create_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 *domain0.Token + if args[1] != nil { + arg1 = args[1].(*domain0.Token) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenRepository_Create_Call) Return(err error) *MockTokenRepository_Create_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockTokenRepository_Create_Call) RunAndReturn(run func(ctx context.Context, token *domain0.Token) error) *MockTokenRepository_Create_Call { + _c.Call.Return(run) + return _c +} + +// DeleteExpired provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) DeleteExpired(ctx context.Context, olderThan time.Time) (int64, error) { + ret := _mock.Called(ctx, olderThan) + + if len(ret) == 0 { + panic("no return value specified for DeleteExpired") + } + + var r0 int64 + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, time.Time) (int64, error)); ok { + return returnFunc(ctx, olderThan) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, time.Time) int64); ok { + r0 = returnFunc(ctx, olderThan) + } else { + r0 = ret.Get(0).(int64) + } + if returnFunc, ok := ret.Get(1).(func(context.Context, time.Time) error); ok { + r1 = returnFunc(ctx, olderThan) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenRepository_DeleteExpired_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteExpired' +type MockTokenRepository_DeleteExpired_Call struct { + *mock.Call +} + +// DeleteExpired is a helper method to define mock.On call +// - ctx context.Context +// - olderThan time.Time +func (_e *MockTokenRepository_Expecter) DeleteExpired(ctx interface{}, olderThan interface{}) *MockTokenRepository_DeleteExpired_Call { + return &MockTokenRepository_DeleteExpired_Call{Call: _e.mock.On("DeleteExpired", ctx, olderThan)} +} + +func (_c *MockTokenRepository_DeleteExpired_Call) Run(run func(ctx context.Context, olderThan time.Time)) *MockTokenRepository_DeleteExpired_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 time.Time + if args[1] != nil { + arg1 = args[1].(time.Time) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenRepository_DeleteExpired_Call) Return(n int64, err error) *MockTokenRepository_DeleteExpired_Call { + _c.Call.Return(n, err) + return _c +} + +func (_c *MockTokenRepository_DeleteExpired_Call) RunAndReturn(run func(ctx context.Context, olderThan time.Time) (int64, error)) *MockTokenRepository_DeleteExpired_Call { + _c.Call.Return(run) + return _c +} + +// GetByToken provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) GetByToken(ctx context.Context, token string) (*domain0.Token, error) { + ret := _mock.Called(ctx, token) + + if len(ret) == 0 { + panic("no return value specified for GetByToken") + } + + var r0 *domain0.Token + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string) (*domain0.Token, error)); ok { + return returnFunc(ctx, token) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string) *domain0.Token); ok { + r0 = returnFunc(ctx, token) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.Token) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = returnFunc(ctx, token) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenRepository_GetByToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetByToken' +type MockTokenRepository_GetByToken_Call struct { + *mock.Call +} + +// GetByToken is a helper method to define mock.On call +// - ctx context.Context +// - token string +func (_e *MockTokenRepository_Expecter) GetByToken(ctx interface{}, token interface{}) *MockTokenRepository_GetByToken_Call { + return &MockTokenRepository_GetByToken_Call{Call: _e.mock.On("GetByToken", ctx, token)} +} + +func (_c *MockTokenRepository_GetByToken_Call) Run(run func(ctx context.Context, token string)) *MockTokenRepository_GetByToken_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenRepository_GetByToken_Call) Return(token1 *domain0.Token, err error) *MockTokenRepository_GetByToken_Call { + _c.Call.Return(token1, err) + return _c +} + +func (_c *MockTokenRepository_GetByToken_Call) RunAndReturn(run func(ctx context.Context, token string) (*domain0.Token, error)) *MockTokenRepository_GetByToken_Call { + _c.Call.Return(run) + return _c +} + +// GetByValueHash provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) GetByValueHash(ctx context.Context, keyID uuid.UUID, valueHash string) (*domain0.Token, error) { + ret := _mock.Called(ctx, keyID, valueHash) + + if len(ret) == 0 { + panic("no return value specified for GetByValueHash") + } + + var r0 *domain0.Token + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID, string) (*domain0.Token, error)); ok { + return returnFunc(ctx, keyID, valueHash) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID, string) *domain0.Token); ok { + r0 = returnFunc(ctx, keyID, valueHash) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.Token) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, uuid.UUID, string) error); ok { + r1 = returnFunc(ctx, keyID, valueHash) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenRepository_GetByValueHash_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetByValueHash' +type MockTokenRepository_GetByValueHash_Call struct { + *mock.Call +} + +// GetByValueHash is a helper method to define mock.On call +// - ctx context.Context +// - keyID uuid.UUID +// - valueHash string +func (_e *MockTokenRepository_Expecter) GetByValueHash(ctx interface{}, keyID interface{}, valueHash interface{}) *MockTokenRepository_GetByValueHash_Call { + return &MockTokenRepository_GetByValueHash_Call{Call: _e.mock.On("GetByValueHash", ctx, keyID, valueHash)} +} + +func (_c *MockTokenRepository_GetByValueHash_Call) Run(run func(ctx context.Context, keyID uuid.UUID, valueHash string)) *MockTokenRepository_GetByValueHash_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 uuid.UUID + if args[1] != nil { + arg1 = args[1].(uuid.UUID) + } + var arg2 string + if args[2] != nil { + arg2 = args[2].(string) + } + run( + arg0, + arg1, + arg2, + ) + }) + return _c +} + +func (_c *MockTokenRepository_GetByValueHash_Call) Return(token *domain0.Token, err error) *MockTokenRepository_GetByValueHash_Call { + _c.Call.Return(token, err) + return _c +} + +func (_c *MockTokenRepository_GetByValueHash_Call) RunAndReturn(run func(ctx context.Context, keyID uuid.UUID, valueHash string) (*domain0.Token, error)) *MockTokenRepository_GetByValueHash_Call { + _c.Call.Return(run) + return _c +} + +// Revoke provides a mock function for the type MockTokenRepository +func (_mock *MockTokenRepository) Revoke(ctx context.Context, token string) error { + ret := _mock.Called(ctx, token) + + if len(ret) == 0 { + panic("no return value specified for Revoke") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = returnFunc(ctx, token) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockTokenRepository_Revoke_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Revoke' +type MockTokenRepository_Revoke_Call struct { + *mock.Call +} + +// Revoke is a helper method to define mock.On call +// - ctx context.Context +// - token string +func (_e *MockTokenRepository_Expecter) Revoke(ctx interface{}, token interface{}) *MockTokenRepository_Revoke_Call { + return &MockTokenRepository_Revoke_Call{Call: _e.mock.On("Revoke", ctx, token)} +} + +func (_c *MockTokenRepository_Revoke_Call) Run(run func(ctx context.Context, token string)) *MockTokenRepository_Revoke_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenRepository_Revoke_Call) Return(err error) *MockTokenRepository_Revoke_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockTokenRepository_Revoke_Call) RunAndReturn(run func(ctx context.Context, token string) error) *MockTokenRepository_Revoke_Call { + _c.Call.Return(run) + return _c +} + +// NewMockTokenizationKeyUseCase creates a new instance of MockTokenizationKeyUseCase. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockTokenizationKeyUseCase(t interface { + mock.TestingT + Cleanup(func()) +}) *MockTokenizationKeyUseCase { + mock := &MockTokenizationKeyUseCase{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} + +// MockTokenizationKeyUseCase is an autogenerated mock type for the TokenizationKeyUseCase type +type MockTokenizationKeyUseCase struct { + mock.Mock +} + +type MockTokenizationKeyUseCase_Expecter struct { + mock *mock.Mock +} + +func (_m *MockTokenizationKeyUseCase) EXPECT() *MockTokenizationKeyUseCase_Expecter { + return &MockTokenizationKeyUseCase_Expecter{mock: &_m.Mock} +} + +// Create provides a mock function for the type MockTokenizationKeyUseCase +func (_mock *MockTokenizationKeyUseCase) Create(ctx context.Context, name string, formatType domain0.FormatType, isDeterministic bool, alg domain.Algorithm) (*domain0.TokenizationKey, error) { + ret := _mock.Called(ctx, name, formatType, isDeterministic, alg) + + if len(ret) == 0 { + panic("no return value specified for Create") + } + + var r0 *domain0.TokenizationKey + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string, domain0.FormatType, bool, domain.Algorithm) (*domain0.TokenizationKey, error)); ok { + return returnFunc(ctx, name, formatType, isDeterministic, alg) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string, domain0.FormatType, bool, domain.Algorithm) *domain0.TokenizationKey); ok { + r0 = returnFunc(ctx, name, formatType, isDeterministic, alg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.TokenizationKey) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string, domain0.FormatType, bool, domain.Algorithm) error); ok { + r1 = returnFunc(ctx, name, formatType, isDeterministic, alg) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationKeyUseCase_Create_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Create' +type MockTokenizationKeyUseCase_Create_Call struct { + *mock.Call +} + +// Create is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - formatType domain0.FormatType +// - isDeterministic bool +// - alg domain.Algorithm +func (_e *MockTokenizationKeyUseCase_Expecter) Create(ctx interface{}, name interface{}, formatType interface{}, isDeterministic interface{}, alg interface{}) *MockTokenizationKeyUseCase_Create_Call { + return &MockTokenizationKeyUseCase_Create_Call{Call: _e.mock.On("Create", ctx, name, formatType, isDeterministic, alg)} +} + +func (_c *MockTokenizationKeyUseCase_Create_Call) Run(run func(ctx context.Context, name string, formatType domain0.FormatType, isDeterministic bool, alg domain.Algorithm)) *MockTokenizationKeyUseCase_Create_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + var arg2 domain0.FormatType + if args[2] != nil { + arg2 = args[2].(domain0.FormatType) + } + var arg3 bool + if args[3] != nil { + arg3 = args[3].(bool) + } + var arg4 domain.Algorithm + if args[4] != nil { + arg4 = args[4].(domain.Algorithm) + } + run( + arg0, + arg1, + arg2, + arg3, + arg4, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyUseCase_Create_Call) Return(tokenizationKey *domain0.TokenizationKey, err error) *MockTokenizationKeyUseCase_Create_Call { + _c.Call.Return(tokenizationKey, err) + return _c +} + +func (_c *MockTokenizationKeyUseCase_Create_Call) RunAndReturn(run func(ctx context.Context, name string, formatType domain0.FormatType, isDeterministic bool, alg domain.Algorithm) (*domain0.TokenizationKey, error)) *MockTokenizationKeyUseCase_Create_Call { + _c.Call.Return(run) + return _c +} + +// Delete provides a mock function for the type MockTokenizationKeyUseCase +func (_mock *MockTokenizationKeyUseCase) Delete(ctx context.Context, keyID uuid.UUID) error { + ret := _mock.Called(ctx, keyID) + + if len(ret) == 0 { + panic("no return value specified for Delete") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, uuid.UUID) error); ok { + r0 = returnFunc(ctx, keyID) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockTokenizationKeyUseCase_Delete_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Delete' +type MockTokenizationKeyUseCase_Delete_Call struct { + *mock.Call +} + +// Delete is a helper method to define mock.On call +// - ctx context.Context +// - keyID uuid.UUID +func (_e *MockTokenizationKeyUseCase_Expecter) Delete(ctx interface{}, keyID interface{}) *MockTokenizationKeyUseCase_Delete_Call { + return &MockTokenizationKeyUseCase_Delete_Call{Call: _e.mock.On("Delete", ctx, keyID)} +} + +func (_c *MockTokenizationKeyUseCase_Delete_Call) Run(run func(ctx context.Context, keyID uuid.UUID)) *MockTokenizationKeyUseCase_Delete_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 uuid.UUID + if args[1] != nil { + arg1 = args[1].(uuid.UUID) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyUseCase_Delete_Call) Return(err error) *MockTokenizationKeyUseCase_Delete_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockTokenizationKeyUseCase_Delete_Call) RunAndReturn(run func(ctx context.Context, keyID uuid.UUID) error) *MockTokenizationKeyUseCase_Delete_Call { + _c.Call.Return(run) + return _c +} + +// Rotate provides a mock function for the type MockTokenizationKeyUseCase +func (_mock *MockTokenizationKeyUseCase) Rotate(ctx context.Context, name string, formatType domain0.FormatType, isDeterministic bool, alg domain.Algorithm) (*domain0.TokenizationKey, error) { + ret := _mock.Called(ctx, name, formatType, isDeterministic, alg) + + if len(ret) == 0 { + panic("no return value specified for Rotate") + } + + var r0 *domain0.TokenizationKey + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string, domain0.FormatType, bool, domain.Algorithm) (*domain0.TokenizationKey, error)); ok { + return returnFunc(ctx, name, formatType, isDeterministic, alg) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string, domain0.FormatType, bool, domain.Algorithm) *domain0.TokenizationKey); ok { + r0 = returnFunc(ctx, name, formatType, isDeterministic, alg) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.TokenizationKey) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string, domain0.FormatType, bool, domain.Algorithm) error); ok { + r1 = returnFunc(ctx, name, formatType, isDeterministic, alg) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationKeyUseCase_Rotate_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Rotate' +type MockTokenizationKeyUseCase_Rotate_Call struct { + *mock.Call +} + +// Rotate is a helper method to define mock.On call +// - ctx context.Context +// - name string +// - formatType domain0.FormatType +// - isDeterministic bool +// - alg domain.Algorithm +func (_e *MockTokenizationKeyUseCase_Expecter) Rotate(ctx interface{}, name interface{}, formatType interface{}, isDeterministic interface{}, alg interface{}) *MockTokenizationKeyUseCase_Rotate_Call { + return &MockTokenizationKeyUseCase_Rotate_Call{Call: _e.mock.On("Rotate", ctx, name, formatType, isDeterministic, alg)} +} + +func (_c *MockTokenizationKeyUseCase_Rotate_Call) Run(run func(ctx context.Context, name string, formatType domain0.FormatType, isDeterministic bool, alg domain.Algorithm)) *MockTokenizationKeyUseCase_Rotate_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + var arg2 domain0.FormatType + if args[2] != nil { + arg2 = args[2].(domain0.FormatType) + } + var arg3 bool + if args[3] != nil { + arg3 = args[3].(bool) + } + var arg4 domain.Algorithm + if args[4] != nil { + arg4 = args[4].(domain.Algorithm) + } + run( + arg0, + arg1, + arg2, + arg3, + arg4, + ) + }) + return _c +} + +func (_c *MockTokenizationKeyUseCase_Rotate_Call) Return(tokenizationKey *domain0.TokenizationKey, err error) *MockTokenizationKeyUseCase_Rotate_Call { + _c.Call.Return(tokenizationKey, err) + return _c +} + +func (_c *MockTokenizationKeyUseCase_Rotate_Call) RunAndReturn(run func(ctx context.Context, name string, formatType domain0.FormatType, isDeterministic bool, alg domain.Algorithm) (*domain0.TokenizationKey, error)) *MockTokenizationKeyUseCase_Rotate_Call { + _c.Call.Return(run) + return _c +} + +// NewMockTokenizationUseCase creates a new instance of MockTokenizationUseCase. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. +// The first argument is typically a *testing.T value. +func NewMockTokenizationUseCase(t interface { + mock.TestingT + Cleanup(func()) +}) *MockTokenizationUseCase { + mock := &MockTokenizationUseCase{} + mock.Mock.Test(t) + + t.Cleanup(func() { mock.AssertExpectations(t) }) + + return mock +} + +// MockTokenizationUseCase is an autogenerated mock type for the TokenizationUseCase type +type MockTokenizationUseCase struct { + mock.Mock +} + +type MockTokenizationUseCase_Expecter struct { + mock *mock.Mock +} + +func (_m *MockTokenizationUseCase) EXPECT() *MockTokenizationUseCase_Expecter { + return &MockTokenizationUseCase_Expecter{mock: &_m.Mock} +} + +// CleanupExpired provides a mock function for the type MockTokenizationUseCase +func (_mock *MockTokenizationUseCase) CleanupExpired(ctx context.Context, days int, dryRun bool) (int64, error) { + ret := _mock.Called(ctx, days, dryRun) + + if len(ret) == 0 { + panic("no return value specified for CleanupExpired") + } + + var r0 int64 + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, int, bool) (int64, error)); ok { + return returnFunc(ctx, days, dryRun) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, int, bool) int64); ok { + r0 = returnFunc(ctx, days, dryRun) + } else { + r0 = ret.Get(0).(int64) + } + if returnFunc, ok := ret.Get(1).(func(context.Context, int, bool) error); ok { + r1 = returnFunc(ctx, days, dryRun) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationUseCase_CleanupExpired_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CleanupExpired' +type MockTokenizationUseCase_CleanupExpired_Call struct { + *mock.Call +} + +// CleanupExpired is a helper method to define mock.On call +// - ctx context.Context +// - days int +// - dryRun bool +func (_e *MockTokenizationUseCase_Expecter) CleanupExpired(ctx interface{}, days interface{}, dryRun interface{}) *MockTokenizationUseCase_CleanupExpired_Call { + return &MockTokenizationUseCase_CleanupExpired_Call{Call: _e.mock.On("CleanupExpired", ctx, days, dryRun)} +} + +func (_c *MockTokenizationUseCase_CleanupExpired_Call) Run(run func(ctx context.Context, days int, dryRun bool)) *MockTokenizationUseCase_CleanupExpired_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 int + if args[1] != nil { + arg1 = args[1].(int) + } + var arg2 bool + if args[2] != nil { + arg2 = args[2].(bool) + } + run( + arg0, + arg1, + arg2, + ) + }) + return _c +} + +func (_c *MockTokenizationUseCase_CleanupExpired_Call) Return(n int64, err error) *MockTokenizationUseCase_CleanupExpired_Call { + _c.Call.Return(n, err) + return _c +} + +func (_c *MockTokenizationUseCase_CleanupExpired_Call) RunAndReturn(run func(ctx context.Context, days int, dryRun bool) (int64, error)) *MockTokenizationUseCase_CleanupExpired_Call { + _c.Call.Return(run) + return _c +} + +// Detokenize provides a mock function for the type MockTokenizationUseCase +func (_mock *MockTokenizationUseCase) Detokenize(ctx context.Context, token string) ([]byte, map[string]any, error) { + ret := _mock.Called(ctx, token) + + if len(ret) == 0 { + panic("no return value specified for Detokenize") + } + + var r0 []byte + var r1 map[string]any + var r2 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string) ([]byte, map[string]any, error)); ok { + return returnFunc(ctx, token) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string) []byte); ok { + r0 = returnFunc(ctx, token) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string) map[string]any); ok { + r1 = returnFunc(ctx, token) + } else { + if ret.Get(1) != nil { + r1 = ret.Get(1).(map[string]any) + } + } + if returnFunc, ok := ret.Get(2).(func(context.Context, string) error); ok { + r2 = returnFunc(ctx, token) + } else { + r2 = ret.Error(2) + } + return r0, r1, r2 +} + +// MockTokenizationUseCase_Detokenize_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Detokenize' +type MockTokenizationUseCase_Detokenize_Call struct { + *mock.Call +} + +// Detokenize is a helper method to define mock.On call +// - ctx context.Context +// - token string +func (_e *MockTokenizationUseCase_Expecter) Detokenize(ctx interface{}, token interface{}) *MockTokenizationUseCase_Detokenize_Call { + return &MockTokenizationUseCase_Detokenize_Call{Call: _e.mock.On("Detokenize", ctx, token)} +} + +func (_c *MockTokenizationUseCase_Detokenize_Call) Run(run func(ctx context.Context, token string)) *MockTokenizationUseCase_Detokenize_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationUseCase_Detokenize_Call) Return(plaintext []byte, metadata map[string]any, err error) *MockTokenizationUseCase_Detokenize_Call { + _c.Call.Return(plaintext, metadata, err) + return _c +} + +func (_c *MockTokenizationUseCase_Detokenize_Call) RunAndReturn(run func(ctx context.Context, token string) ([]byte, map[string]any, error)) *MockTokenizationUseCase_Detokenize_Call { + _c.Call.Return(run) + return _c +} + +// Revoke provides a mock function for the type MockTokenizationUseCase +func (_mock *MockTokenizationUseCase) Revoke(ctx context.Context, token string) error { + ret := _mock.Called(ctx, token) + + if len(ret) == 0 { + panic("no return value specified for Revoke") + } + + var r0 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string) error); ok { + r0 = returnFunc(ctx, token) + } else { + r0 = ret.Error(0) + } + return r0 +} + +// MockTokenizationUseCase_Revoke_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Revoke' +type MockTokenizationUseCase_Revoke_Call struct { + *mock.Call +} + +// Revoke is a helper method to define mock.On call +// - ctx context.Context +// - token string +func (_e *MockTokenizationUseCase_Expecter) Revoke(ctx interface{}, token interface{}) *MockTokenizationUseCase_Revoke_Call { + return &MockTokenizationUseCase_Revoke_Call{Call: _e.mock.On("Revoke", ctx, token)} +} + +func (_c *MockTokenizationUseCase_Revoke_Call) Run(run func(ctx context.Context, token string)) *MockTokenizationUseCase_Revoke_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationUseCase_Revoke_Call) Return(err error) *MockTokenizationUseCase_Revoke_Call { + _c.Call.Return(err) + return _c +} + +func (_c *MockTokenizationUseCase_Revoke_Call) RunAndReturn(run func(ctx context.Context, token string) error) *MockTokenizationUseCase_Revoke_Call { + _c.Call.Return(run) + return _c +} + +// Tokenize provides a mock function for the type MockTokenizationUseCase +func (_mock *MockTokenizationUseCase) Tokenize(ctx context.Context, keyName string, plaintext []byte, metadata map[string]any, expiresAt *time.Time) (*domain0.Token, error) { + ret := _mock.Called(ctx, keyName, plaintext, metadata, expiresAt) + + if len(ret) == 0 { + panic("no return value specified for Tokenize") + } + + var r0 *domain0.Token + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string, []byte, map[string]any, *time.Time) (*domain0.Token, error)); ok { + return returnFunc(ctx, keyName, plaintext, metadata, expiresAt) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string, []byte, map[string]any, *time.Time) *domain0.Token); ok { + r0 = returnFunc(ctx, keyName, plaintext, metadata, expiresAt) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*domain0.Token) + } + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string, []byte, map[string]any, *time.Time) error); ok { + r1 = returnFunc(ctx, keyName, plaintext, metadata, expiresAt) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationUseCase_Tokenize_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Tokenize' +type MockTokenizationUseCase_Tokenize_Call struct { + *mock.Call +} + +// Tokenize is a helper method to define mock.On call +// - ctx context.Context +// - keyName string +// - plaintext []byte +// - metadata map[string]any +// - expiresAt *time.Time +func (_e *MockTokenizationUseCase_Expecter) Tokenize(ctx interface{}, keyName interface{}, plaintext interface{}, metadata interface{}, expiresAt interface{}) *MockTokenizationUseCase_Tokenize_Call { + return &MockTokenizationUseCase_Tokenize_Call{Call: _e.mock.On("Tokenize", ctx, keyName, plaintext, metadata, expiresAt)} +} + +func (_c *MockTokenizationUseCase_Tokenize_Call) Run(run func(ctx context.Context, keyName string, plaintext []byte, metadata map[string]any, expiresAt *time.Time)) *MockTokenizationUseCase_Tokenize_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + var arg2 []byte + if args[2] != nil { + arg2 = args[2].([]byte) + } + var arg3 map[string]any + if args[3] != nil { + arg3 = args[3].(map[string]any) + } + var arg4 *time.Time + if args[4] != nil { + arg4 = args[4].(*time.Time) + } + run( + arg0, + arg1, + arg2, + arg3, + arg4, + ) + }) + return _c +} + +func (_c *MockTokenizationUseCase_Tokenize_Call) Return(token *domain0.Token, err error) *MockTokenizationUseCase_Tokenize_Call { + _c.Call.Return(token, err) + return _c +} + +func (_c *MockTokenizationUseCase_Tokenize_Call) RunAndReturn(run func(ctx context.Context, keyName string, plaintext []byte, metadata map[string]any, expiresAt *time.Time) (*domain0.Token, error)) *MockTokenizationUseCase_Tokenize_Call { + _c.Call.Return(run) + return _c +} + +// Validate provides a mock function for the type MockTokenizationUseCase +func (_mock *MockTokenizationUseCase) Validate(ctx context.Context, token string) (bool, error) { + ret := _mock.Called(ctx, token) + + if len(ret) == 0 { + panic("no return value specified for Validate") + } + + var r0 bool + var r1 error + if returnFunc, ok := ret.Get(0).(func(context.Context, string) (bool, error)); ok { + return returnFunc(ctx, token) + } + if returnFunc, ok := ret.Get(0).(func(context.Context, string) bool); ok { + r0 = returnFunc(ctx, token) + } else { + r0 = ret.Get(0).(bool) + } + if returnFunc, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = returnFunc(ctx, token) + } else { + r1 = ret.Error(1) + } + return r0, r1 +} + +// MockTokenizationUseCase_Validate_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Validate' +type MockTokenizationUseCase_Validate_Call struct { + *mock.Call +} + +// Validate is a helper method to define mock.On call +// - ctx context.Context +// - token string +func (_e *MockTokenizationUseCase_Expecter) Validate(ctx interface{}, token interface{}) *MockTokenizationUseCase_Validate_Call { + return &MockTokenizationUseCase_Validate_Call{Call: _e.mock.On("Validate", ctx, token)} +} + +func (_c *MockTokenizationUseCase_Validate_Call) Run(run func(ctx context.Context, token string)) *MockTokenizationUseCase_Validate_Call { + _c.Call.Run(func(args mock.Arguments) { + var arg0 context.Context + if args[0] != nil { + arg0 = args[0].(context.Context) + } + var arg1 string + if args[1] != nil { + arg1 = args[1].(string) + } + run( + arg0, + arg1, + ) + }) + return _c +} + +func (_c *MockTokenizationUseCase_Validate_Call) Return(b bool, err error) *MockTokenizationUseCase_Validate_Call { + _c.Call.Return(b, err) + return _c +} + +func (_c *MockTokenizationUseCase_Validate_Call) RunAndReturn(run func(ctx context.Context, token string) (bool, error)) *MockTokenizationUseCase_Validate_Call { + _c.Call.Return(run) + return _c +} diff --git a/internal/tokenization/usecase/tokenization_key_metrics_decorator.go b/internal/tokenization/usecase/tokenization_key_metrics_decorator.go new file mode 100644 index 0000000..e6c10a7 --- /dev/null +++ b/internal/tokenization/usecase/tokenization_key_metrics_decorator.go @@ -0,0 +1,89 @@ +package usecase + +import ( + "context" + "time" + + "github.com/google/uuid" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + "github.com/allisson/secrets/internal/metrics" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// tokenizationKeyUseCaseWithMetrics decorates TokenizationKeyUseCase with metrics instrumentation. +type tokenizationKeyUseCaseWithMetrics struct { + next TokenizationKeyUseCase + metrics metrics.BusinessMetrics +} + +// NewTokenizationKeyUseCaseWithMetrics wraps a TokenizationKeyUseCase with metrics recording. +func NewTokenizationKeyUseCaseWithMetrics( + useCase TokenizationKeyUseCase, + m metrics.BusinessMetrics, +) TokenizationKeyUseCase { + return &tokenizationKeyUseCaseWithMetrics{ + next: useCase, + metrics: m, + } +} + +// Create records metrics for tokenization key creation operations. +func (t *tokenizationKeyUseCaseWithMetrics) Create( + ctx context.Context, + name string, + formatType tokenizationDomain.FormatType, + isDeterministic bool, + alg cryptoDomain.Algorithm, +) (*tokenizationDomain.TokenizationKey, error) { + start := time.Now() + key, err := t.next.Create(ctx, name, formatType, isDeterministic, alg) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "tokenization_key_create", status) + t.metrics.RecordDuration(ctx, "tokenization", "tokenization_key_create", time.Since(start), status) + + return key, err +} + +// Rotate records metrics for tokenization key rotation operations. +func (t *tokenizationKeyUseCaseWithMetrics) Rotate( + ctx context.Context, + name string, + formatType tokenizationDomain.FormatType, + isDeterministic bool, + alg cryptoDomain.Algorithm, +) (*tokenizationDomain.TokenizationKey, error) { + start := time.Now() + key, err := t.next.Rotate(ctx, name, formatType, isDeterministic, alg) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "tokenization_key_rotate", status) + t.metrics.RecordDuration(ctx, "tokenization", "tokenization_key_rotate", time.Since(start), status) + + return key, err +} + +// Delete records metrics for tokenization key deletion operations. +func (t *tokenizationKeyUseCaseWithMetrics) Delete(ctx context.Context, tokenizationKeyID uuid.UUID) error { + start := time.Now() + err := t.next.Delete(ctx, tokenizationKeyID) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "tokenization_key_delete", status) + t.metrics.RecordDuration(ctx, "tokenization", "tokenization_key_delete", time.Since(start), status) + + return err +} diff --git a/internal/tokenization/usecase/tokenization_key_metrics_decorator_test.go b/internal/tokenization/usecase/tokenization_key_metrics_decorator_test.go new file mode 100644 index 0000000..052d683 --- /dev/null +++ b/internal/tokenization/usecase/tokenization_key_metrics_decorator_test.go @@ -0,0 +1,336 @@ +package usecase + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + tokenizationMocks "github.com/allisson/secrets/internal/tokenization/usecase/mocks" +) + +func TestNewTokenizationKeyUseCaseWithMetrics(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationKeyUseCase(t) + mockMetrics := &mockBusinessMetrics{} + + decorator := NewTokenizationKeyUseCaseWithMetrics(mockUseCase, mockMetrics) + + assert.NotNil(t, decorator) + assert.IsType(t, &tokenizationKeyUseCaseWithMetrics{}, decorator) +} + +func TestTokenizationKeyUseCaseWithMetrics_Create(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationKeyUseCase, *mockBusinessMetrics) + keyName string + formatType tokenizationDomain.FormatType + isDeterministic bool + algorithm cryptoDomain.Algorithm + expectedKey *tokenizationDomain.TokenizationKey + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationKeyUseCase, mockMetrics *mockBusinessMetrics) { + key := &tokenizationDomain.TokenizationKey{ + ID: uuid.New(), + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + CreatedAt: time.Now().UTC(), + } + mockUseCase.EXPECT(). + Create(mock.Anything, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM). + Return(key, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_create", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_create", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + keyName: "test-key", + formatType: tokenizationDomain.FormatUUID, + isDeterministic: false, + algorithm: cryptoDomain.AESGCM, + expectedKey: &tokenizationDomain.TokenizationKey{ + ID: uuid.UUID{}, + Name: "test-key", + Version: 1, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + }, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Success_DeterministicKey_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationKeyUseCase, mockMetrics *mockBusinessMetrics) { + key := &tokenizationDomain.TokenizationKey{ + ID: uuid.New(), + Name: "deterministic-key", + Version: 1, + FormatType: tokenizationDomain.FormatAlphanumeric, + IsDeterministic: true, + CreatedAt: time.Now().UTC(), + } + mockUseCase.EXPECT(). + Create(mock.Anything, "deterministic-key", tokenizationDomain.FormatAlphanumeric, true, cryptoDomain.ChaCha20). + Return(key, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_create", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_create", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + keyName: "deterministic-key", + formatType: tokenizationDomain.FormatAlphanumeric, + isDeterministic: true, + algorithm: cryptoDomain.ChaCha20, + expectedKey: &tokenizationDomain.TokenizationKey{ + ID: uuid.UUID{}, + Name: "deterministic-key", + Version: 1, + FormatType: tokenizationDomain.FormatAlphanumeric, + IsDeterministic: true, + }, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationKeyUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Create(mock.Anything, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM). + Return(nil, errors.New("key creation failed")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_create", "error"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_create", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + keyName: "test-key", + formatType: tokenizationDomain.FormatUUID, + isDeterministic: false, + algorithm: cryptoDomain.AESGCM, + expectedKey: nil, + expectedErr: errors.New("key creation failed"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationKeyUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationKeyUseCaseWithMetrics(mockUseCase, mockMetrics) + + key, err := decorator.Create( + context.Background(), + tt.keyName, + tt.formatType, + tt.isDeterministic, + tt.algorithm, + ) + + if tt.expectedErr != nil { + assert.Error(t, err) + assert.Nil(t, key) + } else { + assert.NoError(t, err) + assert.NotNil(t, key) + assert.Equal(t, tt.expectedKey.Name, key.Name) + assert.Equal(t, tt.expectedKey.FormatType, key.FormatType) + assert.Equal(t, tt.expectedKey.IsDeterministic, key.IsDeterministic) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} + +func TestTokenizationKeyUseCaseWithMetrics_Rotate(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationKeyUseCase, *mockBusinessMetrics) + keyName string + formatType tokenizationDomain.FormatType + isDeterministic bool + algorithm cryptoDomain.Algorithm + expectedKey *tokenizationDomain.TokenizationKey + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationKeyUseCase, mockMetrics *mockBusinessMetrics) { + key := &tokenizationDomain.TokenizationKey{ + ID: uuid.New(), + Name: "test-key", + Version: 2, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + CreatedAt: time.Now().UTC(), + } + mockUseCase.EXPECT(). + Rotate(mock.Anything, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM). + Return(key, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_rotate", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_rotate", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + keyName: "test-key", + formatType: tokenizationDomain.FormatUUID, + isDeterministic: false, + algorithm: cryptoDomain.AESGCM, + expectedKey: &tokenizationDomain.TokenizationKey{ + ID: uuid.UUID{}, + Name: "test-key", + Version: 2, + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + }, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationKeyUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Rotate(mock.Anything, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM). + Return(nil, errors.New("rotation failed")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_rotate", "error"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_rotate", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + keyName: "test-key", + formatType: tokenizationDomain.FormatUUID, + isDeterministic: false, + algorithm: cryptoDomain.AESGCM, + expectedKey: nil, + expectedErr: errors.New("rotation failed"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationKeyUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationKeyUseCaseWithMetrics(mockUseCase, mockMetrics) + + key, err := decorator.Rotate( + context.Background(), + tt.keyName, + tt.formatType, + tt.isDeterministic, + tt.algorithm, + ) + + if tt.expectedErr != nil { + assert.Error(t, err) + assert.Nil(t, key) + } else { + assert.NoError(t, err) + assert.NotNil(t, key) + assert.Equal(t, tt.expectedKey.Name, key.Name) + assert.Equal(t, tt.expectedKey.Version, key.Version) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} + +func TestTokenizationKeyUseCaseWithMetrics_Delete(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationKeyUseCase, *mockBusinessMetrics) + keyID uuid.UUID + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationKeyUseCase, mockMetrics *mockBusinessMetrics) { + keyID := uuid.New() + mockUseCase.EXPECT(). + Delete(mock.Anything, keyID). + Return(nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_delete", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_delete", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + keyID: uuid.New(), + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationKeyUseCase, mockMetrics *mockBusinessMetrics) { + keyID := uuid.New() + mockUseCase.EXPECT(). + Delete(mock.Anything, keyID). + Return(errors.New("key not found")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_delete", "error"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_delete", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + keyID: uuid.New(), + expectedErr: errors.New("key not found"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationKeyUseCase(t) + mockMetrics := &mockBusinessMetrics{} + // Generate a fresh UUID for each test to pass to mock setup + testKeyID := uuid.New() + mockUseCase.EXPECT(). + Delete(mock.Anything, testKeyID). + Return(tt.expectedErr). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenization_key_delete", tt.expectedStatus). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenization_key_delete", mock.AnythingOfType("time.Duration"), tt.expectedStatus). + Once() + + decorator := NewTokenizationKeyUseCaseWithMetrics(mockUseCase, mockMetrics) + + err := decorator.Delete(context.Background(), testKeyID) + + if tt.expectedErr != nil { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} diff --git a/internal/tokenization/usecase/tokenization_key_usecase.go b/internal/tokenization/usecase/tokenization_key_usecase.go new file mode 100644 index 0000000..257952b --- /dev/null +++ b/internal/tokenization/usecase/tokenization_key_usecase.go @@ -0,0 +1,179 @@ +package usecase + +import ( + "context" + "time" + + "github.com/google/uuid" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + cryptoService "github.com/allisson/secrets/internal/crypto/service" + "github.com/allisson/secrets/internal/database" + apperrors "github.com/allisson/secrets/internal/errors" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// tokenizationKeyUseCase implements TokenizationKeyUseCase for managing tokenization keys. +type tokenizationKeyUseCase struct { + txManager database.TxManager + tokenizationKeyRepo TokenizationKeyRepository + dekRepo DekRepository + keyManager cryptoService.KeyManager + kekChain *cryptoDomain.KekChain +} + +// getKek retrieves a KEK from the chain by its ID. +func (t *tokenizationKeyUseCase) getKek(kekID uuid.UUID) (*cryptoDomain.Kek, error) { + kek, ok := t.kekChain.Get(kekID) + if !ok { + return nil, cryptoDomain.ErrKekNotFound + } + return kek, nil +} + +// Create generates and persists a new tokenization key with version 1. +// Returns ErrTokenizationKeyAlreadyExists if a key with the same name already exists. +func (t *tokenizationKeyUseCase) Create( + ctx context.Context, + name string, + formatType tokenizationDomain.FormatType, + isDeterministic bool, + alg cryptoDomain.Algorithm, +) (*tokenizationDomain.TokenizationKey, error) { + // Validate format type + if err := formatType.Validate(); err != nil { + return nil, tokenizationDomain.ErrInvalidFormatType + } + + // Check if tokenization key with version 1 already exists + existingKey, err := t.tokenizationKeyRepo.GetByNameAndVersion(ctx, name, 1) + if err != nil && !apperrors.Is(err, tokenizationDomain.ErrTokenizationKeyNotFound) { + return nil, err + } + if existingKey != nil { + return nil, tokenizationDomain.ErrTokenizationKeyAlreadyExists + } + + // Get active KEK from chain + activeKek, err := t.getKek(t.kekChain.ActiveKekID()) + if err != nil { + return nil, err + } + + // Create DEK encrypted with active KEK + dek, err := t.keyManager.CreateDek(activeKek, alg) + if err != nil { + return nil, err + } + + // Persist DEK to database + if err := t.dekRepo.Create(ctx, &dek); err != nil { + return nil, err + } + + // Create tokenization key with version 1 + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: name, + Version: 1, + FormatType: formatType, + IsDeterministic: isDeterministic, + DekID: dek.ID, + CreatedAt: time.Now().UTC(), + } + + // Persist tokenization key + if err := t.tokenizationKeyRepo.Create(ctx, tokenizationKey); err != nil { + return nil, err + } + + return tokenizationKey, nil +} + +// Rotate creates a new version of an existing tokenization key. +func (t *tokenizationKeyUseCase) Rotate( + ctx context.Context, + name string, + formatType tokenizationDomain.FormatType, + isDeterministic bool, + alg cryptoDomain.Algorithm, +) (*tokenizationDomain.TokenizationKey, error) { + // Validate format type + if err := formatType.Validate(); err != nil { + return nil, tokenizationDomain.ErrInvalidFormatType + } + + var newKey *tokenizationDomain.TokenizationKey + + err := t.txManager.WithTx(ctx, func(txCtx context.Context) error { + // Get latest tokenization key version + currentKey, err := t.tokenizationKeyRepo.GetByName(txCtx, name) + if err != nil { + // If key doesn't exist, create first version + if apperrors.Is(err, tokenizationDomain.ErrTokenizationKeyNotFound) { + newKey, err = t.Create(txCtx, name, formatType, isDeterministic, alg) + return err + } + return err + } + + // Get active KEK from chain + activeKek, err := t.getKek(t.kekChain.ActiveKekID()) + if err != nil { + return err + } + + // Create new DEK encrypted with active KEK + dek, err := t.keyManager.CreateDek(activeKek, alg) + if err != nil { + return err + } + + // Persist new DEK + if err := t.dekRepo.Create(txCtx, &dek); err != nil { + return err + } + + // Create new tokenization key with incremented version + newKey = &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: name, + Version: currentKey.Version + 1, + FormatType: formatType, + IsDeterministic: isDeterministic, + DekID: dek.ID, + CreatedAt: time.Now().UTC(), + } + + // Persist new tokenization key + return t.tokenizationKeyRepo.Create(txCtx, newKey) + }) + + if err != nil { + return nil, err + } + + return newKey, nil +} + +// Delete soft-deletes a tokenization key by setting its deleted_at timestamp. +func (t *tokenizationKeyUseCase) Delete(ctx context.Context, keyID uuid.UUID) error { + return t.tokenizationKeyRepo.Delete(ctx, keyID) +} + +// NewTokenizationKeyUseCase creates a new tokenization key use case instance. +func NewTokenizationKeyUseCase( + txManager database.TxManager, + tokenizationKeyRepo TokenizationKeyRepository, + dekRepo DekRepository, + keyManager cryptoService.KeyManager, + kekChain *cryptoDomain.KekChain, +) TokenizationKeyUseCase { + return &tokenizationKeyUseCase{ + txManager: txManager, + tokenizationKeyRepo: tokenizationKeyRepo, + dekRepo: dekRepo, + keyManager: keyManager, + kekChain: kekChain, + } +} diff --git a/internal/tokenization/usecase/tokenization_key_usecase_test.go b/internal/tokenization/usecase/tokenization_key_usecase_test.go new file mode 100644 index 0000000..84bf4bb --- /dev/null +++ b/internal/tokenization/usecase/tokenization_key_usecase_test.go @@ -0,0 +1,615 @@ +package usecase + +import ( + "context" + "errors" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + cryptoServiceMocks "github.com/allisson/secrets/internal/crypto/service/mocks" + databaseMocks "github.com/allisson/secrets/internal/database/mocks" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + tokenizationMocks "github.com/allisson/secrets/internal/tokenization/usecase/mocks" +) + +// createKekChain creates a test KEK chain for tokenization key tests. +func createKekChain(masterKey *cryptoDomain.MasterKey) *cryptoDomain.KekChain { + // Create a test KEK with plaintext key populated + kek := &cryptoDomain.Kek{ + ID: uuid.Must(uuid.NewV7()), + MasterKeyID: masterKey.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: make([]byte, 32), + Key: make([]byte, 32), // Plaintext KEK key for testing + Nonce: make([]byte, 12), + Version: 1, + } + + // Create KEK chain with the test KEK (newest first) + kekChain := cryptoDomain.NewKekChain([]*cryptoDomain.Kek{kek}) + + return kekChain +} + +// getActiveKek is a helper to get the active KEK from a chain. +func getActiveKek(kekChain *cryptoDomain.KekChain) *cryptoDomain.Kek { + activeID := kekChain.ActiveKekID() + kek, ok := kekChain.Get(activeID) + if !ok { + panic("active KEK not found in chain") + } + return kek +} + +// TestTokenizationKeyUseCase_Create tests the Create method. +func TestTokenizationKeyUseCase_Create(t *testing.T) { + ctx := context.Background() + + t.Run("Success_CreateKeyWithUUIDFormat", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dek := cryptoDomain.Dek{ + ID: uuid.Must(uuid.NewV7()), + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByNameAndVersion(ctx, "test-key", uint(1)). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + mockKeyManager.EXPECT(). + CreateDek(activeKek, cryptoDomain.AESGCM). + Return(dek, nil). + Once() + + mockDekRepo.EXPECT(). + Create(ctx, mock.MatchedBy(func(d *cryptoDomain.Dek) bool { + return d.ID == dek.ID && d.KekID == dek.KekID + })). + Return(nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Create(ctx, mock.MatchedBy(func(key *tokenizationDomain.TokenizationKey) bool { + return key.Name == "test-key" && + key.FormatType == tokenizationDomain.FormatUUID && + key.Version == 1 && + key.IsDeterministic == false && + key.DekID == dek.ID + })). + Return(nil). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + key, err := uc.Create(ctx, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, key) + assert.Equal(t, "test-key", key.Name) + assert.Equal(t, tokenizationDomain.FormatUUID, key.FormatType) + assert.Equal(t, uint(1), key.Version) + assert.False(t, key.IsDeterministic) + }) + + t.Run("Success_CreateKeyWithLuhnPreservingDeterministic", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dek := cryptoDomain.Dek{ + ID: uuid.Must(uuid.NewV7()), + KekID: activeKek.ID, + Algorithm: cryptoDomain.ChaCha20, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByNameAndVersion(ctx, "payment-cards", uint(1)). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + mockKeyManager.EXPECT(). + CreateDek(activeKek, cryptoDomain.ChaCha20). + Return(dek, nil). + Once() + + mockDekRepo.EXPECT(). + Create(ctx, mock.Anything). + Return(nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Create(ctx, mock.MatchedBy(func(key *tokenizationDomain.TokenizationKey) bool { + return key.Name == "payment-cards" && + key.FormatType == tokenizationDomain.FormatLuhnPreserving && + key.Version == 1 && + key.IsDeterministic == true + })). + Return(nil). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + key, err := uc.Create( + ctx, + "payment-cards", + tokenizationDomain.FormatLuhnPreserving, + true, + cryptoDomain.ChaCha20, + ) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, key) + assert.Equal(t, "payment-cards", key.Name) + assert.Equal(t, tokenizationDomain.FormatLuhnPreserving, key.FormatType) + assert.True(t, key.IsDeterministic) + }) + + t.Run("Error_KeyManagerCreateDekFails", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + expectedError := errors.New("key manager error") + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByNameAndVersion(ctx, "test-key", uint(1)). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + mockKeyManager.EXPECT(). + CreateDek(mock.Anything, mock.Anything). + Return(cryptoDomain.Dek{}, expectedError). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + key, err := uc.Create(ctx, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM) + + // Assert + assert.Error(t, err) + assert.Nil(t, key) + assert.Equal(t, expectedError, err) + }) + + t.Run("Error_DekRepositoryCreateFails", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dek := cryptoDomain.Dek{ + ID: uuid.Must(uuid.NewV7()), + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + expectedError := errors.New("database error") + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByNameAndVersion(ctx, "test-key", uint(1)). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + mockKeyManager.EXPECT(). + CreateDek(mock.Anything, mock.Anything). + Return(dek, nil). + Once() + + mockDekRepo.EXPECT(). + Create(ctx, mock.Anything). + Return(expectedError). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + key, err := uc.Create(ctx, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM) + + // Assert + assert.Error(t, err) + assert.Nil(t, key) + assert.Equal(t, expectedError, err) + }) + + t.Run("Error_TokenizationKeyRepositoryCreateFails", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dek := cryptoDomain.Dek{ + ID: uuid.Must(uuid.NewV7()), + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + expectedError := errors.New("key already exists") + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByNameAndVersion(ctx, "test-key", uint(1)). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + mockKeyManager.EXPECT(). + CreateDek(mock.Anything, mock.Anything). + Return(dek, nil). + Once() + + mockDekRepo.EXPECT(). + Create(ctx, mock.Anything). + Return(nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Create(ctx, mock.Anything). + Return(expectedError). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + key, err := uc.Create(ctx, "test-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM) + + // Assert + assert.Error(t, err) + assert.Nil(t, key) + assert.Equal(t, expectedError, err) + }) +} + +// TestTokenizationKeyUseCase_Rotate tests the Rotate method. +func TestTokenizationKeyUseCase_Rotate(t *testing.T) { + ctx := context.Background() + + t.Run("Success_RotateKey", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + existingKey := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + Name: "test-key", + FormatType: tokenizationDomain.FormatNumeric, + Version: 1, + IsDeterministic: true, + DekID: uuid.Must(uuid.NewV7()), + } + + activeKek := getActiveKek(kekChain) + dek := cryptoDomain.Dek{ + ID: uuid.Must(uuid.NewV7()), + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + // Setup expectations + mockTxManager.EXPECT(). + WithTx(ctx, mock.AnythingOfType("func(context.Context) error")). + Run(func(ctx context.Context, fn func(context.Context) error) { + // Execute the transaction function + _ = fn(ctx) + }). + Return(nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + GetByName(mock.Anything, "test-key"). + Return(existingKey, nil). + Once() + + mockKeyManager.EXPECT(). + CreateDek(activeKek, cryptoDomain.AESGCM). + Return(dek, nil). + Once() + + mockDekRepo.EXPECT(). + Create(mock.Anything, mock.Anything). + Return(nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Create(mock.Anything, mock.MatchedBy(func(key *tokenizationDomain.TokenizationKey) bool { + return key.Name == "test-key" && + key.FormatType == tokenizationDomain.FormatNumeric && + key.Version == 2 && // Version incremented + key.IsDeterministic == true && + key.DekID == dek.ID + })). + Return(nil). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + key, err := uc.Rotate(ctx, "test-key", tokenizationDomain.FormatNumeric, true, cryptoDomain.AESGCM) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, key) + assert.Equal(t, "test-key", key.Name) + assert.Equal(t, uint(2), key.Version) + }) + + t.Run("Success_CreateFirstKeyWhenNoneExist", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dek := cryptoDomain.Dek{ + ID: uuid.Must(uuid.NewV7()), + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + // Setup expectations + mockTxManager.EXPECT(). + WithTx(ctx, mock.AnythingOfType("func(context.Context) error")). + Run(func(ctx context.Context, fn func(context.Context) error) { + // Execute the transaction function + _ = fn(ctx) + }). + Return(nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + GetByName(mock.Anything, "new-key"). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + // Expectations for Create() call within transaction + mockTokenizationKeyRepo.EXPECT(). + GetByNameAndVersion(mock.Anything, "new-key", uint(1)). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + mockKeyManager.EXPECT(). + CreateDek(activeKek, cryptoDomain.AESGCM). + Return(dek, nil). + Once() + + mockDekRepo.EXPECT(). + Create(mock.Anything, mock.Anything). + Return(nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Create(mock.Anything, mock.MatchedBy(func(key *tokenizationDomain.TokenizationKey) bool { + return key.Name == "new-key" && + key.FormatType == tokenizationDomain.FormatUUID && + key.Version == 1 + })). + Return(nil). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + key, err := uc.Rotate(ctx, "new-key", tokenizationDomain.FormatUUID, false, cryptoDomain.AESGCM) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, key) + assert.Equal(t, "new-key", key.Name) + assert.Equal(t, uint(1), key.Version) + }) +} + +// TestTokenizationKeyUseCase_Delete tests the Delete method. +func TestTokenizationKeyUseCase_Delete(t *testing.T) { + ctx := context.Background() + + t.Run("Success_DeleteKey", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + keyID := uuid.Must(uuid.NewV7()) + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + Delete(ctx, keyID). + Return(nil). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + err := uc.Delete(ctx, keyID) + + // Assert + assert.NoError(t, err) + }) + + t.Run("Error_DeleteFails", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + keyID := uuid.Must(uuid.NewV7()) + expectedError := errors.New("database error") + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + Delete(ctx, keyID). + Return(expectedError). + Once() + + // Execute + uc := NewTokenizationKeyUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockDekRepo, + mockKeyManager, + kekChain, + ) + err := uc.Delete(ctx, keyID) + + // Assert + assert.Error(t, err) + assert.Equal(t, expectedError, err) + }) +} diff --git a/internal/tokenization/usecase/tokenization_metrics_decorator.go b/internal/tokenization/usecase/tokenization_metrics_decorator.go new file mode 100644 index 0000000..ad0cac7 --- /dev/null +++ b/internal/tokenization/usecase/tokenization_metrics_decorator.go @@ -0,0 +1,119 @@ +package usecase + +import ( + "context" + "time" + + "github.com/allisson/secrets/internal/metrics" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" +) + +// tokenizationUseCaseWithMetrics decorates TokenizationUseCase with metrics instrumentation. +type tokenizationUseCaseWithMetrics struct { + next TokenizationUseCase + metrics metrics.BusinessMetrics +} + +// NewTokenizationUseCaseWithMetrics wraps a TokenizationUseCase with metrics recording. +func NewTokenizationUseCaseWithMetrics( + useCase TokenizationUseCase, + m metrics.BusinessMetrics, +) TokenizationUseCase { + return &tokenizationUseCaseWithMetrics{ + next: useCase, + metrics: m, + } +} + +// Tokenize records metrics for token generation operations. +func (t *tokenizationUseCaseWithMetrics) Tokenize( + ctx context.Context, + keyName string, + plaintext []byte, + metadata map[string]any, + expiresAt *time.Time, +) (*tokenizationDomain.Token, error) { + start := time.Now() + token, err := t.next.Tokenize(ctx, keyName, plaintext, metadata, expiresAt) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "tokenize", status) + t.metrics.RecordDuration(ctx, "tokenization", "tokenize", time.Since(start), status) + + return token, err +} + +// Detokenize records metrics for token detokenization operations. +func (t *tokenizationUseCaseWithMetrics) Detokenize( + ctx context.Context, + token string, +) (plaintext []byte, metadata map[string]any, err error) { + start := time.Now() + plaintext, metadata, err = t.next.Detokenize(ctx, token) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "detokenize", status) + t.metrics.RecordDuration(ctx, "tokenization", "detokenize", time.Since(start), status) + + return plaintext, metadata, err +} + +// Validate records metrics for token validation operations. +func (t *tokenizationUseCaseWithMetrics) Validate(ctx context.Context, token string) (bool, error) { + start := time.Now() + valid, err := t.next.Validate(ctx, token) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "validate", status) + t.metrics.RecordDuration(ctx, "tokenization", "validate", time.Since(start), status) + + return valid, err +} + +// Revoke records metrics for token revocation operations. +func (t *tokenizationUseCaseWithMetrics) Revoke(ctx context.Context, token string) error { + start := time.Now() + err := t.next.Revoke(ctx, token) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "revoke", status) + t.metrics.RecordDuration(ctx, "tokenization", "revoke", time.Since(start), status) + + return err +} + +// CleanupExpired records metrics for expired token cleanup operations. +func (t *tokenizationUseCaseWithMetrics) CleanupExpired( + ctx context.Context, + days int, + dryRun bool, +) (int64, error) { + start := time.Now() + count, err := t.next.CleanupExpired(ctx, days, dryRun) + + status := "success" + if err != nil { + status = "error" + } + + t.metrics.RecordOperation(ctx, "tokenization", "cleanup_expired", status) + t.metrics.RecordDuration(ctx, "tokenization", "cleanup_expired", time.Since(start), status) + + return count, err +} diff --git a/internal/tokenization/usecase/tokenization_metrics_decorator_test.go b/internal/tokenization/usecase/tokenization_metrics_decorator_test.go new file mode 100644 index 0000000..58275b2 --- /dev/null +++ b/internal/tokenization/usecase/tokenization_metrics_decorator_test.go @@ -0,0 +1,441 @@ +package usecase + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + tokenizationMocks "github.com/allisson/secrets/internal/tokenization/usecase/mocks" +) + +// mockBusinessMetrics is a mock implementation of metrics.BusinessMetrics for testing. +type mockBusinessMetrics struct { + mock.Mock +} + +func (m *mockBusinessMetrics) RecordOperation(ctx context.Context, domain, operation, status string) { + m.Called(ctx, domain, operation, status) +} + +func (m *mockBusinessMetrics) RecordDuration( + ctx context.Context, + domain, operation string, + duration time.Duration, + status string, +) { + m.Called(ctx, domain, operation, duration, status) +} + +func TestNewTokenizationUseCaseWithMetrics(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + assert.NotNil(t, decorator) + assert.IsType(t, &tokenizationUseCaseWithMetrics{}, decorator) +} + +func TestTokenizationUseCaseWithMetrics_Tokenize(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationUseCase, *mockBusinessMetrics) + keyName string + plaintext []byte + metadata map[string]any + expiresAt *time.Time + expectedToken *tokenizationDomain.Token + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + hash := "hash" + token := &tokenizationDomain.Token{ + ID: uuid.New(), + Token: "test-token", + ValueHash: &hash, + } + mockUseCase.EXPECT(). + Tokenize(mock.Anything, "test-key", []byte("plaintext"), mock.Anything, mock.Anything). + Return(token, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenize", "success").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenize", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + keyName: "test-key", + plaintext: []byte("plaintext"), + metadata: map[string]any{"key": "value"}, + expiresAt: nil, + expectedToken: &tokenizationDomain.Token{ + ID: uuid.UUID{}, + Token: "test-token", + ValueHash: nil, + }, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Tokenize(mock.Anything, "test-key", []byte("plaintext"), mock.Anything, mock.Anything). + Return(nil, errors.New("tokenization failed")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "tokenize", "error").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "tokenize", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + keyName: "test-key", + plaintext: []byte("plaintext"), + metadata: map[string]any{"key": "value"}, + expiresAt: nil, + expectedToken: nil, + expectedErr: errors.New("tokenization failed"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + token, err := decorator.Tokenize( + context.Background(), + tt.keyName, + tt.plaintext, + tt.metadata, + tt.expiresAt, + ) + + if tt.expectedErr != nil { + assert.Error(t, err) + assert.Nil(t, token) + } else { + assert.NoError(t, err) + assert.NotNil(t, token) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} + +func TestTokenizationUseCaseWithMetrics_Detokenize(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationUseCase, *mockBusinessMetrics) + token string + expectedPlaintext []byte + expectedMetadata map[string]any + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Detokenize(mock.Anything, "test-token"). + Return([]byte("plaintext"), map[string]any{"key": "value"}, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "detokenize", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "detokenize", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + token: "test-token", + expectedPlaintext: []byte("plaintext"), + expectedMetadata: map[string]any{"key": "value"}, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Detokenize(mock.Anything, "invalid-token"). + Return(nil, nil, errors.New("token not found")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "detokenize", "error").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "detokenize", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + token: "invalid-token", + expectedPlaintext: nil, + expectedMetadata: nil, + expectedErr: errors.New("token not found"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + plaintext, metadata, err := decorator.Detokenize(context.Background(), tt.token) + + if tt.expectedErr != nil { + assert.Error(t, err) + assert.Nil(t, plaintext) + assert.Nil(t, metadata) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.expectedPlaintext, plaintext) + assert.Equal(t, tt.expectedMetadata, metadata) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} + +func TestTokenizationUseCaseWithMetrics_Validate(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationUseCase, *mockBusinessMetrics) + token string + expectedValid bool + expectedErr error + expectedStatus string + }{ + { + name: "Success_ValidToken_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Validate(mock.Anything, "valid-token"). + Return(true, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "validate", "success").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "validate", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + token: "valid-token", + expectedValid: true, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Success_InvalidToken_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Validate(mock.Anything, "invalid-token"). + Return(false, nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "validate", "success").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "validate", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + token: "invalid-token", + expectedValid: false, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Validate(mock.Anything, "error-token"). + Return(false, errors.New("validation error")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "validate", "error").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "validate", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + token: "error-token", + expectedValid: false, + expectedErr: errors.New("validation error"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + valid, err := decorator.Validate(context.Background(), tt.token) + + if tt.expectedErr != nil { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + assert.Equal(t, tt.expectedValid, valid) + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} + +func TestTokenizationUseCaseWithMetrics_Revoke(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationUseCase, *mockBusinessMetrics) + token string + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Revoke(mock.Anything, "test-token"). + Return(nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "revoke", "success").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "revoke", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + token: "test-token", + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + Revoke(mock.Anything, "invalid-token"). + Return(errors.New("token not found")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "revoke", "error").Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "revoke", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + token: "invalid-token", + expectedErr: errors.New("token not found"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + err := decorator.Revoke(context.Background(), tt.token) + + if tt.expectedErr != nil { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} + +func TestTokenizationUseCaseWithMetrics_CleanupExpired(t *testing.T) { + tests := []struct { + name string + setupMocks func(*tokenizationMocks.MockTokenizationUseCase, *mockBusinessMetrics) + days int + dryRun bool + expectedCount int64 + expectedErr error + expectedStatus string + }{ + { + name: "Success_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + CleanupExpired(mock.Anything, 30, false). + Return(int64(10), nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "cleanup_expired", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "cleanup_expired", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + days: 30, + dryRun: false, + expectedCount: 10, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Success_DryRun_RecordsSuccessMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + CleanupExpired(mock.Anything, 7, true). + Return(int64(5), nil). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "cleanup_expired", "success"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "cleanup_expired", mock.AnythingOfType("time.Duration"), "success"). + Once() + }, + days: 7, + dryRun: true, + expectedCount: 5, + expectedErr: nil, + expectedStatus: "success", + }, + { + name: "Error_RecordsErrorMetrics", + setupMocks: func(mockUseCase *tokenizationMocks.MockTokenizationUseCase, mockMetrics *mockBusinessMetrics) { + mockUseCase.EXPECT(). + CleanupExpired(mock.Anything, 30, false). + Return(int64(0), errors.New("cleanup failed")). + Once() + mockMetrics.On("RecordOperation", mock.Anything, "tokenization", "cleanup_expired", "error"). + Once() + mockMetrics.On("RecordDuration", mock.Anything, "tokenization", "cleanup_expired", mock.AnythingOfType("time.Duration"), "error"). + Once() + }, + days: 30, + dryRun: false, + expectedCount: 0, + expectedErr: errors.New("cleanup failed"), + expectedStatus: "error", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + mockUseCase := tokenizationMocks.NewMockTokenizationUseCase(t) + mockMetrics := &mockBusinessMetrics{} + tt.setupMocks(mockUseCase, mockMetrics) + + decorator := NewTokenizationUseCaseWithMetrics(mockUseCase, mockMetrics) + + count, err := decorator.CleanupExpired(context.Background(), tt.days, tt.dryRun) + + if tt.expectedErr != nil { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + assert.Equal(t, tt.expectedCount, count) + + mockMetrics.AssertExpectations(t) + mockUseCase.AssertExpectations(t) + }) + } +} diff --git a/internal/tokenization/usecase/tokenization_usecase.go b/internal/tokenization/usecase/tokenization_usecase.go new file mode 100644 index 0000000..057c753 --- /dev/null +++ b/internal/tokenization/usecase/tokenization_usecase.go @@ -0,0 +1,276 @@ +// Package usecase implements tokenization business logic. +// +// Coordinates token generation, encryption, and lifecycle management with configurable +// deterministic behavior. Uses TxManager for transactional consistency. +package usecase + +import ( + "context" + "time" + + "github.com/google/uuid" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + cryptoService "github.com/allisson/secrets/internal/crypto/service" + "github.com/allisson/secrets/internal/database" + apperrors "github.com/allisson/secrets/internal/errors" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + tokenizationService "github.com/allisson/secrets/internal/tokenization/service" +) + +// tokenizationUseCase implements TokenizationUseCase for managing tokenization operations. +type tokenizationUseCase struct { + txManager database.TxManager + tokenizationRepo TokenizationKeyRepository + tokenRepo TokenRepository + dekRepo DekRepository + aeadManager cryptoService.AEADManager + keyManager cryptoService.KeyManager + hashService HashService + kekChain *cryptoDomain.KekChain +} + +// getKek retrieves a KEK from the chain by its ID. +func (t *tokenizationUseCase) getKek(kekID uuid.UUID) (*cryptoDomain.Kek, error) { + kek, ok := t.kekChain.Get(kekID) + if !ok { + return nil, cryptoDomain.ErrKekNotFound + } + return kek, nil +} + +// Tokenize generates a token for the given plaintext value using the latest version of the named key. +// In deterministic mode, returns the existing token if the value has been tokenized before. +// Metadata is optional display data (e.g., last 4 digits) stored unencrypted. +func (t *tokenizationUseCase) Tokenize( + ctx context.Context, + keyName string, + plaintext []byte, + metadata map[string]any, + expiresAt *time.Time, +) (*tokenizationDomain.Token, error) { + // Get latest tokenization key version + tokenizationKey, err := t.tokenizationRepo.GetByName(ctx, keyName) + if err != nil { + return nil, err + } + + // In deterministic mode, check if token already exists for this value + if tokenizationKey.IsDeterministic { + valueHash := t.hashService.Hash(plaintext) + existingToken, err := t.tokenRepo.GetByValueHash(ctx, tokenizationKey.ID, valueHash) + if err != nil && !apperrors.Is(err, tokenizationDomain.ErrTokenNotFound) { + return nil, err + } + if existingToken != nil { + // Return existing valid token + if existingToken.IsValid() { + return existingToken, nil + } + // Existing token is expired or revoked - proceed to create new token + } + } + + // Get DEK by tokenization key's DekID + dek, err := t.dekRepo.Get(ctx, tokenizationKey.DekID) + if err != nil { + return nil, err + } + + // Get KEK for decrypting DEK + kek, err := t.getKek(dek.KekID) + if err != nil { + return nil, err + } + + // Decrypt DEK with KEK + dekKey, err := t.keyManager.DecryptDek(dek, kek) + if err != nil { + return nil, err + } + defer cryptoDomain.Zero(dekKey) + + // Create AEAD cipher with decrypted DEK + cipher, err := t.aeadManager.CreateCipher(dekKey, dek.Algorithm) + if err != nil { + return nil, err + } + + // Encrypt plaintext + ciphertext, nonce, err := cipher.Encrypt(plaintext, nil) + if err != nil { + return nil, apperrors.Wrap(err, "failed to encrypt plaintext") + } + + // Generate token using appropriate generator + generator, err := tokenizationService.NewTokenGenerator(tokenizationKey.FormatType) + if err != nil { + return nil, err + } + + // For format-preserving tokens, use plaintext length as hint + tokenLength := len(plaintext) + tokenValue, err := generator.Generate(tokenLength) + if err != nil { + return nil, apperrors.Wrap(err, "failed to generate token") + } + + // Create token record + token := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: tokenizationKey.ID, + Token: tokenValue, + ValueHash: nil, + Ciphertext: ciphertext, + Nonce: nonce, + Metadata: metadata, + CreatedAt: time.Now().UTC(), + ExpiresAt: expiresAt, + RevokedAt: nil, + } + + // In deterministic mode, store value hash for lookup + if tokenizationKey.IsDeterministic { + valueHash := t.hashService.Hash(plaintext) + token.ValueHash = &valueHash + } + + // Persist token + if err := t.tokenRepo.Create(ctx, token); err != nil { + return nil, err + } + + return token, nil +} + +// Detokenize retrieves the original plaintext value for a given token. +// Returns ErrTokenNotFound if token doesn't exist, ErrTokenExpired if expired, ErrTokenRevoked if revoked. +// Security Note: Callers MUST zero the returned plaintext after use: cryptoDomain.Zero(plaintext). +func (t *tokenizationUseCase) Detokenize( + ctx context.Context, + token string, +) (plaintext []byte, metadata map[string]any, err error) { + // Get token record + tokenRecord, err := t.tokenRepo.GetByToken(ctx, token) + if err != nil { + return nil, nil, err + } + + // Validate token is not expired + if tokenRecord.IsExpired() { + return nil, nil, tokenizationDomain.ErrTokenExpired + } + + // Validate token is not revoked + if tokenRecord.IsRevoked() { + return nil, nil, tokenizationDomain.ErrTokenRevoked + } + + // Get tokenization key to retrieve its DekID + tokenizationKey, err := t.tokenizationRepo.Get(ctx, tokenRecord.TokenizationKeyID) + if err != nil { + return nil, nil, err + } + + // Get DEK + dek, err := t.dekRepo.Get(ctx, tokenizationKey.DekID) + if err != nil { + return nil, nil, err + } + + // Get KEK for decrypting DEK + kek, err := t.getKek(dek.KekID) + if err != nil { + return nil, nil, err + } + + // Decrypt DEK with KEK + dekKey, err := t.keyManager.DecryptDek(dek, kek) + if err != nil { + return nil, nil, err + } + defer cryptoDomain.Zero(dekKey) + + // Create AEAD cipher with decrypted DEK + cipher, err := t.aeadManager.CreateCipher(dekKey, dek.Algorithm) + if err != nil { + return nil, nil, err + } + + // Decrypt ciphertext with nonce + plaintext, err = cipher.Decrypt(tokenRecord.Ciphertext, tokenRecord.Nonce, nil) + if err != nil { + return nil, nil, cryptoDomain.ErrDecryptionFailed + } + + return plaintext, tokenRecord.Metadata, nil +} + +// Validate checks if a token exists and is valid (not expired or revoked). +func (t *tokenizationUseCase) Validate(ctx context.Context, token string) (bool, error) { + // Get token record + tokenRecord, err := t.tokenRepo.GetByToken(ctx, token) + if err != nil { + if apperrors.Is(err, tokenizationDomain.ErrTokenNotFound) { + return false, nil + } + return false, err + } + + // Check if token is valid + return tokenRecord.IsValid(), nil +} + +// Revoke marks a token as revoked, preventing further detokenization. +func (t *tokenizationUseCase) Revoke(ctx context.Context, token string) error { + // Verify token exists first + _, err := t.tokenRepo.GetByToken(ctx, token) + if err != nil { + return err + } + + // Revoke the token + return t.tokenRepo.Revoke(ctx, token) +} + +// CleanupExpired deletes tokens that expired more than the specified number of days ago. +// Returns the number of deleted tokens. Use dryRun=true to preview count without deletion. +func (t *tokenizationUseCase) CleanupExpired(ctx context.Context, days int, dryRun bool) (int64, error) { + if days < 0 { + return 0, apperrors.New("days must be non-negative") + } + + // Calculate the cutoff timestamp (days ago from now in UTC) + cutoff := time.Now().UTC().AddDate(0, 0, -days) + + if dryRun { + // In dry run mode, count expired tokens without deleting + return t.tokenRepo.CountExpired(ctx, cutoff) + } + + // Delete expired tokens + return t.tokenRepo.DeleteExpired(ctx, cutoff) +} + +// NewTokenizationUseCase creates a new TokenizationUseCase with injected dependencies. +func NewTokenizationUseCase( + txManager database.TxManager, + tokenizationRepo TokenizationKeyRepository, + tokenRepo TokenRepository, + dekRepo DekRepository, + aeadManager cryptoService.AEADManager, + keyManager cryptoService.KeyManager, + hashService HashService, + kekChain *cryptoDomain.KekChain, +) TokenizationUseCase { + return &tokenizationUseCase{ + txManager: txManager, + tokenizationRepo: tokenizationRepo, + tokenRepo: tokenRepo, + dekRepo: dekRepo, + aeadManager: aeadManager, + keyManager: keyManager, + hashService: hashService, + kekChain: kekChain, + } +} diff --git a/internal/tokenization/usecase/tokenization_usecase_test.go b/internal/tokenization/usecase/tokenization_usecase_test.go new file mode 100644 index 0000000..e4b8a13 --- /dev/null +++ b/internal/tokenization/usecase/tokenization_usecase_test.go @@ -0,0 +1,1680 @@ +package usecase + +import ( + "context" + "errors" + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + + cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" + cryptoServiceMocks "github.com/allisson/secrets/internal/crypto/service/mocks" + databaseMocks "github.com/allisson/secrets/internal/database/mocks" + tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain" + tokenizationMocks "github.com/allisson/secrets/internal/tokenization/usecase/mocks" +) + +// TestTokenizationUseCase_Tokenize tests the Tokenize method. +func TestTokenizationUseCase_Tokenize(t *testing.T) { + ctx := context.Background() + + t.Run("Success_NonDeterministicMode", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + tokenizationKeyID := uuid.Must(uuid.NewV7()) + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + dekKey := make([]byte, 32) + plaintext := []byte("test-value") + ciphertext := []byte("encrypted-value") + nonce := []byte("test-nonce") + metadata := map[string]any{"last4": "alue"} + expiresAt := time.Now().UTC().Add(24 * time.Hour) + + // Create mock cipher + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Encrypt(plaintext, mock.Anything). + Return(ciphertext, nonce, nil). + Once() + + mockTokenRepo.EXPECT(). + Create(ctx, mock.MatchedBy(func(token *tokenizationDomain.Token) bool { + return token.TokenizationKeyID == tokenizationKeyID && + len(token.Token) > 0 && + token.ValueHash == nil && + string(token.Ciphertext) == string(ciphertext) && + string(token.Nonce) == string(nonce) && + token.ExpiresAt.Equal(expiresAt) + })). + Return(nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "test-key", plaintext, metadata, &expiresAt) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, token) + assert.Equal(t, tokenizationKeyID, token.TokenizationKeyID) + assert.NotEmpty(t, token.Token) + assert.Nil(t, token.ValueHash) + assert.Equal(t, ciphertext, token.Ciphertext) + assert.Equal(t, nonce, token.Nonce) + assert.Equal(t, metadata, token.Metadata) + assert.Equal(t, expiresAt, *token.ExpiresAt) + }) + + t.Run("Success_DeterministicMode_NewToken", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + tokenizationKeyID := uuid.Must(uuid.NewV7()) + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatLuhnPreserving, + IsDeterministic: true, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + dekKey := make([]byte, 32) + plaintext := []byte("4111111111111111") + valueHash := "hash-of-plaintext" + ciphertext := []byte("encrypted-value") + nonce := []byte("test-nonce") + + // Create mock cipher + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockHashService.EXPECT(). + Hash(plaintext). + Return(valueHash). + Once() + + mockTokenRepo.EXPECT(). + GetByValueHash(ctx, tokenizationKeyID, valueHash). + Return(nil, tokenizationDomain.ErrTokenNotFound). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Encrypt(plaintext, mock.Anything). + Return(ciphertext, nonce, nil). + Once() + + mockHashService.EXPECT(). + Hash(plaintext). + Return(valueHash). + Once() + + mockTokenRepo.EXPECT(). + Create(ctx, mock.MatchedBy(func(token *tokenizationDomain.Token) bool { + return token.TokenizationKeyID == tokenizationKeyID && + len(token.Token) > 0 && + token.ValueHash != nil && + *token.ValueHash == valueHash && + string(token.Ciphertext) == string(ciphertext) && + string(token.Nonce) == string(nonce) + })). + Return(nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "test-key", plaintext, nil, nil) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, token) + assert.Equal(t, tokenizationKeyID, token.TokenizationKeyID) + assert.NotEmpty(t, token.Token) + assert.NotNil(t, token.ValueHash) + assert.Equal(t, valueHash, *token.ValueHash) + assert.Equal(t, ciphertext, token.Ciphertext) + assert.Equal(t, nonce, token.Nonce) + }) + + t.Run("Success_DeterministicMode_ExistingValidToken", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + tokenizationKeyID := uuid.Must(uuid.NewV7()) + plaintext := []byte("test-value") + valueHash := "hash-of-plaintext" + existingTokenValue := "existing-token-123" + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: uuid.Must(uuid.NewV7()), + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: true, + Version: 1, + } + + existingToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: tokenizationKeyID, + Token: existingTokenValue, + ValueHash: &valueHash, + Ciphertext: []byte("existing-ciphertext"), + Nonce: []byte("existing-nonce"), + CreatedAt: time.Now().UTC().Add(-1 * time.Hour), + ExpiresAt: nil, // No expiration + RevokedAt: nil, // Not revoked + } + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockHashService.EXPECT(). + Hash(plaintext). + Return(valueHash). + Once() + + mockTokenRepo.EXPECT(). + GetByValueHash(ctx, tokenizationKeyID, valueHash). + Return(existingToken, nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "test-key", plaintext, nil, nil) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, token) + assert.Equal(t, existingToken.ID, token.ID) + assert.Equal(t, existingTokenValue, token.Token) + assert.Equal(t, valueHash, *token.ValueHash) + }) + + t.Run("Success_DeterministicMode_ExpiredTokenCreatesNew", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + tokenizationKeyID := uuid.Must(uuid.NewV7()) + plaintext := []byte("test-value") + valueHash := "hash-of-plaintext" + expiredTime := time.Now().UTC().Add(-1 * time.Hour) + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: true, + Version: 1, + } + + expiredToken := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: tokenizationKeyID, + Token: "expired-token", + ValueHash: &valueHash, + Ciphertext: []byte("old-ciphertext"), + Nonce: []byte("old-nonce"), + CreatedAt: time.Now().UTC().Add(-2 * time.Hour), + ExpiresAt: &expiredTime, // Expired + RevokedAt: nil, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + dekKey := make([]byte, 32) + ciphertext := []byte("new-encrypted-value") + nonce := []byte("new-nonce") + + // Create mock cipher + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockHashService.EXPECT(). + Hash(plaintext). + Return(valueHash). + Once() + + mockTokenRepo.EXPECT(). + GetByValueHash(ctx, tokenizationKeyID, valueHash). + Return(expiredToken, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Encrypt(plaintext, mock.Anything). + Return(ciphertext, nonce, nil). + Once() + + mockHashService.EXPECT(). + Hash(plaintext). + Return(valueHash). + Once() + + mockTokenRepo.EXPECT(). + Create(ctx, mock.MatchedBy(func(token *tokenizationDomain.Token) bool { + return token.TokenizationKeyID == tokenizationKeyID && + len(token.Token) > 0 && + token.ValueHash != nil && + *token.ValueHash == valueHash && + string(token.Ciphertext) == string(ciphertext) + })). + Return(nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "test-key", plaintext, nil, nil) + + // Assert + assert.NoError(t, err) + assert.NotNil(t, token) + assert.NotEqual(t, expiredToken.ID, token.ID) // Should be a new token + assert.NotEqual(t, "expired-token", token.Token) + }) + + t.Run("Error_TokenizationKeyNotFound", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "nonexistent-key"). + Return(nil, tokenizationDomain.ErrTokenizationKeyNotFound). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "nonexistent-key", []byte("test"), nil, nil) + + // Assert + assert.Nil(t, token) + assert.Equal(t, tokenizationDomain.ErrTokenizationKeyNotFound, err) + }) + + t.Run("Error_DekNotFound", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + dekID := uuid.Must(uuid.NewV7()) + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(nil, cryptoDomain.ErrDekNotFound). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "test-key", []byte("test"), nil, nil) + + // Assert + assert.Nil(t, token) + assert.Equal(t, cryptoDomain.ErrDekNotFound, err) + }) + + t.Run("Error_KekNotFound", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + dekID := uuid.Must(uuid.NewV7()) + nonexistentKekID := uuid.Must(uuid.NewV7()) // KEK not in chain + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: nonexistentKekID, // References KEK not in chain + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "test-key", []byte("test"), nil, nil) + + // Assert + assert.Nil(t, token) + assert.Equal(t, cryptoDomain.ErrKekNotFound, err) + }) + + t.Run("Error_EncryptionFails", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: uuid.Must(uuid.NewV7()), + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("nonce"), + } + + dekKey := make([]byte, 32) + plaintext := []byte("test-value") + + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + encryptionError := errors.New("encryption failed") + + // Setup expectations + mockTokenizationKeyRepo.EXPECT(). + GetByName(ctx, "test-key"). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Encrypt(plaintext, mock.Anything). + Return(nil, nil, encryptionError). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + token, err := uc.Tokenize(ctx, "test-key", plaintext, nil, nil) + + // Assert + assert.Nil(t, token) + assert.Error(t, err) + assert.Contains(t, err.Error(), "failed to encrypt plaintext") + }) +} + +// TestTokenizationUseCase_Detokenize tests the Detokenize method. +func TestTokenizationUseCase_Detokenize(t *testing.T) { + ctx := context.Background() + + t.Run("Success_DetokenizeValid", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + // Create test data + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + tokenizationKeyID := uuid.Must(uuid.NewV7()) + tokenValue := "test-token-123" + plaintext := []byte("original-value") + ciphertext := []byte("encrypted-value") + nonce := []byte("test-nonce") + metadata := map[string]any{"last4": "alue"} + + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: tokenizationKeyID, + Token: tokenValue, + Ciphertext: ciphertext, + Nonce: nonce, + Metadata: metadata, + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("dek-nonce"), + } + + dekKey := make([]byte, 32) + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Get(ctx, tokenizationKeyID). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Decrypt(ciphertext, nonce, mock.Anything). + Return(plaintext, nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + resultPlaintext, resultMetadata, err := uc.Detokenize(ctx, tokenValue) + + // Assert + assert.NoError(t, err) + assert.Equal(t, plaintext, resultPlaintext) + assert.Equal(t, metadata, resultMetadata) + }) + + t.Run("Error_TokenNotFound", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, "nonexistent-token"). + Return(nil, tokenizationDomain.ErrTokenNotFound). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + plaintext, metadata, err := uc.Detokenize(ctx, "nonexistent-token") + + // Assert + assert.Nil(t, plaintext) + assert.Nil(t, metadata) + assert.Equal(t, tokenizationDomain.ErrTokenNotFound, err) + }) + + t.Run("Error_TokenExpired", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + expiredTime := time.Now().UTC().Add(-1 * time.Hour) + tokenValue := "expired-token" + + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: tokenValue, + Ciphertext: []byte("ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC().Add(-2 * time.Hour), + ExpiresAt: &expiredTime, + RevokedAt: nil, + } + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + plaintext, metadata, err := uc.Detokenize(ctx, tokenValue) + + // Assert + assert.Nil(t, plaintext) + assert.Nil(t, metadata) + assert.Equal(t, tokenizationDomain.ErrTokenExpired, err) + }) + + t.Run("Error_TokenRevoked", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + revokedTime := time.Now().UTC().Add(-30 * time.Minute) + tokenValue := "revoked-token" + + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: tokenValue, + Ciphertext: []byte("ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC().Add(-1 * time.Hour), + ExpiresAt: nil, + RevokedAt: &revokedTime, + } + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + plaintext, metadata, err := uc.Detokenize(ctx, tokenValue) + + // Assert + assert.Nil(t, plaintext) + assert.Nil(t, metadata) + assert.Equal(t, tokenizationDomain.ErrTokenRevoked, err) + }) + + t.Run("Error_DecryptionFails", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + activeKek := getActiveKek(kekChain) + dekID := uuid.Must(uuid.NewV7()) + tokenizationKeyID := uuid.Must(uuid.NewV7()) + tokenValue := "test-token" + + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: tokenizationKeyID, + Token: tokenValue, + Ciphertext: []byte("corrupted-ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + tokenizationKey := &tokenizationDomain.TokenizationKey{ + ID: tokenizationKeyID, + DekID: dekID, + Name: "test-key", + FormatType: tokenizationDomain.FormatUUID, + IsDeterministic: false, + Version: 1, + } + + dek := &cryptoDomain.Dek{ + ID: dekID, + KekID: activeKek.ID, + Algorithm: cryptoDomain.AESGCM, + EncryptedKey: []byte("encrypted-dek"), + Nonce: []byte("dek-nonce"), + } + + dekKey := make([]byte, 32) + mockCipher := cryptoServiceMocks.NewMockAEAD(t) + decryptionError := errors.New("decryption failed") + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + mockTokenizationKeyRepo.EXPECT(). + Get(ctx, tokenizationKeyID). + Return(tokenizationKey, nil). + Once() + + mockDekRepo.EXPECT(). + Get(ctx, dekID). + Return(dek, nil). + Once() + + mockKeyManager.EXPECT(). + DecryptDek(dek, activeKek). + Return(dekKey, nil). + Once() + + mockAEADManager.EXPECT(). + CreateCipher(dekKey, cryptoDomain.AESGCM). + Return(mockCipher, nil). + Once() + + mockCipher.EXPECT(). + Decrypt(tokenRecord.Ciphertext, tokenRecord.Nonce, mock.Anything). + Return(nil, decryptionError). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + plaintext, metadata, err := uc.Detokenize(ctx, tokenValue) + + // Assert + assert.Nil(t, plaintext) + assert.Nil(t, metadata) + assert.Equal(t, cryptoDomain.ErrDecryptionFailed, err) + }) +} + +// TestTokenizationUseCase_Validate tests the Validate method. +func TestTokenizationUseCase_Validate(t *testing.T) { + ctx := context.Background() + + t.Run("Success_ValidToken", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + tokenValue := "valid-token" + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: tokenValue, + Ciphertext: []byte("ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + isValid, err := uc.Validate(ctx, tokenValue) + + // Assert + assert.NoError(t, err) + assert.True(t, isValid) + }) + + t.Run("Success_ExpiredToken", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + expiredTime := time.Now().UTC().Add(-1 * time.Hour) + tokenValue := "expired-token" + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: tokenValue, + Ciphertext: []byte("ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC().Add(-2 * time.Hour), + ExpiresAt: &expiredTime, + RevokedAt: nil, + } + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + isValid, err := uc.Validate(ctx, tokenValue) + + // Assert + assert.NoError(t, err) + assert.False(t, isValid) + }) + + t.Run("Success_TokenNotFound", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, "nonexistent-token"). + Return(nil, tokenizationDomain.ErrTokenNotFound). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + isValid, err := uc.Validate(ctx, "nonexistent-token") + + // Assert + assert.NoError(t, err) + assert.False(t, isValid) + }) + + t.Run("Error_RepositoryError", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + dbError := errors.New("database error") + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, "test-token"). + Return(nil, dbError). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + isValid, err := uc.Validate(ctx, "test-token") + + // Assert + assert.False(t, isValid) + assert.Equal(t, dbError, err) + }) +} + +// TestTokenizationUseCase_Revoke tests the Revoke method. +func TestTokenizationUseCase_Revoke(t *testing.T) { + ctx := context.Background() + + t.Run("Success_RevokeToken", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + tokenValue := "token-to-revoke" + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: tokenValue, + Ciphertext: []byte("ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + mockTokenRepo.EXPECT(). + Revoke(ctx, tokenValue). + Return(nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + err := uc.Revoke(ctx, tokenValue) + + // Assert + assert.NoError(t, err) + }) + + t.Run("Error_TokenNotFound", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, "nonexistent-token"). + Return(nil, tokenizationDomain.ErrTokenNotFound). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + err := uc.Revoke(ctx, "nonexistent-token") + + // Assert + assert.Equal(t, tokenizationDomain.ErrTokenNotFound, err) + }) + + t.Run("Error_RevokeFails", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + tokenValue := "test-token" + tokenRecord := &tokenizationDomain.Token{ + ID: uuid.Must(uuid.NewV7()), + TokenizationKeyID: uuid.Must(uuid.NewV7()), + Token: tokenValue, + Ciphertext: []byte("ciphertext"), + Nonce: []byte("nonce"), + CreatedAt: time.Now().UTC(), + ExpiresAt: nil, + RevokedAt: nil, + } + + dbError := errors.New("database error") + + // Setup expectations + mockTokenRepo.EXPECT(). + GetByToken(ctx, tokenValue). + Return(tokenRecord, nil). + Once() + + mockTokenRepo.EXPECT(). + Revoke(ctx, tokenValue). + Return(dbError). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + err := uc.Revoke(ctx, tokenValue) + + // Assert + assert.Equal(t, dbError, err) + }) +} + +// TestTokenizationUseCase_CleanupExpired tests the CleanupExpired method. +func TestTokenizationUseCase_CleanupExpired(t *testing.T) { + ctx := context.Background() + + t.Run("Success_DryRunMode", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + // Setup expectations + mockTokenRepo.EXPECT(). + CountExpired(ctx, mock.MatchedBy(func(cutoff time.Time) bool { + // Verify cutoff is approximately 7 days ago + expectedCutoff := time.Now().UTC().AddDate(0, 0, -7) + // Allow 2 second variance for test execution time + return cutoff.After(expectedCutoff.Add(-2*time.Second)) && + cutoff.Before(expectedCutoff.Add(2*time.Second)) + })). + Return(int64(42), nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + count, err := uc.CleanupExpired(ctx, 7, true) + + // Assert + assert.NoError(t, err) + assert.Equal(t, int64(42), count) + }) + + t.Run("Success_DeleteMode", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + // Setup expectations + mockTokenRepo.EXPECT(). + DeleteExpired(ctx, mock.MatchedBy(func(cutoff time.Time) bool { + // Verify cutoff is approximately 30 days ago + expectedCutoff := time.Now().UTC().AddDate(0, 0, -30) + // Allow 2 second variance for test execution time + return cutoff.After(expectedCutoff.Add(-2*time.Second)) && + cutoff.Before(expectedCutoff.Add(2*time.Second)) + })). + Return(int64(100), nil). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + count, err := uc.CleanupExpired(ctx, 30, false) + + // Assert + assert.NoError(t, err) + assert.Equal(t, int64(100), count) + }) + + t.Run("Error_NegativeDays", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + count, err := uc.CleanupExpired(ctx, -1, false) + + // Assert + assert.Equal(t, int64(0), count) + assert.Error(t, err) + assert.Contains(t, err.Error(), "days must be non-negative") + }) + + t.Run("Error_RepositoryError", func(t *testing.T) { + // Setup mocks + mockTxManager := databaseMocks.NewMockTxManager(t) + mockTokenizationKeyRepo := tokenizationMocks.NewMockTokenizationKeyRepository(t) + mockTokenRepo := tokenizationMocks.NewMockTokenRepository(t) + mockDekRepo := tokenizationMocks.NewMockDekRepository(t) + mockAEADManager := cryptoServiceMocks.NewMockAEADManager(t) + mockKeyManager := cryptoServiceMocks.NewMockKeyManager(t) + mockHashService := tokenizationMocks.NewMockHashService(t) + + masterKey := &cryptoDomain.MasterKey{ + ID: "test-master-key", + Key: make([]byte, 32), + } + kekChain := createKekChain(masterKey) + defer kekChain.Close() + + dbError := errors.New("database error") + + // Setup expectations + mockTokenRepo.EXPECT(). + DeleteExpired(ctx, mock.AnythingOfType("time.Time")). + Return(int64(0), dbError). + Once() + + // Create use case + uc := NewTokenizationUseCase( + mockTxManager, + mockTokenizationKeyRepo, + mockTokenRepo, + mockDekRepo, + mockAEADManager, + mockKeyManager, + mockHashService, + kekChain, + ) + + // Execute + count, err := uc.CleanupExpired(ctx, 7, false) + + // Assert + assert.Equal(t, int64(0), count) + assert.Equal(t, dbError, err) + }) +} diff --git a/migrations/mysql/000002_add_tokenization.down.sql b/migrations/mysql/000002_add_tokenization.down.sql new file mode 100644 index 0000000..e935a2f --- /dev/null +++ b/migrations/mysql/000002_add_tokenization.down.sql @@ -0,0 +1,5 @@ +-- Drop tokenization_tokens table +DROP TABLE IF EXISTS tokenization_tokens; + +-- Drop tokenization_keys table +DROP TABLE IF EXISTS tokenization_keys; diff --git a/migrations/mysql/000002_add_tokenization.up.sql b/migrations/mysql/000002_add_tokenization.up.sql new file mode 100644 index 0000000..4dd6e36 --- /dev/null +++ b/migrations/mysql/000002_add_tokenization.up.sql @@ -0,0 +1,34 @@ +-- Create tokenization_keys table +CREATE TABLE IF NOT EXISTS tokenization_keys ( + id BINARY(16) PRIMARY KEY, + name VARCHAR(255) NOT NULL, + version INT NOT NULL, + format_type VARCHAR(50) NOT NULL, + is_deterministic BOOLEAN NOT NULL, + dek_id BINARY(16) NOT NULL, + created_at DATETIME(6) NOT NULL, + deleted_at DATETIME(6), + UNIQUE KEY unique_name_version (name, version), + CONSTRAINT fk_tokenization_keys_dek_id FOREIGN KEY (dek_id) REFERENCES deks(id), + INDEX idx_tokenization_keys_name (name, deleted_at) +); + +-- Create tokenization_tokens table +CREATE TABLE IF NOT EXISTS tokenization_tokens ( + id BINARY(16) PRIMARY KEY, + tokenization_key_id BINARY(16) NOT NULL, + token VARCHAR(255) NOT NULL, + value_hash VARCHAR(64), + ciphertext BLOB NOT NULL, + nonce BLOB NOT NULL, + metadata JSON, + created_at DATETIME(6) NOT NULL, + expires_at DATETIME(6), + revoked_at DATETIME(6), + UNIQUE KEY unique_token (token), + CONSTRAINT fk_tokenization_tokens_key_id FOREIGN KEY (tokenization_key_id) REFERENCES tokenization_keys(id), + INDEX idx_tokenization_tokens_key_id (tokenization_key_id), + INDEX idx_tokenization_tokens_value_hash (value_hash), + INDEX idx_tokenization_tokens_created_at (created_at), + INDEX idx_tokenization_tokens_expires_at (expires_at) +); diff --git a/migrations/postgresql/000002_add_tokenization.down.sql b/migrations/postgresql/000002_add_tokenization.down.sql new file mode 100644 index 0000000..7453930 --- /dev/null +++ b/migrations/postgresql/000002_add_tokenization.down.sql @@ -0,0 +1,10 @@ +-- Drop tokenization_tokens table +DROP INDEX IF EXISTS idx_tokenization_tokens_expires_at; +DROP INDEX IF EXISTS idx_tokenization_tokens_created_at; +DROP INDEX IF EXISTS idx_tokenization_tokens_value_hash; +DROP INDEX IF EXISTS idx_tokenization_tokens_key_id; +DROP TABLE IF EXISTS tokenization_tokens; + +-- Drop tokenization_keys table +DROP INDEX IF EXISTS idx_tokenization_keys_name; +DROP TABLE IF EXISTS tokenization_keys; diff --git a/migrations/postgresql/000002_add_tokenization.up.sql b/migrations/postgresql/000002_add_tokenization.up.sql new file mode 100644 index 0000000..8fd5c23 --- /dev/null +++ b/migrations/postgresql/000002_add_tokenization.up.sql @@ -0,0 +1,34 @@ +-- Create tokenization_keys table +CREATE TABLE IF NOT EXISTS tokenization_keys ( + id UUID PRIMARY KEY, + name TEXT NOT NULL, + version INTEGER NOT NULL, + format_type TEXT NOT NULL, + is_deterministic BOOLEAN NOT NULL, + dek_id UUID NOT NULL REFERENCES deks(id), + created_at TIMESTAMPTZ NOT NULL, + deleted_at TIMESTAMPTZ, + UNIQUE (name, version) +); + +CREATE INDEX idx_tokenization_keys_name ON tokenization_keys(name) WHERE deleted_at IS NULL; + +-- Create tokenization_tokens table +CREATE TABLE IF NOT EXISTS tokenization_tokens ( + id UUID PRIMARY KEY, + tokenization_key_id UUID NOT NULL REFERENCES tokenization_keys(id), + token TEXT NOT NULL, + value_hash TEXT, + ciphertext BYTEA NOT NULL, + nonce BYTEA NOT NULL, + metadata JSONB, + created_at TIMESTAMPTZ NOT NULL, + expires_at TIMESTAMPTZ, + revoked_at TIMESTAMPTZ, + UNIQUE (token) +); + +CREATE INDEX idx_tokenization_tokens_key_id ON tokenization_tokens(tokenization_key_id); +CREATE INDEX idx_tokenization_tokens_value_hash ON tokenization_tokens(value_hash) WHERE value_hash IS NOT NULL; +CREATE INDEX idx_tokenization_tokens_created_at ON tokenization_tokens(created_at); +CREATE INDEX idx_tokenization_tokens_expires_at ON tokenization_tokens(expires_at) WHERE expires_at IS NOT NULL; diff --git a/test/integration/api_test.go b/test/integration/api_test.go index 927b744..6caf905 100644 --- a/test/integration/api_test.go +++ b/test/integration/api_test.go @@ -29,6 +29,7 @@ import ( cryptoDomain "github.com/allisson/secrets/internal/crypto/domain" secretsDTO "github.com/allisson/secrets/internal/secrets/http/dto" "github.com/allisson/secrets/internal/testutil" + tokenizationDTO "github.com/allisson/secrets/internal/tokenization/http/dto" transitDTO "github.com/allisson/secrets/internal/transit/http/dto" ) @@ -261,9 +262,9 @@ func teardownIntegrationTest(t *testing.T, ctx *integrationTestContext) { t.Logf("Integration test teardown complete for %s", ctx.dbDriver) } -// TestIntegration_AllEndpoints_HappyPath tests all API endpoints in a happy path scenario. -// This test validates the complete API functionality against both PostgreSQL and MySQL databases. -func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { +// TestIntegration_Health_BasicChecks validates infrastructure health and readiness endpoints. +// Tests health check and database connectivity verification against both PostgreSQL and MySQL. +func TestIntegration_Health_BasicChecks(t *testing.T) { // Skip if short mode (integration tests can be slow) if testing.Short() { t.Skip("Skipping integration test in short mode") @@ -283,21 +284,8 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { ctx := setupIntegrationTest(t, tc.dbDriver) defer teardownIntegrationTest(t, ctx) - // Variables to store created resource IDs for later operations - var ( - secretPath = "/integration-test/password" - secretPathStored = "integration-test/password" // API stores without leading slash - transitKeyName = "integration-test-key" - transitKeyID uuid.UUID - newClientID uuid.UUID - plaintextValue = []byte("super-secret-value") - plaintextValueBase64 = base64.StdEncoding.EncodeToString(plaintextValue) - transitPlaintext = []byte("transit-test-data") - transitCiphertext string - ) - - // [1/15] Test GET /health - Health check endpoint - t.Run("01_Health", func(t *testing.T) { + // [1/2] Test GET /health - Health check endpoint + t.Run("01_HealthCheck", func(t *testing.T) { resp, body := ctx.makeRequest(t, http.MethodGet, "/health", nil, false) assert.Equal(t, http.StatusOK, resp.StatusCode) @@ -307,143 +295,71 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { assert.Equal(t, "healthy", response["status"]) }) - // [2/15] Test POST /v1/token - Issue authentication token - t.Run("02_IssueToken", func(t *testing.T) { - requestBody := authDTO.IssueTokenRequest{ - ClientID: ctx.rootClient.ID.String(), - ClientSecret: ctx.rootSecret, - } - - resp, body := ctx.makeRequest(t, http.MethodPost, "/v1/token", requestBody, false) - assert.Equal(t, http.StatusCreated, resp.StatusCode) - - var response authDTO.IssueTokenResponse - err := json.Unmarshal(body, &response) - require.NoError(t, err) - assert.NotEmpty(t, response.Token) - assert.False(t, response.ExpiresAt.IsZero()) - - // Update token for subsequent requests - ctx.rootToken = response.Token - }) - - // [3/15] Test POST /v1/secrets/*path - Create/update secret - t.Run("03_CreateSecret", func(t *testing.T) { - requestBody := secretsDTO.CreateOrUpdateSecretRequest{ - Value: plaintextValueBase64, - } - - resp, body := ctx.makeRequest(t, http.MethodPost, "/v1/secrets"+secretPath, requestBody, true) - assert.Equal(t, http.StatusCreated, resp.StatusCode) - - var response secretsDTO.SecretResponse - err := json.Unmarshal(body, &response) - require.NoError(t, err) - assert.NotEmpty(t, response.ID) - assert.Equal(t, secretPathStored, response.Path) // API stores without leading slash - assert.Equal(t, uint(1), response.Version) - assert.Empty(t, response.Value) // Value not returned on create - }) - - // [4/15] Test GET /v1/secrets/*path - Read secret - t.Run("04_ReadSecret", func(t *testing.T) { - resp, body := ctx.makeRequest(t, http.MethodGet, "/v1/secrets"+secretPath, nil, true) + // [2/2] Test GET /ready - Readiness check endpoint + t.Run("02_ReadinessCheck", func(t *testing.T) { + resp, body := ctx.makeRequest(t, http.MethodGet, "/ready", nil, false) assert.Equal(t, http.StatusOK, resp.StatusCode) - var response secretsDTO.SecretResponse - err := json.Unmarshal(body, &response) - require.NoError(t, err) - assert.NotEmpty(t, response.ID) - assert.Equal(t, secretPathStored, response.Path) // API stores without leading slash - assert.Equal(t, uint(1), response.Version) - assert.Equal(t, plaintextValueBase64, response.Value) // Value returned on read - - // Verify the value decodes correctly - decoded, err := base64.StdEncoding.DecodeString(response.Value) - require.NoError(t, err) - assert.Equal(t, plaintextValue, decoded) - }) - - // [5/15] Test POST /v1/transit/keys - Create transit key - t.Run("05_CreateTransitKey", func(t *testing.T) { - requestBody := transitDTO.CreateTransitKeyRequest{ - Name: transitKeyName, - Algorithm: string(cryptoDomain.AESGCM), - } - - resp, body := ctx.makeRequest(t, http.MethodPost, "/v1/transit/keys", requestBody, true) - assert.Equal(t, http.StatusCreated, resp.StatusCode) - - var response transitDTO.TransitKeyResponse + var response map[string]string err := json.Unmarshal(body, &response) require.NoError(t, err) - assert.NotEmpty(t, response.ID) - assert.Equal(t, transitKeyName, response.Name) - assert.Equal(t, uint(1), response.Version) - - // Store transit key ID for later deletion - parsedID, err := uuid.Parse(response.ID) - require.NoError(t, err) - transitKeyID = parsedID + assert.Equal(t, "ready", response["status"]) }) - // [6/15] Test POST /v1/transit/keys/:name/encrypt - Encrypt with transit key - t.Run("06_TransitEncrypt", func(t *testing.T) { - requestBody := transitDTO.EncryptRequest{ - Plaintext: base64.StdEncoding.EncodeToString(transitPlaintext), - } + t.Logf("All 2 health endpoint tests passed for %s", tc.dbDriver) + }) + } +} - resp, body := ctx.makeRequest( - t, - http.MethodPost, - "/v1/transit/keys/"+transitKeyName+"/encrypt", - requestBody, - true, - ) - assert.Equal(t, http.StatusOK, resp.StatusCode) +// TestIntegration_Auth_CompleteFlow tests authentication, client management, and audit logging. +// Validates complete client lifecycle including token issuance, CRUD operations, and audit trails. +func TestIntegration_Auth_CompleteFlow(t *testing.T) { + // Skip if short mode (integration tests can be slow) + if testing.Short() { + t.Skip("Skipping integration test in short mode") + } - var response transitDTO.EncryptResponse - err := json.Unmarshal(body, &response) - require.NoError(t, err) - assert.NotEmpty(t, response.Ciphertext) - assert.Equal(t, uint(1), response.Version) + testCases := []struct { + name string + dbDriver string + }{ + {"PostgreSQL", "postgres"}, + {"MySQL", "mysql"}, + } - // Store ciphertext for decryption test - transitCiphertext = response.Ciphertext + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Setup + ctx := setupIntegrationTest(t, tc.dbDriver) + defer teardownIntegrationTest(t, ctx) - // Verify ciphertext format: : - assert.Contains(t, response.Ciphertext, ":") - }) + // Variables to store created resource IDs for later operations + var ( + newClientID uuid.UUID + ) - // [7/15] Test POST /v1/transit/keys/:name/decrypt - Decrypt with transit key - t.Run("07_TransitDecrypt", func(t *testing.T) { - requestBody := transitDTO.DecryptRequest{ - Ciphertext: transitCiphertext, + // [1/8] Test POST /v1/token - Issue authentication token + t.Run("01_IssueToken", func(t *testing.T) { + requestBody := authDTO.IssueTokenRequest{ + ClientID: ctx.rootClient.ID.String(), + ClientSecret: ctx.rootSecret, } - resp, body := ctx.makeRequest( - t, - http.MethodPost, - "/v1/transit/keys/"+transitKeyName+"/decrypt", - requestBody, - true, - ) - assert.Equal(t, http.StatusOK, resp.StatusCode) + resp, body := ctx.makeRequest(t, http.MethodPost, "/v1/token", requestBody, false) + assert.Equal(t, http.StatusCreated, resp.StatusCode) - var response transitDTO.DecryptResponse + var response authDTO.IssueTokenResponse err := json.Unmarshal(body, &response) require.NoError(t, err) - assert.NotEmpty(t, response.Plaintext) - assert.Equal(t, uint(1), response.Version) + assert.NotEmpty(t, response.Token) + assert.False(t, response.ExpiresAt.IsZero()) - // Verify decrypted value matches original - decoded, err := base64.StdEncoding.DecodeString(response.Plaintext) - require.NoError(t, err) - assert.Equal(t, transitPlaintext, decoded) + // Update token for subsequent requests + ctx.rootToken = response.Token }) - // [8/15] Test POST /v1/clients - Create new client - t.Run("08_CreateClient", func(t *testing.T) { + // [2/8] Test POST /v1/clients - Create new client + t.Run("02_CreateClient", func(t *testing.T) { requestBody := authDTO.CreateClientRequest{ Name: "Test Client", IsActive: true, @@ -473,8 +389,8 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { newClientID = parsedID }) - // [9/15] Test GET /v1/clients/:id - Get client by ID - t.Run("09_GetClient", func(t *testing.T) { + // [3/8] Test GET /v1/clients/:id - Get client by ID + t.Run("03_GetClient", func(t *testing.T) { resp, body := ctx.makeRequest( t, http.MethodGet, @@ -493,8 +409,8 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { assert.Len(t, response.Policies, 1) }) - // [10/15] Test PUT /v1/clients/:id - Update client - t.Run("10_UpdateClient", func(t *testing.T) { + // [4/8] Test PUT /v1/clients/:id - Update client + t.Run("04_UpdateClient", func(t *testing.T) { requestBody := authDTO.UpdateClientRequest{ Name: "Updated Test Client", IsActive: true, @@ -522,10 +438,11 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { require.NoError(t, err) assert.Equal(t, "Updated Test Client", response.Name) assert.True(t, response.IsActive) + assert.Len(t, response.Policies, 1) }) - // [11/15] Test GET /v1/clients - List clients - t.Run("11_ListClients", func(t *testing.T) { + // [5/8] Test GET /v1/clients - List clients + t.Run("05_ListClients", func(t *testing.T) { resp, body := ctx.makeRequest(t, http.MethodGet, "/v1/clients", nil, true) assert.Equal(t, http.StatusOK, resp.StatusCode) @@ -533,33 +450,11 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { err := json.Unmarshal(body, &response) require.NoError(t, err) assert.NotEmpty(t, response.Clients) - assert.GreaterOrEqual(t, len(response.Clients), 2) // At least root + new client - }) - - // [12/15] Test POST /v1/transit/keys/:name/rotate - Rotate transit key - t.Run("12_RotateTransitKey", func(t *testing.T) { - requestBody := transitDTO.RotateTransitKeyRequest{ - Algorithm: "aes-gcm", - } - - resp, body := ctx.makeRequest( - t, - http.MethodPost, - "/v1/transit/keys/"+transitKeyName+"/rotate", - requestBody, - true, - ) - assert.Equal(t, http.StatusOK, resp.StatusCode) - - var response transitDTO.TransitKeyResponse - err := json.Unmarshal(body, &response) - require.NoError(t, err) - assert.Equal(t, transitKeyName, response.Name) - assert.Equal(t, uint(2), response.Version) // Version should increment + assert.GreaterOrEqual(t, len(response.Clients), 2, "should have at least root + new client") }) - // [13/15] Test GET /v1/audit-logs - List audit logs - t.Run("13_ListAuditLogs", func(t *testing.T) { + // [6/8] Test GET /v1/audit-logs - List audit logs + t.Run("06_ListAuditLogs", func(t *testing.T) { resp, body := ctx.makeRequest(t, http.MethodGet, "/v1/audit-logs", nil, true) assert.Equal(t, http.StatusOK, resp.StatusCode) @@ -578,8 +473,8 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { assert.NotEmpty(t, firstLog.Capability) }) - // [14/15] Test DELETE /v1/clients/:id - Delete client (soft delete) - t.Run("14_DeleteClient", func(t *testing.T) { + // [7/8] Test DELETE /v1/clients/:id - Delete client (soft delete) + t.Run("07_DeleteClient", func(t *testing.T) { resp, body := ctx.makeRequest( t, http.MethodDelete, @@ -589,9 +484,11 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { ) assert.Equal(t, http.StatusNoContent, resp.StatusCode) assert.Empty(t, body) + }) - // Verify client is soft-deleted (IsActive = false) - resp, body = ctx.makeRequest( + // [8/8] Test GET /v1/clients/:id - Verify client is inactive after deletion + t.Run("08_VerifyClientInactive", func(t *testing.T) { + resp, body := ctx.makeRequest( t, http.MethodGet, "/v1/clients/"+newClientID.String(), @@ -606,12 +503,772 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { assert.False(t, response.IsActive, "client should be inactive after deletion") }) - // [15/15] Test DELETE /v1/transit/keys/:id - Delete transit key - t.Run("15_DeleteTransitKey", func(t *testing.T) { + t.Logf("All 8 auth endpoint tests passed for %s", tc.dbDriver) + }) + } +} + +// TestIntegration_Secrets_CompleteFlow tests the secrets engine complete lifecycle. +// Validates secret creation, versioning, updates, version-specific reads, and deletion. +func TestIntegration_Secrets_CompleteFlow(t *testing.T) { + // Skip if short mode (integration tests can be slow) + if testing.Short() { + t.Skip("Skipping integration test in short mode") + } + + testCases := []struct { + name string + dbDriver string + }{ + {"PostgreSQL", "postgres"}, + {"MySQL", "mysql"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Setup + ctx := setupIntegrationTest(t, tc.dbDriver) + defer teardownIntegrationTest(t, ctx) + + // Variables to store test data + var ( + secretPath = "/integration-test/password" + secretPathStored = "integration-test/password" // API stores without leading slash + plaintextValue1 = []byte("super-secret-value-v1") + plaintextValue2 = []byte("super-secret-value-v2-updated") + plaintextValue1Base64 = base64.StdEncoding.EncodeToString(plaintextValue1) + plaintextValue2Base64 = base64.StdEncoding.EncodeToString(plaintextValue2) + ) + + // [1/6] Test POST /v1/secrets/*path - Create secret (version 1) + t.Run("01_CreateSecret", func(t *testing.T) { + requestBody := secretsDTO.CreateOrUpdateSecretRequest{ + Value: plaintextValue1Base64, + } + + resp, body := ctx.makeRequest(t, http.MethodPost, "/v1/secrets"+secretPath, requestBody, true) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response secretsDTO.SecretResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.ID) + assert.Equal(t, secretPathStored, response.Path) + assert.Equal(t, uint(1), response.Version) + assert.Empty(t, response.Value, "value should not be returned on create") + }) + + // [2/6] Test GET /v1/secrets/*path - Read secret + t.Run("02_ReadSecret", func(t *testing.T) { + resp, body := ctx.makeRequest(t, http.MethodGet, "/v1/secrets"+secretPath, nil, true) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response secretsDTO.SecretResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.ID) + assert.Equal(t, secretPathStored, response.Path) + assert.Equal(t, uint(1), response.Version) + assert.Equal(t, plaintextValue1Base64, response.Value) + + // Verify the value decodes correctly + decoded, err := base64.StdEncoding.DecodeString(response.Value) + require.NoError(t, err) + assert.Equal(t, plaintextValue1, decoded) + }) + + // [3/6] Test POST /v1/secrets/*path - Update secret (version 2) + t.Run("03_UpdateSecret", func(t *testing.T) { + requestBody := secretsDTO.CreateOrUpdateSecretRequest{ + Value: plaintextValue2Base64, + } + + resp, body := ctx.makeRequest(t, http.MethodPost, "/v1/secrets"+secretPath, requestBody, true) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response secretsDTO.SecretResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.ID) + assert.Equal(t, secretPathStored, response.Path) + assert.Equal(t, uint(2), response.Version, "version should increment to 2") + assert.Empty(t, response.Value, "value should not be returned on create/update") + }) + + // [4/6] Test GET /v1/secrets/*path - Read updated secret (latest version) + t.Run("04_ReadUpdatedSecret", func(t *testing.T) { + resp, body := ctx.makeRequest(t, http.MethodGet, "/v1/secrets"+secretPath, nil, true) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response secretsDTO.SecretResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.Equal(t, secretPathStored, response.Path) + assert.Equal(t, uint(2), response.Version, "should return latest version (v2)") + assert.Equal(t, plaintextValue2Base64, response.Value) + + // Verify the value decodes correctly + decoded, err := base64.StdEncoding.DecodeString(response.Value) + require.NoError(t, err) + assert.Equal(t, plaintextValue2, decoded, "should return updated value") + }) + + // [5/6] Test GET /v1/secrets/*path?version=1 - Read specific version + t.Run("05_ReadSecretVersion1", func(t *testing.T) { + resp, body := ctx.makeRequest( + t, + http.MethodGet, + "/v1/secrets"+secretPath+"?version=1", + nil, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response secretsDTO.SecretResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.Equal(t, secretPathStored, response.Path) + assert.Equal(t, uint(1), response.Version, "should return version 1") + assert.Equal(t, plaintextValue1Base64, response.Value) + + // Verify the value decodes correctly + decoded, err := base64.StdEncoding.DecodeString(response.Value) + require.NoError(t, err) + assert.Equal(t, plaintextValue1, decoded, "should return original v1 value") + }) + + // [6/6] Test DELETE /v1/secrets/*path - Delete secret + t.Run("06_DeleteSecret", func(t *testing.T) { resp, body := ctx.makeRequest( t, http.MethodDelete, - "/v1/transit/keys/"+transitKeyID.String(), + "/v1/secrets"+secretPath, + nil, + true, + ) + assert.Equal(t, http.StatusNoContent, resp.StatusCode) + assert.Empty(t, body) + }) + + t.Logf("All 6 secrets endpoint tests passed for %s", tc.dbDriver) + }) + } +} + +// TestIntegration_Transit_CompleteFlow tests all transit encryption endpoints in a complete lifecycle. +// This test validates transit key creation, encryption/decryption, key rotation, and backward +// compatibility (decrypting old ciphertexts after rotation) across both database engines. +func TestIntegration_Transit_CompleteFlow(t *testing.T) { + // Skip if short mode (integration tests can be slow) + if testing.Short() { + t.Skip("Skipping integration test in short mode") + } + + testCases := []struct { + name string + dbDriver string + }{ + {"PostgreSQL", "postgres"}, + {"MySQL", "mysql"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Setup + ctx := setupIntegrationTest(t, tc.dbDriver) + defer teardownIntegrationTest(t, ctx) + + // Variables to store created resource IDs and encrypted data for later operations + var ( + transitKeyName = "integration-test-transit-key" + transitKeyID uuid.UUID + plaintext1 = []byte("transit-test-data-1") + plaintext2 = []byte("transit-test-data-2") + ciphertext1 string // Encrypted with version 1 + ciphertext2 string // Encrypted with different plaintext + ciphertextV2 string // Encrypted with version 2 (after rotation) + ) + + // [1/8] Test POST /v1/transit/keys - Create transit key + t.Run("01_CreateTransitKey", func(t *testing.T) { + requestBody := transitDTO.CreateTransitKeyRequest{ + Name: transitKeyName, + Algorithm: string(cryptoDomain.AESGCM), + } + + resp, body := ctx.makeRequest(t, http.MethodPost, "/v1/transit/keys", requestBody, true) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response transitDTO.TransitKeyResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.ID) + assert.Equal(t, transitKeyName, response.Name) + assert.Equal(t, uint(1), response.Version) + assert.NotEmpty(t, response.DekID) + assert.False(t, response.CreatedAt.IsZero()) + + // Store transit key ID for later deletion + parsedID, err := uuid.Parse(response.ID) + require.NoError(t, err) + transitKeyID = parsedID + }) + + // [2/8] Test POST /v1/transit/keys/:name/encrypt - Encrypt with transit key + t.Run("02_Encrypt", func(t *testing.T) { + requestBody := transitDTO.EncryptRequest{ + Plaintext: base64.StdEncoding.EncodeToString(plaintext1), + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/transit/keys/"+transitKeyName+"/encrypt", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response transitDTO.EncryptResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Ciphertext) + assert.Equal(t, uint(1), response.Version) + + // Store ciphertext for decryption test + ciphertext1 = response.Ciphertext + + // Verify ciphertext format: : + assert.Contains(t, response.Ciphertext, ":") + }) + + // [3/8] Test POST /v1/transit/keys/:name/decrypt - Decrypt with transit key + t.Run("03_Decrypt", func(t *testing.T) { + requestBody := transitDTO.DecryptRequest{ + Ciphertext: ciphertext1, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/transit/keys/"+transitKeyName+"/decrypt", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response transitDTO.DecryptResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Plaintext) + assert.Equal(t, uint(1), response.Version) + + // Verify decrypted value matches original + decoded, err := base64.StdEncoding.DecodeString(response.Plaintext) + require.NoError(t, err) + assert.Equal(t, plaintext1, decoded) + }) + + // [4/8] Test POST /v1/transit/keys/:name/encrypt - Encrypt different value + t.Run("04_EncryptDifferentValue", func(t *testing.T) { + requestBody := transitDTO.EncryptRequest{ + Plaintext: base64.StdEncoding.EncodeToString(plaintext2), + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/transit/keys/"+transitKeyName+"/encrypt", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response transitDTO.EncryptResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Ciphertext) + assert.Equal(t, uint(1), response.Version) + + // Store second ciphertext + ciphertext2 = response.Ciphertext + + // Verify different plaintext produces different ciphertext + assert.NotEqual(t, ciphertext1, ciphertext2) + }) + + // [5/8] Test POST /v1/transit/keys/:name/rotate - Rotate transit key + t.Run("05_RotateTransitKey", func(t *testing.T) { + requestBody := transitDTO.RotateTransitKeyRequest{ + Algorithm: string(cryptoDomain.AESGCM), + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/transit/keys/"+transitKeyName+"/rotate", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response transitDTO.TransitKeyResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.Equal(t, transitKeyName, response.Name) + assert.Equal(t, uint(2), response.Version) // Version should increment to 2 + }) + + // [6/8] Test POST /v1/transit/keys/:name/encrypt - Encrypt with rotated key (version 2) + t.Run("06_EncryptWithRotatedKey", func(t *testing.T) { + requestBody := transitDTO.EncryptRequest{ + Plaintext: base64.StdEncoding.EncodeToString(plaintext1), + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/transit/keys/"+transitKeyName+"/encrypt", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response transitDTO.EncryptResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Ciphertext) + assert.Equal(t, uint(2), response.Version) // Should use new version 2 + + // Store version 2 ciphertext + ciphertextV2 = response.Ciphertext + + // Verify version 2 ciphertext is different from version 1 + assert.NotEqual(t, ciphertext1, ciphertextV2) + }) + + // [7/8] Test POST /v1/transit/keys/:name/decrypt - Decrypt old ciphertext (backward compatibility) + t.Run("07_DecryptOldCiphertext", func(t *testing.T) { + requestBody := transitDTO.DecryptRequest{ + Ciphertext: ciphertext1, // Use version 1 ciphertext + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/transit/keys/"+transitKeyName+"/decrypt", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response transitDTO.DecryptResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Plaintext) + assert.Equal(t, uint(1), response.Version) // Should indicate version 1 was used + + // Verify decrypted value still matches original (backward compatibility) + decoded, err := base64.StdEncoding.DecodeString(response.Plaintext) + require.NoError(t, err) + assert.Equal(t, plaintext1, decoded) + }) + + // [8/8] Test DELETE /v1/transit/keys/:id - Delete transit key + t.Run("08_DeleteTransitKey", func(t *testing.T) { + resp, body := ctx.makeRequest( + t, + http.MethodDelete, + "/v1/transit/keys/"+transitKeyID.String(), + nil, + true, + ) + assert.Equal(t, http.StatusNoContent, resp.StatusCode) + assert.Empty(t, body) + }) + + t.Logf("All 8 transit endpoint tests passed for %s", tc.dbDriver) + }) + } +} + +// TestIntegration_Tokenization_CompleteFlow tests all tokenization endpoints in a complete lifecycle. +// This test validates tokenization functionality including deterministic/non-deterministic modes, +// token expiration, key rotation, and token lifecycle management across both database engines. +func TestIntegration_Tokenization_CompleteFlow(t *testing.T) { + // Skip if short mode (integration tests can be slow) + if testing.Short() { + t.Skip("Skipping integration test in short mode") + } + + testCases := []struct { + name string + dbDriver string + }{ + {"PostgreSQL", "postgres"}, + {"MySQL", "mysql"}, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + // Setup + ctx := setupIntegrationTest(t, tc.dbDriver) + defer teardownIntegrationTest(t, ctx) + + // Variables to store created resource IDs and tokens for later operations + var ( + tokenizationKeyName1 = "integration-test-key-uuid" + tokenizationKeyName2 = "integration-test-key-deterministic" + tokenizationKeyID1 uuid.UUID + tokenizationKeyID2 uuid.UUID + testToken string + deterministicToken1 string + deterministicToken2 string + plaintextValue = []byte("sensitive-credit-card-4532015112830366") + plaintextValueBase64 = base64.StdEncoding.EncodeToString(plaintextValue) + testMetadata = map[string]any{"user_id": "12345", "source": "integration-test"} + ) + + // [1/12] Test POST /v1/tokenization/keys - Create UUID format tokenization key + t.Run("01_CreateTokenizationKey_UUID", func(t *testing.T) { + requestBody := tokenizationDTO.CreateTokenizationKeyRequest{ + Name: tokenizationKeyName1, + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "aes-gcm", + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response tokenizationDTO.TokenizationKeyResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.ID) + assert.Equal(t, tokenizationKeyName1, response.Name) + assert.Equal(t, uint(1), response.Version) + assert.Equal(t, "uuid", response.FormatType) + assert.False(t, response.IsDeterministic) + assert.False(t, response.CreatedAt.IsZero()) + + // Store ID for later operations + parsedID, err := uuid.Parse(response.ID) + require.NoError(t, err) + tokenizationKeyID1 = parsedID + }) + + // [2/12] Test POST /v1/tokenization/keys/:name/tokenize - Tokenize with UUID format + t.Run("02_Tokenize_UUID", func(t *testing.T) { + requestBody := tokenizationDTO.TokenizeRequest{ + Plaintext: plaintextValueBase64, + Metadata: testMetadata, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName1+"/tokenize", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response tokenizationDTO.TokenizeResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Token) + assert.False(t, response.CreatedAt.IsZero()) + assert.Nil(t, response.ExpiresAt) // No TTL specified + assert.Equal(t, testMetadata, response.Metadata) + + // Verify token is in UUID format + _, err = uuid.Parse(response.Token) + assert.NoError(t, err, "token should be valid UUID format") + + // Store token for detokenization + testToken = response.Token + }) + + // [3/12] Test POST /v1/tokenization/detokenize - Detokenize UUID token + t.Run("03_Detokenize_UUID", func(t *testing.T) { + requestBody := tokenizationDTO.DetokenizeRequest{ + Token: testToken, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/detokenize", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response tokenizationDTO.DetokenizeResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Plaintext) + assert.Equal(t, testMetadata, response.Metadata) + + // Verify decrypted value matches original + assert.Equal(t, plaintextValueBase64, response.Plaintext) + }) + + // [4/12] Test POST /v1/tokenization/validate - Validate active token + t.Run("04_ValidateToken_Valid", func(t *testing.T) { + requestBody := tokenizationDTO.ValidateTokenRequest{ + Token: testToken, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/validate", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response tokenizationDTO.ValidateTokenResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.True(t, response.Valid, "token should be valid") + }) + + // [5/12] Test POST /v1/tokenization/revoke - Revoke token + t.Run("05_RevokeToken", func(t *testing.T) { + requestBody := tokenizationDTO.RevokeTokenRequest{ + Token: testToken, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/revoke", + requestBody, + true, + ) + assert.Equal(t, http.StatusNoContent, resp.StatusCode) + assert.Empty(t, body) + }) + + // [6/12] Test POST /v1/tokenization/validate - Validate revoked token + t.Run("06_ValidateToken_Revoked", func(t *testing.T) { + requestBody := tokenizationDTO.ValidateTokenRequest{ + Token: testToken, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/validate", + requestBody, + true, + ) + assert.Equal(t, http.StatusOK, resp.StatusCode) + + var response tokenizationDTO.ValidateTokenResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.False(t, response.Valid, "revoked token should be invalid") + }) + + // [7/12] Test POST /v1/tokenization/keys - Create deterministic tokenization key + t.Run("07_CreateTokenizationKey_Deterministic", func(t *testing.T) { + requestBody := tokenizationDTO.CreateTokenizationKeyRequest{ + Name: tokenizationKeyName2, + FormatType: "alphanumeric", + IsDeterministic: true, + Algorithm: "chacha20-poly1305", + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response tokenizationDTO.TokenizationKeyResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.ID) + assert.Equal(t, tokenizationKeyName2, response.Name) + assert.Equal(t, uint(1), response.Version) + assert.Equal(t, "alphanumeric", response.FormatType) + assert.True(t, response.IsDeterministic) + + // Store ID for later operations + parsedID, err := uuid.Parse(response.ID) + require.NoError(t, err) + tokenizationKeyID2 = parsedID + }) + + // [8/12] Test POST /v1/tokenization/keys/:name/tokenize - Deterministic tokenization + t.Run("08_Tokenize_Deterministic_SameValue", func(t *testing.T) { + requestBody := tokenizationDTO.TokenizeRequest{ + Plaintext: plaintextValueBase64, + } + + // First tokenization + resp1, body1 := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName2+"/tokenize", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp1.StatusCode) + + var response1 tokenizationDTO.TokenizeResponse + err := json.Unmarshal(body1, &response1) + require.NoError(t, err) + assert.NotEmpty(t, response1.Token) + deterministicToken1 = response1.Token + + // Second tokenization with same plaintext + resp2, body2 := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName2+"/tokenize", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp2.StatusCode) + + var response2 tokenizationDTO.TokenizeResponse + err = json.Unmarshal(body2, &response2) + require.NoError(t, err) + assert.NotEmpty(t, response2.Token) + deterministicToken2 = response2.Token + + // Verify both tokens are identical (deterministic behavior) + assert.Equal(t, deterministicToken1, deterministicToken2, + "deterministic tokenization should produce same token for same plaintext") + }) + + // [9/12] Test POST /v1/tokenization/keys/:name/tokenize - Tokenize with TTL + t.Run("09_Tokenize_WithTTL", func(t *testing.T) { + ttlSeconds := 60 + requestBody := tokenizationDTO.TokenizeRequest{ + Plaintext: plaintextValueBase64, + TTL: &ttlSeconds, + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName1+"/tokenize", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response tokenizationDTO.TokenizeResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Token) + assert.False(t, response.CreatedAt.IsZero()) + assert.NotNil(t, response.ExpiresAt, "ExpiresAt should be set when TTL is provided") + + // Verify ExpiresAt is approximately CreatedAt + TTL + expectedExpiry := response.CreatedAt.Add(time.Duration(ttlSeconds) * time.Second) + assert.WithinDuration(t, expectedExpiry, *response.ExpiresAt, 2*time.Second, + "ExpiresAt should be approximately CreatedAt + TTL") + }) + + // [10/12] Test POST /v1/tokenization/keys/:name/rotate - Rotate tokenization key + t.Run("10_RotateTokenizationKey", func(t *testing.T) { + requestBody := tokenizationDTO.RotateTokenizationKeyRequest{ + FormatType: "uuid", + IsDeterministic: false, + Algorithm: "chacha20-poly1305", // Rotate to different algorithm + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName1+"/rotate", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response tokenizationDTO.TokenizationKeyResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.ID) + assert.NotEqual( + t, + tokenizationKeyID1.String(), + response.ID, + "rotation creates new key with new ID", + ) + assert.Equal(t, tokenizationKeyName1, response.Name, "name should remain the same") + assert.Equal(t, uint(2), response.Version, "version should increment after rotation") + assert.Equal(t, "uuid", response.FormatType) + assert.False(t, response.IsDeterministic) + }) + + // [11/12] Test POST /v1/tokenization/keys/:name/tokenize - Tokenize with rotated key + t.Run("11_Tokenize_WithRotatedKey", func(t *testing.T) { + newPlaintext := []byte("new-data-after-rotation") + requestBody := tokenizationDTO.TokenizeRequest{ + Plaintext: base64.StdEncoding.EncodeToString(newPlaintext), + } + + resp, body := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/keys/"+tokenizationKeyName1+"/tokenize", + requestBody, + true, + ) + assert.Equal(t, http.StatusCreated, resp.StatusCode) + + var response tokenizationDTO.TokenizeResponse + err := json.Unmarshal(body, &response) + require.NoError(t, err) + assert.NotEmpty(t, response.Token) + + // Verify token is in UUID format + _, err = uuid.Parse(response.Token) + assert.NoError(t, err, "token should be valid UUID format") + + // Verify we can detokenize with the rotated key + detokenizeRequest := tokenizationDTO.DetokenizeRequest{ + Token: response.Token, + } + + detokenizeResp, detokenizeBody := ctx.makeRequest( + t, + http.MethodPost, + "/v1/tokenization/detokenize", + detokenizeRequest, + true, + ) + assert.Equal(t, http.StatusOK, detokenizeResp.StatusCode) + + var detokenizeResponse tokenizationDTO.DetokenizeResponse + err = json.Unmarshal(detokenizeBody, &detokenizeResponse) + require.NoError(t, err) + assert.Equal(t, base64.StdEncoding.EncodeToString(newPlaintext), detokenizeResponse.Plaintext) + }) + + // [12/12] Test DELETE /v1/tokenization/keys/:id - Delete tokenization key + t.Run("12_DeleteTokenizationKey", func(t *testing.T) { + resp, body := ctx.makeRequest( + t, + http.MethodDelete, + "/v1/tokenization/keys/"+tokenizationKeyID2.String(), nil, true, ) @@ -619,7 +1276,7 @@ func TestIntegration_AllEndpoints_HappyPath(t *testing.T) { assert.Empty(t, body) }) - t.Logf("All 15 endpoint tests passed for %s", tc.dbDriver) + t.Logf("All 12 tokenization endpoint tests passed for %s", tc.dbDriver) }) } } From 47ff4dba4f3623951b3442f8fe05cdce5f37f2b0 Mon Sep 17 00:00:00 2001 From: Allisson Azevedo Date: Wed, 18 Feb 2026 20:03:14 -0300 Subject: [PATCH 2/2] fix: resolve all golangci-lint gosec warnings Addresses 12 gosec lint warnings by adding appropriate nolint directives for false positives and refactoring one type conversion for clarity. Changes by category: Test fixture false positives (9 files, G101): - Add `//nolint:gosec // test fixture data` to test database credentials and token hash strings in repository and use case tests - Files: internal/app/di_test.go, internal/testutil/database.go, internal/tokenization/service/*_test.go, internal/auth/repository/*_token_repository_test.go, internal/auth/usecase/token_usecase_test.go Domain model fields (4 files, G117): - Add `//nolint:gosec` to legitimate Secret/ClientSecret fields used in authentication flows (hashed secrets, not plaintext credentials) - Files: internal/auth/domain/client.go, internal/auth/domain/token.go, internal/auth/http/dto/request.go, internal/auth/http/dto/response.go Integer conversion safety (2 files, G115): - numeric_generator.go: Add nolint comment (conversion bounded 0-9 by design) - rules.go: Replace `string(rune(p.MinLength+48))` with `strconv.Itoa(p.MinLength)` for clearer intent and import strconv package HTTP test SSRF false positive (1 file, G704): - Add `//nolint:gosec // controlled test environment with localhost URLs` to integration test HTTP client - File: test/integration/api_test.go All lint warnings are legitimate false positives or have been refactored for improved clarity. No actual security vulnerabilities were present. Verification: `make lint` now passes with 0 issues. --- internal/app/di_test.go | 1 + internal/auth/domain/client.go | 2 +- internal/auth/domain/token.go | 2 +- internal/auth/http/dto/request.go | 2 +- internal/auth/http/dto/response.go | 2 +- internal/auth/repository/mysql_token_repository_test.go | 8 ++++++++ .../auth/repository/postgresql_token_repository_test.go | 5 +++++ internal/auth/usecase/token_usecase_test.go | 2 ++ internal/testutil/database.go | 4 +++- .../tokenization/service/alphanumeric_generator_test.go | 1 + internal/tokenization/service/numeric_generator.go | 1 + internal/tokenization/service/uuid_generator_test.go | 2 ++ internal/validation/rules.go | 3 ++- test/integration/api_test.go | 1 + 14 files changed, 30 insertions(+), 6 deletions(-) diff --git a/internal/app/di_test.go b/internal/app/di_test.go index 0a3d1db..8ea1def 100644 --- a/internal/app/di_test.go +++ b/internal/app/di_test.go @@ -12,6 +12,7 @@ import ( // TestNewContainer verifies that a new container can be created with a valid configuration. func TestNewContainer(t *testing.T) { + //nolint:gosec // test fixture data cfg := &config.Config{ LogLevel: "info", DBDriver: "postgres", diff --git a/internal/auth/domain/client.go b/internal/auth/domain/client.go index 3975e63..9f55668 100644 --- a/internal/auth/domain/client.go +++ b/internal/auth/domain/client.go @@ -23,7 +23,7 @@ type PolicyDocument struct { // Clients are used to authenticate API requests and enforce access control. type Client struct { ID uuid.UUID // Unique identifier (UUIDv7) - Secret string // Hashed client secret for authentication + Secret string //nolint:gosec // hashed client secret (not plaintext) Name string // Human-readable client name IsActive bool // Whether the client can authenticate Policies []PolicyDocument // Authorization policies for this client diff --git a/internal/auth/domain/token.go b/internal/auth/domain/token.go index a7a32a0..8c1ad19 100644 --- a/internal/auth/domain/token.go +++ b/internal/auth/domain/token.go @@ -21,7 +21,7 @@ type Token struct { // Used during authentication to verify client identity before generating tokens. type IssueTokenInput struct { ClientID uuid.UUID - ClientSecret string + ClientSecret string //nolint:gosec // authentication credential field } // IssueTokenOutput contains the newly issued authentication token and expiration. diff --git a/internal/auth/http/dto/request.go b/internal/auth/http/dto/request.go index 3bf2167..6cae892 100644 --- a/internal/auth/http/dto/request.go +++ b/internal/auth/http/dto/request.go @@ -75,7 +75,7 @@ func validatePolicyDocument(value interface{}) error { // IssueTokenRequest contains the parameters for issuing an authentication token. type IssueTokenRequest struct { ClientID string `json:"client_id"` - ClientSecret string `json:"client_secret"` + ClientSecret string `json:"client_secret"` //nolint:gosec // API authentication field } // Validate checks if the issue token request is valid. diff --git a/internal/auth/http/dto/response.go b/internal/auth/http/dto/response.go index 88f5c7f..8df83c8 100644 --- a/internal/auth/http/dto/response.go +++ b/internal/auth/http/dto/response.go @@ -11,7 +11,7 @@ import ( // SECURITY: The secret is only returned once and must be saved securely. type CreateClientResponse struct { ID string `json:"id"` - Secret string `json:"secret"` + Secret string `json:"secret"` //nolint:gosec // returned once on creation } // ClientResponse represents a client in API responses (excludes secret). diff --git a/internal/auth/repository/mysql_token_repository_test.go b/internal/auth/repository/mysql_token_repository_test.go index 6fbeee4..b1e536b 100644 --- a/internal/auth/repository/mysql_token_repository_test.go +++ b/internal/auth/repository/mysql_token_repository_test.go @@ -132,6 +132,7 @@ func TestMySQLTokenRepository_Create_MultipleTokens(t *testing.T) { tokenRepo := NewMySQLTokenRepository(db) // Create first token + //nolint:gosec // test fixture data token1 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-hash-mysql-1", @@ -147,6 +148,7 @@ func TestMySQLTokenRepository_Create_MultipleTokens(t *testing.T) { time.Sleep(time.Millisecond) // Ensure different timestamp for UUIDv7 // Create second token + //nolint:gosec // test fixture data token2 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-hash-mysql-2", @@ -243,6 +245,7 @@ func TestMySQLTokenRepository_Update_NonExistent(t *testing.T) { tokenRepo := NewMySQLTokenRepository(db) // Try to update a non-existent token + //nolint:gosec // test fixture data token := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "hash-mysql", @@ -342,6 +345,7 @@ func TestMySQLTokenRepository_Create_WithTransaction(t *testing.T) { tokenRepo := NewMySQLTokenRepository(db) + //nolint:gosec // test fixture data token := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "tx-test-hash-mysql", @@ -481,6 +485,7 @@ func TestMySQLTokenRepository_Get_WithTransaction(t *testing.T) { tokenRepo := NewMySQLTokenRepository(db) // Create a token outside transaction + //nolint:gosec // test fixture data token1 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-1-tx-mysql", @@ -499,6 +504,7 @@ func TestMySQLTokenRepository_Get_WithTransaction(t *testing.T) { // Create another token inside transaction time.Sleep(time.Millisecond) + //nolint:gosec // test fixture data token2 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-2-tx-mysql", @@ -685,6 +691,7 @@ func TestMySQLTokenRepository_GetByTokenHash_WithTransaction(t *testing.T) { tokenRepo := NewMySQLTokenRepository(db) // Create a token outside transaction + //nolint:gosec // test fixture data token1 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-hash-tx-mysql-1", @@ -703,6 +710,7 @@ func TestMySQLTokenRepository_GetByTokenHash_WithTransaction(t *testing.T) { // Create another token inside transaction time.Sleep(time.Millisecond) + //nolint:gosec // test fixture data token2 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-hash-tx-mysql-2", diff --git a/internal/auth/repository/postgresql_token_repository_test.go b/internal/auth/repository/postgresql_token_repository_test.go index 5f1423a..1feb092 100644 --- a/internal/auth/repository/postgresql_token_repository_test.go +++ b/internal/auth/repository/postgresql_token_repository_test.go @@ -147,6 +147,7 @@ func TestPostgreSQLTokenRepository_Create_MultipleTokens(t *testing.T) { time.Sleep(time.Millisecond) // Ensure different timestamp for UUIDv7 // Create second token + //nolint:gosec // test fixture data token2 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-hash-2", @@ -342,6 +343,7 @@ func TestPostgreSQLTokenRepository_Create_WithTransaction(t *testing.T) { tokenRepo := NewPostgreSQLTokenRepository(db) + //nolint:gosec // test fixture data token := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "tx-test-hash", @@ -485,6 +487,7 @@ func TestPostgreSQLTokenRepository_Get_WithTransaction(t *testing.T) { // Create another token inside transaction time.Sleep(time.Millisecond) + //nolint:gosec // test fixture data token2 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-2-tx", @@ -659,6 +662,7 @@ func TestPostgreSQLTokenRepository_GetByTokenHash_WithTransaction(t *testing.T) tokenRepo := NewPostgreSQLTokenRepository(db) // Create a token outside transaction + //nolint:gosec // test fixture data token1 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-hash-tx-1", @@ -677,6 +681,7 @@ func TestPostgreSQLTokenRepository_GetByTokenHash_WithTransaction(t *testing.T) // Create another token inside transaction time.Sleep(time.Millisecond) + //nolint:gosec // test fixture data token2 := &authDomain.Token{ ID: uuid.Must(uuid.NewV7()), TokenHash: "token-hash-tx-2", diff --git a/internal/auth/usecase/token_usecase_test.go b/internal/auth/usecase/token_usecase_test.go index edc090b..9d91c63 100644 --- a/internal/auth/usecase/token_usecase_test.go +++ b/internal/auth/usecase/token_usecase_test.go @@ -505,6 +505,7 @@ func TestTokenUseCase_Authenticate(t *testing.T) { CreatedAt: time.Now().UTC().Add(-1 * time.Hour), } + //nolint:gosec // test fixture data client := &authDomain.Client{ ID: clientID, Secret: "$argon2id$v=19$m=65536,t=3,p=4$test-hash", //nolint:gosec // test fixture @@ -712,6 +713,7 @@ func TestTokenUseCase_Authenticate(t *testing.T) { CreatedAt: time.Now().UTC().Add(-1 * time.Hour), } + //nolint:gosec // test fixture data client := &authDomain.Client{ ID: clientID, Secret: "$argon2id$v=19$m=65536,t=3,p=4$test-hash", //nolint:gosec // test fixture diff --git a/internal/testutil/database.go b/internal/testutil/database.go index b3955cd..669a316 100644 --- a/internal/testutil/database.go +++ b/internal/testutil/database.go @@ -19,8 +19,10 @@ import ( ) const ( + //nolint:gosec // test database credentials PostgresTestDSN = "postgres://testuser:testpassword@localhost:5433/testdb?sslmode=disable" - MySQLTestDSN = "testuser:testpassword@tcp(localhost:3307)/testdb?parseTime=true&multiStatements=true" + //nolint:gosec // test database credentials + MySQLTestDSN = "testuser:testpassword@tcp(localhost:3307)/testdb?parseTime=true&multiStatements=true" ) // SetupPostgresDB creates a new PostgreSQL database connection and runs migrations. diff --git a/internal/tokenization/service/alphanumeric_generator_test.go b/internal/tokenization/service/alphanumeric_generator_test.go index 15cd6bf..2cd8e7c 100644 --- a/internal/tokenization/service/alphanumeric_generator_test.go +++ b/internal/tokenization/service/alphanumeric_generator_test.go @@ -102,6 +102,7 @@ func TestAlphanumericGenerator_Validate(t *testing.T) { token: "0123456789", expectError: false, }, + //nolint:gosec // test token string { name: "Valid_Mixed", token: "aB3dE5fG7h", diff --git a/internal/tokenization/service/numeric_generator.go b/internal/tokenization/service/numeric_generator.go index bc8c9c0..6c2b15a 100644 --- a/internal/tokenization/service/numeric_generator.go +++ b/internal/tokenization/service/numeric_generator.go @@ -31,6 +31,7 @@ func (g *numericGenerator) Generate(length int) (string, error) { if err != nil { return "", fmt.Errorf("failed to generate random digit: %w", err) } + //nolint:gosec // n is bounded [0,9] by big.NewInt(10), safe conversion digits[i] = byte('0' + n.Int64()) } diff --git a/internal/tokenization/service/uuid_generator_test.go b/internal/tokenization/service/uuid_generator_test.go index 8ba0117..a6bd781 100644 --- a/internal/tokenization/service/uuid_generator_test.go +++ b/internal/tokenization/service/uuid_generator_test.go @@ -38,6 +38,7 @@ func TestUUIDGenerator_Validate(t *testing.T) { token string expectError bool }{ + //nolint:gosec // test UUID string { name: "Valid_UUIDv4", token: "550e8400-e29b-41d4-a716-446655440000", @@ -58,6 +59,7 @@ func TestUUIDGenerator_Validate(t *testing.T) { token: "", expectError: true, }, + //nolint:gosec // test UUID string { name: "Invalid_PartialUUID", token: "550e8400-e29b-41d4", diff --git a/internal/validation/rules.go b/internal/validation/rules.go index f21b888..8370da8 100644 --- a/internal/validation/rules.go +++ b/internal/validation/rules.go @@ -3,6 +3,7 @@ package validation import ( "regexp" + "strconv" "strings" "unicode" @@ -43,7 +44,7 @@ func (p PasswordStrength) Validate(value interface{}) error { if len(s) < p.MinLength { return validation.NewError( "validation_password_min_length", - "password must be at least "+string(rune(p.MinLength+48))+" characters", + "password must be at least "+strconv.Itoa(p.MinLength)+" characters", ) } diff --git a/test/integration/api_test.go b/test/integration/api_test.go index 6caf905..602b5aa 100644 --- a/test/integration/api_test.go +++ b/test/integration/api_test.go @@ -73,6 +73,7 @@ func (ctx *integrationTestContext) makeRequest( } client := &http.Client{Timeout: 10 * time.Second} + //nolint:gosec // controlled test environment with localhost URLs resp, err := client.Do(req) require.NoError(t, err, "failed to perform request")