Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 40 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,43 @@ jobs:
exit 1
fi

- name: API docs consistency guard (PRs)
if: github.event_name == 'pull_request'
run: |
set -euo pipefail

CHANGED_FILES="$(git diff --name-only "${{ github.event.pull_request.base.sha }}" "${{ github.sha }}")"
if [ -z "$CHANGED_FILES" ]; then
exit 0
fi

api_changed=false
docs_changed=false

while IFS= read -r file; do
[ -z "$file" ] && continue

case "$file" in
internal/*/http/*.go|cmd/app/commands/*.go|migrations/*/*.sql)
api_changed=true
;;
esac

case "$file" in
docs/api/*|docs/openapi.yaml|docs/examples/*|docs/operations/*|docs/getting-started/*|docs/cli/commands.md|docs/releases/*|docs/CHANGELOG.md|docs/README.md|docs/metadata.json|README.md)
docs_changed=true
;;
esac
done <<EOF
$CHANGED_FILES
EOF

if [ "$api_changed" = true ] && [ "$docs_changed" = false ]; then
echo "API/runtime changes detected but no related docs updates found"
echo "Update docs/api, openapi/examples/runbooks/release notes as needed"
exit 1
fi

- name: Markdown lint
uses: DavidAnson/markdownlint-cli2-action@v20
with:
Expand All @@ -65,6 +102,9 @@ jobs:
- name: Example shape checks
run: python3 docs/tools/check_example_shapes.py

- name: Docs metadata checks
run: python3 docs/tools/check_docs_metadata.py

- name: Markdown link check (offline)
uses: lycheeverse/lychee-action@v2
with:
Expand Down
8 changes: 8 additions & 0 deletions .mockery.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,11 @@ packages:
DekRepository: {}
TransitKeyRepository: {}
TransitKeyUseCase: {}
github.com/allisson/secrets/internal/tokenization/usecase:
interfaces:
DekRepository: {}
TokenizationKeyRepository: {}
TokenizationKeyUseCase: {}
TokenRepository: {}
TokenizationUseCase: {}
HashService: {}
7 changes: 6 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.PHONY: help build run test lint clean migrate-up migrate-down docker-build docker-run mocks docs-lint docs-check-examples
.PHONY: help build run test lint clean migrate-up migrate-down docker-build docker-run mocks docs-lint docs-check-examples docs-check-metadata

APP_NAME := app
BINARY_DIR := bin
Expand Down Expand Up @@ -74,10 +74,15 @@ docs-check-examples: ## Validate JSON shapes used by docs examples
@echo "Running docs example shape checks..."
@python3 docs/tools/check_example_shapes.py

docs-check-metadata: ## Validate docs metadata and API markers
@echo "Running docs metadata checks..."
@python3 docs/tools/check_docs_metadata.py

docs-lint: ## Run markdown lint and offline link checks
@echo "Running markdownlint-cli2..."
@docker run --rm -v "$(PWD):/workdir" -w /workdir davidanson/markdownlint-cli2:v0.18.1 README.md "docs/**/*.md" ".github/pull_request_template.md"
@$(MAKE) docs-check-examples
@$(MAKE) docs-check-metadata
@echo "Running lychee offline link checks..."
@docker run --rm -v "$(PWD):/input" lycheeverse/lychee:latest --offline --include-fragments --no-progress "/input/README.md" "/input/docs/**/*.md" "/input/.github/pull_request_template.md"

Expand Down
21 changes: 14 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,13 @@ Secrets is inspired by **HashiCorp Vault** ❤️, but it is intentionally **muc
The default way to run Secrets is the published Docker image:

```bash
docker pull allisson/secrets:v0.3.0
docker pull allisson/secrets:v0.4.0
```

Use pinned tags for reproducible setups. `latest` is also available for fast iteration.

Docs release/API metadata source: `docs/metadata.json`.

Then follow the Docker setup guide in [docs/getting-started/docker.md](docs/getting-started/docker.md).

⚠️ After rotating a master key or KEK, restart API server instances so they load the updated key material.
Expand All @@ -27,12 +29,13 @@ Then follow the Docker setup guide in [docs/getting-started/docker.md](docs/gett
1. 🐳 **Run with Docker image (recommended)**: [docs/getting-started/docker.md](docs/getting-started/docker.md)
2. 💻 **Run locally for development**: [docs/getting-started/local-development.md](docs/getting-started/local-development.md)

## 🆕 What's New in v0.3.0
## 🆕 What's New in v0.4.0

- 📊 OpenTelemetry metrics with Prometheus-compatible export at `GET /metrics`
- ⚙️ Runtime metrics controls via `METRICS_ENABLED` and `METRICS_NAMESPACE`
- 📈 HTTP and business-operation metrics for auth, secrets, and transit flows
- 📘 New monitoring operations guide: [docs/operations/monitoring.md](docs/operations/monitoring.md)
- 🎫 Tokenization API for format-preserving token workflows (`/v1/tokenization/*`)
- 🧰 New tokenization CLI commands: `create-tokenization-key`, `rotate-tokenization-key`, `clean-expired-tokens`
- 🗄️ Tokenization persistence migrations for PostgreSQL and MySQL (`000002_add_tokenization`)
- 📈 Tokenization business-operation metrics added to observability
- 📘 New release notes: [docs/releases/v0.4.0.md](docs/releases/v0.4.0.md)

## 📚 Docs Map

Expand All @@ -43,7 +46,7 @@ Then follow the Docker setup guide in [docs/getting-started/docker.md](docs/gett
- 🧰 **Troubleshooting**: [docs/getting-started/troubleshooting.md](docs/getting-started/troubleshooting.md)
- ✅ **Smoke test script**: [docs/getting-started/smoke-test.md](docs/getting-started/smoke-test.md)
- 🧪 **CLI commands reference**: [docs/cli/commands.md](docs/cli/commands.md)
- 🚀 **v0.3.0 release notes**: [docs/releases/v0.3.0.md](docs/releases/v0.3.0.md)
- 🚀 **v0.4.0 release notes**: [docs/releases/v0.4.0.md](docs/releases/v0.4.0.md)

- **By Topic**
- ⚙️ **Environment variables**: [docs/configuration/environment-variables.md](docs/configuration/environment-variables.md)
Expand All @@ -62,8 +65,10 @@ Then follow the Docker setup guide in [docs/getting-started/docker.md](docs/gett
- 🔐 **Auth API**: [docs/api/authentication.md](docs/api/authentication.md)
- 👤 **Clients API**: [docs/api/clients.md](docs/api/clients.md)
- 📘 **Policy cookbook**: [docs/api/policies.md](docs/api/policies.md)
- 🗂️ **Capability matrix**: [docs/api/capability-matrix.md](docs/api/capability-matrix.md)
- 📦 **Secrets API**: [docs/api/secrets.md](docs/api/secrets.md)
- 🚄 **Transit API**: [docs/api/transit.md](docs/api/transit.md)
- 🎫 **Tokenization API**: [docs/api/tokenization.md](docs/api/tokenization.md)
- 📜 **Audit logs API**: [docs/api/audit-logs.md](docs/api/audit-logs.md)
- 🧩 **API versioning policy**: [docs/api/versioning-policy.md](docs/api/versioning-policy.md)

Expand All @@ -79,6 +84,7 @@ All detailed guides include practical use cases and copy/paste-ready examples.

- 🔐 Envelope encryption (`Master Key -> KEK -> DEK -> Secret Data`)
- 🚄 Transit encryption (`/v1/transit/keys/*`) for encrypt/decrypt as a service (decrypt input uses `<version>:<base64-ciphertext>`; see [Transit API docs](docs/api/transit.md), [create vs rotate](docs/api/transit.md#create-vs-rotate), and [error matrix](docs/api/transit.md#endpoint-error-matrix))
- 🎫 Tokenization API (`/v1/tokenization/*`) for token generation, detokenization, validation, and revocation
- 👤 Token-based authentication and policy-based authorization
- 📦 Versioned secrets by path (`/v1/secrets/*path`)
- 📜 Audit logs with request correlation (`request_id`) and filtering
Expand All @@ -92,6 +98,7 @@ All detailed guides include practical use cases and copy/paste-ready examples.
- Clients: `GET/POST /v1/clients`, `GET/PUT/DELETE /v1/clients/:id`
- Secrets: `POST/GET/DELETE /v1/secrets/*path`
- Transit: `POST /v1/transit/keys`, `POST /v1/transit/keys/:name/rotate`, `POST /v1/transit/keys/:name/encrypt`, `POST /v1/transit/keys/:name/decrypt`, `DELETE /v1/transit/keys/:id` ([create vs rotate](docs/api/transit.md#create-vs-rotate), [error matrix](docs/api/transit.md#endpoint-error-matrix))
- Tokenization: `POST /v1/tokenization/keys`, `POST /v1/tokenization/keys/:name/rotate`, `DELETE /v1/tokenization/keys/:id`, `POST /v1/tokenization/keys/:name/tokenize`, `POST /v1/tokenization/detokenize`, `POST /v1/tokenization/validate`, `POST /v1/tokenization/revoke`
- Audit logs: `GET /v1/audit-logs`
- Metrics: `GET /metrics` (available when `METRICS_ENABLED=true`)

Expand Down
92 changes: 92 additions & 0 deletions cmd/app/commands/clean_expired_tokens.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
package commands

import (
"context"
"encoding/json"
"fmt"
"log/slog"
"os"

"github.com/allisson/secrets/internal/app"
"github.com/allisson/secrets/internal/config"
)

// RunCleanExpiredTokens deletes expired tokens older than the specified number of days.
// Supports dry-run mode to preview deletion count and both text/JSON output formats.
//
// Requirements: Database must be migrated and accessible.
func RunCleanExpiredTokens(ctx context.Context, days int, dryRun bool, format string) error {
// Validate days parameter
if days < 0 {
return fmt.Errorf("days must be a positive number, got: %d", days)
}

// Load configuration
cfg := config.Load()

// Create DI container
container := app.NewContainer(cfg)

// Get logger from container
logger := container.Logger()
logger.Info("cleaning expired tokens",
slog.Int("days", days),
slog.Bool("dry_run", dryRun),
)

// Ensure cleanup on exit
defer closeContainer(container, logger)

// Get tokenization use case from container
tokenizationUseCase, err := container.TokenizationUseCase()
if err != nil {
return fmt.Errorf("failed to initialize tokenization use case: %w", err)
}

// Execute deletion or count operation
count, err := tokenizationUseCase.CleanupExpired(ctx, days, dryRun)
if err != nil {
return fmt.Errorf("failed to cleanup expired tokens: %w", err)
}

// Output result based on format
if format == "json" {
outputCleanExpiredJSON(count, days, dryRun)
} else {
outputCleanExpiredText(count, days, dryRun)
}

logger.Info("cleanup completed",
slog.Int64("count", count),
slog.Int("days", days),
slog.Bool("dry_run", dryRun),
)

return nil
}

// outputCleanExpiredText outputs the result in human-readable text format.
func outputCleanExpiredText(count int64, days int, dryRun bool) {
if dryRun {
fmt.Printf("Dry-run mode: Would delete %d expired token(s) older than %d day(s)\n", count, days)
} else {
fmt.Printf("Successfully deleted %d expired token(s) older than %d day(s)\n", count, days)
}
}

// outputCleanExpiredJSON outputs the result in JSON format for machine consumption.
func outputCleanExpiredJSON(count int64, days int, dryRun bool) {
result := map[string]interface{}{
"count": count,
"days": days,
"dry_run": dryRun,
}

jsonBytes, err := json.MarshalIndent(result, "", " ")
if err != nil {
fmt.Fprintf(os.Stderr, "failed to marshal JSON: %v\n", err)
return
}

fmt.Println(string(jsonBytes))
}
75 changes: 75 additions & 0 deletions cmd/app/commands/create_tokenization_key.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
package commands

import (
"context"
"fmt"
"log/slog"

"github.com/allisson/secrets/internal/app"
"github.com/allisson/secrets/internal/config"
)

// RunCreateTokenizationKey creates a new tokenization key with the specified parameters.
// Should be run during initial setup or when adding new tokenization formats.
//
// Requirements: Database must be migrated, MASTER_KEYS and ACTIVE_MASTER_KEY_ID must be set.
func RunCreateTokenizationKey(
ctx context.Context,
name string,
formatType string,
isDeterministic bool,
algorithmStr string,
) error {
// Load configuration
cfg := config.Load()

// Create DI container
container := app.NewContainer(cfg)

// Get logger from container
logger := container.Logger()
logger.Info("creating new tokenization key",
slog.String("name", name),
slog.String("format_type", formatType),
slog.Bool("is_deterministic", isDeterministic),
slog.String("algorithm", algorithmStr),
)

// Ensure cleanup on exit
defer closeContainer(container, logger)

// Parse format type
format, err := parseFormatType(formatType)
if err != nil {
return err
}

// Parse algorithm
algorithm, err := parseAlgorithm(algorithmStr)
if err != nil {
return err
}

// Get tokenization key use case from container
tokenizationKeyUseCase, err := container.TokenizationKeyUseCase()
if err != nil {
return fmt.Errorf("failed to initialize tokenization key use case: %w", err)
}

// Create the tokenization key
key, err := tokenizationKeyUseCase.Create(ctx, name, format, isDeterministic, algorithm)
if err != nil {
return fmt.Errorf("failed to create tokenization key: %w", err)
}

logger.Info("tokenization key created successfully",
slog.String("id", key.ID.String()),
slog.String("name", key.Name),
slog.String("format_type", string(key.FormatType)),
slog.Uint64("version", uint64(key.Version)),
slog.Bool("is_deterministic", key.IsDeterministic),
slog.String("algorithm", string(algorithm)),
)

return nil
}
22 changes: 22 additions & 0 deletions cmd/app/commands/helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@ package commands

import (
"context"
"fmt"
"log/slog"

"github.com/golang-migrate/migrate/v4"

"github.com/allisson/secrets/internal/app"
tokenizationDomain "github.com/allisson/secrets/internal/tokenization/domain"
)

// closeContainer closes all resources in the container and logs any errors.
Expand All @@ -28,3 +30,23 @@ func closeMigrate(migrate *migrate.Migrate, logger *slog.Logger) {
)
}
}

// parseFormatType converts format type string to tokenizationDomain.FormatType.
// Returns an error if the format type string is invalid.
func parseFormatType(formatType string) (tokenizationDomain.FormatType, error) {
switch formatType {
case "uuid":
return tokenizationDomain.FormatUUID, nil
case "numeric":
return tokenizationDomain.FormatNumeric, nil
case "luhn-preserving":
return tokenizationDomain.FormatLuhnPreserving, nil
case "alphanumeric":
return tokenizationDomain.FormatAlphanumeric, nil
default:
return "", fmt.Errorf(
"invalid format type: %s (valid options: uuid, numeric, luhn-preserving, alphanumeric)",
formatType,
)
}
}
Loading
Loading